code
stringlengths
3
1.01M
repo_name
stringlengths
5
116
path
stringlengths
3
311
language
stringclasses
30 values
license
stringclasses
15 values
size
int64
3
1.01M
package eu.ehealth.db.db; // Generated Oct 1, 2013 11:39:06 AM by Hibernate Tools 3.2.4.GA /** * EntertainmentContent generated by hbm2java */ public class EntertainmentContent implements java.io.Serializable { private Integer id; private String title; private String url; private String type; private String category; private String text; private Boolean enabled; public EntertainmentContent() { } public EntertainmentContent(String title, String url, String type, String category, String text, Boolean enabled) { this.title = title; this.url = url; this.type = type; this.category = category; this.text = text; this.enabled = enabled; } public Integer getId() { return this.id; } public void setId(Integer id) { this.id = id; } public String getTitle() { return this.title; } public void setTitle(String title) { this.title = title; } public String getUrl() { return this.url; } public void setUrl(String url) { this.url = url; } public String getType() { return this.type; } public void setType(String type) { this.type = type; } public String getCategory() { return this.category; } public void setCategory(String category) { this.category = category; } public String getText() { return this.text; } public void setText(String text) { this.text = text; } public Boolean getEnabled() { return this.enabled; } public void setEnabled(Boolean enabled) { this.enabled = enabled; } }
seaclouds-atos/softcare-final-implementation
storage-component/src/main/java/eu/ehealth/db/db/EntertainmentContent.java
Java
apache-2.0
1,754
package org.nd4j.parameterserver.updater; import org.nd4j.aeron.ipc.NDArrayMessage; import org.nd4j.linalg.api.ndarray.INDArray; import java.util.Map; /** * Created by agibsonccc on 12/1/16. */ public class TimeDelayedParameterUpdater extends BaseParameterUpdater { private long syncTime; private long lastSynced; /** * Returns the number of required * updates for a new pass * * @return the number of required updates for a new pass */ @Override public int requiredUpdatesForPass() { return 0; } /** * Returns the current status of this parameter server * updater * * @return */ @Override public Map<String, Number> status() { return null; } /** * Serialize this updater as json * * @return */ @Override public String toJson() { return null; } /** * Reset internal counters * such as number of updates accumulated. */ @Override public void reset() { } /** * Returns true if * the updater has accumulated enough ndarrays to * replicate to the workers * * @return true if replication should happen,false otherwise */ @Override public boolean shouldReplicate() { long now = System.currentTimeMillis(); long diff = Math.abs(now - lastSynced); return diff > syncTime; } /** * Do an update based on the ndarray message. * * @param message */ @Override public void update(NDArrayMessage message) { } /** * Updates result * based on arr along a particular * {@link INDArray#tensorAlongDimension(int, int...)} * * @param arr the array to update * @param result the result ndarray to update * @param idx the index to update * @param dimensions the dimensions to update */ @Override public void partialUpdate(INDArray arr, INDArray result, long idx, int... dimensions) { } /** * Updates result * based on arr * * @param arr the array to update * @param result the result ndarray to update */ @Override public void update(INDArray arr, INDArray result) { } }
smarthi/nd4j
nd4j-parameter-server-parent/nd4j-parameter-server/src/main/java/org/nd4j/parameterserver/updater/TimeDelayedParameterUpdater.java
Java
apache-2.0
2,281
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for Keras metrics functions.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import json from absl.testing import parameterized import numpy as np from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import test_util from tensorflow.python.keras import metrics from tensorflow.python.keras.utils import metrics_utils from tensorflow.python.ops import math_ops from tensorflow.python.ops import random_ops from tensorflow.python.ops import variables from tensorflow.python.platform import test @test_util.run_all_in_graph_and_eager_modes class FalsePositivesTest(test.TestCase): def test_config(self): fp_obj = metrics.FalsePositives(name='my_fp', thresholds=[0.4, 0.9]) self.assertEqual(fp_obj.name, 'my_fp') self.assertEqual(len(fp_obj.variables), 1) self.assertEqual(fp_obj.thresholds, [0.4, 0.9]) # Check save and restore config fp_obj2 = metrics.FalsePositives.from_config(fp_obj.get_config()) self.assertEqual(fp_obj2.name, 'my_fp') self.assertEqual(len(fp_obj2.variables), 1) self.assertEqual(fp_obj2.thresholds, [0.4, 0.9]) def test_unweighted(self): fp_obj = metrics.FalsePositives() self.evaluate(variables.variables_initializer(fp_obj.variables)) y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1), (1, 1, 1, 1, 0), (0, 0, 0, 0, 1))) y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1), (0, 1, 0, 1, 0), (1, 1, 1, 1, 1))) update_op = fp_obj.update_state(y_true, y_pred) self.evaluate(update_op) result = fp_obj.result() self.assertAllClose(7., result) def test_weighted(self): fp_obj = metrics.FalsePositives() self.evaluate(variables.variables_initializer(fp_obj.variables)) y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1), (1, 1, 1, 1, 0), (0, 0, 0, 0, 1))) y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1), (0, 1, 0, 1, 0), (1, 1, 1, 1, 1))) sample_weight = constant_op.constant((1., 1.5, 2., 2.5)) result = fp_obj(y_true, y_pred, sample_weight=sample_weight) self.assertAllClose(14., self.evaluate(result)) def test_unweighted_with_thresholds(self): fp_obj = metrics.FalsePositives(thresholds=[0.15, 0.5, 0.85]) self.evaluate(variables.variables_initializer(fp_obj.variables)) y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6), (0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3))) y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0), (1, 1, 1, 1))) update_op = fp_obj.update_state(y_true, y_pred) self.evaluate(update_op) result = fp_obj.result() self.assertAllClose([7., 4., 2.], result) def test_weighted_with_thresholds(self): fp_obj = metrics.FalsePositives(thresholds=[0.15, 0.5, 0.85]) self.evaluate(variables.variables_initializer(fp_obj.variables)) y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6), (0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3))) y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0), (1, 1, 1, 1))) sample_weight = ((1.0, 2.0, 3.0, 5.0), (7.0, 11.0, 13.0, 17.0), (19.0, 23.0, 29.0, 31.0), (5.0, 15.0, 10.0, 0)) result = fp_obj(y_true, y_pred, sample_weight=sample_weight) self.assertAllClose([125., 42., 12.], self.evaluate(result)) def test_threshold_limit(self): with self.assertRaisesRegexp( ValueError, r'Threshold values must be in \[0, 1\]. Invalid values: \[-1, 2\]'): metrics.FalsePositives(thresholds=[-1, 0.5, 2]) with self.assertRaisesRegexp( ValueError, r'Threshold values must be in \[0, 1\]. Invalid values: \[None\]'): metrics.FalsePositives(thresholds=[None]) @test_util.run_all_in_graph_and_eager_modes class FalseNegativesTest(test.TestCase): def test_config(self): fn_obj = metrics.FalseNegatives(name='my_fn', thresholds=[0.4, 0.9]) self.assertEqual(fn_obj.name, 'my_fn') self.assertEqual(len(fn_obj.variables), 1) self.assertEqual(fn_obj.thresholds, [0.4, 0.9]) # Check save and restore config fn_obj2 = metrics.FalseNegatives.from_config(fn_obj.get_config()) self.assertEqual(fn_obj2.name, 'my_fn') self.assertEqual(len(fn_obj2.variables), 1) self.assertEqual(fn_obj2.thresholds, [0.4, 0.9]) def test_unweighted(self): fn_obj = metrics.FalseNegatives() self.evaluate(variables.variables_initializer(fn_obj.variables)) y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1), (1, 1, 1, 1, 0), (0, 0, 0, 0, 1))) y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1), (0, 1, 0, 1, 0), (1, 1, 1, 1, 1))) update_op = fn_obj.update_state(y_true, y_pred) self.evaluate(update_op) result = fn_obj.result() self.assertAllClose(3., result) def test_weighted(self): fn_obj = metrics.FalseNegatives() self.evaluate(variables.variables_initializer(fn_obj.variables)) y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1), (1, 1, 1, 1, 0), (0, 0, 0, 0, 1))) y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1), (0, 1, 0, 1, 0), (1, 1, 1, 1, 1))) sample_weight = constant_op.constant((1., 1.5, 2., 2.5)) result = fn_obj(y_true, y_pred, sample_weight=sample_weight) self.assertAllClose(5., self.evaluate(result)) def test_unweighted_with_thresholds(self): fn_obj = metrics.FalseNegatives(thresholds=[0.15, 0.5, 0.85]) self.evaluate(variables.variables_initializer(fn_obj.variables)) y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6), (0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3))) y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0), (1, 1, 1, 1))) update_op = fn_obj.update_state(y_true, y_pred) self.evaluate(update_op) result = fn_obj.result() self.assertAllClose([1., 4., 6.], result) def test_weighted_with_thresholds(self): fn_obj = metrics.FalseNegatives(thresholds=[0.15, 0.5, 0.85]) self.evaluate(variables.variables_initializer(fn_obj.variables)) y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6), (0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3))) y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0), (1, 1, 1, 1))) sample_weight = ((3.0,), (5.0,), (7.0,), (4.0,)) result = fn_obj(y_true, y_pred, sample_weight=sample_weight) self.assertAllClose([4., 16., 23.], self.evaluate(result)) @test_util.run_all_in_graph_and_eager_modes class TrueNegativesTest(test.TestCase): def test_config(self): tn_obj = metrics.TrueNegatives(name='my_tn', thresholds=[0.4, 0.9]) self.assertEqual(tn_obj.name, 'my_tn') self.assertEqual(len(tn_obj.variables), 1) self.assertEqual(tn_obj.thresholds, [0.4, 0.9]) # Check save and restore config tn_obj2 = metrics.TrueNegatives.from_config(tn_obj.get_config()) self.assertEqual(tn_obj2.name, 'my_tn') self.assertEqual(len(tn_obj2.variables), 1) self.assertEqual(tn_obj2.thresholds, [0.4, 0.9]) def test_unweighted(self): tn_obj = metrics.TrueNegatives() self.evaluate(variables.variables_initializer(tn_obj.variables)) y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1), (1, 1, 1, 1, 0), (0, 0, 0, 0, 1))) y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1), (0, 1, 0, 1, 0), (1, 1, 1, 1, 1))) update_op = tn_obj.update_state(y_true, y_pred) self.evaluate(update_op) result = tn_obj.result() self.assertAllClose(3., result) def test_weighted(self): tn_obj = metrics.TrueNegatives() self.evaluate(variables.variables_initializer(tn_obj.variables)) y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1), (1, 1, 1, 1, 0), (0, 0, 0, 0, 1))) y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1), (0, 1, 0, 1, 0), (1, 1, 1, 1, 1))) sample_weight = constant_op.constant((1., 1.5, 2., 2.5)) result = tn_obj(y_true, y_pred, sample_weight=sample_weight) self.assertAllClose(4., self.evaluate(result)) def test_unweighted_with_thresholds(self): tn_obj = metrics.TrueNegatives(thresholds=[0.15, 0.5, 0.85]) self.evaluate(variables.variables_initializer(tn_obj.variables)) y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6), (0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3))) y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0), (1, 1, 1, 1))) update_op = tn_obj.update_state(y_true, y_pred) self.evaluate(update_op) result = tn_obj.result() self.assertAllClose([2., 5., 7.], result) def test_weighted_with_thresholds(self): tn_obj = metrics.TrueNegatives(thresholds=[0.15, 0.5, 0.85]) self.evaluate(variables.variables_initializer(tn_obj.variables)) y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6), (0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3))) y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0), (1, 1, 1, 1))) sample_weight = ((0.0, 2.0, 3.0, 5.0),) result = tn_obj(y_true, y_pred, sample_weight=sample_weight) self.assertAllClose([5., 15., 23.], self.evaluate(result)) @test_util.run_all_in_graph_and_eager_modes class TruePositivesTest(test.TestCase): def test_config(self): tp_obj = metrics.TruePositives(name='my_tp', thresholds=[0.4, 0.9]) self.assertEqual(tp_obj.name, 'my_tp') self.assertEqual(len(tp_obj.variables), 1) self.assertEqual(tp_obj.thresholds, [0.4, 0.9]) # Check save and restore config tp_obj2 = metrics.TruePositives.from_config(tp_obj.get_config()) self.assertEqual(tp_obj2.name, 'my_tp') self.assertEqual(len(tp_obj2.variables), 1) self.assertEqual(tp_obj2.thresholds, [0.4, 0.9]) def test_unweighted(self): tp_obj = metrics.TruePositives() self.evaluate(variables.variables_initializer(tp_obj.variables)) y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1), (1, 1, 1, 1, 0), (0, 0, 0, 0, 1))) y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1), (0, 1, 0, 1, 0), (1, 1, 1, 1, 1))) update_op = tp_obj.update_state(y_true, y_pred) self.evaluate(update_op) result = tp_obj.result() self.assertAllClose(7., result) def test_weighted(self): tp_obj = metrics.TruePositives() self.evaluate(variables.variables_initializer(tp_obj.variables)) y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1), (1, 1, 1, 1, 0), (0, 0, 0, 0, 1))) y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1), (0, 1, 0, 1, 0), (1, 1, 1, 1, 1))) sample_weight = constant_op.constant((1., 1.5, 2., 2.5)) result = tp_obj(y_true, y_pred, sample_weight=sample_weight) self.assertAllClose(12., self.evaluate(result)) def test_unweighted_with_thresholds(self): tp_obj = metrics.TruePositives(thresholds=[0.15, 0.5, 0.85]) self.evaluate(variables.variables_initializer(tp_obj.variables)) y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6), (0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3))) y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0), (1, 1, 1, 1))) update_op = tp_obj.update_state(y_true, y_pred) self.evaluate(update_op) result = tp_obj.result() self.assertAllClose([6., 3., 1.], result) def test_weighted_with_thresholds(self): tp_obj = metrics.TruePositives(thresholds=[0.15, 0.5, 0.85]) self.evaluate(variables.variables_initializer(tp_obj.variables)) y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6), (0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3))) y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0), (1, 1, 1, 1))) result = tp_obj(y_true, y_pred, sample_weight=37.) self.assertAllClose([222., 111., 37.], self.evaluate(result)) @test_util.run_all_in_graph_and_eager_modes class PrecisionTest(test.TestCase): def test_config(self): p_obj = metrics.Precision( name='my_precision', thresholds=[0.4, 0.9], top_k=15, class_id=12) self.assertEqual(p_obj.name, 'my_precision') self.assertEqual(len(p_obj.variables), 2) self.assertEqual([v.name for v in p_obj.variables], ['true_positives:0', 'false_positives:0']) self.assertEqual(p_obj.thresholds, [0.4, 0.9]) self.assertEqual(p_obj.top_k, 15) self.assertEqual(p_obj.class_id, 12) # Check save and restore config p_obj2 = metrics.Precision.from_config(p_obj.get_config()) self.assertEqual(p_obj2.name, 'my_precision') self.assertEqual(len(p_obj2.variables), 2) self.assertEqual(p_obj2.thresholds, [0.4, 0.9]) self.assertEqual(p_obj2.top_k, 15) self.assertEqual(p_obj2.class_id, 12) def test_value_is_idempotent(self): p_obj = metrics.Precision(thresholds=[0.3, 0.72]) y_pred = random_ops.random_uniform(shape=(10, 3)) y_true = random_ops.random_uniform(shape=(10, 3)) update_op = p_obj.update_state(y_true, y_pred) self.evaluate(variables.variables_initializer(p_obj.variables)) # Run several updates. for _ in range(10): self.evaluate(update_op) # Then verify idempotency. initial_precision = self.evaluate(p_obj.result()) for _ in range(10): self.assertArrayNear(initial_precision, self.evaluate(p_obj.result()), 1e-3) def test_unweighted(self): p_obj = metrics.Precision() y_pred = constant_op.constant([1, 0, 1, 0], shape=(1, 4)) y_true = constant_op.constant([0, 1, 1, 0], shape=(1, 4)) self.evaluate(variables.variables_initializer(p_obj.variables)) result = p_obj(y_true, y_pred) self.assertAlmostEqual(0.5, self.evaluate(result)) def test_unweighted_all_incorrect(self): p_obj = metrics.Precision(thresholds=[0.5]) inputs = np.random.randint(0, 2, size=(100, 1)) y_pred = constant_op.constant(inputs) y_true = constant_op.constant(1 - inputs) self.evaluate(variables.variables_initializer(p_obj.variables)) result = p_obj(y_true, y_pred) self.assertAlmostEqual(0, self.evaluate(result)) def test_weighted(self): p_obj = metrics.Precision() y_pred = constant_op.constant([[1, 0, 1, 0], [1, 0, 1, 0]]) y_true = constant_op.constant([[0, 1, 1, 0], [1, 0, 0, 1]]) self.evaluate(variables.variables_initializer(p_obj.variables)) result = p_obj( y_true, y_pred, sample_weight=constant_op.constant([[1, 2, 3, 4], [4, 3, 2, 1]])) weighted_tp = 3.0 + 4.0 weighted_positives = (1.0 + 3.0) + (4.0 + 2.0) expected_precision = weighted_tp / weighted_positives self.assertAlmostEqual(expected_precision, self.evaluate(result)) def test_div_by_zero(self): p_obj = metrics.Precision() y_pred = constant_op.constant([0, 0, 0, 0]) y_true = constant_op.constant([0, 0, 0, 0]) self.evaluate(variables.variables_initializer(p_obj.variables)) result = p_obj(y_true, y_pred) self.assertEqual(0, self.evaluate(result)) def test_unweighted_with_threshold(self): p_obj = metrics.Precision(thresholds=[0.5, 0.7]) y_pred = constant_op.constant([1, 0, 0.6, 0], shape=(1, 4)) y_true = constant_op.constant([0, 1, 1, 0], shape=(1, 4)) self.evaluate(variables.variables_initializer(p_obj.variables)) result = p_obj(y_true, y_pred) self.assertArrayNear([0.5, 0.], self.evaluate(result), 0) def test_weighted_with_threshold(self): p_obj = metrics.Precision(thresholds=[0.5, 1.]) y_true = constant_op.constant([[0, 1], [1, 0]], shape=(2, 2)) y_pred = constant_op.constant([[1, 0], [0.6, 0]], shape=(2, 2), dtype=dtypes.float32) weights = constant_op.constant([[4, 0], [3, 1]], shape=(2, 2), dtype=dtypes.float32) self.evaluate(variables.variables_initializer(p_obj.variables)) result = p_obj(y_true, y_pred, sample_weight=weights) weighted_tp = 0 + 3. weighted_positives = (0 + 3.) + (4. + 0.) expected_precision = weighted_tp / weighted_positives self.assertArrayNear([expected_precision, 0], self.evaluate(result), 1e-3) def test_multiple_updates(self): p_obj = metrics.Precision(thresholds=[0.5, 1.]) y_true = constant_op.constant([[0, 1], [1, 0]], shape=(2, 2)) y_pred = constant_op.constant([[1, 0], [0.6, 0]], shape=(2, 2), dtype=dtypes.float32) weights = constant_op.constant([[4, 0], [3, 1]], shape=(2, 2), dtype=dtypes.float32) self.evaluate(variables.variables_initializer(p_obj.variables)) update_op = p_obj.update_state(y_true, y_pred, sample_weight=weights) for _ in range(2): self.evaluate(update_op) weighted_tp = (0 + 3.) + (0 + 3.) weighted_positives = ((0 + 3.) + (4. + 0.)) + ((0 + 3.) + (4. + 0.)) expected_precision = weighted_tp / weighted_positives self.assertArrayNear([expected_precision, 0], self.evaluate(p_obj.result()), 1e-3) def test_unweighted_top_k(self): p_obj = metrics.Precision(top_k=3) y_pred = constant_op.constant([0.2, 0.1, 0.5, 0, 0.2], shape=(1, 5)) y_true = constant_op.constant([0, 1, 1, 0, 0], shape=(1, 5)) self.evaluate(variables.variables_initializer(p_obj.variables)) result = p_obj(y_true, y_pred) self.assertAlmostEqual(1. / 3, self.evaluate(result)) def test_weighted_top_k(self): p_obj = metrics.Precision(top_k=3) y_pred1 = constant_op.constant([0.2, 0.1, 0.4, 0, 0.2], shape=(1, 5)) y_true1 = constant_op.constant([0, 1, 1, 0, 1], shape=(1, 5)) self.evaluate(variables.variables_initializer(p_obj.variables)) self.evaluate( p_obj( y_true1, y_pred1, sample_weight=constant_op.constant([[1, 4, 2, 3, 5]]))) y_pred2 = constant_op.constant([0.2, 0.6, 0.4, 0.2, 0.2], shape=(1, 5)) y_true2 = constant_op.constant([1, 0, 1, 1, 1], shape=(1, 5)) result = p_obj(y_true2, y_pred2, sample_weight=constant_op.constant(3)) tp = (2 + 5) + (3 + 3) predicted_positives = (1 + 2 + 5) + (3 + 3 + 3) expected_precision = tp / predicted_positives self.assertAlmostEqual(expected_precision, self.evaluate(result)) def test_unweighted_class_id(self): p_obj = metrics.Precision(class_id=2) self.evaluate(variables.variables_initializer(p_obj.variables)) y_pred = constant_op.constant([0.2, 0.1, 0.6, 0, 0.2], shape=(1, 5)) y_true = constant_op.constant([0, 1, 1, 0, 0], shape=(1, 5)) result = p_obj(y_true, y_pred) self.assertAlmostEqual(1, self.evaluate(result)) self.assertAlmostEqual(1, self.evaluate(p_obj.true_positives)) self.assertAlmostEqual(0, self.evaluate(p_obj.false_positives)) y_pred = constant_op.constant([0.2, 0.1, 0, 0, 0.2], shape=(1, 5)) y_true = constant_op.constant([0, 1, 1, 0, 0], shape=(1, 5)) result = p_obj(y_true, y_pred) self.assertAlmostEqual(1, self.evaluate(result)) self.assertAlmostEqual(1, self.evaluate(p_obj.true_positives)) self.assertAlmostEqual(0, self.evaluate(p_obj.false_positives)) y_pred = constant_op.constant([0.2, 0.1, 0.6, 0, 0.2], shape=(1, 5)) y_true = constant_op.constant([0, 1, 0, 0, 0], shape=(1, 5)) result = p_obj(y_true, y_pred) self.assertAlmostEqual(0.5, self.evaluate(result)) self.assertAlmostEqual(1, self.evaluate(p_obj.true_positives)) self.assertAlmostEqual(1, self.evaluate(p_obj.false_positives)) def test_unweighted_top_k_and_class_id(self): p_obj = metrics.Precision(class_id=2, top_k=2) self.evaluate(variables.variables_initializer(p_obj.variables)) y_pred = constant_op.constant([0.2, 0.6, 0.3, 0, 0.2], shape=(1, 5)) y_true = constant_op.constant([0, 1, 1, 0, 0], shape=(1, 5)) result = p_obj(y_true, y_pred) self.assertAlmostEqual(1, self.evaluate(result)) self.assertAlmostEqual(1, self.evaluate(p_obj.true_positives)) self.assertAlmostEqual(0, self.evaluate(p_obj.false_positives)) y_pred = constant_op.constant([1, 1, 0.9, 1, 1], shape=(1, 5)) y_true = constant_op.constant([0, 1, 1, 0, 0], shape=(1, 5)) result = p_obj(y_true, y_pred) self.assertAlmostEqual(1, self.evaluate(result)) self.assertAlmostEqual(1, self.evaluate(p_obj.true_positives)) self.assertAlmostEqual(0, self.evaluate(p_obj.false_positives)) def test_unweighted_top_k_and_threshold(self): p_obj = metrics.Precision(thresholds=.7, top_k=2) self.evaluate(variables.variables_initializer(p_obj.variables)) y_pred = constant_op.constant([0.2, 0.8, 0.6, 0, 0.2], shape=(1, 5)) y_true = constant_op.constant([0, 1, 1, 0, 1], shape=(1, 5)) result = p_obj(y_true, y_pred) self.assertAlmostEqual(1, self.evaluate(result)) self.assertAlmostEqual(1, self.evaluate(p_obj.true_positives)) self.assertAlmostEqual(0, self.evaluate(p_obj.false_positives)) @test_util.run_all_in_graph_and_eager_modes class RecallTest(test.TestCase): def test_config(self): r_obj = metrics.Recall( name='my_recall', thresholds=[0.4, 0.9], top_k=15, class_id=12) self.assertEqual(r_obj.name, 'my_recall') self.assertEqual(len(r_obj.variables), 2) self.assertEqual([v.name for v in r_obj.variables], ['true_positives:0', 'false_negatives:0']) self.assertEqual(r_obj.thresholds, [0.4, 0.9]) self.assertEqual(r_obj.top_k, 15) self.assertEqual(r_obj.class_id, 12) # Check save and restore config r_obj2 = metrics.Recall.from_config(r_obj.get_config()) self.assertEqual(r_obj2.name, 'my_recall') self.assertEqual(len(r_obj2.variables), 2) self.assertEqual(r_obj2.thresholds, [0.4, 0.9]) self.assertEqual(r_obj2.top_k, 15) self.assertEqual(r_obj2.class_id, 12) def test_value_is_idempotent(self): r_obj = metrics.Recall(thresholds=[0.3, 0.72]) y_pred = random_ops.random_uniform(shape=(10, 3)) y_true = random_ops.random_uniform(shape=(10, 3)) update_op = r_obj.update_state(y_true, y_pred) self.evaluate(variables.variables_initializer(r_obj.variables)) # Run several updates. for _ in range(10): self.evaluate(update_op) # Then verify idempotency. initial_recall = self.evaluate(r_obj.result()) for _ in range(10): self.assertArrayNear(initial_recall, self.evaluate(r_obj.result()), 1e-3) def test_unweighted(self): r_obj = metrics.Recall() y_pred = constant_op.constant([1, 0, 1, 0], shape=(1, 4)) y_true = constant_op.constant([0, 1, 1, 0], shape=(1, 4)) self.evaluate(variables.variables_initializer(r_obj.variables)) result = r_obj(y_true, y_pred) self.assertAlmostEqual(0.5, self.evaluate(result)) def test_unweighted_all_incorrect(self): r_obj = metrics.Recall(thresholds=[0.5]) inputs = np.random.randint(0, 2, size=(100, 1)) y_pred = constant_op.constant(inputs) y_true = constant_op.constant(1 - inputs) self.evaluate(variables.variables_initializer(r_obj.variables)) result = r_obj(y_true, y_pred) self.assertAlmostEqual(0, self.evaluate(result)) def test_weighted(self): r_obj = metrics.Recall() y_pred = constant_op.constant([[1, 0, 1, 0], [0, 1, 0, 1]]) y_true = constant_op.constant([[0, 1, 1, 0], [1, 0, 0, 1]]) self.evaluate(variables.variables_initializer(r_obj.variables)) result = r_obj( y_true, y_pred, sample_weight=constant_op.constant([[1, 2, 3, 4], [4, 3, 2, 1]])) weighted_tp = 3.0 + 1.0 weighted_t = (2.0 + 3.0) + (4.0 + 1.0) expected_recall = weighted_tp / weighted_t self.assertAlmostEqual(expected_recall, self.evaluate(result)) def test_div_by_zero(self): r_obj = metrics.Recall() y_pred = constant_op.constant([0, 0, 0, 0]) y_true = constant_op.constant([0, 0, 0, 0]) self.evaluate(variables.variables_initializer(r_obj.variables)) result = r_obj(y_true, y_pred) self.assertEqual(0, self.evaluate(result)) def test_unweighted_with_threshold(self): r_obj = metrics.Recall(thresholds=[0.5, 0.7]) y_pred = constant_op.constant([1, 0, 0.6, 0], shape=(1, 4)) y_true = constant_op.constant([0, 1, 1, 0], shape=(1, 4)) self.evaluate(variables.variables_initializer(r_obj.variables)) result = r_obj(y_true, y_pred) self.assertArrayNear([0.5, 0.], self.evaluate(result), 0) def test_weighted_with_threshold(self): r_obj = metrics.Recall(thresholds=[0.5, 1.]) y_true = constant_op.constant([[0, 1], [1, 0]], shape=(2, 2)) y_pred = constant_op.constant([[1, 0], [0.6, 0]], shape=(2, 2), dtype=dtypes.float32) weights = constant_op.constant([[1, 4], [3, 2]], shape=(2, 2), dtype=dtypes.float32) self.evaluate(variables.variables_initializer(r_obj.variables)) result = r_obj(y_true, y_pred, sample_weight=weights) weighted_tp = 0 + 3. weighted_positives = (0 + 3.) + (4. + 0.) expected_recall = weighted_tp / weighted_positives self.assertArrayNear([expected_recall, 0], self.evaluate(result), 1e-3) def test_multiple_updates(self): r_obj = metrics.Recall(thresholds=[0.5, 1.]) y_true = constant_op.constant([[0, 1], [1, 0]], shape=(2, 2)) y_pred = constant_op.constant([[1, 0], [0.6, 0]], shape=(2, 2), dtype=dtypes.float32) weights = constant_op.constant([[1, 4], [3, 2]], shape=(2, 2), dtype=dtypes.float32) self.evaluate(variables.variables_initializer(r_obj.variables)) update_op = r_obj.update_state(y_true, y_pred, sample_weight=weights) for _ in range(2): self.evaluate(update_op) weighted_tp = (0 + 3.) + (0 + 3.) weighted_positives = ((0 + 3.) + (4. + 0.)) + ((0 + 3.) + (4. + 0.)) expected_recall = weighted_tp / weighted_positives self.assertArrayNear([expected_recall, 0], self.evaluate(r_obj.result()), 1e-3) def test_unweighted_top_k(self): r_obj = metrics.Recall(top_k=3) y_pred = constant_op.constant([0.2, 0.1, 0.5, 0, 0.2], shape=(1, 5)) y_true = constant_op.constant([0, 1, 1, 0, 0], shape=(1, 5)) self.evaluate(variables.variables_initializer(r_obj.variables)) result = r_obj(y_true, y_pred) self.assertAlmostEqual(0.5, self.evaluate(result)) def test_weighted_top_k(self): r_obj = metrics.Recall(top_k=3) y_pred1 = constant_op.constant([0.2, 0.1, 0.4, 0, 0.2], shape=(1, 5)) y_true1 = constant_op.constant([0, 1, 1, 0, 1], shape=(1, 5)) self.evaluate(variables.variables_initializer(r_obj.variables)) self.evaluate( r_obj( y_true1, y_pred1, sample_weight=constant_op.constant([[1, 4, 2, 3, 5]]))) y_pred2 = constant_op.constant([0.2, 0.6, 0.4, 0.2, 0.2], shape=(1, 5)) y_true2 = constant_op.constant([1, 0, 1, 1, 1], shape=(1, 5)) result = r_obj(y_true2, y_pred2, sample_weight=constant_op.constant(3)) tp = (2 + 5) + (3 + 3) positives = (4 + 2 + 5) + (3 + 3 + 3 + 3) expected_recall = tp / positives self.assertAlmostEqual(expected_recall, self.evaluate(result)) def test_unweighted_class_id(self): r_obj = metrics.Recall(class_id=2) self.evaluate(variables.variables_initializer(r_obj.variables)) y_pred = constant_op.constant([0.2, 0.1, 0.6, 0, 0.2], shape=(1, 5)) y_true = constant_op.constant([0, 1, 1, 0, 0], shape=(1, 5)) result = r_obj(y_true, y_pred) self.assertAlmostEqual(1, self.evaluate(result)) self.assertAlmostEqual(1, self.evaluate(r_obj.true_positives)) self.assertAlmostEqual(0, self.evaluate(r_obj.false_negatives)) y_pred = constant_op.constant([0.2, 0.1, 0, 0, 0.2], shape=(1, 5)) y_true = constant_op.constant([0, 1, 1, 0, 0], shape=(1, 5)) result = r_obj(y_true, y_pred) self.assertAlmostEqual(0.5, self.evaluate(result)) self.assertAlmostEqual(1, self.evaluate(r_obj.true_positives)) self.assertAlmostEqual(1, self.evaluate(r_obj.false_negatives)) y_pred = constant_op.constant([0.2, 0.1, 0.6, 0, 0.2], shape=(1, 5)) y_true = constant_op.constant([0, 1, 0, 0, 0], shape=(1, 5)) result = r_obj(y_true, y_pred) self.assertAlmostEqual(0.5, self.evaluate(result)) self.assertAlmostEqual(1, self.evaluate(r_obj.true_positives)) self.assertAlmostEqual(1, self.evaluate(r_obj.false_negatives)) def test_unweighted_top_k_and_class_id(self): r_obj = metrics.Recall(class_id=2, top_k=2) self.evaluate(variables.variables_initializer(r_obj.variables)) y_pred = constant_op.constant([0.2, 0.6, 0.3, 0, 0.2], shape=(1, 5)) y_true = constant_op.constant([0, 1, 1, 0, 0], shape=(1, 5)) result = r_obj(y_true, y_pred) self.assertAlmostEqual(1, self.evaluate(result)) self.assertAlmostEqual(1, self.evaluate(r_obj.true_positives)) self.assertAlmostEqual(0, self.evaluate(r_obj.false_negatives)) y_pred = constant_op.constant([1, 1, 0.9, 1, 1], shape=(1, 5)) y_true = constant_op.constant([0, 1, 1, 0, 0], shape=(1, 5)) result = r_obj(y_true, y_pred) self.assertAlmostEqual(0.5, self.evaluate(result)) self.assertAlmostEqual(1, self.evaluate(r_obj.true_positives)) self.assertAlmostEqual(1, self.evaluate(r_obj.false_negatives)) def test_unweighted_top_k_and_threshold(self): r_obj = metrics.Recall(thresholds=.7, top_k=2) self.evaluate(variables.variables_initializer(r_obj.variables)) y_pred = constant_op.constant([0.2, 0.8, 0.6, 0, 0.2], shape=(1, 5)) y_true = constant_op.constant([1, 1, 1, 0, 1], shape=(1, 5)) result = r_obj(y_true, y_pred) self.assertAlmostEqual(0.25, self.evaluate(result)) self.assertAlmostEqual(1, self.evaluate(r_obj.true_positives)) self.assertAlmostEqual(3, self.evaluate(r_obj.false_negatives)) @test_util.run_all_in_graph_and_eager_modes class SensitivityAtSpecificityTest(test.TestCase, parameterized.TestCase): def test_config(self): s_obj = metrics.SensitivityAtSpecificity( 0.4, num_thresholds=100, name='sensitivity_at_specificity_1') self.assertEqual(s_obj.name, 'sensitivity_at_specificity_1') self.assertLen(s_obj.variables, 4) self.assertEqual(s_obj.specificity, 0.4) self.assertEqual(s_obj.num_thresholds, 100) # Check save and restore config s_obj2 = metrics.SensitivityAtSpecificity.from_config(s_obj.get_config()) self.assertEqual(s_obj2.name, 'sensitivity_at_specificity_1') self.assertLen(s_obj2.variables, 4) self.assertEqual(s_obj2.specificity, 0.4) self.assertEqual(s_obj2.num_thresholds, 100) def test_value_is_idempotent(self): s_obj = metrics.SensitivityAtSpecificity(0.7) y_pred = random_ops.random_uniform((10, 3), maxval=1, dtype=dtypes.float32, seed=1) y_true = random_ops.random_uniform((10, 3), maxval=2, dtype=dtypes.int64, seed=1) update_op = s_obj.update_state(y_true, y_pred) self.evaluate(variables.variables_initializer(s_obj.variables)) # Run several updates. for _ in range(10): self.evaluate(update_op) # Then verify idempotency. initial_sensitivity = self.evaluate(s_obj.result()) for _ in range(10): self.assertAlmostEqual(initial_sensitivity, self.evaluate(s_obj.result()), 1e-3) def test_unweighted_all_correct(self): s_obj = metrics.SensitivityAtSpecificity(0.7) inputs = np.random.randint(0, 2, size=(100, 1)) y_pred = constant_op.constant(inputs, dtype=dtypes.float32) y_true = constant_op.constant(inputs) self.evaluate(variables.variables_initializer(s_obj.variables)) result = s_obj(y_true, y_pred) self.assertAlmostEqual(1, self.evaluate(result)) def test_unweighted_high_specificity(self): s_obj = metrics.SensitivityAtSpecificity(0.8) pred_values = [0.0, 0.1, 0.2, 0.3, 0.4, 0.1, 0.45, 0.5, 0.8, 0.9] label_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1] y_pred = constant_op.constant(pred_values, dtype=dtypes.float32) y_true = constant_op.constant(label_values) self.evaluate(variables.variables_initializer(s_obj.variables)) result = s_obj(y_true, y_pred) self.assertAlmostEqual(0.8, self.evaluate(result)) def test_unweighted_low_specificity(self): s_obj = metrics.SensitivityAtSpecificity(0.4) pred_values = [0.0, 0.1, 0.2, 0.3, 0.4, 0.01, 0.02, 0.25, 0.26, 0.26] label_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1] y_pred = constant_op.constant(pred_values, dtype=dtypes.float32) y_true = constant_op.constant(label_values) self.evaluate(variables.variables_initializer(s_obj.variables)) result = s_obj(y_true, y_pred) self.assertAlmostEqual(0.6, self.evaluate(result)) @parameterized.parameters([dtypes.bool, dtypes.int32, dtypes.float32]) def test_weighted(self, label_dtype): s_obj = metrics.SensitivityAtSpecificity(0.4) pred_values = [0.0, 0.1, 0.2, 0.3, 0.4, 0.01, 0.02, 0.25, 0.26, 0.26] label_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1] weight_values = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] y_pred = constant_op.constant(pred_values, dtype=dtypes.float32) y_true = math_ops.cast(label_values, dtype=label_dtype) weights = constant_op.constant(weight_values) self.evaluate(variables.variables_initializer(s_obj.variables)) result = s_obj(y_true, y_pred, sample_weight=weights) self.assertAlmostEqual(0.675, self.evaluate(result)) def test_invalid_specificity(self): with self.assertRaisesRegexp( ValueError, r'`specificity` must be in the range \[0, 1\].'): metrics.SensitivityAtSpecificity(-1) def test_invalid_num_thresholds(self): with self.assertRaisesRegexp(ValueError, '`num_thresholds` must be > 0.'): metrics.SensitivityAtSpecificity(0.4, num_thresholds=-1) @test_util.run_all_in_graph_and_eager_modes class SpecificityAtSensitivityTest(test.TestCase, parameterized.TestCase): def test_config(self): s_obj = metrics.SpecificityAtSensitivity( 0.4, num_thresholds=100, name='specificity_at_sensitivity_1') self.assertEqual(s_obj.name, 'specificity_at_sensitivity_1') self.assertLen(s_obj.variables, 4) self.assertEqual(s_obj.sensitivity, 0.4) self.assertEqual(s_obj.num_thresholds, 100) # Check save and restore config s_obj2 = metrics.SpecificityAtSensitivity.from_config(s_obj.get_config()) self.assertEqual(s_obj2.name, 'specificity_at_sensitivity_1') self.assertLen(s_obj2.variables, 4) self.assertEqual(s_obj2.sensitivity, 0.4) self.assertEqual(s_obj2.num_thresholds, 100) def test_value_is_idempotent(self): s_obj = metrics.SpecificityAtSensitivity(0.7) y_pred = random_ops.random_uniform((10, 3), maxval=1, dtype=dtypes.float32, seed=1) y_true = random_ops.random_uniform((10, 3), maxval=2, dtype=dtypes.int64, seed=1) update_op = s_obj.update_state(y_true, y_pred) self.evaluate(variables.variables_initializer(s_obj.variables)) # Run several updates. for _ in range(10): self.evaluate(update_op) # Then verify idempotency. initial_specificity = self.evaluate(s_obj.result()) for _ in range(10): self.assertAlmostEqual(initial_specificity, self.evaluate(s_obj.result()), 1e-3) def test_unweighted_all_correct(self): s_obj = metrics.SpecificityAtSensitivity(0.7) inputs = np.random.randint(0, 2, size=(100, 1)) y_pred = constant_op.constant(inputs, dtype=dtypes.float32) y_true = constant_op.constant(inputs) self.evaluate(variables.variables_initializer(s_obj.variables)) result = s_obj(y_true, y_pred) self.assertAlmostEqual(1, self.evaluate(result)) def test_unweighted_high_sensitivity(self): s_obj = metrics.SpecificityAtSensitivity(0.8) pred_values = [0.0, 0.1, 0.2, 0.3, 0.4, 0.1, 0.45, 0.5, 0.8, 0.9] label_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1] y_pred = constant_op.constant(pred_values, dtype=dtypes.float32) y_true = constant_op.constant(label_values) self.evaluate(variables.variables_initializer(s_obj.variables)) result = s_obj(y_true, y_pred) self.assertAlmostEqual(0.4, self.evaluate(result)) def test_unweighted_low_sensitivity(self): s_obj = metrics.SpecificityAtSensitivity(0.4) pred_values = [0.0, 0.1, 0.2, 0.3, 0.4, 0.01, 0.02, 0.25, 0.26, 0.26] label_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1] y_pred = constant_op.constant(pred_values, dtype=dtypes.float32) y_true = constant_op.constant(label_values) self.evaluate(variables.variables_initializer(s_obj.variables)) result = s_obj(y_true, y_pred) self.assertAlmostEqual(0.6, self.evaluate(result)) @parameterized.parameters([dtypes.bool, dtypes.int32, dtypes.float32]) def test_weighted(self, label_dtype): s_obj = metrics.SpecificityAtSensitivity(0.4) pred_values = [0.0, 0.1, 0.2, 0.3, 0.4, 0.01, 0.02, 0.25, 0.26, 0.26] label_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1] weight_values = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] y_pred = constant_op.constant(pred_values, dtype=dtypes.float32) y_true = math_ops.cast(label_values, dtype=label_dtype) weights = constant_op.constant(weight_values) self.evaluate(variables.variables_initializer(s_obj.variables)) result = s_obj(y_true, y_pred, sample_weight=weights) self.assertAlmostEqual(0.4, self.evaluate(result)) def test_invalid_sensitivity(self): with self.assertRaisesRegexp( ValueError, r'`sensitivity` must be in the range \[0, 1\].'): metrics.SpecificityAtSensitivity(-1) def test_invalid_num_thresholds(self): with self.assertRaisesRegexp(ValueError, '`num_thresholds` must be > 0.'): metrics.SpecificityAtSensitivity(0.4, num_thresholds=-1) @test_util.run_all_in_graph_and_eager_modes class AUCTest(test.TestCase): def setup(self): self.num_thresholds = 3 self.y_pred = constant_op.constant([0, 0.5, 0.3, 0.9], dtype=dtypes.float32) self.y_true = constant_op.constant([0, 0, 1, 1]) self.sample_weight = [1, 2, 3, 4] # threshold values are [0 - 1e-7, 0.5, 1 + 1e-7] # y_pred when threshold = 0 - 1e-7 : [1, 1, 1, 1] # y_pred when threshold = 0.5 : [0, 0, 0, 1] # y_pred when threshold = 1 + 1e-7 : [0, 0, 0, 0] # without sample_weight: # tp = np.sum([[0, 0, 1, 1], [0, 0, 0, 1], [0, 0, 0, 0]], axis=1) # fp = np.sum([[1, 1, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]], axis=1) # fn = np.sum([[0, 0, 0, 0], [0, 0, 1, 0], [0, 0, 1, 1]], axis=1) # tn = np.sum([[0, 0, 0, 0], [1, 1, 0, 0], [1, 1, 0, 0]], axis=1) # tp = [2, 1, 0], fp = [2, 0, 0], fn = [0, 1, 2], tn = [0, 2, 2] # with sample_weight: # tp = np.sum([[0, 0, 3, 4], [0, 0, 0, 4], [0, 0, 0, 0]], axis=1) # fp = np.sum([[1, 2, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]], axis=1) # fn = np.sum([[0, 0, 0, 0], [0, 0, 3, 0], [0, 0, 3, 4]], axis=1) # tn = np.sum([[0, 0, 0, 0], [1, 2, 0, 0], [1, 2, 0, 0]], axis=1) # tp = [7, 4, 0], fp = [3, 0, 0], fn = [0, 3, 7], tn = [0, 3, 3] def test_config(self): auc_obj = metrics.AUC( num_thresholds=100, curve='PR', summation_method='majoring', name='auc_1') self.assertEqual(auc_obj.name, 'auc_1') self.assertEqual(len(auc_obj.variables), 4) self.assertEqual(auc_obj.num_thresholds, 100) self.assertEqual(auc_obj.curve, metrics_utils.AUCCurve.PR) self.assertEqual(auc_obj.summation_method, metrics_utils.AUCSummationMethod.MAJORING) old_config = auc_obj.get_config() self.assertDictEqual(old_config, json.loads(json.dumps(old_config))) # Check save and restore config. auc_obj2 = metrics.AUC.from_config(auc_obj.get_config()) self.assertEqual(auc_obj2.name, 'auc_1') self.assertEqual(len(auc_obj2.variables), 4) self.assertEqual(auc_obj2.num_thresholds, 100) self.assertEqual(auc_obj2.curve, metrics_utils.AUCCurve.PR) self.assertEqual(auc_obj2.summation_method, metrics_utils.AUCSummationMethod.MAJORING) new_config = auc_obj2.get_config() self.assertDictEqual(old_config, new_config) self.assertAllClose(auc_obj.thresholds, auc_obj2.thresholds) def test_config_manual_thresholds(self): auc_obj = metrics.AUC( num_thresholds=None, curve='PR', summation_method='majoring', name='auc_1', thresholds=[0.3, 0.5]) self.assertEqual(auc_obj.name, 'auc_1') self.assertEqual(len(auc_obj.variables), 4) self.assertEqual(auc_obj.num_thresholds, 4) self.assertAllClose(auc_obj.thresholds, [0.0, 0.3, 0.5, 1.0]) self.assertEqual(auc_obj.curve, metrics_utils.AUCCurve.PR) self.assertEqual(auc_obj.summation_method, metrics_utils.AUCSummationMethod.MAJORING) old_config = auc_obj.get_config() self.assertDictEqual(old_config, json.loads(json.dumps(old_config))) # Check save and restore config. auc_obj2 = metrics.AUC.from_config(auc_obj.get_config()) self.assertEqual(auc_obj2.name, 'auc_1') self.assertEqual(len(auc_obj2.variables), 4) self.assertEqual(auc_obj2.num_thresholds, 4) self.assertEqual(auc_obj2.curve, metrics_utils.AUCCurve.PR) self.assertEqual(auc_obj2.summation_method, metrics_utils.AUCSummationMethod.MAJORING) new_config = auc_obj2.get_config() self.assertDictEqual(old_config, new_config) self.assertAllClose(auc_obj.thresholds, auc_obj2.thresholds) def test_value_is_idempotent(self): self.setup() auc_obj = metrics.AUC(num_thresholds=3) self.evaluate(variables.variables_initializer(auc_obj.variables)) # Run several updates. update_op = auc_obj.update_state(self.y_true, self.y_pred) for _ in range(10): self.evaluate(update_op) # Then verify idempotency. initial_auc = self.evaluate(auc_obj.result()) for _ in range(10): self.assertAllClose(initial_auc, self.evaluate(auc_obj.result()), 1e-3) def test_unweighted_all_correct(self): self.setup() auc_obj = metrics.AUC() self.evaluate(variables.variables_initializer(auc_obj.variables)) result = auc_obj(self.y_true, self.y_true) self.assertEqual(self.evaluate(result), 1) def test_unweighted(self): self.setup() auc_obj = metrics.AUC(num_thresholds=self.num_thresholds) self.evaluate(variables.variables_initializer(auc_obj.variables)) result = auc_obj(self.y_true, self.y_pred) # tp = [2, 1, 0], fp = [2, 0, 0], fn = [0, 1, 2], tn = [0, 2, 2] # recall = [2/2, 1/(1+1), 0] = [1, 0.5, 0] # fp_rate = [2/2, 0, 0] = [1, 0, 0] # heights = [(1 + 0.5)/2, (0.5 + 0)/2] = [0.75, 0.25] # widths = [(1 - 0), (0 - 0)] = [1, 0] expected_result = (0.75 * 1 + 0.25 * 0) self.assertAllClose(self.evaluate(result), expected_result, 1e-3) def test_manual_thresholds(self): self.setup() # Verify that when specified, thresholds are used instead of num_thresholds. auc_obj = metrics.AUC(num_thresholds=2, thresholds=[0.5]) self.assertEqual(auc_obj.num_thresholds, 3) self.assertAllClose(auc_obj.thresholds, [0.0, 0.5, 1.0]) self.evaluate(variables.variables_initializer(auc_obj.variables)) result = auc_obj(self.y_true, self.y_pred) # tp = [2, 1, 0], fp = [2, 0, 0], fn = [0, 1, 2], tn = [0, 2, 2] # recall = [2/2, 1/(1+1), 0] = [1, 0.5, 0] # fp_rate = [2/2, 0, 0] = [1, 0, 0] # heights = [(1 + 0.5)/2, (0.5 + 0)/2] = [0.75, 0.25] # widths = [(1 - 0), (0 - 0)] = [1, 0] expected_result = (0.75 * 1 + 0.25 * 0) self.assertAllClose(self.evaluate(result), expected_result, 1e-3) def test_weighted_roc_interpolation(self): self.setup() auc_obj = metrics.AUC(num_thresholds=self.num_thresholds) self.evaluate(variables.variables_initializer(auc_obj.variables)) result = auc_obj(self.y_true, self.y_pred, sample_weight=self.sample_weight) # tp = [7, 4, 0], fp = [3, 0, 0], fn = [0, 3, 7], tn = [0, 3, 3] # recall = [7/7, 4/(4+3), 0] = [1, 0.571, 0] # fp_rate = [3/3, 0, 0] = [1, 0, 0] # heights = [(1 + 0.571)/2, (0.571 + 0)/2] = [0.7855, 0.2855] # widths = [(1 - 0), (0 - 0)] = [1, 0] expected_result = (0.7855 * 1 + 0.2855 * 0) self.assertAllClose(self.evaluate(result), expected_result, 1e-3) def test_weighted_roc_majoring(self): self.setup() auc_obj = metrics.AUC( num_thresholds=self.num_thresholds, summation_method='majoring') self.evaluate(variables.variables_initializer(auc_obj.variables)) result = auc_obj(self.y_true, self.y_pred, sample_weight=self.sample_weight) # tp = [7, 4, 0], fp = [3, 0, 0], fn = [0, 3, 7], tn = [0, 3, 3] # recall = [7/7, 4/(4+3), 0] = [1, 0.571, 0] # fp_rate = [3/3, 0, 0] = [1, 0, 0] # heights = [max(1, 0.571), max(0.571, 0)] = [1, 0.571] # widths = [(1 - 0), (0 - 0)] = [1, 0] expected_result = (1 * 1 + 0.571 * 0) self.assertAllClose(self.evaluate(result), expected_result, 1e-3) def test_weighted_roc_minoring(self): self.setup() auc_obj = metrics.AUC( num_thresholds=self.num_thresholds, summation_method='minoring') self.evaluate(variables.variables_initializer(auc_obj.variables)) result = auc_obj(self.y_true, self.y_pred, sample_weight=self.sample_weight) # tp = [7, 4, 0], fp = [3, 0, 0], fn = [0, 3, 7], tn = [0, 3, 3] # recall = [7/7, 4/(4+3), 0] = [1, 0.571, 0] # fp_rate = [3/3, 0, 0] = [1, 0, 0] # heights = [min(1, 0.571), min(0.571, 0)] = [0.571, 0] # widths = [(1 - 0), (0 - 0)] = [1, 0] expected_result = (0.571 * 1 + 0 * 0) self.assertAllClose(self.evaluate(result), expected_result, 1e-3) def test_weighted_pr_majoring(self): self.setup() auc_obj = metrics.AUC( num_thresholds=self.num_thresholds, curve='PR', summation_method='majoring') self.evaluate(variables.variables_initializer(auc_obj.variables)) result = auc_obj(self.y_true, self.y_pred, sample_weight=self.sample_weight) # tp = [7, 4, 0], fp = [3, 0, 0], fn = [0, 3, 7], tn = [0, 3, 3] # precision = [7/(7+3), 4/4, 0] = [0.7, 1, 0] # recall = [7/7, 4/(4+3), 0] = [1, 0.571, 0] # heights = [max(0.7, 1), max(1, 0)] = [1, 1] # widths = [(1 - 0.571), (0.571 - 0)] = [0.429, 0.571] expected_result = (1 * 0.429 + 1 * 0.571) self.assertAllClose(self.evaluate(result), expected_result, 1e-3) def test_weighted_pr_minoring(self): self.setup() auc_obj = metrics.AUC( num_thresholds=self.num_thresholds, curve='PR', summation_method='minoring') self.evaluate(variables.variables_initializer(auc_obj.variables)) result = auc_obj(self.y_true, self.y_pred, sample_weight=self.sample_weight) # tp = [7, 4, 0], fp = [3, 0, 0], fn = [0, 3, 7], tn = [0, 3, 3] # precision = [7/(7+3), 4/4, 0] = [0.7, 1, 0] # recall = [7/7, 4/(4+3), 0] = [1, 0.571, 0] # heights = [min(0.7, 1), min(1, 0)] = [0.7, 0] # widths = [(1 - 0.571), (0.571 - 0)] = [0.429, 0.571] expected_result = (0.7 * 0.429 + 0 * 0.571) self.assertAllClose(self.evaluate(result), expected_result, 1e-3) def test_weighted_pr_interpolation(self): self.setup() auc_obj = metrics.AUC(num_thresholds=self.num_thresholds, curve='PR') self.evaluate(variables.variables_initializer(auc_obj.variables)) result = auc_obj(self.y_true, self.y_pred, sample_weight=self.sample_weight) # auc = (slope / Total Pos) * [dTP - intercept * log(Pb/Pa)] # tp = [7, 4, 0], fp = [3, 0, 0], fn = [0, 3, 7], tn = [0, 3, 3] # P = tp + fp = [10, 4, 0] # dTP = [7-4, 4-0] = [3, 4] # dP = [10-4, 4-0] = [6, 4] # slope = dTP/dP = [0.5, 1] # intercept = (TPa+(slope*Pa) = [(4 - 0.5*4), (0 - 1*0)] = [2, 0] # (Pb/Pa) = (Pb/Pa) if Pb > 0 AND Pa > 0 else 1 = [10/4, 4/0] = [2.5, 1] # auc * TotalPos = [(0.5 * (3 + 2 * log(2.5))), (1 * (4 + 0))] # = [2.416, 4] # auc = [2.416, 4]/(tp[1:]+fn[1:]) expected_result = (2.416/7 + 4/7) self.assertAllClose(self.evaluate(result), expected_result, 1e-3) def test_invalid_num_thresholds(self): with self.assertRaisesRegexp(ValueError, '`num_thresholds` must be > 1.'): metrics.AUC(num_thresholds=-1) with self.assertRaisesRegexp(ValueError, '`num_thresholds` must be > 1.'): metrics.AUC(num_thresholds=1) def test_invalid_curve(self): with self.assertRaisesRegexp(ValueError, 'Invalid AUC curve value "Invalid".'): metrics.AUC(curve='Invalid') def test_invalid_summation_method(self): with self.assertRaisesRegexp( ValueError, 'Invalid AUC summation method value "Invalid".'): metrics.AUC(summation_method='Invalid') if __name__ == '__main__': test.main()
chemelnucfin/tensorflow
tensorflow/python/keras/metrics_confusion_matrix_test.py
Python
apache-2.0
51,243
// Copyright 2012 Cloudera Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.cloudera.impala.catalog; import com.cloudera.impala.thrift.TCatalogObjectType; import com.cloudera.impala.thrift.THdfsCachePool; import com.google.common.base.Preconditions; /** * Represents an HDFS cache pool (CachePoolInfo class). Currently, the only metadata we * care about for cache pools is the cache pool name. In the future it may be desirable * to track additional metadata such as the owner, size, and current usage of the pool. */ public class HdfsCachePool implements CatalogObject { private long catalogVersion_; private final THdfsCachePool cachePool_; public HdfsCachePool(THdfsCachePool cachePool) { Preconditions.checkNotNull(cachePool); cachePool_ = cachePool; } @Override public TCatalogObjectType getCatalogObjectType() { return TCatalogObjectType.HDFS_CACHE_POOL; } public THdfsCachePool toThrift() { return cachePool_; } public static HdfsCachePool fromThrift(THdfsCachePool cachePool) { return new HdfsCachePool(cachePool); } @Override public String getName() { return cachePool_.getPool_name(); } @Override public long getCatalogVersion() { return catalogVersion_; } @Override public void setCatalogVersion(long newVersion) { catalogVersion_ = newVersion; } @Override public boolean isLoaded() { return true; } }
andybab/Impala
fe/src/main/java/com/cloudera/impala/catalog/HdfsCachePool.java
Java
apache-2.0
1,904
<?php if ( ! defined('BASEPATH')) exit('No direct script access allowed'); /** * CodeIgniter * * An open source application development framework for PHP 5.1.6 or newer * * @package CodeIgniter * @author Esen Sagynov * @copyright Copyright (c) 2008 - 2014, EllisLab, Inc. * @license http://codeigniter.com/user_guide/license.html * @link http://codeigniter.com * @since Version 1.0 * @filesource */ // ------------------------------------------------------------------------ /** * CUBRID Utility Class * * @category Database * @author Esen Sagynov * @link http://codeigniter.com/user_guide/database/ */ class CI_DB_cubrid_utility extends CI_DB_utility { /** * List databases * * @access private * @return array */ function _list_databases() { // CUBRID does not allow to see the list of all databases on the // server. It is the way its architecture is designed. Every // database is independent and isolated. // For this reason we can return only the name of the currect // connected database. if ($this->conn_id) { return "SELECT '" . $this->database . "'"; } else { return FALSE; } } // -------------------------------------------------------------------- /** * Optimize table query * * Generates a platform-specific query so that a table can be optimized * * @access private * @param string the table name * @return object * @link http://www.cubrid.org/manual/840/en/Optimize%20Database */ function _optimize_table($table) { // No SQL based support in CUBRID as of version 8.4.0. Database or // table optimization can be performed using CUBRID Manager // database administration tool. See the link above for more info. return FALSE; } // -------------------------------------------------------------------- /** * Repair table query * * Generates a platform-specific query so that a table can be repaired * * @access private * @param string the table name * @return object * @link http://www.cubrid.org/manual/840/en/Checking%20Database%20Consistency */ function _repair_table($table) { // Not supported in CUBRID as of version 8.4.0. Database or // table consistency can be checked using CUBRID Manager // database administration tool. See the link above for more info. return FALSE; } // -------------------------------------------------------------------- /** * CUBRID Export * * @access private * @param array Preferences * @return mixed */ function _backup($params = array()) { // No SQL based support in CUBRID as of version 8.4.0. Database or // table backup can be performed using CUBRID Manager // database administration tool. return $this->db->display_error('db_unsuported_feature'); } } /* End of file cubrid_utility.php */ /* Location: ./system/database/drivers/cubrid/cubrid_utility.php */
tanateros/send-and-grab-mails
www/core/database/drivers/cubrid/cubrid_utility.php
PHP
apache-2.0
2,978
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package kubeapiserver import ( "fmt" "k8s.io/apimachinery/pkg/runtime/schema" "k8s.io/apiserver/pkg/registry/generic" genericregistry "k8s.io/apiserver/pkg/registry/generic/registry" genericapiserver "k8s.io/apiserver/pkg/server" ) // RESTOptionsFactory is a RESTOptionsGetter for kube apiservers since they do complicated stuff type RESTOptionsFactory struct { DeleteCollectionWorkers int EnableGarbageCollection bool EnableWatchCache bool StorageFactory genericapiserver.StorageFactory } func (f *RESTOptionsFactory) GetRESTOptions(resource schema.GroupResource) (generic.RESTOptions, error) { storageConfig, err := f.StorageFactory.NewConfig(resource) if err != nil { return generic.RESTOptions{}, fmt.Errorf("Unable to find storage destination for %v, due to %v", resource, err.Error()) } ret := generic.RESTOptions{ StorageConfig: storageConfig, Decorator: generic.UndecoratedStorage, DeleteCollectionWorkers: f.DeleteCollectionWorkers, EnableGarbageCollection: f.EnableGarbageCollection, ResourcePrefix: f.StorageFactory.ResourcePrefix(resource), } if f.EnableWatchCache { ret.Decorator = genericregistry.StorageWithCacher } return ret, nil }
shouhong/kubernetes
pkg/kubeapiserver/rest.go
GO
apache-2.0
1,806
CREATE TABLE address ( id integer NOT NULL, street character varying(100), number integer, city character varying(100), state character varying(100), country character varying(100) ); CREATE TABLE broker ( id integer NOT NULL, name character varying(100), lastname character varying(100), dateofbirth date, ssn character varying(100), addressid integer ); CREATE TABLE brokerworksfor ( brokerid integer NOT NULL, companyid integer, clientid integer ); CREATE TABLE client ( id integer NOT NULL, name character varying(100), lastname character varying(100), dateofbirth date, ssn character varying(100), addressid integer ); CREATE TABLE company ( id integer NOT NULL, name character varying(100), marketshares integer, networth double precision, addressid integer ); CREATE TABLE stockbooklist ( date date NOT NULL, stockid integer ); CREATE TABLE stockinformation ( id integer NOT NULL, numberofshares integer, sharetype smallint, companyid integer, description character varying(1000) ); CREATE TABLE transaction ( id integer NOT NULL, date timestamp, stockid integer, type smallint, brokerid integer, forclientid integer, forcompanyid integer, amount decimal(10,4) ); INSERT INTO address VALUES (991, 'Road street', 24, 'Chonala', 'Veracruz', 'Mexico'); INSERT INTO address VALUES (992, 'Via Marconi', 3, 'Bolzano', 'Bolzano', 'Italy'); INSERT INTO address VALUES (995, 'Huberg Strasse', 3, 'Bolzano', 'Bolzano', 'Italy'); INSERT INTO address VALUES (996, 'Via Piani di Bolzano', 7, 'Marconi', 'Trentino', 'Italy'); INSERT INTO address VALUES (993, 'Romer Street', 32, 'Malaga', 'Malaga', 'Spain'); INSERT INTO address VALUES (997, 'Samara road', 9976, 'Puebla', 'Puebla', 'Mexico'); INSERT INTO address VALUES (998, 'Jalan Madura 12', 245, 'Jakarta', 'Jakarta', 'Indonesia'); INSERT INTO broker VALUES (112, 'Joana', 'Lopatenkko', '1970-07-14', 'JLPTK54992', 992); INSERT INTO broker VALUES (113, 'Walter', 'Schmidt', '1968-09-03', 'WSCH9820783903', 993); INSERT INTO broker VALUES (114, 'Patricia', 'Lombrardi', '1975-02-22', 'PTLM8878767830', 997); INSERT INTO brokerworksfor VALUES (112, NULL, 111); INSERT INTO brokerworksfor VALUES (112, NULL, 112); INSERT INTO brokerworksfor VALUES (113, 212, NULL); INSERT INTO brokerworksfor VALUES (113, 211, NULL); INSERT INTO brokerworksfor VALUES (114, 212, NULL); INSERT INTO brokerworksfor VALUES (114, NULL, 111); INSERT INTO brokerworksfor VALUES (112, NULL, 111); INSERT INTO brokerworksfor VALUES (112, NULL, 112); INSERT INTO brokerworksfor VALUES (113, 212, NULL); INSERT INTO brokerworksfor VALUES (113, 211, NULL); INSERT INTO brokerworksfor VALUES (114, 212, NULL); INSERT INTO brokerworksfor VALUES (114, NULL, 111); INSERT INTO client VALUES (111, 'John', 'Smith', '1950-03-21', 'JSRX229500321', 991); INSERT INTO client VALUES (112, 'Joana', 'Lopatenkko', '1970-07-14', 'JLPTK54992', 992); INSERT INTO company VALUES (211, 'General Motors', 25000000, 1.2345678e+03, 995); INSERT INTO company VALUES (212, 'GnA Investments', 100000, 1234.5678, 996); INSERT INTO stockbooklist VALUES ('2008-04-01', 661); INSERT INTO stockbooklist VALUES ('2008-04-02', 662); INSERT INTO stockbooklist VALUES ('2008-04-03', 663); INSERT INTO stockbooklist VALUES ('2008-04-04', 664); INSERT INTO stockbooklist VALUES ('2008-04-05', 665); INSERT INTO stockbooklist VALUES ('2008-04-06', 666); INSERT INTO stockbooklist VALUES ('2008-04-07', 667); INSERT INTO stockbooklist VALUES ('2008-04-08', 668); INSERT INTO stockbooklist VALUES ('2008-04-09', 669); INSERT INTO stockinformation VALUES (661, 100, 0, 211, 'Text description 1'); INSERT INTO stockinformation VALUES (660, 100, 0, 211, 'Text description 2'); INSERT INTO stockinformation VALUES (662, 100, 0, 211, 'Text description 3'); INSERT INTO stockinformation VALUES (663, 100, 0, 211, 'Text description 4'); INSERT INTO stockinformation VALUES (664, 100, 0, 211, 'Text description 5'); INSERT INTO stockinformation VALUES (665, 100, 1, 211, 'Testo di descrizione 1'); INSERT INTO stockinformation VALUES (666, 100, 1, 211, 'Testo di descrizione 2'); INSERT INTO stockinformation VALUES (667, 100, 1, 211, 'Testo di descrizione 3'); INSERT INTO stockinformation VALUES (669, 100, 1, 211, 'Testo di descrizione 4'); INSERT INTO stockinformation VALUES (668, 100, 1, 211, 'Testo di descrizione 5'); INSERT INTO transaction VALUES (3331, '2008-04-01', 661, 1, 112, 111, NULL, 12.6); INSERT INTO transaction VALUES (3332, '2008-04-02', 662, 1, 112, 111, NULL, 108.34); INSERT INTO transaction VALUES (3333, '2008-04-03', 663, 1, 112, NULL, 212, -2.349); INSERT INTO transaction VALUES (3334, '2008-04-14', 663, 1, 113, NULL, NULL, 1667.0092); ALTER TABLE address ADD CONSTRAINT address_pkey PRIMARY KEY (id); ALTER TABLE broker ADD CONSTRAINT broker_pkey PRIMARY KEY (id); ALTER TABLE client ADD CONSTRAINT client_pkey PRIMARY KEY (id); ALTER TABLE company ADD CONSTRAINT company_pkey PRIMARY KEY (id); ALTER TABLE stockbooklist ADD CONSTRAINT stockbooklist_pkey PRIMARY KEY (date); ALTER TABLE stockinformation ADD CONSTRAINT stockinformation_pkey PRIMARY KEY (id); ALTER TABLE transaction ADD CONSTRAINT transaction_pkey PRIMARY KEY (id);
eschwert/ontop
quest-owlapi3/src/test/resources/test/stockexchange-create-db2.sql
SQL
apache-2.0
5,357
/* * Copyright 2009 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.collect.Lists; import com.google.javascript.jscomp.CompilerOptions.TracerMode; import com.google.javascript.jscomp.PhaseOptimizer.Loop; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import junit.framework.TestCase; import java.util.List; import java.util.Random; /** * Tests for {@link PhaseOptimizer}. * @author nicksantos@google.com (Nick Santos) */ public class PhaseOptimizerTest extends TestCase { private final List<String> passesRun = Lists.newArrayList(); private PhaseOptimizer optimizer; private Compiler compiler; private PerformanceTracker tracker; @Override public void setUp() { passesRun.clear(); compiler = new Compiler(); compiler.initCompilerOptionsIfTesting(); tracker = new PerformanceTracker( new Node(Token.BLOCK), TracerMode.TIMING_ONLY); optimizer = new PhaseOptimizer(compiler, tracker, null); } public void testOneRun() { addOneTimePass("x"); assertPasses("x"); } public void testLoop1() { Loop loop = optimizer.addFixedPointLoop(); addLoopedPass(loop, "x", 0); assertPasses("x"); } public void testLoop2() { Loop loop = optimizer.addFixedPointLoop(); addLoopedPass(loop, "x", 3); assertPasses("x", "x", "x", "x"); } public void testLoop3() { Loop loop = optimizer.addFixedPointLoop(); addLoopedPass(loop, "x", 3); addLoopedPass(loop, "y", 1); assertPasses("x", "y", "x", "y", "x", "x", "y"); } public void testNotInfiniteLoop() { Loop loop = optimizer.addFixedPointLoop(); addLoopedPass(loop, "x", PhaseOptimizer.MAX_LOOPS - 1); optimizer.process(null, null); assertEquals("There should be no errors.", 0, compiler.getErrorCount()); } public void testInfiniteLoop() { Loop loop = optimizer.addFixedPointLoop(); addLoopedPass(loop, "x", PhaseOptimizer.MAX_LOOPS + 1); try { optimizer.process(null, null); fail("Expected RuntimeException"); } catch (RuntimeException e) { assertTrue(e.getMessage().contains(PhaseOptimizer.OPTIMIZE_LOOP_ERROR)); } } public void testCombined() { addOneTimePass("a"); Loop loop = optimizer.addFixedPointLoop(); addLoopedPass(loop, "x", 3); addLoopedPass(loop, "y", 1); addOneTimePass("z"); assertPasses("a", "x", "y", "x", "y", "x", "x", "y", "z"); } public void testSanityCheck() { Loop loop = optimizer.addFixedPointLoop(); addLoopedPass(loop, "x", 1); addOneTimePass("z"); optimizer.setSanityCheck( createPassFactory("sanity", createPass("sanity", 0), false)); assertPasses("x", "sanity", "x", "sanity", "z", "sanity"); } public void testConsumption1() { optimizer.consume( Lists.newArrayList( createPassFactory("a", 0, true), createPassFactory("b", 1, false), createPassFactory("c", 2, false), createPassFactory("d", 1, false), createPassFactory("e", 1, true), createPassFactory("f", 0, true))); assertPasses("a", "b", "c", "d", "b", "c", "d", "c", "b", "d", "e", "f"); } public void testConsumption2() { optimizer.consume( Lists.newArrayList( createPassFactory("a", 2, false), createPassFactory("b", 1, true), createPassFactory("c", 1, false))); assertPasses("a", "a", "a", "b", "c", "c"); } public void testConsumption3() { optimizer.consume( Lists.newArrayList( createPassFactory("a", 2, true), createPassFactory("b", 0, false), createPassFactory("c", 0, false))); assertPasses("a", "b", "c"); } public void testDuplicateLoop() { Loop loop = optimizer.addFixedPointLoop(); addLoopedPass(loop, "x", 1); try { addLoopedPass(loop, "x", 1); fail("Expected exception"); } catch (IllegalArgumentException e) {} } public void testPassOrdering() { Loop loop = optimizer.addFixedPointLoop(); List<String> optimalOrder = Lists.newArrayList( PhaseOptimizer.OPTIMAL_ORDER); Random random = new Random(); while (optimalOrder.size() > 0) { addLoopedPass( loop, optimalOrder.remove(random.nextInt(optimalOrder.size())), 0); } optimizer.process(null, null); assertEquals(PhaseOptimizer.OPTIMAL_ORDER, passesRun); } public void testProgress() { final List<Double> progressList = Lists.newArrayList(); compiler = new Compiler() { @Override void setProgress(double p, String name) { progressList.add(p); } }; compiler.initCompilerOptionsIfTesting(); optimizer = new PhaseOptimizer(compiler, null, new PhaseOptimizer.ProgressRange(0, 100)); addOneTimePass("x1"); addOneTimePass("x2"); addOneTimePass("x3"); addOneTimePass("x4"); optimizer.process(null, null); assertEquals(4, progressList.size()); assertEquals(25, Math.round(progressList.get(0))); assertEquals(50, Math.round(progressList.get(1))); assertEquals(75, Math.round(progressList.get(2))); assertEquals(100, Math.round(progressList.get(3))); } public void assertPasses(String ... names) { optimizer.process(null, null); assertEquals(Lists.newArrayList(names), passesRun); } private void addOneTimePass(String name) { optimizer.addOneTimePass( createPassFactory(name, 0, true)); } private void addLoopedPass(Loop loop, String name, int numChanges) { loop.addLoopedPass( createPassFactory(name, numChanges, false)); } private PassFactory createPassFactory( String name, int numChanges, boolean isOneTime) { return createPassFactory(name, createPass(name, numChanges), isOneTime); } private PassFactory createPassFactory( String name, final CompilerPass pass, boolean isOneTime) { return new PassFactory(name, isOneTime) { @Override protected CompilerPass create(AbstractCompiler compiler) { return pass; } }; } private CompilerPass createPass(final String name, int numChanges) { final int[] numChangesClosure = new int[] {numChanges}; return new CompilerPass() { @Override public void process(Node externs, Node root) { passesRun.add(name); if (numChangesClosure[0] > 0) { compiler.reportCodeChange(); numChangesClosure[0] = numChangesClosure[0] - 1; } } }; } }
weitzj/closure-compiler
test/com/google/javascript/jscomp/PhaseOptimizerTest.java
Java
apache-2.0
7,093
# Abhiyantriki kjsce Abhiyantriki 2016 Website
Chaitya62/Abhiyantriki
README.md
Markdown
apache-2.0
49
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.psi.scope.processor; import com.intellij.openapi.util.Key; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiSubstitutor; import com.intellij.psi.ResolveState; import com.intellij.psi.filters.ElementFilter; import com.intellij.psi.scope.PsiScopeProcessor; import com.intellij.util.SmartList; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.List; public class FilterScopeProcessor<T> implements PsiScopeProcessor { protected final List<T> myResults; private PsiElement myCurrentDeclarationHolder; private final ElementFilter myFilter; private final PsiScopeProcessor myProcessor; public FilterScopeProcessor(@NotNull ElementFilter filter, @NotNull List<T> container) { this(filter, null, container); } public FilterScopeProcessor(@NotNull ElementFilter filter, @NotNull PsiScopeProcessor processor) { this(filter, processor, new SmartList<>()); } public FilterScopeProcessor(@NotNull ElementFilter filter) { this(filter, null, new SmartList<>()); } public FilterScopeProcessor(@NotNull ElementFilter filter, @Nullable PsiScopeProcessor processor, @NotNull List<T> container) { myFilter = filter; myProcessor = processor; myResults = container; } @Override public void handleEvent(@NotNull PsiScopeProcessor.Event event, Object associated) { if (myProcessor != null) { myProcessor.handleEvent(event, associated); } if (event == PsiScopeProcessor.Event.SET_DECLARATION_HOLDER && associated instanceof PsiElement) { myCurrentDeclarationHolder = (PsiElement)associated; } } @Override public boolean execute(@NotNull PsiElement element, @NotNull ResolveState state) { if (myFilter.isAcceptable(element, myCurrentDeclarationHolder)) { if (myProcessor != null) { return myProcessor.execute(element, state); } add(element, state.get(PsiSubstitutor.KEY)); } return true; } @SuppressWarnings("unchecked") protected void add(@NotNull PsiElement element, @NotNull PsiSubstitutor substitutor) { myResults.add((T)element); } @Override public <K> K getHint(@NotNull Key<K> hintKey) { if (myProcessor != null) { return myProcessor.getHint(hintKey); } return null; } @NotNull public List<T> getResults() { return myResults; } }
smmribeiro/intellij-community
java/java-psi-impl/src/com/intellij/psi/scope/processor/FilterScopeProcessor.java
Java
apache-2.0
2,521
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.util; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableSet; import com.google.common.primitives.Primitives; import java.lang.reflect.Field; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.lang.reflect.WildcardType; import java.util.Arrays; import java.util.Collection; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.Optional; import java.util.Queue; import java.util.concurrent.ExecutionException; import javax.annotation.Nullable; public class Types { private static final LoadingCache<Field, Type> FIRST_NON_OPTIONAL_TYPE_CACHE = CacheBuilder.newBuilder() .weakValues() .build( new CacheLoader<Field, Type>() { @Override public Type load(Field field) { boolean isOptional = Optional.class.isAssignableFrom(field.getType()); if (isOptional) { Type type = field.getGenericType(); if (type instanceof ParameterizedType) { return ((ParameterizedType) type).getActualTypeArguments()[0]; } else { throw new RuntimeException("Unexpected type parameter for Optional: " + type); } } else { return field.getGenericType(); } } }); private Types() { // Utility class. } /** * Determine the "base type" of a field. That is, the following will be returned: * * <ul> * <li>{@code String} -&gt; {@code String.class} * <li>{@code Optional&lt;String&gt;} -&gt; {@code String.class} * <li>{@code Set&lt;String&gt;} -&gt; {@code String.class} * <li>{@code Collection&lt;? extends Comparable&gt;} -&gt; {@code Comparable.class} * <li>{@code Collection&lt;? super Comparable} -&gt; {@code Object.class} * </ul> */ public static Type getBaseType(Field field) { Type type = getFirstNonOptionalType(field); if (type instanceof ParameterizedType) { type = ((ParameterizedType) type).getActualTypeArguments()[0]; } if (type instanceof WildcardType) { type = ((WildcardType) type).getUpperBounds()[0]; } return Primitives.wrap((Class<?>) type); } /** * @return The raw type of the {@link Collection} a field represents, even if contained in an * {@link Optional}, but without the ParameterizedType information. */ @SuppressWarnings("unchecked") @Nullable public static Class<? extends Collection<?>> getContainerClass(Field field) { Type type = getFirstNonOptionalType(field); if (!(type instanceof ParameterizedType)) { return null; } Type rawType = ((ParameterizedType) type).getRawType(); if (!(rawType instanceof Class)) { return null; } Class<?> clazz = (Class<?>) rawType; if (!(Collection.class.isAssignableFrom(clazz))) { return null; } return (Class<? extends Collection<?>>) clazz; } /** * Get the first complete {@link Type} in a signature that's non-optional, complete with the * information from the {@link ParameterizedType}. * * <ul> * <li>String -&gt; String * <li>Optional&lt;String$gt; -&gt; String * <li>ImmutableSet&lt;String&gt; -&gt; ImmutableSet&lt;String&gt; * <li>Optional&lt;ImmutableSet&lt;String&gt;&gt; -&gt; ImmutableSet&lt;String&gt; * </ul> */ public static Type getFirstNonOptionalType(Field field) { try { return FIRST_NON_OPTIONAL_TYPE_CACHE.get(field); } catch (ExecutionException e) { throw new RuntimeException(e); } } /** * Returns a Set of classes and interfaces inherited or implemented by clazz. * * <p>Result includes clazz itself. Result is ordered closest to furthest, i.e. first entry will * always be clazz and last entry will always be {@link java.lang.Object}. */ public static ImmutableSet<Class<?>> getSupertypes(Class<?> clazz) { LinkedHashSet<Class<?>> ret = new LinkedHashSet<>(); Queue<Class<?>> toExpand = new LinkedList<>(); toExpand.add(clazz); while (!toExpand.isEmpty()) { Class<?> current = toExpand.remove(); if (!ret.add(current)) { continue; } toExpand.addAll(Arrays.asList(current.getInterfaces())); if (current.getSuperclass() != null) { toExpand.add(current.getSuperclass()); } } return ImmutableSet.copyOf(ret); } }
shs96c/buck
src/com/facebook/buck/util/Types.java
Java
apache-2.0
5,261
<h2>Comments</h2> <pre><code>(* This is an old style comment *) { This is a Turbo Pascal comment } // This is a Delphi comment.</code></pre> <h2>Strings and characters</h2> <pre><code>'This is a pascal string' '' 'a' ^G #7 #$f4 'A tabulator character: '#9' is easy to embed'</code></pre> <h2>Numbers</h2> <pre><code>123 123.456 132.456e-789 132.456e+789 $7aff &17 %11110101</code></pre> <h2>Full example</h2> <pre><code>Type Str25 = String[25]; TBookRec = Record Title, Author, ISBN : Str25; Price : Real; End; Procedure EnterNewBook(var newBook : TBookRec); Begin Writeln('Please enter the book details: '); Write('Book Name: '); Readln(newBook.Title); Write('Author: '); Readln(newBook.Author); Write('ISBN: '); Readln(newBook.ISBN); Write('Price: '); Readln(newBook.Price); End; Var bookRecArray : Array[1..10] of TBookRec; i : 1..10; Begin For i := 1 to 10 do EnterNewBook(bookRecArray[i]); Writeln('Thanks for entering the book details'); Write('Now choose a record to display from 1 to 10: '); Readln(i); Writeln('Here are the book details of record #',i,':'); Writeln; Writeln('Title: ', bookRecArray[i].Title); Writeln('Author: ', bookRecArray[i].Author); Writeln('ISBN: ', bookRecArray[i].ISBN); Writeln('Price: ', bookRecArray[i].Price); Readln; End.</code></pre>
Heanes/cdn.heanes.com
js/prism/prism-1.14.0/examples/prism-pascal.html
HTML
apache-2.0
1,399
# -*- coding: utf-8 -*- ############################################################################### # # GetTag # Retrieves a specified tag object. # # Python versions 2.6, 2.7, 3.x # # Copyright 2014, Temboo Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied. See the License for the specific # language governing permissions and limitations under the License. # # ############################################################################### from temboo.core.choreography import Choreography from temboo.core.choreography import InputSet from temboo.core.choreography import ResultSet from temboo.core.choreography import ChoreographyExecution import json class GetTag(Choreography): def __init__(self, temboo_session): """ Create a new instance of the GetTag Choreo. A TembooSession object, containing a valid set of Temboo credentials, must be supplied. """ super(GetTag, self).__init__(temboo_session, '/Library/GitHub/GitDataAPI/Tags/GetTag') def new_input_set(self): return GetTagInputSet() def _make_result_set(self, result, path): return GetTagResultSet(result, path) def _make_execution(self, session, exec_id, path): return GetTagChoreographyExecution(session, exec_id, path) class GetTagInputSet(InputSet): """ An InputSet with methods appropriate for specifying the inputs to the GetTag Choreo. The InputSet object is used to specify input parameters when executing this Choreo. """ def set_AccessToken(self, value): """ Set the value of the AccessToken input for this Choreo. ((conditional, string) The Access Token retrieved during the OAuth process. Required when accessing a protected resource.) """ super(GetTagInputSet, self)._set_input('AccessToken', value) def set_Repo(self, value): """ Set the value of the Repo input for this Choreo. ((required, string) The name of the repo associated with the tag to retrieve.) """ super(GetTagInputSet, self)._set_input('Repo', value) def set_SHA(self, value): """ Set the value of the SHA input for this Choreo. ((required, string) The SHA associated with the tag to retrieve.) """ super(GetTagInputSet, self)._set_input('SHA', value) def set_User(self, value): """ Set the value of the User input for this Choreo. ((required, string) The GitHub username.) """ super(GetTagInputSet, self)._set_input('User', value) class GetTagResultSet(ResultSet): """ A ResultSet with methods tailored to the values returned by the GetTag Choreo. The ResultSet object is used to retrieve the results of a Choreo execution. """ def getJSONFromString(self, str): return json.loads(str) def get_Response(self): """ Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from GitHub.) """ return self._output.get('Response', None) def get_Limit(self): """ Retrieve the value for the "Limit" output from this Choreo execution. ((integer) The available rate limit for your account. This is returned in the GitHub response header.) """ return self._output.get('Limit', None) def get_Remaining(self): """ Retrieve the value for the "Remaining" output from this Choreo execution. ((integer) The remaining number of API requests available to you. This is returned in the GitHub response header.) """ return self._output.get('Remaining', None) class GetTagChoreographyExecution(ChoreographyExecution): def _make_result_set(self, response, path): return GetTagResultSet(response, path)
jordanemedlock/psychtruths
temboo/core/Library/GitHub/GitDataAPI/Tags/GetTag.py
Python
apache-2.0
4,161
nifi-provenance-repo-bundle =========================== ### Overview Parent project for the Kylo NiFi provenance reporting 2 versions of the com.thinkbiganalytics.nifi.provenance.repo.KyloPersistentProvenanceEventRepository exist 1. 'nifi-provenance-repo-v1-nar' is for NiFi 1.0.x and NiFi 1.1.x versions 2. 'nifi-provenance-repo-v1.2-nar' is for NiFi 1.2.x and 1.3.x versions ### Deployment 1. The NAR must be deployed to /nifi/lib/
peter-gergely-horvath/kylo
integrations/nifi/nifi-nar-bundles/nifi-provenance-repo-bundle/README.md
Markdown
apache-2.0
440
maintainer "Opscode, Inc." maintainer_email "cookbooks@opscode.com" license "Apache 2.0" description "Installs thrift from source" long_description IO.read(File.join(File.dirname(__FILE__), 'README.md')) version "0.99.0" recipe "thrift", "Installs thrift from source" supports "ubuntu" %w{ build-essential boost python }.each do |cb| depends cb end
brunosoab/bkether_cookbooks
thrift/metadata.rb
Ruby
apache-2.0
383
// Copyright 2017 The TensorFlow Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ============================================================================== package commands import ( "fmt" "log" "strings" "sync" "time" "context" "flag" "github.com/fatih/color" "github.com/google/subcommands" "github.com/tensorflow/tpu/tools/ctpu/config" "github.com/tensorflow/tpu/tools/ctpu/ctrl" ) // StatusTPUCP encapsulates the control plane interfaces required to execute the Status command. type StatusTPUCP interface { // OptionallyRetrieveInstance retrieves the instance, but can optionally not enable the TPU API. OptionallyRetrieveInstance(bool) (*ctrl.TPUInstance, bool, error) } // StatusGCECP encapsulates the control plane interfaces required to execute the Status command. type StatusGCECP interface { // OptionallyRetrieveInstance retrieves the instance, but can optionally not enable the TPU API. OptionallyRetrieveInstance(bool) (*ctrl.GCEInstance, bool, error) } type statusCmd struct { cfg *config.Config tpu StatusTPUCP gce StatusGCECP details bool noColor bool } // StatusCommand creates the status command. func StatusCommand(cfg *config.Config, tpu StatusTPUCP, gce StatusGCECP) subcommands.Command { return &statusCmd{ cfg: cfg, tpu: tpu, gce: gce, } } func (statusCmd) Name() string { return "status" } func (s *statusCmd) SetFlags(f *flag.FlagSet) { s.cfg.SetFlags(f) // Allow users to specify cfg flags either before or after the subcommand name. f.BoolVar(&s.details, "details", false, "Prints out more details about the state of the Compute Engine VM and Cloud TPU.") f.BoolVar(&s.noColor, "no-color", false, "Disable color in the output.") } func (statusCmd) Synopsis() string { return "queries the control planes for the current Compute Engine & TPU status." } func (statusCmd) Usage() string { return `ctpu status [--no-color] ` } type statusCmdAlias struct { statusCmd } // StatusCommandAlias creates an alias for the status command with a shorter name. func StatusCommandAlias(cfg *config.Config, tpu StatusTPUCP, gce StatusGCECP) subcommands.Command { return &statusCmdAlias{statusCmd{cfg: cfg, tpu: tpu, gce: gce}} } func (statusCmdAlias) Name() string { return "st" } func (statusCmdAlias) Synopsis() string { return "alias to ctpu status (retrieves info on current instances)" } func (statusCmdAlias) Usage() string { return "ctpu st\n" } func (s *statusCmd) runnableStatus(exists, isRunning bool, status string) string { if !exists { return color.YellowString("--") } if isRunning { return color.GreenString("RUNNING") } return color.RedString(status) } func (s *statusCmd) vmStatus(vm *ctrl.GCEInstance) string { var status string if vm != nil { status = vm.Status } exists := vm != nil isRunning := vm != nil && vm.IsRunning() return s.runnableStatus(exists, isRunning, status) } func (s *statusCmd) timeDelta(t time.Time) string { delta := time.Since(t).Round(time.Minute) if delta < 0 { return "--" } if delta.Minutes() < 1 { return "< 1 minute" } minutes := (delta / time.Minute) % 60 hours := delta / time.Hour days := delta / (time.Hour * 24) if days > 3 { return fmt.Sprintf("%dd %dh", days, hours%24) } if hours == 0 { return fmt.Sprintf("%dm", minutes) } return fmt.Sprintf("%dh %dm", hours, minutes) } func (s *statusCmd) flockStatus(vm *ctrl.GCEInstance, tpu *ctrl.TPUInstance) string { if vm == nil && tpu == nil { return color.BlueString("No instances currently exist.") } if vm != nil && vm.IsRunning() && tpu != nil && tpu.IsRunning() { return color.GreenString("Your cluster is running!") } if vm != nil && !vm.IsRunning() && tpu == nil { return color.YellowString("Your cluster is paused.") } return color.RedString("Your cluster is in an unhealthy state.") } func (s *statusCmd) tpuStatus(tpu *ctrl.TPUInstance) string { var status string if tpu != nil { status = tpu.State } exists := tpu != nil isRunning := tpu != nil && tpu.IsRunning() return s.runnableStatus(exists, isRunning, status) } func (s *statusCmd) Execute(ctx context.Context, flags *flag.FlagSet, args ...interface{}) subcommands.ExitStatus { err := s.cfg.Validate() if err != nil { log.Print(err) return subcommands.ExitFailure } if s.noColor { color.NoColor = true } var vm *ctrl.GCEInstance var tpu *ctrl.TPUInstance var gceEnabled, tpuEnabled bool var exitTPU, exitVM subcommands.ExitStatus var wg sync.WaitGroup wg.Add(2) go func() { var err error vm, gceEnabled, err = s.gce.OptionallyRetrieveInstance(false) if err != nil { log.Print(err) exitVM = subcommands.ExitFailure } wg.Done() }() go func() { var err error tpu, tpuEnabled, err = s.tpu.OptionallyRetrieveInstance(false) if err != nil { log.Print(err) exitTPU = subcommands.ExitFailure } wg.Done() }() wg.Wait() if exitTPU != subcommands.ExitSuccess { return exitTPU } if exitVM != subcommands.ExitSuccess { return exitVM } if !gceEnabled || !tpuEnabled { if !gceEnabled && !tpuEnabled { fmt.Println("Neither the Compute Engine nor the Cloud TPU services have been enabled.") } else if !gceEnabled { fmt.Println("The Compute Engine service has not been enabled.") } else { fmt.Println("The Cloud TPU service has not been enabled.") } return subcommands.ExitFailure } fmt.Printf(`%s Compute Engine VM: %s Cloud TPU: %s `, s.flockStatus(vm, tpu), s.vmStatus(vm), s.tpuStatus(tpu)) vmIP, vmCreated, vmCreateDelta, machineType := "--", "--", "--", "--" if vm != nil { if len(vm.NetworkInterfaces) > 0 { vmIP = vm.NetworkInterfaces[0].NetworkIP } vmCreated = vm.CreationTimestamp if createTime, err := time.Parse(time.RFC3339, vmCreated); err == nil { vmCreateDelta = s.timeDelta(createTime) } machineTypeParts := strings.Split(vm.MachineType, "/") machineType = machineTypeParts[len(machineTypeParts)-1] } tpuType, tpuIP, tpuVer, tpuSA, tpuCreated, tpuCreateDelta, tpuState, tpuHealth, tpuPreemptible := "--", "--", "--", "--", "--", "--", "--", "--", "--" if tpu != nil { tpuType = tpu.AcceleratorType if len(tpu.NetworkEndpoints) > 0 { tpuIP = tpu.NetworkEndpoints[0].IpAddress } tpuVer = tpu.TensorflowVersion tpuSA = tpu.ServiceAccount tpuCreated = tpu.CreateTime if createTime, err := time.Parse(time.RFC3339Nano, tpuCreated); err == nil { tpuCreateDelta = s.timeDelta(createTime) } tpuState = tpu.State tpuHealth = tpu.Health tpuPreemptible = fmt.Sprintf("%v", tpu.IsPreemptible()) } if s.details { fmt.Printf(` Compute Engine IP Address: %s Compute Engine Created: %s ago (@: %s) Compute Engine Machine Type: %s TPU Accelerator Type: %s TPU IP Address: %s TPU TF Version: %s TPU Service Acct: %s TPU Created: %s ago (@: %s) TPU State: %s TPU Health: %s TPU Preemptible: %s `, vmIP, vmCreateDelta, vmCreated, machineType, tpuType, tpuIP, tpuVer, tpuSA, tpuCreateDelta, tpuCreated, tpuState, tpuHealth, tpuPreemptible) } return subcommands.ExitSuccess }
mlperf/training_results_v0.5
v0.5.0/google/cloud_v3.8/ssd-tpuv3-8/code/ssd/model/tpu/tools/ctpu/commands/status.go
GO
apache-2.0
7,684
--- external help file: Microsoft.Azure.Commands.AzureBackup.dll-Help.xml Module Name: AzureRM.Backup ms.assetid: 6187F603-5298-4854-94F3-0C38FCF3125F online version: https://docs.microsoft.com/en-us/powershell/module/azurerm.backup/get-azurermbackupjobdetails schema: 2.0.0 --- # Get-AzureRmBackupJobDetails ## SYNOPSIS Gets the details of a Backup job. ## SYNTAX ### JobsFiltersSet (Default) ``` Get-AzureRmBackupJobDetails -Job <AzureRMBackupJob> [-DefaultProfile <IAzureContextContainer>] [<CommonParameters>] ``` ### IdFiltersSet ``` Get-AzureRmBackupJobDetails -Vault <AzureRMBackupVault> -JobId <String> [-DefaultProfile <IAzureContextContainer>] [<CommonParameters>] ``` ## DESCRIPTION The **Get-AzureRmBackupJobDetails** cmdlet gets the details of an Azure Backup job. You can use this cmdlet to gather information about a job that fails. ## EXAMPLES ### Example 1: Display the details of a failed job ``` PS C:\>$Vault = Get-AzureRmBackupVault -Name "Vault03" PS C:\> $Jobs = Get-AzureRmBackupJob -Vault $Vault -Status Failed PS C:\> $JobDetails = Get-AzureRmBackupJobDetails -Job $Jobs[0] PS C:\> $JobDetails.ErrorDetails ErrorCode ErrorMessage Recommendations --------- ------------ --------------- 400001 Command execution failed. {Another operation is currently in p... ``` The first command gets the vault named Vault03 by using the **Get-AzureRmBackupVault** cmdlet. The command stores that object in the $Vault variable. The second command gets failed jobs from the vault in $Vault, and then stores them in the $Jobs array variable. The third job gets details for the first job in the $Jobs variable, and then stores those details in the $JobDetails variable. The final command displays the **ErrorDetails** property of $JobDetails by using standard dot syntax. ### Example 2: Display the recommended action for a failed job ``` PS C:\>$JobDetails.ErrorDetails.Recommendations Another operation is currently in progress on this item. Please wait until the previous operation is completed, and then retry. ``` This command displays the recommended action from the $JobDetails variable that was created in the first example. ## PARAMETERS ### -DefaultProfile The credentials, account, tenant, and subscription used for communication with azure ```yaml Type: Microsoft.Azure.Commands.Common.Authentication.Abstractions.IAzureContextContainer Parameter Sets: (All) Aliases: AzureRmContext, AzureCredential Required: False Position: Named Default value: None Accept pipeline input: False Accept wildcard characters: False ``` ### -Job Specifies a job for which this cmdlet gets details. To obtain an **AzureRmBackupJob** object, use the Get-AzureRmBackupJob cmdlet. ```yaml Type: Microsoft.Azure.Commands.AzureBackup.Models.AzureRMBackupJob Parameter Sets: JobsFiltersSet Aliases: Required: True Position: Named Default value: None Accept pipeline input: True (ByValue) Accept wildcard characters: False ``` ### -JobId Specifies the ID of a job for which this cmdlet gets details. The ID is the **InstanceId** property of an **AzureRmBackupJob** object. To obtain an **AzureRmBackupJob** object, use Get-AzureRmBackupJob. ```yaml Type: System.String Parameter Sets: IdFiltersSet Aliases: Required: True Position: Named Default value: None Accept pipeline input: False Accept wildcard characters: False ``` ### -Vault Specifies the Backup vault for which this cmdlet gets job details. To obtain an **AzureRmBackupVault** object, use the Get-AzureRmBackupVault cmdlet. ```yaml Type: Microsoft.Azure.Commands.AzureBackup.Models.AzureRMBackupVault Parameter Sets: IdFiltersSet Aliases: Required: True Position: Named Default value: None Accept pipeline input: False Accept wildcard characters: False ``` ### CommonParameters This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see about_CommonParameters (http://go.microsoft.com/fwlink/?LinkID=113216). ## INPUTS ### Microsoft.Azure.Commands.AzureBackup.Models.AzureRMBackupJob Parameters: Job (ByValue) ## OUTPUTS ### Microsoft.Azure.Commands.AzureBackup.Models.AzureRMBackupJobDetails ## NOTES ## RELATED LINKS [Get-AzureRmBackupJob](./Get-AzureRmBackupJob.md) [Get-AzureRmBackupVault](./Get-AzureRmBackupVault.md)
AzureAutomationTeam/azure-powershell
src/ResourceManager/AzureBackup/Commands.AzureBackup/help/Get-AzureRmBackupJobDetails.md
Markdown
apache-2.0
4,471
<html> <head> <link rel="stylesheet" type="text/css" href="css/tips.css"> </head> <body> <p>To navigate to the declaration of a class, method or variable used somewhere in the code, position the caret at the usage and press <span class="shortcut">&shortcut:GotoDeclaration;</span>. You may also click the mouse on usages with the <span class="shortcut">Ctrl</span> key pressed to jump to declarations.</p> <p class="image"><img src="images/ctrl_click.gif"></p> </body> </html>
android-ia/platform_tools_idea
platform/platform-resources-en/src/tips/GoToDeclaration.html
HTML
apache-2.0
514
import h from 'spec/spec_helper'; import { config, path, t, utils, fsAsync } from 'azk'; import { Cli } from 'azk/cli'; import { Manifest } from 'azk/manifest'; describe('Azk cli, init controller', function() { var outputs = []; var ui = h.mockUI(beforeEach, outputs); var manifest = config('manifest'); var cli_options = {}; var cli = new Cli(cli_options) .route('init'); var doc_opts = { exit: false }; var run_options = { ui: ui }; describe("run in a project already has a manifest", function() { var message = t("commands.init.already_exists", manifest); before(() => { return h.tmp_dir().then((project) => { run_options.cwd = project; return fsAsync.createFile(path.join(project, manifest)); }); }); it("should fail", function() { doc_opts.argv = ['init']; var options = cli.router.cleanParams(cli.docopt(doc_opts)); return cli.run(doc_opts, run_options).then((code) => { h.expect(code).to.equal(1); h.expect(options).to.have.property('init', true); h.expect(options).to.have.property('path', null); h.expect(outputs[0]).to.match(RegExp(h.escapeRegExp(message))); }); }); it("should sucess if --force is passed", function() { doc_opts.argv = ['init', '--force']; var options = cli.docopt(doc_opts); return cli.run(doc_opts, run_options).then((code) => { h.expect(options).to.have.property('--force', true); h.expect(code).to.equal(0); h.expect(outputs[0]).to.not.match(RegExp(h.escapeRegExp(message))); }); }); }); it("should generate a manifest with a example system in a blank dir", function() { return h.tmp_dir().then((project) => { doc_opts.argv = ['init']; run_options.cwd = project; var options = cli.router.cleanParams(cli.docopt(doc_opts)); // Check generated manifest return cli.run(doc_opts, run_options).then((code) => { var manifest = new Manifest(project); var system = manifest.systemDefault; var name = path.basename(project); h.expect(code).to.equal(0); h.expect(options).to.have.property('path', null); h.expect(options).to.have.property('filename', false); h.expect(system).to.have.deep.property("name", "example"); h.expect(system).to.have.deep.property("image.name", "[repository]:[tag]"); h.expect(system).to.have.deep.property("depends").and.to.eql([]); var obj = {}; obj[`/azk/${name}`] = utils.docker.resolvePath(manifest.manifestPath); h.expect(system).to.have.deep.property("options.workdir", `/azk/${name}`); h.expect(system).to.have.deep.property("mounts" ).and.to.eql(obj); h.expect(system).to.have.deep.property("options.command").and.to.eql("# command to run app"); h.expect(system).to.have.deep.property("options.envs" ).and.to.eql({ EXAMPLE: "value" }); // Check messages var message = t("commands.init.not_found"); h.expect(outputs[0]).to.match(RegExp(h.escapeRegExp(message))); }); }); }); });
renanmpimentel/azk
spec/cmds/init_spec.js
JavaScript
apache-2.0
3,149
// Copyright 2016 The Cockroach Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // implied. See the License for the specific language governing // permissions and limitations under the License. // // Author: Peter Mattis (peter@cockroachlabs.com) package storage_test import ( "testing" "github.com/pkg/errors" "golang.org/x/net/context" "github.com/cockroachdb/cockroach/pkg/server" "github.com/cockroachdb/cockroach/pkg/storage" "github.com/cockroachdb/cockroach/pkg/testutils" "github.com/cockroachdb/cockroach/pkg/util/leaktest" "github.com/cockroachdb/cockroach/pkg/util/stop" ) func TestEagerReplication(t *testing.T) { defer leaktest.AfterTest(t)() // Start with the split queue disabled. storeCfg := storage.TestStoreConfig(nil) storeCfg.TestingKnobs.DisableSplitQueue = true stopper := stop.NewStopper() defer stopper.Stop(context.TODO()) store := createTestStoreWithConfig(t, stopper, storeCfg) // Disable the replica scanner so that we rely on the eager replication code // path that occurs after splits. store.SetReplicaScannerActive(false) // Enable the split queue and force a scan and process. store.SetSplitQueueActive(true) store.ForceSplitScanAndProcess() // The addition of replicas to the replicateQueue after a split // occurs happens after the update of the descriptors in meta2 // leaving a tiny window of time in which the newly split replica // will not have been added to purgatory. Thus we loop. testutils.SucceedsSoon(t, func() error { // After the initial splits have been performed, all of the resulting ranges // should be present in replicate queue purgatory (because we only have a // single store in the test and thus replication cannot succeed). expected, err := server.ExpectedInitialRangeCount(store.DB()) if err != nil { return err } if n := store.ReplicateQueuePurgatoryLength(); expected != n { return errors.Errorf("expected %d replicas in purgatory, but found %d", expected, n) } return nil }) }
nvanbenschoten/epaxos
vendor/github.com/cockroachdb/cockroach/pkg/storage/replicate_test.go
GO
apache-2.0
2,402
#!/usr/bin/env python # update-dependencies-bad.py - Fails on bad.swift -*- python -*- # # This source file is part of the Swift.org open source project # # Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors # Licensed under Apache License v2.0 with Runtime Library Exception # # See https://swift.org/LICENSE.txt for license information # See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors # # ---------------------------------------------------------------------------- # # Fails if the input file is named "bad.swift" or "crash.swift"; otherwise # dispatches to update-dependencies.py. "crash.swift" gives an exit code # other than 1. # # ---------------------------------------------------------------------------- from __future__ import print_function import os import shutil import sys assert sys.argv[1] == '-frontend' primaryFile = sys.argv[sys.argv.index('-primary-file') + 1] if (os.path.basename(primaryFile) == 'bad.swift' or os.path.basename(primaryFile) == 'crash.swift'): print("Handled", os.path.basename(primaryFile)) # Replace the dependencies file with the input file. try: depsFile = sys.argv[sys.argv.index( '-emit-reference-dependencies-path') + 1] shutil.copyfile(primaryFile, depsFile) except ValueError: pass if os.path.basename(primaryFile) == 'bad.swift': sys.exit(1) else: sys.exit(129) execDir = os.path.dirname(os.path.abspath(__file__)) execfile(os.path.join(execDir, "update-dependencies.py"))
gottesmm/swift
test/Driver/Dependencies/Inputs/update-dependencies-bad.py
Python
apache-2.0
1,560
/** * @license * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ foam.CLASS({ package: 'foam.input', name: 'TouchEvent', properties: [ { class: 'Float', name: 'x' }, { class: 'Float', name: 'y' }, { class: 'Boolean', name: 'claimed', value: false } ] }); foam.CLASS({ package: 'foam.input', name: 'Mouse', topics: [ 'down', 'move', 'touch', 'up' ], properties: [ 'lastTouch', 'x', 'y', { name: 'element', postSet: function(old, e) { if ( old ) { old.removeEventListener('mousedown', this.onMouseDown); old.removeEventListener('mouseup', this.onMouseUp); old.removeEventListener('mousemove', this.onMouseMove); } e.addEventListener('mousedown', this.onMouseDown); e.addEventListener('mouseup', this.onMouseUp); e.addEventListener('mousemove', this.onMouseMove); } } ], methods: [ function install(element) { this.ref = element; } ], listeners: [ { name: 'onMouseDown', code: function(e) { var bounds = this.element.getBoundingClientRect(); this.x = e.clientX - bounds.left; this.y = e.clientY - bounds.top; this.down.pub(); if ( this.touch.hasListeners() ) { if ( this.lastTouch ) this.lastTouch.detach(); this.lastTouch = foam.input.TouchEvent.create(); this.lastTouch.onDetach(this.lastTouch.x$.follow(this.x$)); this.lastTouch.onDetach(this.lastTouch.y$.follow(this.y$)); this.touch.pub(this.lastTouch); if ( this.lastTouch && this.lastTouch.claimed ) e.preventDefault(); } // While the mouse is down, track the movements and mouseup on the // entire window so it's tracked if/when the mouse leaves the element. window.addEventListener('mouseup', this.onMouseUp); window.addEventListener('mousemove', this.onMouseMove); } }, { name: 'onMouseUp', code: function(e) { this.up.pub(); if ( this.lastTouch ) { this.lastTouch.detach(); this.lastTouch = undefined; } window.removeEventListener('mouseup', this.onMouseUp); window.removeEventListener('mousemove', this.onMouseMove); } }, { name: 'onMouseMove', code: function(e) { if ( this.lastTouch || this.hasListeners('propertyChange') || this.move.hasListeners() ) { var bounds = this.element.getBoundingClientRect(); this.x = e.clientX - bounds.left; this.y = e.clientY - bounds.top; this.move.pub(); if ( this.lastTouch && this.lastTouch.claimed ) e.preventDefault(); } } } ] }); foam.CLASS({ package: 'foam.input', name: 'Touch', topics: [ 'touch' ], properties: [ { name: 'touches', factory: function() { return {}; } }, { name: 'element', postSet: function(old, e) { if ( old ) { old.removeEventListener('touchstart', this.onTouchStart); old.removeEventListener('touchmove', this.onTouchMove); old.removeEventListener('touchend', this.onTouchEnd); } e.addEventListener('touchstart', this.onTouchStart); e.addEventListener('touchmove', this.onTouchMove); e.addEventListener('touchend', this.onTouchEnd); } } ], listeners: [ function onTouchStart(e) { var newTouches = e.changedTouches; var bounds = this.element.getBoundingClientRect(); for ( var i = 0 ; i < newTouches.length ; i++ ) { var touch = newTouches.item(i); var touchEvent = foam.input.TouchEvent.create({ x: touch.clientX - bounds.left, y: touch.clientY - bounds.top }); this.touch.pub(touchEvent); if ( touchEvent.claimed ) e.preventDefault(); this.touches[touch.identifier] = touchEvent; } }, function onTouchMove(e) { var changed = e.changedTouches; var bounds = this.element.getBoundingClientRect(); for ( var i = 0 ; i < changed.length ; i++ ) { var touch = changed.item(i); var event = this.touches[touch.identifier]; event.x = touch.clientX - bounds.left; event.y = touch.clientY - bounds.top; if ( event.claimed ) e.preventDefault(); } }, function onTouchEnd(e) { var changed = e.changedTouches; for ( var i = 0 ; i < changed.length ; i++ ) { var touch = changed.item(i); this.touches[touch.identifier].detach(); delete this.touches[touch.identifier]; } } ] }); foam.CLASS({ package: 'foam.input', name: 'Pointer', requires: [ 'foam.input.Mouse', 'foam.input.Touch' ], topics: [ 'touch' ], properties: [ { name: 'element', required: true }, { name: 'mouseInput', factory: function() { var m = this.Mouse.create(); this.onDetach(m.element$.follow(this.element$)); this.onDetach(m.touch.sub(this.onTouch)); return m; } }, { name: 'touchInput', factory: function() { var t = this.Touch.create(); this.onDetach(t.element$.follow(this.element$)); this.onDetach(t.touch.sub(this.onTouch)); return t; } } ], methods: [ function init() { // Assigning to unused variables to make Closure happy. var mi = this.mouseInput; var ti = this.touchInput; } ], listeners: [ function onTouch(e, _, t) { this.touch.pub(t); } ] });
jacksonic/vjlofvhjfgm
src/lib/input.js
JavaScript
apache-2.0
6,330
/*****************************************************************************/ /*! *\file minisat_solver.h *\brief Adaptation of MiniSat to DPLL(T) * * Author: Alexander Fuchs * * Created: Fri Sep 08 11:04:00 2006 * * <hr> * * License to use, copy, modify, sell and/or distribute this software * and its documentation for any purpose is hereby granted without * royalty, subject to the terms and conditions defined in the \ref * LICENSE file provided with this distribution. * * <hr> */ /*****************************************************************************/ /****************************************************************************************[Solver.h] MiniSat -- Copyright (c) 2003-2005, Niklas Een, Niklas Sorensson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. **************************************************************************************************/ #ifndef _cvc3__minisat_h_ #define _cvc3__minisat_h_ #include "minisat_types.h" #include "minisat_varorder.h" #include "minisat_derivation.h" #include "dpllt.h" #include <queue> #include <stack> #include <vector> #include <limits> #include "hash_set.h" // changes to MiniSat for CVC integration: // 1) Decision heuristics // 2) Theory clauses // 3) Theory conflicts // 4) Theory implications // 5) binary clause trick // // in more detail: // 1) Decision heuristics // if a CVC decider is given (d_decider), // it is used instead of MiniSat's decision heuristics // to choose the next decision literal. // // 2) Theory clauses // any number of clauses can be added at any decision level. // see explanations for d_conflict and d_pendingClauses // // 3) Theory conflicts // theory conflicts are just treated as conflicting theory clauses // // 4) Theory implications // can be treated just as theory clauses if their explanation is retrieved immediately. // otherwise, Clause::TheoryImplication() is used as a reason // and the computation level is assumed to be the decision level, // until the explanation is retrieved (see d_theoryExplanations). // other changes: // - binary clause trick // MiniSat sometimes (watched literal, implication reason) // used a pointer to a clause to represent a unary clause. // the lowest bit was used to distinguish between a pointer, // and the integer representing the literal of the unit clause. // this saved memory and a pointer derefence. // while this is reported to increase the performance by about 10%-20%, // it also complicated the code. removing it didn't show any // worse performance, so this trick was dropped. // // - store all clauses // MiniSat stored unit and binary clauses only implicitly, // in the context and the watched literal data. // without the binary clause trick binary clauses have to be stored explicitly in d_clauses anyway. // mostly for consistency and simplicity unary clauses are stored expicitly as well. // not-so-convincing reasons are that this makes it also simpler to handle conflicting // theory unit clauses (see insertClause()) by giving the reason // (although one could use NULL instead, // but this would then complicate proof logging which is based on clause ids), // and that it helps to retrieve the clause set independently of the assignment. // (currently this is neither needed for DPLLT::checkSat nor DPLLT::continueCheck, // the two operations in DPLLTMiniSat which use MiniSat) // trying this out didn't show too much of an improvement, so it's not done. namespace MiniSat { // assume that all stl containers use the same size type // and define it here once and for all typedef std::vector<int>::size_type size_type; // /// conversions between MiniSat and CVC data types: /// // both MiniSat and CVC use integers for variables and literals. // CVC uses the integer's sign as the literals sign, // while MiniSat doubles the id and uses only positive numbers // (to be able to use them as array indizes). // e.g, for the variable p with the number 2, // CVC represents +p as 3 and -p as -3, // while MiniSat represents +p as 5 and -p as 4. // // unifying this representation is probably not worth doing, // as, first, conversion happens only at the interface level, // and second, no memory can be saved as a literal is just an integer. inline Var cvcToMiniSat(const SAT::Var& var) { return var.getIndex(); } inline SAT::Var miniSatToCVC(Var var) { return SAT::Var(var); } inline Lit cvcToMiniSat(const SAT::Lit& literal) { return MiniSat::Lit(cvcToMiniSat(literal.getVar()), literal.isPositive()); } inline SAT::Lit miniSatToCVC(Lit literal) { return SAT::Lit(miniSatToCVC(literal.var()), literal.sign()); } // converts cvc clause into MiniSat literal list // returns true on permanently satisfied clause, i.e. clause containing 'true' bool cvcToMiniSat(const SAT::Clause& clause, std::vector<Lit>& literals); //================================================================================================= // MiniSat -- the main class: struct SolverStats { int64_t starts, decisions, propagations, conflicts, theory_conflicts, max_level; int64_t clauses_literals, learnts_literals, max_literals, del_clauses, del_lemmas, db_simpl, lm_simpl, debug; SolverStats() : starts(0), decisions(0), propagations(0), conflicts(0), theory_conflicts(0), max_level(0), clauses_literals(0), learnts_literals(0), max_literals(0), del_clauses(0), del_lemmas(0), db_simpl(0), lm_simpl(0), debug(0) { } }; // solver state at a push, needed so that a pop can revert to that state struct PushEntry { // the highest id of all clauses known - // clauses with higher id must have been added after the push int d_clauseID; // size of d_trail size_type d_trailSize; size_type d_qhead; size_type d_thead; // conflict detected in initial propagation phase of push bool d_ok; PushEntry(int clauseID, size_type trailSize, size_type qhead, size_type thead, bool ok) : d_clauseID(clauseID), d_trailSize(trailSize), d_qhead(qhead), d_thead(thead), d_ok(ok) {} }; struct SearchParams { double var_decay, clause_decay, random_var_freq; // (reasonable values are: 0.95, 0.999, 0.02) SearchParams(double v = 1, double c = 1, double r = 0) : var_decay(v), clause_decay(c), random_var_freq(r) { } }; class Solver { /// variables protected: // level before first decision static const int d_rootLevel = 0; /// status // a search() has been started bool d_inSearch; // if false, then the clause set is unsatisfiable. bool d_ok; // this clause is conflicting with the current context // // it is not necessary to store more than one conflicting clause. // if there are several conflicting clauses, // they must all have been become conflicting at the same decision level, // as in a conflicting state no decision is made. // // after backtracking on any of these conflicting clauses, // the others are also not conflicting anymore, // if the conflict really was due to the current decision level. // // this is only not the case if theory clauses are involved. // i) a conflicting theory clause is added to d_pendingClauses instead of the clause set. // it will be only moved to the clause set if it is not conflicting, // otherwise it (or some other conflicting clause) will be used for backtracking. // ii) progapations based on new theory clauses may actually be already valid // in a previous level, not only in the current decision level. // on backtracking this will be kept in the part of the trail which has to be propagated, // and be propagated again after backtracking, // thus the conflict will be computed again. // // this scheme also allows to stop the propagation as soon as one conflict clause is found, // and backtrack only in this one, instead of searching for all conflicting clauses. // // the only attempt at picking a good conflict clause is to pick the shortest one. // looking at the lowest backjumping level is probably(?) too expensive. Clause* d_conflict; /// variable assignments, and pending propagations // mapping from literals to clauses in which a literal is watched, // literal.index() is used as the index std::vector<std::vector<Clause*> > d_watches; // The current assignments (lbool:s stored as char:s), indexed by var std::vector<signed char> d_assigns; // Assignment stack; stores all assigments made in the order they were made. // as theory clause and theory implications can add propagations // which are valid at earlier levels this list is _not_ necessarily ordered by level. std::vector<Lit> d_trail; // Separator indices for different decision levels in 'trail', // i.e. d_trail[trail_lim[i]] is the i.th decision std::vector<int> d_trail_lim; // 'd_trail_pos[var]' is the variable's position in 'trail[]' // used for proof logging std::vector<size_type> d_trail_pos; // head of propagation queue as index into the trail: // the context is the trail up to trail[qhead - 1], // the propagation queue is trail[qhead] to its end. size_type d_qhead; // like d_qhead for theories: // only the literals up to trail[thead - 1] have been asserted to the theories. size_type d_thead; // 'reason[var]' is the clause that implied the variables current value, // or Clause::Decision() for a decision , // resp. (Clause::TheoryImplication()) for a theory implication with lazy explanation retrieval std::vector<Clause*> d_reason; // 'level[var]' is the decision level at which assignment was made. // except when the literal is a theory implication and the explanation // has not been retrieved yet. Then, this is the level of the literal's // assertion, and its real level will be computed during conflict analysis. std::vector<int> d_level; // Variables // the variables registered before the first push // and at each push level (with registerVar), // i.e. the variables occurring in the clauses at each push level. // cumulative, i.e. the variables registered in a push level // are the union of the variables registered at it and any previous level. std::vector<Hash::hash_set<Var> > d_registeredVars; /// Clauses // clause id counter int d_clauseIDCounter; // problem clauses (input clauses, theory clauses, explanations of theory implications). std::vector<Clause*> d_clauses; // learnt clauses (conflict clauses) std::vector<Clause*> d_learnts; /// Temporary clauses // these are clauses which were already conflicting when added. // so, first the solver has to backtrack, // then they can be added in a consistent state. std::queue<Clause*> d_pendingClauses; // these clauses are explanations for theory propagations which have been // retrieved to regress a conflict. they are gathered for the regression // in analyze, and then deleted on backtracking in backtrack. std::stack<std::pair<int, Clause*> > d_theoryExplanations; /// Push / Pop // pushes std::vector<PushEntry> d_pushes; // lemmas kept after a pop, to add with the next push std::vector<Clause*> d_popLemmas; // for each variable the highest pushID of the clauses used for its implication. // for a decision or theory implication with unknown explanation this is max_int, // for a unit clause as the reason it is the clauses pushID, // for any other reason it is the max of the d_pushIDs of the literals // falsifying the literals of the reason clause // // thus, an approximation for checking if a clause literal is permanently // falsified/satisfied even after pops (as long as the clause is not popped itself), // is that the implication level of the literal it the root level, // and that clauses' pushID is <= the d_pushIDs value of the literal. // // this can be used for simplifcation of clauses, lemma minimization, // and keeping propagated literals after a pop. std::vector<int> d_pushIDs; // :TODO: unify var -> x arrays into one with a varInfo data structure: // d_assigns, d_reason, d_level, d_pushIDs, d_activity // probably not: d_trail_pos, d_analyze_seen // number of queued pop requests unsigned int d_popRequests; /// heuristics // heuristics for keeping lemmas // Amount to bump next clause with. double d_cla_inc; // INVERSE decay factor for clause activity: stores 1/decay. double d_cla_decay; // heuristics for variable decisions // A heuristic measurement of the activity of a variable. std::vector<double> d_activity; // Amount to bump next variable with. double d_var_inc; // INVERSE decay factor for variable activity: stores 1/decay. // Use negative value for static variable order. double d_var_decay; // Keeps track of the decision variable order. VarOrder d_order; // heuristics for clause/lemma database cleanup // Number of top-level assignments since last execution of 'simplifyDB()'. int d_simpDB_assigns; // Remaining number of propagations that must be made before next execution of 'simplifyDB()'. int64_t d_simpDB_props; // Number of lemmas after last execution of 'reduceDB()'. int d_simpRD_learnts; /// CVC interface // CVC theory API SAT::DPLLT::TheoryAPI* d_theoryAPI; // CVC decision heuristic SAT::DPLLT::Decider* d_decider; /// proof logging // log derivation, to create a resolution proof from a closed derivation tree proof Derivation* d_derivation; /// Mode of operation: // Restart frequency etc. SearchParams d_default_params; // Controls conflict clause minimization. true by default. bool d_expensive_ccmin; /// Temporaries (to reduce allocation overhead). // Each variable is prefixed by the method in which is used: std::vector<char> d_analyze_seen; std::vector<Lit> d_analyze_stack; std::vector<Lit> d_analyze_redundant; // solver statistics SolverStats d_stats; protected: /// Search: // the current decision level int decisionLevel() const { return (int)d_trail_lim.size(); } // decision on p bool assume(Lit p); // queue a literal for propagation, at decisionLevel implied by reason bool enqueue(Lit fact, int decisionLevel, Clause* reason); // propagate a literal (the head of the propagation queue) void propagate(); // perform a lookahead on the best split literals. // this is done on the propositional level only, without involving theories. void propLookahead(const SearchParams& params); /// Conflict handling // conflict analysis: returns conflict clause and level to backtrack to // clause implies its first literal in level out_btlevel Clause* analyze(int& out_btlevel); // conflict analysis: conflict clause minimization (helper method for 'analyze()') void analyze_minimize(std::vector<Lit>& out_learnt, Inference* inference, int& pushID); // conflict analysis: conflict clause minimization (helper method for 'analyze()') bool analyze_removable(Lit p, unsigned int min_level, int pushID); // backtrack to level, add conflict clause void backtrack(int level, Clause* clause); // is the current state conflicting, i.e. is there a conflicting clause? bool isConflicting() const; // mark this clause as conflicting void updateConflict(Clause* clause); // returns the level in which this clause implies its first literal. // precondition: all clause literals except for the first must be falsified. int getImplicationLevel(const Clause& clause) const; // returns the level in which this clause became falsified // (or at least fully assigned). // precondition: no clause literal is undefined. int getConflictLevel(const Clause& clause) const; // if this literal is a theory implied literal and its explanation has not been retrieved, // then this is done now and the literal's reason is updated. // precondition: literal must be a propagated literal void resolveTheoryImplication(Lit literal); /// unit propagation // return the watched clauses for a literal std::vector<Clause*>& getWatches(Lit literal) { return d_watches[literal.index()]; }; // return the watched clauses for a literal const std::vector<Clause*>& getWatches(Lit literal) const { return d_watches[literal.index()]; }; // adds a watch to a clause literal // precondition: literal must be one of the first two literals in clause void addWatch(Lit literal, Clause* clause) { getWatches(literal).push_back(clause); }; // removes the clause from the list of watched clauses void removeWatch(std::vector<Clause*>& ws, Clause* elem); /// Operations on clauses: // registers a variable - any variable has to be registered before it is used in the search. void registerVar(Var var); // checks if a variable is already registered (pop can remove a variable) bool isRegistered(Var var); // creates/adds a clause or a lemma and returns it; registers all variables, // used by all other addClause methods void addClause(std::vector<Lit>& literals, CVC3::Theorem theorem, int clauseID); // adds a clause or a lemma to the solver, watched lists, and checks if it is unit/conflicting // clause activity heuristics are updated. // precondition: all variables are registered // precondition: a lemma is propagating its first literal void insertClause(Clause* clause); // add a lemma which has not been computed just now (see push(), createFrom()), // so it is not necessary propagating (which is assumed by insertClause()) void insertLemma(const Clause* lemma, int clauseID, int pushID); // simplify clause based on root level assignment // precondition: all variables are registered bool simplifyClause(std::vector<Lit>& literals, int clausePushID) const; // order a clause such that it is consistent with the current assignment, // i.e. the two first literals can be taken as the watched literals. // precondition: all variables are registered void orderClause(std::vector<Lit>& literals) const; // deallocate a clause, and removes it from watches if just_dealloc is false void remove(Clause* c, bool just_dealloc = false); // assuming that the literal is implied at the root level: // will the literal be assigned as long as the clause exists, even over pops? bool isImpliedAt(Lit lit, int clausePushID) const; // is this clause permanently satisfied? bool isPermSatisfied(Clause* c) const; // Push / Pop // sets the d_pushIDs entry of var implied by from void setPushID(Var var, Clause* from); // returns the d_pushIDs entry of a var // makes only sense for a var with a defined value int getPushID(Var var) const { return d_pushIDs[var]; } int getPushID(Lit lit) const { return getPushID(lit.var()); } // pop the most recent push void pop(); void popClauses(const PushEntry& pushEntry, std::vector<Clause*>& clauses); /// Activity: void varBumpActivity(Lit p) { if (d_var_decay < 0) return; // (negative decay means static variable order -- don't bump) if ( (d_activity[p.var()] += d_var_inc) > 1e100 ) varRescaleActivity(); d_order.update(p.var()); } void varDecayActivity () { if (d_var_decay >= 0) d_var_inc *= d_var_decay; } void varRescaleActivity(); void claDecayActivity() { d_cla_inc *= d_cla_decay; } void claRescaleActivity() ; void claBumpActivity (Clause* c) { float act = c->activity() + (float)d_cla_inc; c->setActivity(act); if (act > 1e20) claRescaleActivity(); } /// debugging // are all clauses (excluding lemmas) satisfied? bool allClausesSatisfied(); // checks that the first two literals of a clause are watched void checkWatched(const Clause& clause) const; void checkWatched() const; // checks that for each clause one of these holds: // 1) the first two literals are undefined // 2) one of the first two literals is satisfied // 3) the first literal is undefined and all other literals are falsified // 4) all literals are falsified void checkClause(const Clause& clause) const; void checkClauses() const; // checks that each literal in the context(trail) is either // 1) a decision // 2) or implied by previous context literals void checkTrail() const; // print the current propagation step void protocolPropagation() const; public: /// Initialization // assumes that this is the SAT solver in control of CVC theories, // so it immediately pushs a new theory context. // // uses MiniSat's internal decision heuristics if decider is NULL // // if logDerivation then the derivation will be logged in getDerivation(), // which provides a prove if the empty clause is derived. Solver(SAT::DPLLT::TheoryAPI* theoryAPI, SAT::DPLLT::Decider* decider, bool logDerivation); // copies clauses, assignment as unit clauses, and lemmas // will be in root level static Solver* createFrom(const Solver* solver); // releases all memory, but does not pop theories. // this is according to the semantics expected by CVC: // is the solver detects unsatisfiability, it pops all theory levels. // otherwise the caller is responsible for resetting the theory levels. ~Solver(); /// problem specification // converts cvc clause into MiniSat clause with the given id. // returns NULL on permanently satisfied clause, i.e. clause containing 'true' Clause* cvcToMiniSat(const SAT::Clause& clause, int id); // adds a unit clause given as a literal void addClause(Lit p, CVC3::Theorem theorem); // adds a (copy of) clause, uses original clause id if wished void addClause(const Clause& clause, bool keepClauseID); // adds a CVC clause void addClause(const SAT::Clause& clause, bool isTheoryClause); // adds a CVC formula void addFormula(const SAT::CNF_Formula& cnf, bool isTheoryClause); // returns a unique id for a new clause // (addClause will then use the negation for theory clauses) int nextClauseID() { FatalAssert(d_clauseIDCounter >= 0, "MiniSat::Solver::nextClauseID: overflow"); return d_clauseIDCounter++; }; // removes permanently satisfied clauses void simplifyDB(); // removes 'bad' lemmas void reduceDB(); /// search // (continue) search with current clause set and context // until model is found (in d_trail), or unsatisfiability detected. // // between two calls clauses may be added, // but everything else (including the theories) should remain untouched. // // the prover becomes essentially unusable if unsatisfiability is detected, // only data may be retrieved (clauses, statistics, proof, ...) CVC3::QueryResult search(); // returns a resolution proof for unsatisfiability if // - createProof was true in the call to the constructor // - the last call to search returned status UNSATISFIABLE // returns NULL otherwise Derivation* getProof(); // is the solver currently in a search state? // i.e. search() has been called and not been undone by a pop request. bool inSearch() const { return d_inSearch && d_popRequests == 0; } // is the solver in a consistent state? bool isConsistent() const { return !isConflicting(); } /// Push / Pop // push the current solver state // can only be done when solver is not already in a search (inSearch()). void push(); // pop all theory levels pushed by the solver, // i.e. all (current) decision levels of the solver. void popTheories(); // request to pop theories - all request are done when doPops is called void requestPop(); // perform all pop requests (calls to requestPop) void doPops(); // has there been a push which hasn't been (requested to be) undone yet? bool inPush() const { return d_pushes.size() > d_popRequests; } /// clauses / assignment // get the current value of a variable/literal lbool getValue(Var x) const { return toLbool(d_assigns[x]); } lbool getValue(Lit p) const { return p.sign() ? getValue(p.var()) : ~getValue(p.var()); } // get the assignment level of a variable/literal (which should have a value) int getLevel(Var var) const { return d_level[var]; }; int getLevel(Lit lit) const { return getLevel(lit.var()); }; // set the assignment level of a variable/literal void setLevel(Var var, int level) { d_level[var] = level; }; void setLevel(Lit lit, int level) { setLevel(lit.var(), level); }; // this clause is the reason for a propagation and thus can't be removed // precondition: the first literal of the reason clause must be the propagated literal bool isReason(const Clause* c) const { return c->size() > 0 && d_reason[((*c)[0]).var()] == c; } // returns the implication reason of a variable (its value must be defined) Clause* getReason(Var var) const { return d_reason[var]; }; // like getReason, but if resolveTheoryImplication is true, // then additionaly if literal is a theory implication resolveTheoryImplication() is called. Clause* getReason(Lit literal, bool resolveTheoryImplication = true); // the current clause set const std::vector<Clause*>& getClauses() const { return d_clauses; } // the current lemma set const std::vector<Clause*>& getLemmas() const { return d_learnts; } // the current variable assignments const std::vector<Lit>& getTrail() const { return d_trail; } // the derivation, logged if != NULL Derivation* getDerivation() { return d_derivation; } /// Statistics // derivation statistics const SolverStats& getStats() const { return d_stats; } // number of assigned variabels (context size) int nAssigns() const { return d_trail.size(); } // number of stored clauses (does not include clauses removed by simplifyDB) int nClauses() const { return d_clauses.size(); } // number of stored lemmas (forgotten lemmas are not counted) int nLearnts() const { return d_learnts.size(); } // variable with the highest id + 1 // not necessaribly the number of variables, if they are not enumerated without gap int nVars() const { return d_assigns.size(); } /// String representation: // literal id, sign, current assignment as string std::string toString(Lit literal, bool showAssignment) const; // clause as string, showAssignment true -> show current assignment of each literal std::string toString(const std::vector<Lit>& clause, bool showAssignment) const; // clause as string, showAssignment true -> show current assignment of each literal std::string toString(const Clause& clause, bool showAssignment) const; // prints lemmas, clauses, assignment to cout void printState() const; // output the clause set and context in DIMACS format void printDIMACS() const; std::vector<SAT::Lit> curAssigns() ; std::vector<std::vector<SAT::Lit> > curClauses(); }; } //================================================================================================= #endif
ehsan/js-symbolic-executor
cvc3/src/sat/minisat_solver.h
C
apache-2.0
27,844
define(function(require){ require('$model/UI2/system/components/justep/window/window'); require('$model/UI2/system/components/bootstrap/navbar/navbar'); require('$model/UI2/system/components/bootstrap/navs/navs'); var __parent1=require('$model/UI2/system/lib/base/modelBase'); var __parent0=require('$model/UI2/system/components/bootstrap/navbar/demo/base'); var __result = __parent1._extend(__parent0).extend({ constructor:function(contextUrl){ this.__sysParam=true; this.__contextUrl=contextUrl; this.__id='__baseID__'; this._flag_='d2aa57dcd25aedc20e3c3c5d740dcee8'; this.callParent(contextUrl); }}); return __result;});
Diaosir/WeX5
UI2/system/components/bootstrap/navbar/demo/.release/base/base.w.form.js
JavaScript
apache-2.0
632
/* * * Copyright 2015 gRPC authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /// An Alarm posts the user provided tag to its associated completion queue upon /// expiry or cancellation. #ifndef GRPCXX_ALARM_H #define GRPCXX_ALARM_H #include <grpc++/impl/codegen/completion_queue.h> #include <grpc++/impl/codegen/completion_queue_tag.h> #include <grpc++/impl/codegen/grpc_library.h> #include <grpc++/impl/codegen/time.h> #include <grpc++/impl/grpc_library.h> #include <grpc/grpc.h> struct grpc_alarm; namespace grpc { class CompletionQueue; /// A thin wrapper around \a grpc_alarm (see / \a / src/core/surface/alarm.h). class Alarm : private GrpcLibraryCodegen { public: /// Create an unset completion queue alarm Alarm() : tag_(nullptr), alarm_(grpc_alarm_create(nullptr)) {} /// DEPRECATED: Create and set a completion queue alarm instance associated to /// \a cq. /// This form is deprecated because it is inherently racy. /// \internal We rely on the presence of \a cq for grpc initialization. If \a /// cq were ever to be removed, a reference to a static /// internal::GrpcLibraryInitializer instance would need to be introduced /// here. \endinternal. template <typename T> Alarm(CompletionQueue* cq, const T& deadline, void* tag) : tag_(tag), alarm_(grpc_alarm_create(nullptr)) { grpc_alarm_set(alarm_, cq->cq(), TimePoint<T>(deadline).raw_time(), static_cast<void*>(&tag_), nullptr); } /// Trigger an alarm instance on completion queue \a cq at the specified time. /// Once the alarm expires (at \a deadline) or it's cancelled (see \a Cancel), /// an event with tag \a tag will be added to \a cq. If the alarm expired, the /// event's success bit will be true, false otherwise (ie, upon cancellation). template <typename T> void Set(CompletionQueue* cq, const T& deadline, void* tag) { tag_.Set(tag); grpc_alarm_set(alarm_, cq->cq(), TimePoint<T>(deadline).raw_time(), static_cast<void*>(&tag_), nullptr); } /// Alarms aren't copyable. Alarm(const Alarm&) = delete; Alarm& operator=(const Alarm&) = delete; /// Alarms are movable. Alarm(Alarm&& rhs) : tag_(rhs.tag_), alarm_(rhs.alarm_) { rhs.alarm_ = nullptr; } Alarm& operator=(Alarm&& rhs) { tag_ = rhs.tag_; alarm_ = rhs.alarm_; rhs.alarm_ = nullptr; return *this; } /// Destroy the given completion queue alarm, cancelling it in the process. ~Alarm() { if (alarm_ != nullptr) grpc_alarm_destroy(alarm_, nullptr); } /// Cancel a completion queue alarm. Calling this function over an alarm that /// has already fired has no effect. void Cancel() { if (alarm_ != nullptr) grpc_alarm_cancel(alarm_, nullptr); } private: class AlarmEntry : public CompletionQueueTag { public: AlarmEntry(void* tag) : tag_(tag) {} void Set(void* tag) { tag_ = tag; } bool FinalizeResult(void** tag, bool* status) override { *tag = tag_; return true; } private: void* tag_; }; AlarmEntry tag_; grpc_alarm* alarm_; // owned }; } // namespace grpc #endif // GRPCXX_ALARM_H
matt-kwong/grpc
include/grpc++/alarm.h
C
apache-2.0
3,655
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.externalSystem.service.execution; import com.intellij.codeInsight.completion.CompletionResultSet; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.codeInsight.lookup.LookupElementBuilder; import com.intellij.openapi.externalSystem.model.DataNode; import com.intellij.openapi.externalSystem.model.ExternalProjectInfo; import com.intellij.openapi.externalSystem.model.ProjectKeys; import com.intellij.openapi.externalSystem.model.ProjectSystemId; import com.intellij.openapi.externalSystem.model.project.ModuleData; import com.intellij.openapi.externalSystem.model.project.ProjectData; import com.intellij.openapi.externalSystem.model.task.TaskData; import com.intellij.openapi.externalSystem.service.execution.cmd.CommandLineCompletionProvider; import com.intellij.openapi.externalSystem.service.project.manage.ProjectDataManager; import com.intellij.openapi.externalSystem.settings.ExternalProjectSettings; import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.TextAccessor; import com.intellij.util.BooleanFunction; import com.intellij.util.containers.ContainerUtil; import groovyjarjarcommonscli.Options; import icons.ExternalSystemIcons; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Collection; import java.util.Collections; import java.util.List; /** * @author Vladislav.Soroka * @since 11/26/2014 */ public class TaskCompletionProvider extends CommandLineCompletionProvider { private volatile List<LookupElement> myCachedElements; private volatile String myCachedWorkingDir; private final Project myProject; private final ProjectSystemId mySystemId; private final TextAccessor myProjectPathAccessor; public TaskCompletionProvider(@NotNull Project project, @NotNull ProjectSystemId externalSystemId, @NotNull TextAccessor workDirectoryField) { this(project, externalSystemId, workDirectoryField, new Options()); } public TaskCompletionProvider(@NotNull Project project, @NotNull ProjectSystemId externalSystemId, @NotNull TextAccessor workDirectoryField, @NotNull Options options) { super(options); myProject = project; mySystemId = externalSystemId; myProjectPathAccessor = workDirectoryField; } @Override protected void addArgumentVariants(@NotNull CompletionResultSet result) { List<LookupElement> cachedElements = myCachedElements; final String projectPath = myProjectPathAccessor.getText(); if (cachedElements == null || !StringUtil.equals(myCachedWorkingDir, projectPath)) { final ExternalProjectSettings linkedProjectSettings = ExternalSystemApiUtil.getSettings(myProject, mySystemId).getLinkedProjectSettings(projectPath); if (linkedProjectSettings == null) return; final ExternalProjectInfo projectData = ProjectDataManager.getInstance().getExternalProjectData(myProject, mySystemId, linkedProjectSettings.getExternalProjectPath()); if (projectData == null || projectData.getExternalProjectStructure() == null) return; cachedElements = ContainerUtil.newArrayList(getVariants(projectData.getExternalProjectStructure(), projectPath)); myCachedElements = cachedElements; myCachedWorkingDir = projectPath; } result.addAllElements(cachedElements); } protected List<LookupElement> getVariants(@NotNull final DataNode<ProjectData> projectDataNode, @NotNull final String modulePath) { final DataNode<ModuleData> moduleDataNode = findModuleDataNode(projectDataNode, modulePath); if (moduleDataNode == null) { return Collections.emptyList(); } final Collection<DataNode<TaskData>> tasks = ExternalSystemApiUtil.getChildren(moduleDataNode, ProjectKeys.TASK); List<LookupElement> elements = ContainerUtil.newArrayListWithCapacity(tasks.size()); for (DataNode<TaskData> taskDataNode : tasks) { elements.add(LookupElementBuilder.create(taskDataNode.getData().getName()).withIcon(ExternalSystemIcons.Task)); } return elements; } @Nullable public static DataNode<ModuleData> findModuleDataNode(@NotNull final DataNode<ProjectData> projectDataNode, @NotNull final String projectPath) { final DataNode<?> node = ExternalSystemApiUtil.findFirstRecursively(projectDataNode, node1 -> node1.getKey().equals(ProjectKeys.MODULE) && node1.getData() instanceof ModuleData && ((ModuleData)node1 .getData()).getLinkedExternalProjectPath().equals(projectPath)); //noinspection unchecked return (DataNode<ModuleData>)node; } }
hurricup/intellij-community
platform/external-system-impl/src/com/intellij/openapi/externalSystem/service/execution/TaskCompletionProvider.java
Java
apache-2.0
5,715
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.maven.utils.library; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.*; import com.intellij.openapi.roots.impl.libraries.LibraryEx; import com.intellij.openapi.roots.libraries.Library; import com.intellij.openapi.roots.libraries.LibraryProperties; import com.intellij.openapi.roots.libraries.LibraryTable; import com.intellij.openapi.roots.ui.configuration.libraries.LibraryEditingUtil; import com.intellij.openapi.util.Comparing; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.idea.maven.utils.library.propertiesEditor.RepositoryLibraryPropertiesModel; import java.util.Arrays; public class RepositoryLibrarySupport { @NotNull private Project project; @NotNull private RepositoryLibraryPropertiesModel model; @NotNull private RepositoryLibraryDescription libraryDescription; public RepositoryLibrarySupport(@NotNull Project project, @NotNull RepositoryLibraryDescription libraryDescription, @NotNull RepositoryLibraryPropertiesModel model) { this.project = project; this.libraryDescription = libraryDescription; this.model = model; } public void addSupport(@NotNull Module module, @NotNull final ModifiableRootModel rootModel, @NotNull ModifiableModelsProvider modifiableModelsProvider) { LibraryTable.ModifiableModel modifiableModel = modifiableModelsProvider.getLibraryTableModifiableModel(module.getProject()); Library library = Iterables.find(Arrays.asList(modifiableModel.getLibraries()), new Predicate<Library>() { @Override public boolean apply(@Nullable Library library) { return isLibraryEqualsToSelected(library); } }, null); if (library == null) { library = createNewLibrary(module, modifiableModel); } else { modifiableModelsProvider.disposeLibraryTableModifiableModel(modifiableModel); } final DependencyScope dependencyScope = LibraryDependencyScopeSuggester.getDefaultScope(library); final ModifiableRootModel moduleModifiableModel = modifiableModelsProvider.getModuleModifiableModel(module); LibraryOrderEntry foundEntry = (LibraryOrderEntry)Iterables.find(Arrays.asList(moduleModifiableModel.getOrderEntries()), new Predicate<OrderEntry>() { @Override public boolean apply(@Nullable OrderEntry entry) { return entry instanceof LibraryOrderEntry && ((LibraryOrderEntry)entry).getScope() == dependencyScope && isLibraryEqualsToSelected(((LibraryOrderEntry)entry).getLibrary()); } }, null); modifiableModelsProvider.disposeModuleModifiableModel(moduleModifiableModel); if (foundEntry == null) { rootModel.addLibraryEntry(library).setScope(dependencyScope); } } private LibraryEx createNewLibrary(@NotNull final Module module, final LibraryTable.ModifiableModel modifiableModel) { RepositoryLibraryProperties libraryProperties = new RepositoryLibraryProperties( libraryDescription.getGroupId(), libraryDescription.getArtifactId(), model.getVersion()); final LibraryEx library = (LibraryEx)modifiableModel.createLibrary( LibraryEditingUtil.suggestNewLibraryName(modifiableModel, RepositoryLibraryType.getInstance().getDescription(libraryProperties)), RepositoryLibraryType.REPOSITORY_LIBRARY_KIND); RepositoryLibraryProperties realLibraryProperties = (RepositoryLibraryProperties)library.getProperties(); realLibraryProperties.setMavenId(libraryProperties.getMavenId()); ApplicationManager.getApplication().runWriteAction(() -> modifiableModel.commit()); RepositoryUtils.loadDependencies( module.getProject(), library, model.isDownloadSources(), model.isDownloadJavaDocs(), null); return library; } private boolean isLibraryEqualsToSelected(Library library) { if (!(library instanceof LibraryEx)) { return false; } LibraryEx libraryEx = (LibraryEx)library; if (!RepositoryLibraryType.REPOSITORY_LIBRARY_KIND.equals(libraryEx.getKind())) { return false; } LibraryProperties libraryProperties = libraryEx.getProperties(); if (libraryProperties == null || !(libraryProperties instanceof RepositoryLibraryProperties)) { return false; } RepositoryLibraryProperties repositoryLibraryProperties = (RepositoryLibraryProperties)libraryProperties; RepositoryLibraryDescription description = RepositoryLibraryDescription.findDescription(repositoryLibraryProperties); if (!description.equals(libraryDescription)) { return false; } return Comparing.equal(repositoryLibraryProperties.getVersion(), model.getVersion()); } }
hurricup/intellij-community
plugins/maven/src/main/java/org/jetbrains/idea/maven/utils/library/RepositoryLibrarySupport.java
Java
apache-2.0
5,633
module MiqFilter ALLOWED_DESCENDANT_CLASSES_FROM_MODEL = %w[ExtManagementSystem].freeze def self.belongsto2object(tag) belongsto2object_list(tag).last end def self.belongsto2path_human(tag) tag.split("/").map { |x| x.split("|").second }.compact.join(" -> ") end def self.find_descendant_class_by(klass, name) if ALLOWED_DESCENDANT_CLASSES_FROM_MODEL.include?(klass.to_s) && (descendant_class = klass.try(:belongsto_descendant_class, name)) return descendant_class.constantize else _log.warn("Unable to find descendant class for belongsto filter: #{klass}/#{name}") end nil end def self.find_object_by_special_class(klass, name) if (descendant_class = find_descendant_class_by(klass, name)) && descendant_class.respond_to?(:find_object_for_belongs_to_filter) return descendant_class.find_object_for_belongs_to_filter(name) else _log.warn("#{klass} is not supported for loading objects of descendants classes.(belongsto filter: #{klass}/#{name}, descendant class: #{descendant_class}") end nil end def self.find_object_by_name(klass, name) klass = klass.constantize object = klass.find_by(:name => name) if object.nil? find_object_by_special_class(klass, name) else object end end def self.belongsto2object_list(tag) # /belongsto/ExtManagementSystem|<name>/EmsCluster|<name>/EmsFolder|<name> raise _("invalid tag: %{tag}") % {:tag => tag} unless tag.starts_with?("/belongsto/ExtManagementSystem") parts = tag.split("/")[2..-1] depth = parts.size - 1 # ancestry uses 0 based depth # Get the root EMS object # TODO: For tree queries deeper than 1, we don't actually need the ems object, # so find a way to just get the id ems_class, ems_name = parts.first.split("|", 2) ems = find_object_by_name(ems_class, ems_name) if ems.nil? _log.warn("lookup for klass=#{ems_class.to_s.inspect} with name=#{ems_name.inspect} failed in tag=#{tag.inspect}") return [] end return [ems] if depth == 0 # Get the leaf node object for this EMS leaf_class, leaf_name = parts.last.split("|", 2) leaves = leaf_class.constantize .includes(:all_relationships) .where(:name => leaf_name, :ems_id => ems.id) # If multiple leaves come back, filter by depth, and then find which one has # the valid path. It's possible multiple leaves could be at the same depth. leaves.each do |leaf| next unless leaf.depth == depth # Get the full path from the leaf object to the root result = leaf.with_relationship_type("ems_metadata") { leaf.path } # Verify that the records match what's in the provided tag result_parts = result&.map { |o| "#{o.class.base_model.name}|#{o.name}" } return result if result_parts == parts end # Nothing was found from any of the candidates _log.warn("lookup failed for tag=#{tag.inspect}") [] end def self.object2belongsto(obj) # /belongsto/ExtManagementSystem|<name>/EmsCluster|<name>/EmsFolder|<name> unless obj.root_id[0] == "ExtManagementSystem" raise _("Folder Root is not a Provider") end tag = obj.relationship_ancestry( :field_delimiter => '|', :record_delimiter => '/', :include_self => true, :field_method => :name ) "/belongsto/#{tag}" end def self.apply_belongsto_filters(inputs, bfilters) return [] if inputs.nil? return inputs if bfilters.empty? vcmeta_index = bfilters.index_with { |tag| belongsto2object_list(tag) } filtered = [] inputs.each do |p| bfilters.each do |tag| vcmeta_list = vcmeta_index[tag] if p.kind_of?(Storage) vcmeta_list.reverse_each do |vcmeta| if vcmeta.respond_to?(:storages) && vcmeta.storages.include?(p) filtered.push(p) break end end break if filtered.last == p else vcmeta_list = vcmeta_list[0..-2] if vcmeta_list.last.kind_of?(Host) vcmeta = vcmeta_list.last next if vcmeta.nil? if vcmeta == p || vcmeta.with_relationship_type("ems_metadata") { vcmeta.is_ancestor_of?(p) } filtered.push(p) break end end end end filtered end end
agrare/manageiq
app/models/miq_filter.rb
Ruby
apache-2.0
4,366
/* Copyright (C) 2000-2003 Constantin Kaplinsky. All Rights Reserved. * Copyright 2004-2005 Cendio AB. * * This is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this software; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, * USA. */ package com.iiordanov.tigervnc.rfb; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import com.iiordanov.bVNC.AbstractBitmapData; import com.iiordanov.bVNC.RemoteCanvas; import com.iiordanov.tigervnc.rdr.InStream; import com.iiordanov.tigervnc.rdr.ZlibInStream; import java.util.ArrayList; import java.io.InputStream; import java.awt.*; public class TightDecoder extends Decoder { final static int TIGHT_MAX_WIDTH = 2048; // Compression control final static int rfbTightExplicitFilter = 0x04; final static int rfbTightFill = 0x08; final static int rfbTightJpeg = 0x09; final static int rfbTightMaxSubencoding = 0x09; // Filters to improve compression efficiency final static int rfbTightFilterCopy = 0x00; final static int rfbTightFilterPalette = 0x01; final static int rfbTightFilterGradient = 0x02; final static int rfbTightMinToCompress = 12; BitmapFactory.Options bitmapopts; byte[] netbuf; int[] pix; byte[] bytebuf; int[] palette; byte[] tightPalette; byte[] prevRow; byte[] thisRow; byte[] bpix; int[] est; //final static Toolkit tk = Toolkit.getDefaultToolkit(); public TightDecoder(CMsgReader reader_, RemoteCanvas c) { bitmapopts = new BitmapFactory.Options(); bitmapopts.inPurgeable = false; bitmapopts.inDither = false; bitmapopts.inTempStorage = new byte[32768]; bitmapopts.inPreferredConfig= Bitmap.Config.RGB_565; bitmapopts.inScaled = false; reader = reader_; zis = new ZlibInStream[4]; for (int i = 0; i < 4; i++) zis[i] = new ZlibInStream(); netbuf = new byte[1024]; pix = new int[1]; bytebuf = new byte[3]; palette = new int[256]; tightPalette = new byte[256 * 3]; prevRow = new byte[TIGHT_MAX_WIDTH*3]; thisRow = new byte[TIGHT_MAX_WIDTH*3]; bpix = new byte[3]; est = new int[3]; vncCanvas = c; } public TightDecoder(CMsgReader reader_) { bitmapopts = new BitmapFactory.Options(); bitmapopts.inPurgeable = false; bitmapopts.inInputShareable = true; bitmapopts.inDither = false; bitmapopts.inTempStorage = new byte[32768]; reader = reader_; zis = new ZlibInStream[4]; for (int i = 0; i < 4; i++) zis[i] = new ZlibInStream(); netbuf = new byte[1024]; pix = new int[1]; bytebuf = new byte[3]; palette = new int[256]; tightPalette = new byte[256 * 3]; prevRow = new byte[TIGHT_MAX_WIDTH*3]; thisRow = new byte[TIGHT_MAX_WIDTH*3]; bpix = new byte[3]; est = new int[3]; } /* public void readRectNew(Rect r, CMsgHandler handler) { if (r.tl.x + r.width() > vncCanvas.bitmapData.bmWidth()) r.setXYWH(r.tl.x, r.tl.y, vncCanvas.bitmapData.bmWidth() - r.tl.x, r.height()); if (r.tl.y + r.height() > vncCanvas.bitmapData.bmHeight()) r.setXYWH(r.tl.x, r.tl.y, r.width(), vncCanvas.bitmapData.bmHeight() - r.tl.y); try { vncCanvas.handleTightRect(r.tl.x, r.tl.y, r.width(), r.height(), reader); } catch (java.lang.Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } */ public void readRect(Rect r, CMsgHandler handler) { InStream is = reader.getInStream(); boolean cutZeros = false; clientpf = handler.getPreferredPF(); serverpf = handler.cp.pf(); int bpp = serverpf.bpp; if (bpp == 32) { if (serverpf.is888()) { cutZeros = true; } } int comp_ctl = is.readU8(); boolean bigEndian = handler.cp.pf().bigEndian; // Flush zlib streams if we are told by the server to do so. for (int i = 0; i < 4; i++) { if ((comp_ctl & 1) != 0) { zis[i].reset(); } comp_ctl >>= 1; } // "Fill" compression type. if (comp_ctl == rfbTightFill) { if (cutZeros) { is.readBytes(bytebuf, 0, 3); serverpf.bufferFromRGB(pix, 0, bytebuf, 0, 1); } else { pix[0] = is.readPixel(serverpf.bpp/8, serverpf.bigEndian); } handler.fillRect(r, pix[0]); return; } // "JPEG" compression type. if (comp_ctl == rfbTightJpeg) { DECOMPRESS_JPEG_RECT(r, is, handler); return; } // Quit on unsupported compression type. if (comp_ctl > rfbTightMaxSubencoding) { throw new Exception("TightDecoder: bad subencoding value received"); } // "Basic" compression type. int palSize = 0; boolean useGradient = false; if ((comp_ctl & rfbTightExplicitFilter) != 0) { int filterId = is.readU8(); switch (filterId) { case rfbTightFilterPalette: palSize = is.readU8() + 1; if (cutZeros) { is.readBytes(tightPalette, 0, palSize * 3); serverpf.bufferFromRGB(palette, 0, tightPalette, 0, palSize); } else { is.readPixels(palette, palSize, serverpf.bpp/8, serverpf.bigEndian); } break; case rfbTightFilterGradient: useGradient = true; break; case rfbTightFilterCopy: break; default: throw new Exception("TightDecoder: unknown filter code recieved"); } } int bppp = bpp; if (palSize != 0) { bppp = (palSize <= 2) ? 1 : 8; } else if (cutZeros) { bppp = 24; } // Determine if the data should be decompressed or just copied. int rowSize = (r.width() * bppp + 7) / 8; int dataSize = r.height() * rowSize; int streamId = -1; InStream input; if (dataSize < rfbTightMinToCompress) { input = is; } else { int length = is.readCompactLength(); streamId = comp_ctl & 0x03; zis[streamId].setUnderlying(is, length); input = (ZlibInStream)zis[streamId]; } // Allocate netbuf and read in data if (dataSize > netbuf.length) netbuf = new byte[dataSize]; input.readBytes(netbuf, 0, dataSize); int stride = r.width(); int[] buf = reader.getImageBuf(r.area()); if (palSize == 0) { // Truecolor data. if (useGradient) { if (bpp == 32 && cutZeros) { FilterGradient24(netbuf, buf, stride, r); } else { FilterGradient(netbuf, buf, stride, r); } } else { // Copy int h = r.height(); int ptr = 0; int srcPtr = 0; int w = r.width(); if (cutZeros) { serverpf.bufferFromRGB(buf, ptr, netbuf, srcPtr, w*h); } else { int pixelSize = (bpp >= 24) ? 3 : bpp/8; while (h > 0) { for (int i = 0; i < w; i++) { if (bpp == 8) { buf[ptr+i] = netbuf[srcPtr+i] & 0xff; } else { for (int j = pixelSize-1; j >= 0; j--) buf[ptr+i] |= ((netbuf[srcPtr+i+j] & 0xff) << j*8); } } ptr += stride; srcPtr += w * pixelSize; h--; } } } } else { // Indexed color int x, h = r.height(), w = r.width(), b, pad = stride - w; int ptr = 0; int srcPtr = 0, bits; if (palSize <= 2) { // 2-color palette while (h > 0) { for (x = 0; x < w / 8; x++) { bits = netbuf[srcPtr++]; for(b = 7; b >= 0; b--) { buf[ptr++] = palette[bits >> b & 1]; } } if (w % 8 != 0) { bits = netbuf[srcPtr++]; for (b = 7; b >= 8 - w % 8; b--) { buf[ptr++] = palette[bits >> b & 1]; } } ptr += pad; h--; } } else { // 256-color palette while (h > 0) { int endOfRow = ptr + w; while (ptr < endOfRow) { buf[ptr++] = palette[netbuf[srcPtr++] & 0xff]; } ptr += pad; h--; } } } handler.imageRect(r, buf); if (streamId != -1) { zis[streamId].reset(); } } final private void DECOMPRESS_JPEG_RECT(Rect r, InStream is, CMsgHandler handler) { // Read length int compressedLen = is.readCompactLength(); // Allocate netbuf and read in data if (compressedLen > netbuf.length) netbuf = new byte[compressedLen]; is.readBytes(netbuf, 0, compressedLen); // Decode JPEG data Bitmap tightBitmap = BitmapFactory.decodeByteArray(netbuf, 0, compressedLen, bitmapopts); /* int w = r.width(); int h = r.height(); int[] buf = reader.getImageBuf(w*h); // Copy decoded data into buf. tightBitmap.getPixels(buf, 0, w, 0, 0, w, h); handler.imageRect(r, buf); */ handler.imageRect(r, tightBitmap); // To avoid running out of memory, recycle bitmap immediately. tightBitmap.recycle(); } final private void FilterGradient24(byte[] netbuf, int[] buf, int stride, Rect r) { int x, y, c; // Set up shortcut variables int rectHeight = r.height(); int rectWidth = r.width(); for (y = 0; y < rectHeight; y++) { /* First pixel in a row */ for (c = 0; c < 3; c++) { bpix[c] = (byte)(netbuf[y*rectWidth*3+c] + prevRow[c]); thisRow[c] = bpix[c]; } serverpf.bufferFromRGB(buf, y*stride, bpix, 0, 1); /* Remaining pixels of a row */ for (x = 1; x < rectWidth; x++) { for (c = 0; c < 3; c++) { est[c] = (int)(prevRow[x*3+c] + bpix[c] - prevRow[(x-1)*3+c]); if (est[c] > 0xFF) { est[c] = 0xFF; } else if (est[c] < 0) { est[c] = 0; } bpix[c] = (byte)(netbuf[(y*rectWidth+x)*3+c] + est[c]); thisRow[x*3+c] = bpix[c]; } serverpf.bufferFromRGB(buf, y*stride+x, bpix, 0, 1); } System.arraycopy(thisRow, 0, prevRow, 0, prevRow.length); } } final private void FilterGradient(byte[] netbuf, int[] buf, int stride, Rect r) { int x, y, c; // Set up shortcut variables int rectHeight = r.height(); int rectWidth = r.width(); for (y = 0; y < rectHeight; y++) { /* First pixel in a row */ // FIXME //serverpf.rgbFromBuffer(bpix, 0, netbuf, y*rectWidth, 1, cm); for (c = 0; c < 3; c++) bpix[c] += prevRow[c]; System.arraycopy(bpix, 0, thisRow, 0, bpix.length); serverpf.bufferFromRGB(buf, y*stride, bpix, 0, 1); /* Remaining pixels of a row */ for (x = 1; x < rectWidth; x++) { for (c = 0; c < 3; c++) { est[c] = (int)(prevRow[x*3+c] + bpix[c] - prevRow[(x-1)*3+c]); if (est[c] > 0xff) { est[c] = 0xff; } else if (est[c] < 0) { est[c] = 0; } } // FIXME //serverpf.rgbFromBuffer(bpix, 0, netbuf, y*rectWidth+x, 1, cm); for (c = 0; c < 3; c++) bpix[c] += est[c]; System.arraycopy(bpix, 0, thisRow, x*3, bpix.length); serverpf.bufferFromRGB(buf, y*stride+x, bpix, 0, 1); } System.arraycopy(thisRow, 0, prevRow, 0, prevRow.length); } } RemoteCanvas vncCanvas; private CMsgReader reader; private ZlibInStream[] zis; private PixelFormat serverpf; private PixelFormat clientpf; static LogWriter vlog = new LogWriter("TightDecoder"); }
x-hansong/aSpice
src/com/iiordanov/tigervnc/rfb/TightDecoder.java
Java
apache-2.0
12,131
RSpec.describe MiqWorker do context "::Runner" do def all_workers MiqWorker.descendants.select { |c| c.subclasses.empty? } end it "finds the correct corresponding runner for workers" do all_workers.each do |worker| # If this isn't true, we're probably accidentally inheriting the # runner from a superclass expect(worker::Runner.name).to eq("#{worker.name}::Runner") end end end context ".sync_workers" do it "stops extra workers, returning deleted pids" do expect_any_instance_of(described_class).to receive(:stop) worker = FactoryBot.create(:miq_worker, :status => "started") worker.class.workers = 0 expect(worker.class.sync_workers).to eq(:adds => [], :deletes => [worker.pid]) end end context ".has_required_role?" do def check_has_required_role(worker_role_names, expected_result) allow(described_class).to receive(:required_roles).and_return(worker_role_names) expect(described_class.has_required_role?).to eq(expected_result) end before do active_roles = %w(foo bar).map { |rn| FactoryBot.create(:server_role, :name => rn) } @server = EvmSpecHelper.local_miq_server(:active_roles => active_roles) end context "clean_active_messages" do before do @worker = FactoryBot.create(:miq_worker, :miq_server => @server) @message = FactoryBot.create(:miq_queue, :handler => @worker, :state => 'dequeue') end it "normal" do expect(@worker.active_messages.length).to eq(1) @worker.clean_active_messages expect(@worker.reload.active_messages.length).to eq(0) end it "invokes a message callback" do @message.update_attribute(:miq_callback, :class_name => 'Kernel', :method_name => 'rand') expect(Kernel).to receive(:rand) @worker.clean_active_messages end end it "when worker roles is nil" do check_has_required_role(nil, true) end context "when worker roles is a string" do it "that is blank" do check_has_required_role(" ", true) end it "that is one of the server roles" do check_has_required_role("foo", true) end it "that is not one of the server roles" do check_has_required_role("baa", false) end end context "when worker roles is an array" do it "that is empty" do check_has_required_role([], true) end it "that is a subset of server roles" do check_has_required_role(["foo"], true) check_has_required_role(%w(bah foo), true) end it "that is not a subset of server roles" do check_has_required_role(["bah"], false) end end context "when worker roles is a lambda" do it "that is empty" do check_has_required_role(-> { [] }, true) end it "that is a subset of server roles" do check_has_required_role(-> { ["foo"] }, true) end it "that is not a subset of server roles" do check_has_required_role(-> { ["bah"] }, false) end end end context ".workers_configured_count" do before do @configured_count = 2 allow(described_class).to receive(:worker_settings).and_return(:count => @configured_count) @maximum_workers_count = described_class.maximum_workers_count end after do described_class.maximum_workers_count = @maximum_workers_count end it "when maximum_workers_count is nil" do expect(described_class.workers_configured_count).to eq(@configured_count) end it "when maximum_workers_count is less than configured_count" do described_class.maximum_workers_count = 1 expect(described_class.workers_configured_count).to eq(1) end it "when maximum_workers_count is equal to the configured_count" do described_class.maximum_workers_count = 2 expect(described_class.workers_configured_count).to eq(@configured_count) end it "when maximum_workers_count is greater than configured_count" do described_class.maximum_workers_count = 2 expect(described_class.workers_configured_count).to eq(@configured_count) end end describe ".worker_settings" do let(:settings) do { :workers => { :worker_base => { :defaults => {:memory_threshold => "100.megabytes"}, :queue_worker_base => { :defaults => {:memory_threshold => "300.megabytes"}, :ems_refresh_worker => { :defaults => {:memory_threshold => "500.megabytes"}, :ems_refresh_worker_amazon => { :memory_threshold => "700.megabytes" } } } } }, :ems => {:ems_amazon => {}} } end before do EvmSpecHelper.create_guid_miq_server_zone stub_settings(settings) end context "at a concrete subclass" do let(:actual) { ManageIQ::Providers::Amazon::CloudManager::RefreshWorker.worker_settings[:memory_threshold] } it "with overrides" do expect(actual).to eq(700.megabytes) end it "without overrides" do settings.store_path(:workers, :worker_base, :queue_worker_base, :ems_refresh_worker, :ems_refresh_worker_amazon, {}) stub_settings(settings) expect(actual).to eq(500.megabytes) end end context "at the BaseManager level" do let(:actual) { ManageIQ::Providers::BaseManager::RefreshWorker.worker_settings[:memory_threshold] } it "with overrides" do expect(actual).to eq(500.megabytes) end it "without overrides" do settings.store_path(:workers, :worker_base, :queue_worker_base, :ems_refresh_worker, :defaults, {}) stub_settings(settings) expect(actual).to eq(300.megabytes) end end context "at the MiqQueueWorkerBase level" do let(:actual) { MiqQueueWorkerBase.worker_settings[:memory_threshold] } it "with overrides" do expect(actual).to eq(300.megabytes) end it "without overrides" do settings.store_path(:workers, :worker_base, :queue_worker_base, :defaults, {}) stub_settings(settings) expect(actual).to eq(100.megabytes) end end context "with mixed memory value types" do # Same settings from above, just using integers and integers/floats as strings let(:settings) do { :workers => { :worker_base => { :defaults => {:memory_threshold => "100.megabytes"}, :queue_worker_base => { :defaults => {:memory_threshold => 314_572_800}, # 300.megabytes :ems_refresh_worker => { :defaults => {:memory_threshold => "524288000"}, # 500.megabytes :ems_refresh_worker_amazon => { :memory_threshold => "1181116006.4" # 1.1.gigabtye } } } } }, :ems => {:ems_amazon => {}} } end let(:worker_base) { MiqWorker.worker_settings[:memory_threshold] } let(:queue_worker) { MiqQueueWorkerBase.worker_settings[:memory_threshold] } let(:ems_worker) { ManageIQ::Providers::BaseManager::RefreshWorker.worker_settings[:memory_threshold] } let(:aws_worker) { ManageIQ::Providers::Amazon::CloudManager::RefreshWorker.worker_settings[:memory_threshold] } it "converts everyting to integers properly" do expect(worker_base).to eq(100.megabytes) expect(queue_worker).to eq(300.megabytes) expect(ems_worker).to eq(500.megabytes) expect(aws_worker).to eq(1_181_116_006) end end it "at the base class" do actual = MiqWorker.worker_settings[:memory_threshold] expect(actual).to eq(100.megabytes) end it "uses passed in config" do settings.store_path(:workers, :worker_base, :queue_worker_base, :ems_refresh_worker, :ems_refresh_worker_amazon, :memory_threshold, "5.terabyte") stub_settings(settings) settings.store_path(:workers, :worker_base, :queue_worker_base, :ems_refresh_worker, :ems_refresh_worker_amazon, :memory_threshold, "1.terabyte") actual = ManageIQ::Providers::Amazon::CloudManager::RefreshWorker .worker_settings(:config => settings)[:memory_threshold] expect(actual).to eq(1.terabyte) end end context "with two servers" do before do allow(described_class).to receive(:nice_increment).and_return("+10") @zone = FactoryBot.create(:zone) @server = FactoryBot.create(:miq_server, :zone => @zone) allow(MiqServer).to receive(:my_server).and_return(@server) @worker = FactoryBot.create(:ems_refresh_worker_amazon, :miq_server => @server) @server2 = FactoryBot.create(:miq_server, :zone => @zone) @worker2 = FactoryBot.create(:ems_refresh_worker_amazon, :miq_server => @server2) end it ".server_scope" do expect(described_class.server_scope).to eq([@worker]) end describe "#worker_settings" do let(:config1) do { :workers => { :worker_base => { :defaults => {:memory_threshold => "550.megabytes"}, :queue_worker_base => { :defaults => {:memory_threshold => "570.megabytes"}, :ems_refresh_worker => { :defaults => {:memory_threshold => "600.megabytes"}, :ems_refresh_worker_amazon => { :memory_threshold => "700.megabytes" } } } } } } end let(:config2) do { :workers => { :worker_base => { :defaults => {:memory_threshold => "555.megabytes"}, :queue_worker_base => { :defaults => {:memory_threshold => "575.megabytes"}, :ems_refresh_worker => { :defaults => {:memory_threshold => "605.megabytes"}, :ems_refresh_worker_amazon => { :memory_threshold => "805.megabytes" } } } } } } end before do Vmdb::Settings.save!(@server, config1) Vmdb::Settings.save!(@server2, config2) end it "uses the worker's miq_server" do expect(@worker.worker_settings[:memory_threshold]).to eq(700.megabytes) expect(@worker2.worker_settings[:memory_threshold]).to eq(805.megabytes) end it "uses passed in config" do expect(@worker.worker_settings(:config => config2)[:memory_threshold]).to eq(805.megabytes) expect(@worker2.worker_settings(:config => config1)[:memory_threshold]).to eq(700.megabytes) end it "without overrides" do @server.settings_changes.where( :key => "/workers/worker_base/queue_worker_base/ems_refresh_worker/ems_refresh_worker_amazon/memory_threshold" ).delete_all expect(@worker.worker_settings[:memory_threshold]).to eq(600.megabytes) expect(@worker2.worker_settings[:memory_threshold]).to eq(805.megabytes) end end end describe ".config_settings_path" do let(:capu_worker) do ManageIQ::Providers::Amazon::CloudManager::MetricsCollectorWorker end it "include parent entries" do expect(capu_worker.config_settings_path).to eq( %i(workers worker_base queue_worker_base ems_metrics_collector_worker ems_metrics_collector_worker_amazon) ) end it "works for high level entries" do expect(MiqEmsMetricsCollectorWorker.config_settings_path).to eq( %i(workers worker_base queue_worker_base ems_metrics_collector_worker) ) end end context "instance" do before do allow(described_class).to receive(:nice_increment).and_return("+10") @worker = FactoryBot.create(:miq_worker) end it "#worker_options" do expect(@worker.worker_options).to eq(:guid => @worker.guid) end context "#destroy" do context "where it has messages it's handling" do before { EvmSpecHelper.local_guid_miq_server_zone } let(:miq_task) do queue_opts = {:class_name => "MiqServer", :method_name => "my_server", :args => []} task_opts = {:name => "Thing1", :userid => "admin"} MiqTask.generic_action_with_callback(task_opts, queue_opts, true) end let(:message_linked_to_task) { miq_task.miq_queue } it "synchronously errors out tasks linked to these soon to be deleted active messages" do miq_task.state_active message_linked_to_task.update!(:handler => @worker, :state => MiqQueue::STATE_DEQUEUE) @worker.destroy expect(miq_task.reload.active?).to be_falsey expect(miq_task.status_error?).to be_truthy expect { message_linked_to_task.reload }.to raise_error(ActiveRecord::RecordNotFound) end it "let's other handlers pick up tasks and messages not yet started" do miq_task.state_queued message_linked_to_task.update!(:handler => @worker, :state => MiqQueue::STATE_READY) @worker.destroy expect(miq_task.reload.active?).to be_falsey expect(miq_task.status_ok?).to be_truthy expect(message_linked_to_task.reload.handler).to be_nil end end end context "#command_line" do it "without guid in worker_options" do allow(@worker).to receive(:worker_options).and_return({}) expect { @worker.command_line }.to raise_error(ArgumentError) end it "without ENV['APPLIANCE']" do allow(@worker).to receive(:worker_options).and_return(:ems_id => 1234, :guid => @worker.guid) expect(@worker.command_line).to_not include("nice") end it "with ENV['APPLIANCE']" do begin allow(MiqWorker).to receive(:nice_increment).and_return("10") allow(@worker).to receive(:worker_options).and_return(:ems_id => 1234, :guid => @worker.guid) old_env = ENV.delete('APPLIANCE') ENV['APPLIANCE'] = 'true' cmd = @worker.command_line expect(cmd).to start_with("nice -n 10") expect(cmd).to include("--ems-id 1234") expect(cmd).to include("--guid #{@worker.guid}") expect(cmd).to include("--heartbeat") expect(cmd).to end_with("MiqWorker") ensure # ENV['x'] = nil deletes the key because ENV accepts only string values ENV['APPLIANCE'] = old_env end end end describe "#kill_async" do let!(:remote_server) { EvmSpecHelper.remote_guid_miq_server_zone[1] } let!(:local_server) { EvmSpecHelper.local_guid_miq_server_zone[1] } it "queues local worker to local server" do worker = FactoryBot.create(:miq_worker, :miq_server => local_server) worker.kill_async msg = MiqQueue.where(:method_name => 'kill', :class_name => worker.class.name).first expect(msg).to have_attributes( :queue_name => 'miq_server', :server_guid => local_server.guid, :zone => local_server.my_zone ) end it "queues remote worker to remote server" do worker = FactoryBot.create(:miq_worker, :miq_server => remote_server) worker.kill_async msg = MiqQueue.where(:method_name => 'kill', :class_name => worker.class.name).first expect(msg).to have_attributes( :queue_name => 'miq_server', :server_guid => remote_server.guid, :zone => remote_server.my_zone ) end end describe "#stopping_for_too_long?" do subject { @worker.stopping_for_too_long? } it "false if started" do @worker.update(:status => described_class::STATUS_STARTED) expect(subject).to be_falsey end it "true if stopping and not heartbeated recently" do @worker.update(:status => described_class::STATUS_STOPPING, :last_heartbeat => 30.minutes.ago) expect(subject).to be_truthy end it "true if stopping and last heartbeat is within the queue message timeout of an active message" do @worker.messages << FactoryBot.create(:miq_queue, :msg_timeout => 60.minutes) @worker.update(:status => described_class::STATUS_STOPPING, :last_heartbeat => 90.minutes.ago) expect(subject).to be_truthy end it "false if stopping and last heartbeat is older than the queue message timeout of the work item" do @worker.messages << FactoryBot.create(:miq_queue, :msg_timeout => 60.minutes, :state => "dequeue") @worker.update(:status => described_class::STATUS_STOPPING, :last_heartbeat => 30.minutes.ago) expect(subject).to be_falsey end it "false if stopping and heartbeated recently" do @worker.update(:status => described_class::STATUS_STOPPING, :last_heartbeat => 1.minute.ago) expect(subject).to be_falsey end end it "is_current? false when starting" do @worker.update_attribute(:status, described_class::STATUS_STARTING) expect(@worker.is_current?).not_to be_truthy end it "is_current? true when started" do @worker.update_attribute(:status, described_class::STATUS_STARTED) expect(@worker.is_current?).to be_truthy end it "is_current? true when working" do @worker.update_attribute(:status, described_class::STATUS_WORKING) expect(@worker.is_current?).to be_truthy end context ".status_update" do before do @worker.update_attribute(:pid, 123) require 'miq-process' end it "no such process" do allow(MiqProcess).to receive(:processInfo).with(123).and_raise(Errno::ESRCH) described_class.status_update @worker.reload expect(@worker.status).to eq MiqWorker::STATUS_ABORTED end it "a StandardError" do allow(MiqProcess).to receive(:processInfo).with(123).and_raise(StandardError.new("LOLRUBY")) expect($log).to receive(:warn).with(/LOLRUBY/) described_class.status_update end it "updates expected values" do values = { :pid => 123, :memory_usage => 246_824_960, :memory_size => 2_792_611_840, :percent_memory => "1.4", :percent_cpu => "1.0", :cpu_time => 660, :priority => "31", :name => "ruby", :proportional_set_size => 198_721_987, :unique_set_size => 172_122_122 } fields = described_class::PROCESS_INFO_FIELDS.dup # convert priority -> os_priority column fields.delete(:priority) fields << :os_priority fields.each do |field| expect(@worker.public_send(field)).to be_nil end allow(MiqProcess).to receive(:processInfo).with(123).and_return(values) described_class.status_update @worker.reload fields.each do |field| expect(@worker.public_send(field)).to be_present end expect(@worker.proportional_set_size).to eq 198_721_987 expect(@worker.unique_set_size).to eq 172_122_122 end end end end
agrare/manageiq
spec/models/miq_worker_spec.rb
Ruby
apache-2.0
19,841
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package git4idea.branch; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.NlsSafe; import com.intellij.openapi.util.text.HtmlBuilder; import com.intellij.openapi.util.text.HtmlChunk; import com.intellij.openapi.vcs.VcsNotifier; import git4idea.commands.Git; import git4idea.commands.GitCommandResult; import git4idea.commands.GitCompoundResult; import git4idea.i18n.GitBundle; import git4idea.repo.GitRepository; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import java.util.Collection; import java.util.List; import static git4idea.GitNotificationIdsHolder.BRANCH_RENAME_ROLLBACK_FAILED; import static git4idea.GitNotificationIdsHolder.BRANCH_RENAME_ROLLBACK_SUCCESS; class GitRenameBranchOperation extends GitBranchOperation { @NotNull private final VcsNotifier myNotifier; @NotNull @NlsSafe private final String myCurrentName; @NotNull @NlsSafe private final String myNewName; GitRenameBranchOperation(@NotNull Project project, @NotNull Git git, @NotNull GitBranchUiHandler uiHandler, @NotNull @NlsSafe String currentName, @NotNull @NlsSafe String newName, @NotNull List<? extends GitRepository> repositories) { super(project, git, uiHandler, repositories); myCurrentName = currentName; myNewName = newName; myNotifier = VcsNotifier.getInstance(myProject); } @Override protected void execute() { while (hasMoreRepositories()) { GitRepository repository = next(); GitCommandResult result = myGit.renameBranch(repository, myCurrentName, myNewName); if (result.success()) { repository.update(); markSuccessful(repository); } else { fatalError(GitBundle.message("git.rename.branch.could.not.rename.from.to", myCurrentName, myNewName), result); return; } } notifySuccess(); } @Override protected void rollback() { GitCompoundResult result = new GitCompoundResult(myProject); Collection<GitRepository> repositories = getSuccessfulRepositories(); for (GitRepository repository : repositories) { result.append(repository, myGit.renameBranch(repository, myNewName, myCurrentName)); repository.update(); } if (result.totalSuccess()) { myNotifier.notifySuccess(BRANCH_RENAME_ROLLBACK_SUCCESS, GitBundle.message("git.rename.branch.rollback.successful"), GitBundle.message("git.rename.branch.renamed.back.to", myCurrentName)); } else { myNotifier.notifyError(BRANCH_RENAME_ROLLBACK_FAILED, GitBundle.message("git.rename.branch.rollback.failed"), result.getErrorOutputWithReposIndication(), true); } } @NotNull @Override protected String getSuccessMessage() { return GitBundle.message("git.rename.branch.was.renamed.to", HtmlChunk.text(myCurrentName).code().bold(), HtmlChunk.text(myNewName).code().bold()); } @NotNull @Override @Nls(capitalization = Nls.Capitalization.Sentence) protected String getRollbackProposal() { return new HtmlBuilder().append(GitBundle.message("git.rename.branch.has.succeeded.for.the.following.repositories", getSuccessfulRepositories().size())) .br() .appendRaw(successfulRepositoriesJoined()) .br() .append(GitBundle.message("git.rename.branch.you.may.rename.branch.back", myCurrentName)).toString(); } @NotNull @Nls @Override protected String getOperationName() { return GitBundle.message("rename.branch.operation.name"); } }
smmribeiro/intellij-community
plugins/git4idea/src/git4idea/branch/GitRenameBranchOperation.java
Java
apache-2.0
4,461
package com.google.api.ads.adwords.jaxws.v201502.express; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * * Retrieves the budget suggestion for the specified criteria in the given selector based on * co-trigger data. * @param selector the selector specifying the budget suggestion to return * @return budget suggestion identified by the selector * * * <p>Java class for get element declaration. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;element name="get"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="selector" type="{https://adwords.google.com/api/adwords/express/v201502}BudgetSuggestionSelector" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "selector" }) @XmlRootElement(name = "get") public class BudgetSuggestionServiceInterfaceget { protected BudgetSuggestionSelector selector; /** * Gets the value of the selector property. * * @return * possible object is * {@link BudgetSuggestionSelector } * */ public BudgetSuggestionSelector getSelector() { return selector; } /** * Sets the value of the selector property. * * @param value * allowed object is * {@link BudgetSuggestionSelector } * */ public void setSelector(BudgetSuggestionSelector value) { this.selector = value; } }
stoksey69/googleads-java-lib
modules/adwords_appengine/src/main/java/com/google/api/ads/adwords/jaxws/v201502/express/BudgetSuggestionServiceInterfaceget.java
Java
apache-2.0
1,952
# -*- coding: utf-8 -*- ############################################################################### # # Badges # Returns badges for a given user. # # Python versions 2.6, 2.7, 3.x # # Copyright 2014, Temboo Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied. See the License for the specific # language governing permissions and limitations under the License. # # ############################################################################### from temboo.core.choreography import Choreography from temboo.core.choreography import InputSet from temboo.core.choreography import ResultSet from temboo.core.choreography import ChoreographyExecution import json class Badges(Choreography): def __init__(self, temboo_session): """ Create a new instance of the Badges Choreo. A TembooSession object, containing a valid set of Temboo credentials, must be supplied. """ super(Badges, self).__init__(temboo_session, '/Library/Foursquare/Users/Badges') def new_input_set(self): return BadgesInputSet() def _make_result_set(self, result, path): return BadgesResultSet(result, path) def _make_execution(self, session, exec_id, path): return BadgesChoreographyExecution(session, exec_id, path) class BadgesInputSet(InputSet): """ An InputSet with methods appropriate for specifying the inputs to the Badges Choreo. The InputSet object is used to specify input parameters when executing this Choreo. """ def set_OauthToken(self, value): """ Set the value of the OauthToken input for this Choreo. ((required, string) The Foursquare API OAuth token string.) """ super(BadgesInputSet, self)._set_input('OauthToken', value) def set_ResponseFormat(self, value): """ Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that response should be in. Can be set to xml or json. Defaults to json.) """ super(BadgesInputSet, self)._set_input('ResponseFormat', value) def set_UserID(self, value): """ Set the value of the UserID input for this Choreo. ((optional, string) Identity of the user to get badges for. Defaults to "self" to get lists of the acting user.) """ super(BadgesInputSet, self)._set_input('UserID', value) class BadgesResultSet(ResultSet): """ A ResultSet with methods tailored to the values returned by the Badges Choreo. The ResultSet object is used to retrieve the results of a Choreo execution. """ def getJSONFromString(self, str): return json.loads(str) def get_Response(self): """ Retrieve the value for the "Response" output from this Choreo execution. (The response from Foursquare. Corresponds to the ResponseFormat input. Defaults to JSON.) """ return self._output.get('Response', None) class BadgesChoreographyExecution(ChoreographyExecution): def _make_result_set(self, response, path): return BadgesResultSet(response, path)
jordanemedlock/psychtruths
temboo/core/Library/Foursquare/Users/Badges.py
Python
apache-2.0
3,462
/* * Copyright (C) 2014 Jerrell Mardis * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jerrellmardis.amphitheatre.api; import com.google.gson.Gson; import com.jerrellmardis.amphitheatre.model.guessit.Guess; import com.jerrellmardis.amphitheatre.util.ApiConstants; import retrofit.RestAdapter; import retrofit.converter.GsonConverter; import retrofit.http.GET; import retrofit.http.Query; /** * Created by Jerrell Mardis on 7/8/14. */ public class GuessItClient { private interface GuessItService { @GET("/guess") Guess guess(@Query("filename") CharSequence filename); } private static GuessItService service; private static GuessItService getService() { if (service == null) { RestAdapter restAdapter = new RestAdapter.Builder() .setConverter(new GsonConverter(new Gson())) .setEndpoint(ApiConstants.GUESS_IT_SERVER_URL) .build(); service = restAdapter.create(GuessItService.class); } return service; } public static Guess guess(CharSequence filename) { try { Guess guess = getService().guess(filename); return guess; } catch (Exception e) { e.printStackTrace(); } return null; } }
leasual/Amphitheatre
tv/src/main/java/com/jerrellmardis/amphitheatre/api/GuessItClient.java
Java
apache-2.0
1,839
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.connectors.jdbc.internal.cli; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping; public class DescribeMappingResult { private final Map<String, String> attributeMap; private String groupName; private List<FieldMapping> fieldMappings; public DescribeMappingResult() { this.attributeMap = new LinkedHashMap<>(); } public DescribeMappingResult(Map<String, String> attributeMap) { this.attributeMap = attributeMap; } public Map<String, String> getAttributeMap() { return this.attributeMap; } public String getGroupName() { return groupName; } public void setGroupName(String group) { groupName = group; } public List<FieldMapping> getFieldMappings() { return fieldMappings; } public void setFieldMappings(List<FieldMapping> fieldMappings) { this.fieldMappings = fieldMappings; } }
smgoller/geode
geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/DescribeMappingResult.java
Java
apache-2.0
1,768
--- name: Documentation about: Report an issue with our documentation labels: 'type: documentation, triage' assignees: '' --- <!-- - Thanks for helping us improve our developer site documentation! - Use this template to describe issues with the content at - developers.google.com/blockly/guides --> **Where** <!-- A link to the page with the documentation you want us to update. - More specific is better. If no page exists, describe what the page - should be, and where. --> **What** <!-- What kind of content is it? - Check a box with an 'x' between the brackets: [x] --> - [ ] Text - [ ] Image or Gif - [ ] Other **Old content** <!-- What the documentation currently says --> **Suggested content** <!-- Your suggestion for improved documentation --> **Additional context** <!-- Add any other context about the problem here. - If this is related to a specific pull request, link to it. -->
rachel-fenichel/blockly
.github/ISSUE_TEMPLATE/documentation.md
Markdown
apache-2.0
937
//============================================================================= // // Adventure Game Studio (AGS) // // Copyright (C) 1999-2011 Chris Jones and 2011-20xx others // The full list of copyright holders can be found in the Copyright.txt // file, which is part of this source code distribution. // // The AGS source code is provided under the Artistic License 2.0. // A copy of this license can be found in the file License.txt and at // http://www.opensource.org/licenses/artistic-license-2.0.php // //============================================================================= // // // //============================================================================= #ifndef __AGS_EE_AC__TEXTBOX_H #define __AGS_EE_AC__TEXTBOX_H #include "gui/guitextbox.h" const char* TextBox_GetText_New(GUITextBox *texbox); void TextBox_GetText(GUITextBox *texbox, char *buffer); void TextBox_SetText(GUITextBox *texbox, const char *newtex); int TextBox_GetTextColor(GUITextBox *guit); void TextBox_SetTextColor(GUITextBox *guit, int colr); int TextBox_GetFont(GUITextBox *guit); void TextBox_SetFont(GUITextBox *guit, int fontnum); #endif // __AGS_EE_AC__TEXTBOX_H
humble/ags-geminirue
Engine/ac/textbox.h
C
artistic-2.0
1,177
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <html> <head> <title>Minim : : Bypass : : tick</title> <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1"> <link href="stylesheet.css" rel="stylesheet" type="text/css"> </head> <body> <center> <table class="mainTable"> <tr> <td class="header"> <span class="indexheader">Minim</span><br/> <span class="indexnavigation"> <a href="index.html">core</a> | <a href="index_ugens.html">ugens</a> | <a href="index_analysis.html">analysis</a> </span> </td> <td class="border-left">&nbsp;</td> </tr> <tr> <td class="classNavigation"> <p class="mainTextName"><A href="ugen_class_ugen.html">UGen</A></p> <p class="methodName">tick</p> </td> <td class="mainText border-left"> <p class="memberSectionHeader">Description</p> Generates one sample frame for this UGen. <p class="memberSectionHeader">Signature</p> <pre>void tick(float[] channels) </pre> <p class="memberSectionHeader">Parameters</p> <span class="parameterName">channels</span>&nbsp;&mdash;&nbsp;<span class="parameterDescription">An array that represents one sample frame. To generate a mono signal, pass an array of length 1, if stereo an array of length 2, and so on. How a UGen deals with multi-channel sound will be implementation dependent.</span><br/> <p class="memberSectionHeader">Returns</p> <p>None</p> <p class="memberSectionHeader">Related</p> <p class="memberSectionHeader">Example</p> <pre>None available</pre> <p class="memberSectionHeader">Usage</p> Web & Application </td> </tr> </table> </center> </body> </html>
UTSDataArena/examples
processing/sketchbook/libraries/minim/documentation/bypass_method_tick.html
HTML
bsd-2-clause
1,915
// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_MEDIA_WEBRTC_DESKTOP_MEDIA_PICKER_H_ #define CHROME_BROWSER_MEDIA_WEBRTC_DESKTOP_MEDIA_PICKER_H_ #include <memory> #include "base/callback_forward.h" #include "base/macros.h" #include "base/strings/string16.h" #include "content/public/browser/desktop_media_id.h" #include "ui/gfx/native_widget_types.h" class DesktopMediaList; namespace content { class WebContents; } // Abstract interface for desktop media picker UI. It's used by Desktop Media // API to let user choose a desktop media source. class DesktopMediaPicker { public: typedef base::Callback<void(content::DesktopMediaID)> DoneCallback; // Creates default implementation of DesktopMediaPicker for the current // platform. static std::unique_ptr<DesktopMediaPicker> Create(); DesktopMediaPicker() {} virtual ~DesktopMediaPicker() {} // Shows dialog with list of desktop media sources (screens, windows, tabs) // provided by |screen_list|, |window_list| and |tab_list|. // Dialog window will call |done_callback| when user chooses one of the // sources or closes the dialog. virtual void Show(content::WebContents* web_contents, gfx::NativeWindow context, gfx::NativeWindow parent, const base::string16& app_name, const base::string16& target_name, std::unique_ptr<DesktopMediaList> screen_list, std::unique_ptr<DesktopMediaList> window_list, std::unique_ptr<DesktopMediaList> tab_list, bool request_audio, const DoneCallback& done_callback) = 0; private: DISALLOW_COPY_AND_ASSIGN(DesktopMediaPicker); }; #endif // CHROME_BROWSER_MEDIA_WEBRTC_DESKTOP_MEDIA_PICKER_H_
ssaroha/node-webrtc
third_party/webrtc/include/chromium/src/chrome/browser/media/webrtc/desktop_media_picker.h
C
bsd-2-clause
1,933
/* * Copyright (C) Igor Sysoev * Copyright (C) Nginx, Inc. */ #include <ngx_config.h> #include <ngx_core.h> #include <ngx_http.h> static char *ngx_http_block(ngx_conf_t *cf, ngx_command_t *cmd, void *conf); static ngx_int_t ngx_http_init_phases(ngx_conf_t *cf, ngx_http_core_main_conf_t *cmcf); static ngx_int_t ngx_http_init_headers_in_hash(ngx_conf_t *cf, ngx_http_core_main_conf_t *cmcf); static ngx_int_t ngx_http_init_phase_handlers(ngx_conf_t *cf, ngx_http_core_main_conf_t *cmcf); static ngx_int_t ngx_http_add_addresses(ngx_conf_t *cf, ngx_http_core_srv_conf_t *cscf, ngx_http_conf_port_t *port, ngx_http_listen_opt_t *lsopt); static ngx_int_t ngx_http_add_address(ngx_conf_t *cf, ngx_http_core_srv_conf_t *cscf, ngx_http_conf_port_t *port, ngx_http_listen_opt_t *lsopt); static ngx_int_t ngx_http_add_server(ngx_conf_t *cf, ngx_http_core_srv_conf_t *cscf, ngx_http_conf_addr_t *addr); static char *ngx_http_merge_servers(ngx_conf_t *cf, ngx_http_core_main_conf_t *cmcf, ngx_http_module_t *module, ngx_uint_t ctx_index); static char *ngx_http_merge_locations(ngx_conf_t *cf, ngx_queue_t *locations, void **loc_conf, ngx_http_module_t *module, ngx_uint_t ctx_index); static ngx_int_t ngx_http_init_locations(ngx_conf_t *cf, ngx_http_core_srv_conf_t *cscf, ngx_http_core_loc_conf_t *pclcf); static ngx_int_t ngx_http_init_static_location_trees(ngx_conf_t *cf, ngx_http_core_loc_conf_t *pclcf); static ngx_int_t ngx_http_cmp_locations(const ngx_queue_t *one, const ngx_queue_t *two); static ngx_int_t ngx_http_join_exact_locations(ngx_conf_t *cf, ngx_queue_t *locations); static void ngx_http_create_locations_list(ngx_queue_t *locations, ngx_queue_t *q); static ngx_http_location_tree_node_t * ngx_http_create_locations_tree(ngx_conf_t *cf, ngx_queue_t *locations, size_t prefix); static ngx_int_t ngx_http_optimize_servers(ngx_conf_t *cf, ngx_http_core_main_conf_t *cmcf, ngx_array_t *ports); static ngx_int_t ngx_http_server_names(ngx_conf_t *cf, ngx_http_core_main_conf_t *cmcf, ngx_http_conf_addr_t *addr); static ngx_int_t ngx_http_cmp_conf_addrs(const void *one, const void *two); static int ngx_libc_cdecl ngx_http_cmp_dns_wildcards(const void *one, const void *two); static ngx_int_t ngx_http_init_listening(ngx_conf_t *cf, ngx_http_conf_port_t *port); static ngx_listening_t *ngx_http_add_listening(ngx_conf_t *cf, ngx_http_conf_addr_t *addr); static ngx_int_t ngx_http_add_addrs(ngx_conf_t *cf, ngx_http_port_t *hport, ngx_http_conf_addr_t *addr); #if (NGX_HAVE_INET6) static ngx_int_t ngx_http_add_addrs6(ngx_conf_t *cf, ngx_http_port_t *hport, ngx_http_conf_addr_t *addr); #endif ngx_uint_t ngx_http_max_module; ngx_http_output_header_filter_pt ngx_http_top_header_filter; ngx_http_output_body_filter_pt ngx_http_top_body_filter; ngx_http_request_body_filter_pt ngx_http_top_request_body_filter; ngx_str_t ngx_http_html_default_types[] = { ngx_string("text/html"), ngx_null_string }; static ngx_command_t ngx_http_commands[] = { { ngx_string("http"), NGX_MAIN_CONF|NGX_CONF_BLOCK|NGX_CONF_NOARGS, ngx_http_block, 0, 0, NULL }, ngx_null_command }; static ngx_core_module_t ngx_http_module_ctx = { ngx_string("http"), NULL, NULL }; ngx_module_t ngx_http_module = { NGX_MODULE_V1, &ngx_http_module_ctx, /* module context */ ngx_http_commands, /* module directives */ NGX_CORE_MODULE, /* module type */ NULL, /* init master */ NULL, /* init module */ NULL, /* init process */ NULL, /* init thread */ NULL, /* exit thread */ NULL, /* exit process */ NULL, /* exit master */ NGX_MODULE_V1_PADDING }; static char * ngx_http_block(ngx_conf_t *cf, ngx_command_t *cmd, void *conf) { char *rv; ngx_uint_t mi, m, s; ngx_conf_t pcf; ngx_http_module_t *module; ngx_http_conf_ctx_t *ctx; ngx_http_core_loc_conf_t *clcf; ngx_http_core_srv_conf_t **cscfp; ngx_http_core_main_conf_t *cmcf; if (*(ngx_http_conf_ctx_t **) conf) { return "is duplicate"; } /* the main http context */ ctx = ngx_pcalloc(cf->pool, sizeof(ngx_http_conf_ctx_t)); if (ctx == NULL) { return NGX_CONF_ERROR; } *(ngx_http_conf_ctx_t **) conf = ctx; /* count the number of the http modules and set up their indices */ ngx_http_max_module = 0; for (m = 0; ngx_modules[m]; m++) { if (ngx_modules[m]->type != NGX_HTTP_MODULE) { continue; } ngx_modules[m]->ctx_index = ngx_http_max_module++; } /* the http main_conf context, it is the same in the all http contexts */ ctx->main_conf = ngx_pcalloc(cf->pool, sizeof(void *) * ngx_http_max_module); if (ctx->main_conf == NULL) { return NGX_CONF_ERROR; } /* * the http null srv_conf context, it is used to merge * the server{}s' srv_conf's */ ctx->srv_conf = ngx_pcalloc(cf->pool, sizeof(void *) * ngx_http_max_module); if (ctx->srv_conf == NULL) { return NGX_CONF_ERROR; } /* * the http null loc_conf context, it is used to merge * the server{}s' loc_conf's */ ctx->loc_conf = ngx_pcalloc(cf->pool, sizeof(void *) * ngx_http_max_module); if (ctx->loc_conf == NULL) { return NGX_CONF_ERROR; } /* * create the main_conf's, the null srv_conf's, and the null loc_conf's * of the all http modules */ for (m = 0; ngx_modules[m]; m++) { if (ngx_modules[m]->type != NGX_HTTP_MODULE) { continue; } module = ngx_modules[m]->ctx; mi = ngx_modules[m]->ctx_index; if (module->create_main_conf) { ctx->main_conf[mi] = module->create_main_conf(cf); if (ctx->main_conf[mi] == NULL) { return NGX_CONF_ERROR; } } if (module->create_srv_conf) { ctx->srv_conf[mi] = module->create_srv_conf(cf); if (ctx->srv_conf[mi] == NULL) { return NGX_CONF_ERROR; } } if (module->create_loc_conf) { ctx->loc_conf[mi] = module->create_loc_conf(cf); if (ctx->loc_conf[mi] == NULL) { return NGX_CONF_ERROR; } } } pcf = *cf; cf->ctx = ctx; for (m = 0; ngx_modules[m]; m++) { if (ngx_modules[m]->type != NGX_HTTP_MODULE) { continue; } module = ngx_modules[m]->ctx; if (module->preconfiguration) { if (module->preconfiguration(cf) != NGX_OK) { return NGX_CONF_ERROR; } } } /* parse inside the http{} block */ cf->module_type = NGX_HTTP_MODULE; cf->cmd_type = NGX_HTTP_MAIN_CONF; rv = ngx_conf_parse(cf, NULL); if (rv != NGX_CONF_OK) { goto failed; } /* * init http{} main_conf's, merge the server{}s' srv_conf's * and its location{}s' loc_conf's */ cmcf = ctx->main_conf[ngx_http_core_module.ctx_index]; cscfp = cmcf->servers.elts; for (m = 0; ngx_modules[m]; m++) { if (ngx_modules[m]->type != NGX_HTTP_MODULE) { continue; } module = ngx_modules[m]->ctx; mi = ngx_modules[m]->ctx_index; /* init http{} main_conf's */ if (module->init_main_conf) { rv = module->init_main_conf(cf, ctx->main_conf[mi]); if (rv != NGX_CONF_OK) { goto failed; } } rv = ngx_http_merge_servers(cf, cmcf, module, mi); if (rv != NGX_CONF_OK) { goto failed; } } /* create location trees */ for (s = 0; s < cmcf->servers.nelts; s++) { clcf = cscfp[s]->ctx->loc_conf[ngx_http_core_module.ctx_index]; if (ngx_http_init_locations(cf, cscfp[s], clcf) != NGX_OK) { return NGX_CONF_ERROR; } if (ngx_http_init_static_location_trees(cf, clcf) != NGX_OK) { return NGX_CONF_ERROR; } } if (ngx_http_init_phases(cf, cmcf) != NGX_OK) { return NGX_CONF_ERROR; } if (ngx_http_init_headers_in_hash(cf, cmcf) != NGX_OK) { return NGX_CONF_ERROR; } for (m = 0; ngx_modules[m]; m++) { if (ngx_modules[m]->type != NGX_HTTP_MODULE) { continue; } module = ngx_modules[m]->ctx; if (module->postconfiguration) { if (module->postconfiguration(cf) != NGX_OK) { return NGX_CONF_ERROR; } } } if (ngx_http_variables_init_vars(cf) != NGX_OK) { return NGX_CONF_ERROR; } /* * http{}'s cf->ctx was needed while the configuration merging * and in postconfiguration process */ *cf = pcf; if (ngx_http_init_phase_handlers(cf, cmcf) != NGX_OK) { return NGX_CONF_ERROR; } /* optimize the lists of ports, addresses and server names */ if (ngx_http_optimize_servers(cf, cmcf, cmcf->ports) != NGX_OK) { return NGX_CONF_ERROR; } return NGX_CONF_OK; failed: *cf = pcf; return rv; } static ngx_int_t ngx_http_init_phases(ngx_conf_t *cf, ngx_http_core_main_conf_t *cmcf) { if (ngx_array_init(&cmcf->phases[NGX_HTTP_POST_READ_PHASE].handlers, cf->pool, 1, sizeof(ngx_http_handler_pt)) != NGX_OK) { return NGX_ERROR; } if (ngx_array_init(&cmcf->phases[NGX_HTTP_SERVER_REWRITE_PHASE].handlers, cf->pool, 1, sizeof(ngx_http_handler_pt)) != NGX_OK) { return NGX_ERROR; } if (ngx_array_init(&cmcf->phases[NGX_HTTP_REWRITE_PHASE].handlers, cf->pool, 1, sizeof(ngx_http_handler_pt)) != NGX_OK) { return NGX_ERROR; } if (ngx_array_init(&cmcf->phases[NGX_HTTP_PREACCESS_PHASE].handlers, cf->pool, 1, sizeof(ngx_http_handler_pt)) != NGX_OK) { return NGX_ERROR; } if (ngx_array_init(&cmcf->phases[NGX_HTTP_ACCESS_PHASE].handlers, cf->pool, 2, sizeof(ngx_http_handler_pt)) != NGX_OK) { return NGX_ERROR; } if (ngx_array_init(&cmcf->phases[NGX_HTTP_CONTENT_PHASE].handlers, cf->pool, 4, sizeof(ngx_http_handler_pt)) != NGX_OK) { return NGX_ERROR; } if (ngx_array_init(&cmcf->phases[NGX_HTTP_LOG_PHASE].handlers, cf->pool, 1, sizeof(ngx_http_handler_pt)) != NGX_OK) { return NGX_ERROR; } return NGX_OK; } static ngx_int_t ngx_http_init_headers_in_hash(ngx_conf_t *cf, ngx_http_core_main_conf_t *cmcf) { ngx_array_t headers_in; ngx_hash_key_t *hk; ngx_hash_init_t hash; ngx_http_header_t *header; if (ngx_array_init(&headers_in, cf->temp_pool, 32, sizeof(ngx_hash_key_t)) != NGX_OK) { return NGX_ERROR; } for (header = ngx_http_headers_in; header->name.len; header++) { hk = ngx_array_push(&headers_in); if (hk == NULL) { return NGX_ERROR; } hk->key = header->name; hk->key_hash = ngx_hash_key_lc(header->name.data, header->name.len); hk->value = header; } hash.hash = &cmcf->headers_in_hash; hash.key = ngx_hash_key_lc; hash.max_size = 512; hash.bucket_size = ngx_align(64, ngx_cacheline_size); hash.name = "headers_in_hash"; hash.pool = cf->pool; hash.temp_pool = NULL; if (ngx_hash_init(&hash, headers_in.elts, headers_in.nelts) != NGX_OK) { return NGX_ERROR; } return NGX_OK; } static ngx_int_t ngx_http_init_phase_handlers(ngx_conf_t *cf, ngx_http_core_main_conf_t *cmcf) { ngx_int_t j; ngx_uint_t i, n; ngx_uint_t find_config_index, use_rewrite, use_access; ngx_http_handler_pt *h; ngx_http_phase_handler_t *ph; ngx_http_phase_handler_pt checker; cmcf->phase_engine.server_rewrite_index = (ngx_uint_t) -1; cmcf->phase_engine.location_rewrite_index = (ngx_uint_t) -1; find_config_index = 0; use_rewrite = cmcf->phases[NGX_HTTP_REWRITE_PHASE].handlers.nelts ? 1 : 0; use_access = cmcf->phases[NGX_HTTP_ACCESS_PHASE].handlers.nelts ? 1 : 0; n = use_rewrite + use_access + cmcf->try_files + 1 /* find config phase */; for (i = 0; i < NGX_HTTP_LOG_PHASE; i++) { n += cmcf->phases[i].handlers.nelts; } ph = ngx_pcalloc(cf->pool, n * sizeof(ngx_http_phase_handler_t) + sizeof(void *)); if (ph == NULL) { return NGX_ERROR; } cmcf->phase_engine.handlers = ph; n = 0; for (i = 0; i < NGX_HTTP_LOG_PHASE; i++) { h = cmcf->phases[i].handlers.elts; switch (i) { case NGX_HTTP_SERVER_REWRITE_PHASE: if (cmcf->phase_engine.server_rewrite_index == (ngx_uint_t) -1) { cmcf->phase_engine.server_rewrite_index = n; } checker = ngx_http_core_rewrite_phase; break; case NGX_HTTP_FIND_CONFIG_PHASE: find_config_index = n; ph->checker = ngx_http_core_find_config_phase; n++; ph++; continue; case NGX_HTTP_REWRITE_PHASE: if (cmcf->phase_engine.location_rewrite_index == (ngx_uint_t) -1) { cmcf->phase_engine.location_rewrite_index = n; } checker = ngx_http_core_rewrite_phase; break; case NGX_HTTP_POST_REWRITE_PHASE: if (use_rewrite) { ph->checker = ngx_http_core_post_rewrite_phase; ph->next = find_config_index; n++; ph++; } continue; case NGX_HTTP_ACCESS_PHASE: checker = ngx_http_core_access_phase; n++; break; case NGX_HTTP_POST_ACCESS_PHASE: if (use_access) { ph->checker = ngx_http_core_post_access_phase; ph->next = n; ph++; } continue; case NGX_HTTP_TRY_FILES_PHASE: if (cmcf->try_files) { ph->checker = ngx_http_core_try_files_phase; n++; ph++; } continue; case NGX_HTTP_CONTENT_PHASE: checker = ngx_http_core_content_phase; break; default: checker = ngx_http_core_generic_phase; } n += cmcf->phases[i].handlers.nelts; for (j = cmcf->phases[i].handlers.nelts - 1; j >=0; j--) { ph->checker = checker; ph->handler = h[j]; ph->next = n; ph++; } } return NGX_OK; } static char * ngx_http_merge_servers(ngx_conf_t *cf, ngx_http_core_main_conf_t *cmcf, ngx_http_module_t *module, ngx_uint_t ctx_index) { char *rv; ngx_uint_t s; ngx_http_conf_ctx_t *ctx, saved; ngx_http_core_loc_conf_t *clcf; ngx_http_core_srv_conf_t **cscfp; cscfp = cmcf->servers.elts; ctx = (ngx_http_conf_ctx_t *) cf->ctx; saved = *ctx; rv = NGX_CONF_OK; for (s = 0; s < cmcf->servers.nelts; s++) { /* merge the server{}s' srv_conf's */ ctx->srv_conf = cscfp[s]->ctx->srv_conf; if (module->merge_srv_conf) { rv = module->merge_srv_conf(cf, saved.srv_conf[ctx_index], cscfp[s]->ctx->srv_conf[ctx_index]); if (rv != NGX_CONF_OK) { goto failed; } } if (module->merge_loc_conf) { /* merge the server{}'s loc_conf */ ctx->loc_conf = cscfp[s]->ctx->loc_conf; rv = module->merge_loc_conf(cf, saved.loc_conf[ctx_index], cscfp[s]->ctx->loc_conf[ctx_index]); if (rv != NGX_CONF_OK) { goto failed; } /* merge the locations{}' loc_conf's */ clcf = cscfp[s]->ctx->loc_conf[ngx_http_core_module.ctx_index]; rv = ngx_http_merge_locations(cf, clcf->locations, cscfp[s]->ctx->loc_conf, module, ctx_index); if (rv != NGX_CONF_OK) { goto failed; } } } failed: *ctx = saved; return rv; } static char * ngx_http_merge_locations(ngx_conf_t *cf, ngx_queue_t *locations, void **loc_conf, ngx_http_module_t *module, ngx_uint_t ctx_index) { char *rv; ngx_queue_t *q; ngx_http_conf_ctx_t *ctx, saved; ngx_http_core_loc_conf_t *clcf; ngx_http_location_queue_t *lq; if (locations == NULL) { return NGX_CONF_OK; } ctx = (ngx_http_conf_ctx_t *) cf->ctx; saved = *ctx; for (q = ngx_queue_head(locations); q != ngx_queue_sentinel(locations); q = ngx_queue_next(q)) { lq = (ngx_http_location_queue_t *) q; clcf = lq->exact ? lq->exact : lq->inclusive; ctx->loc_conf = clcf->loc_conf; rv = module->merge_loc_conf(cf, loc_conf[ctx_index], clcf->loc_conf[ctx_index]); if (rv != NGX_CONF_OK) { return rv; } rv = ngx_http_merge_locations(cf, clcf->locations, clcf->loc_conf, module, ctx_index); if (rv != NGX_CONF_OK) { return rv; } } *ctx = saved; return NGX_CONF_OK; } static ngx_int_t ngx_http_init_locations(ngx_conf_t *cf, ngx_http_core_srv_conf_t *cscf, ngx_http_core_loc_conf_t *pclcf) { ngx_uint_t n; ngx_queue_t *q, *locations, *named, tail; ngx_http_core_loc_conf_t *clcf; ngx_http_location_queue_t *lq; ngx_http_core_loc_conf_t **clcfp; #if (NGX_PCRE) ngx_uint_t r; ngx_queue_t *regex; #endif locations = pclcf->locations; if (locations == NULL) { return NGX_OK; } ngx_queue_sort(locations, ngx_http_cmp_locations); named = NULL; n = 0; #if (NGX_PCRE) regex = NULL; r = 0; #endif for (q = ngx_queue_head(locations); q != ngx_queue_sentinel(locations); q = ngx_queue_next(q)) { lq = (ngx_http_location_queue_t *) q; clcf = lq->exact ? lq->exact : lq->inclusive; if (ngx_http_init_locations(cf, NULL, clcf) != NGX_OK) { return NGX_ERROR; } #if (NGX_PCRE) if (clcf->regex) { r++; if (regex == NULL) { regex = q; } continue; } #endif if (clcf->named) { n++; if (named == NULL) { named = q; } continue; } if (clcf->noname) { break; } } if (q != ngx_queue_sentinel(locations)) { ngx_queue_split(locations, q, &tail); } if (named) { clcfp = ngx_palloc(cf->pool, (n + 1) * sizeof(ngx_http_core_loc_conf_t *)); if (clcfp == NULL) { return NGX_ERROR; } cscf->named_locations = clcfp; for (q = named; q != ngx_queue_sentinel(locations); q = ngx_queue_next(q)) { lq = (ngx_http_location_queue_t *) q; *(clcfp++) = lq->exact; } *clcfp = NULL; ngx_queue_split(locations, named, &tail); } #if (NGX_PCRE) if (regex) { clcfp = ngx_palloc(cf->pool, (r + 1) * sizeof(ngx_http_core_loc_conf_t *)); if (clcfp == NULL) { return NGX_ERROR; } pclcf->regex_locations = clcfp; for (q = regex; q != ngx_queue_sentinel(locations); q = ngx_queue_next(q)) { lq = (ngx_http_location_queue_t *) q; *(clcfp++) = lq->exact; } *clcfp = NULL; ngx_queue_split(locations, regex, &tail); } #endif return NGX_OK; } static ngx_int_t ngx_http_init_static_location_trees(ngx_conf_t *cf, ngx_http_core_loc_conf_t *pclcf) { ngx_queue_t *q, *locations; ngx_http_core_loc_conf_t *clcf; ngx_http_location_queue_t *lq; locations = pclcf->locations; if (locations == NULL) { return NGX_OK; } if (ngx_queue_empty(locations)) { return NGX_OK; } for (q = ngx_queue_head(locations); q != ngx_queue_sentinel(locations); q = ngx_queue_next(q)) { lq = (ngx_http_location_queue_t *) q; clcf = lq->exact ? lq->exact : lq->inclusive; if (ngx_http_init_static_location_trees(cf, clcf) != NGX_OK) { return NGX_ERROR; } } if (ngx_http_join_exact_locations(cf, locations) != NGX_OK) { return NGX_ERROR; } ngx_http_create_locations_list(locations, ngx_queue_head(locations)); pclcf->static_locations = ngx_http_create_locations_tree(cf, locations, 0); if (pclcf->static_locations == NULL) { return NGX_ERROR; } return NGX_OK; } ngx_int_t ngx_http_add_location(ngx_conf_t *cf, ngx_queue_t **locations, ngx_http_core_loc_conf_t *clcf) { ngx_http_location_queue_t *lq; if (*locations == NULL) { *locations = ngx_palloc(cf->temp_pool, sizeof(ngx_http_location_queue_t)); if (*locations == NULL) { return NGX_ERROR; } ngx_queue_init(*locations); } lq = ngx_palloc(cf->temp_pool, sizeof(ngx_http_location_queue_t)); if (lq == NULL) { return NGX_ERROR; } if (clcf->exact_match #if (NGX_PCRE) || clcf->regex #endif || clcf->named || clcf->noname) { lq->exact = clcf; lq->inclusive = NULL; } else { lq->exact = NULL; lq->inclusive = clcf; } lq->name = &clcf->name; lq->file_name = cf->conf_file->file.name.data; lq->line = cf->conf_file->line; ngx_queue_init(&lq->list); ngx_queue_insert_tail(*locations, &lq->queue); return NGX_OK; } static ngx_int_t ngx_http_cmp_locations(const ngx_queue_t *one, const ngx_queue_t *two) { ngx_int_t rc; ngx_http_core_loc_conf_t *first, *second; ngx_http_location_queue_t *lq1, *lq2; lq1 = (ngx_http_location_queue_t *) one; lq2 = (ngx_http_location_queue_t *) two; first = lq1->exact ? lq1->exact : lq1->inclusive; second = lq2->exact ? lq2->exact : lq2->inclusive; if (first->noname && !second->noname) { /* shift no named locations to the end */ return 1; } if (!first->noname && second->noname) { /* shift no named locations to the end */ return -1; } if (first->noname || second->noname) { /* do not sort no named locations */ return 0; } if (first->named && !second->named) { /* shift named locations to the end */ return 1; } if (!first->named && second->named) { /* shift named locations to the end */ return -1; } if (first->named && second->named) { return ngx_strcmp(first->name.data, second->name.data); } #if (NGX_PCRE) if (first->regex && !second->regex) { /* shift the regex matches to the end */ return 1; } if (!first->regex && second->regex) { /* shift the regex matches to the end */ return -1; } if (first->regex || second->regex) { /* do not sort the regex matches */ return 0; } #endif rc = ngx_filename_cmp(first->name.data, second->name.data, ngx_min(first->name.len, second->name.len) + 1); if (rc == 0 && !first->exact_match && second->exact_match) { /* an exact match must be before the same inclusive one */ return 1; } return rc; } static ngx_int_t ngx_http_join_exact_locations(ngx_conf_t *cf, ngx_queue_t *locations) { ngx_queue_t *q, *x; ngx_http_location_queue_t *lq, *lx; q = ngx_queue_head(locations); while (q != ngx_queue_last(locations)) { x = ngx_queue_next(q); lq = (ngx_http_location_queue_t *) q; lx = (ngx_http_location_queue_t *) x; if (lq->name->len == lx->name->len && ngx_filename_cmp(lq->name->data, lx->name->data, lx->name->len) == 0) { if ((lq->exact && lx->exact) || (lq->inclusive && lx->inclusive)) { ngx_log_error(NGX_LOG_EMERG, cf->log, 0, "duplicate location \"%V\" in %s:%ui", lx->name, lx->file_name, lx->line); return NGX_ERROR; } lq->inclusive = lx->inclusive; ngx_queue_remove(x); continue; } q = ngx_queue_next(q); } return NGX_OK; } static void ngx_http_create_locations_list(ngx_queue_t *locations, ngx_queue_t *q) { u_char *name; size_t len; ngx_queue_t *x, tail; ngx_http_location_queue_t *lq, *lx; if (q == ngx_queue_last(locations)) { return; } lq = (ngx_http_location_queue_t *) q; if (lq->inclusive == NULL) { ngx_http_create_locations_list(locations, ngx_queue_next(q)); return; } len = lq->name->len; name = lq->name->data; for (x = ngx_queue_next(q); x != ngx_queue_sentinel(locations); x = ngx_queue_next(x)) { lx = (ngx_http_location_queue_t *) x; if (len > lx->name->len || ngx_filename_cmp(name, lx->name->data, len) != 0) { break; } } q = ngx_queue_next(q); if (q == x) { ngx_http_create_locations_list(locations, x); return; } ngx_queue_split(locations, q, &tail); ngx_queue_add(&lq->list, &tail); if (x == ngx_queue_sentinel(locations)) { ngx_http_create_locations_list(&lq->list, ngx_queue_head(&lq->list)); return; } ngx_queue_split(&lq->list, x, &tail); ngx_queue_add(locations, &tail); ngx_http_create_locations_list(&lq->list, ngx_queue_head(&lq->list)); ngx_http_create_locations_list(locations, x); } /* * to keep cache locality for left leaf nodes, allocate nodes in following * order: node, left subtree, right subtree, inclusive subtree */ static ngx_http_location_tree_node_t * ngx_http_create_locations_tree(ngx_conf_t *cf, ngx_queue_t *locations, size_t prefix) { size_t len; ngx_queue_t *q, tail; ngx_http_location_queue_t *lq; ngx_http_location_tree_node_t *node; q = ngx_queue_middle(locations); lq = (ngx_http_location_queue_t *) q; len = lq->name->len - prefix; node = ngx_palloc(cf->pool, offsetof(ngx_http_location_tree_node_t, name) + len); if (node == NULL) { return NULL; } node->left = NULL; node->right = NULL; node->tree = NULL; node->exact = lq->exact; node->inclusive = lq->inclusive; node->auto_redirect = (u_char) ((lq->exact && lq->exact->auto_redirect) || (lq->inclusive && lq->inclusive->auto_redirect)); node->len = (u_char) len; ngx_memcpy(node->name, &lq->name->data[prefix], len); ngx_queue_split(locations, q, &tail); if (ngx_queue_empty(locations)) { /* * ngx_queue_split() insures that if left part is empty, * then right one is empty too */ goto inclusive; } node->left = ngx_http_create_locations_tree(cf, locations, prefix); if (node->left == NULL) { return NULL; } ngx_queue_remove(q); if (ngx_queue_empty(&tail)) { goto inclusive; } node->right = ngx_http_create_locations_tree(cf, &tail, prefix); if (node->right == NULL) { return NULL; } inclusive: if (ngx_queue_empty(&lq->list)) { return node; } node->tree = ngx_http_create_locations_tree(cf, &lq->list, prefix + len); if (node->tree == NULL) { return NULL; } return node; } ngx_int_t ngx_http_add_listen(ngx_conf_t *cf, ngx_http_core_srv_conf_t *cscf, ngx_http_listen_opt_t *lsopt) { in_port_t p; ngx_uint_t i; struct sockaddr *sa; struct sockaddr_in *sin; ngx_http_conf_port_t *port; ngx_http_core_main_conf_t *cmcf; #if (NGX_HAVE_INET6) struct sockaddr_in6 *sin6; #endif cmcf = ngx_http_conf_get_module_main_conf(cf, ngx_http_core_module); if (cmcf->ports == NULL) { cmcf->ports = ngx_array_create(cf->temp_pool, 2, sizeof(ngx_http_conf_port_t)); if (cmcf->ports == NULL) { return NGX_ERROR; } } sa = &lsopt->u.sockaddr; switch (sa->sa_family) { #if (NGX_HAVE_INET6) case AF_INET6: sin6 = &lsopt->u.sockaddr_in6; p = sin6->sin6_port; break; #endif #if (NGX_HAVE_UNIX_DOMAIN) case AF_UNIX: p = 0; break; #endif default: /* AF_INET */ sin = &lsopt->u.sockaddr_in; p = sin->sin_port; break; } port = cmcf->ports->elts; for (i = 0; i < cmcf->ports->nelts; i++) { if (p != port[i].port || sa->sa_family != port[i].family) { continue; } /* a port is already in the port list */ return ngx_http_add_addresses(cf, cscf, &port[i], lsopt); } /* add a port to the port list */ port = ngx_array_push(cmcf->ports); if (port == NULL) { return NGX_ERROR; } port->family = sa->sa_family; port->port = p; port->addrs.elts = NULL; return ngx_http_add_address(cf, cscf, port, lsopt); } static ngx_int_t ngx_http_add_addresses(ngx_conf_t *cf, ngx_http_core_srv_conf_t *cscf, ngx_http_conf_port_t *port, ngx_http_listen_opt_t *lsopt) { u_char *p; size_t len, off; ngx_uint_t i, default_server, proxy_protocol; struct sockaddr *sa; ngx_http_conf_addr_t *addr; #if (NGX_HAVE_UNIX_DOMAIN) struct sockaddr_un *saun; #endif #if (NGX_HTTP_SSL) ngx_uint_t ssl; #endif #if (NGX_HTTP_V2) ngx_uint_t http2; #endif /* * we cannot compare whole sockaddr struct's as kernel * may fill some fields in inherited sockaddr struct's */ sa = &lsopt->u.sockaddr; switch (sa->sa_family) { #if (NGX_HAVE_INET6) case AF_INET6: off = offsetof(struct sockaddr_in6, sin6_addr); len = 16; break; #endif #if (NGX_HAVE_UNIX_DOMAIN) case AF_UNIX: off = offsetof(struct sockaddr_un, sun_path); len = sizeof(saun->sun_path); break; #endif default: /* AF_INET */ off = offsetof(struct sockaddr_in, sin_addr); len = 4; break; } p = lsopt->u.sockaddr_data + off; addr = port->addrs.elts; for (i = 0; i < port->addrs.nelts; i++) { if (ngx_memcmp(p, addr[i].opt.u.sockaddr_data + off, len) != 0) { continue; } /* the address is already in the address list */ if (ngx_http_add_server(cf, cscf, &addr[i]) != NGX_OK) { return NGX_ERROR; } /* preserve default_server bit during listen options overwriting */ default_server = addr[i].opt.default_server; proxy_protocol = lsopt->proxy_protocol || addr[i].opt.proxy_protocol; #if (NGX_HTTP_SSL) ssl = lsopt->ssl || addr[i].opt.ssl; #endif #if (NGX_HTTP_V2) http2 = lsopt->http2 || addr[i].opt.http2; #endif if (lsopt->set) { if (addr[i].opt.set) { ngx_conf_log_error(NGX_LOG_EMERG, cf, 0, "duplicate listen options for %s", addr[i].opt.addr); return NGX_ERROR; } addr[i].opt = *lsopt; } /* check the duplicate "default" server for this address:port */ if (lsopt->default_server) { if (default_server) { ngx_conf_log_error(NGX_LOG_EMERG, cf, 0, "a duplicate default server for %s", addr[i].opt.addr); return NGX_ERROR; } default_server = 1; addr[i].default_server = cscf; } addr[i].opt.default_server = default_server; addr[i].opt.proxy_protocol = proxy_protocol; #if (NGX_HTTP_SSL) addr[i].opt.ssl = ssl; #endif #if (NGX_HTTP_V2) addr[i].opt.http2 = http2; #endif return NGX_OK; } /* add the address to the addresses list that bound to this port */ return ngx_http_add_address(cf, cscf, port, lsopt); } /* * add the server address, the server names and the server core module * configurations to the port list */ static ngx_int_t ngx_http_add_address(ngx_conf_t *cf, ngx_http_core_srv_conf_t *cscf, ngx_http_conf_port_t *port, ngx_http_listen_opt_t *lsopt) { ngx_http_conf_addr_t *addr; if (port->addrs.elts == NULL) { if (ngx_array_init(&port->addrs, cf->temp_pool, 4, sizeof(ngx_http_conf_addr_t)) != NGX_OK) { return NGX_ERROR; } } #if (NGX_HTTP_V2 && NGX_HTTP_SSL \ && !defined TLSEXT_TYPE_application_layer_protocol_negotiation \ && !defined TLSEXT_TYPE_next_proto_neg) if (lsopt->http2 && lsopt->ssl) { ngx_conf_log_error(NGX_LOG_WARN, cf, 0, "nginx was built with OpenSSL that lacks ALPN " "and NPN support, HTTP/2 is not enabled for %s", lsopt->addr); } #endif addr = ngx_array_push(&port->addrs); if (addr == NULL) { return NGX_ERROR; } addr->opt = *lsopt; addr->hash.buckets = NULL; addr->hash.size = 0; addr->wc_head = NULL; addr->wc_tail = NULL; #if (NGX_PCRE) addr->nregex = 0; addr->regex = NULL; #endif addr->default_server = cscf; addr->servers.elts = NULL; return ngx_http_add_server(cf, cscf, addr); } /* add the server core module configuration to the address:port */ static ngx_int_t ngx_http_add_server(ngx_conf_t *cf, ngx_http_core_srv_conf_t *cscf, ngx_http_conf_addr_t *addr) { ngx_uint_t i; ngx_http_core_srv_conf_t **server; if (addr->servers.elts == NULL) { if (ngx_array_init(&addr->servers, cf->temp_pool, 4, sizeof(ngx_http_core_srv_conf_t *)) != NGX_OK) { return NGX_ERROR; } } else { server = addr->servers.elts; for (i = 0; i < addr->servers.nelts; i++) { if (server[i] == cscf) { ngx_conf_log_error(NGX_LOG_EMERG, cf, 0, "a duplicate listen %s", addr->opt.addr); return NGX_ERROR; } } } server = ngx_array_push(&addr->servers); if (server == NULL) { return NGX_ERROR; } *server = cscf; return NGX_OK; } static ngx_int_t ngx_http_optimize_servers(ngx_conf_t *cf, ngx_http_core_main_conf_t *cmcf, ngx_array_t *ports) { ngx_uint_t p, a; ngx_http_conf_port_t *port; ngx_http_conf_addr_t *addr; if (ports == NULL) { return NGX_OK; } port = ports->elts; for (p = 0; p < ports->nelts; p++) { ngx_sort(port[p].addrs.elts, (size_t) port[p].addrs.nelts, sizeof(ngx_http_conf_addr_t), ngx_http_cmp_conf_addrs); /* * check whether all name-based servers have the same * configuration as a default server for given address:port */ addr = port[p].addrs.elts; for (a = 0; a < port[p].addrs.nelts; a++) { if (addr[a].servers.nelts > 1 #if (NGX_PCRE) || addr[a].default_server->captures #endif ) { if (ngx_http_server_names(cf, cmcf, &addr[a]) != NGX_OK) { return NGX_ERROR; } } } if (ngx_http_init_listening(cf, &port[p]) != NGX_OK) { return NGX_ERROR; } } return NGX_OK; } static ngx_int_t ngx_http_server_names(ngx_conf_t *cf, ngx_http_core_main_conf_t *cmcf, ngx_http_conf_addr_t *addr) { ngx_int_t rc; ngx_uint_t n, s; ngx_hash_init_t hash; ngx_hash_keys_arrays_t ha; ngx_http_server_name_t *name; ngx_http_core_srv_conf_t **cscfp; #if (NGX_PCRE) ngx_uint_t regex, i; regex = 0; #endif ngx_memzero(&ha, sizeof(ngx_hash_keys_arrays_t)); ha.temp_pool = ngx_create_pool(NGX_DEFAULT_POOL_SIZE, cf->log); if (ha.temp_pool == NULL) { return NGX_ERROR; } ha.pool = cf->pool; if (ngx_hash_keys_array_init(&ha, NGX_HASH_LARGE) != NGX_OK) { goto failed; } cscfp = addr->servers.elts; for (s = 0; s < addr->servers.nelts; s++) { name = cscfp[s]->server_names.elts; for (n = 0; n < cscfp[s]->server_names.nelts; n++) { #if (NGX_PCRE) if (name[n].regex) { regex++; continue; } #endif rc = ngx_hash_add_key(&ha, &name[n].name, name[n].server, NGX_HASH_WILDCARD_KEY); if (rc == NGX_ERROR) { return NGX_ERROR; } if (rc == NGX_DECLINED) { ngx_log_error(NGX_LOG_EMERG, cf->log, 0, "invalid server name or wildcard \"%V\" on %s", &name[n].name, addr->opt.addr); return NGX_ERROR; } if (rc == NGX_BUSY) { ngx_log_error(NGX_LOG_WARN, cf->log, 0, "conflicting server name \"%V\" on %s, ignored", &name[n].name, addr->opt.addr); } } } hash.key = ngx_hash_key_lc; hash.max_size = cmcf->server_names_hash_max_size; hash.bucket_size = cmcf->server_names_hash_bucket_size; hash.name = "server_names_hash"; hash.pool = cf->pool; if (ha.keys.nelts) { hash.hash = &addr->hash; hash.temp_pool = NULL; if (ngx_hash_init(&hash, ha.keys.elts, ha.keys.nelts) != NGX_OK) { goto failed; } } if (ha.dns_wc_head.nelts) { ngx_qsort(ha.dns_wc_head.elts, (size_t) ha.dns_wc_head.nelts, sizeof(ngx_hash_key_t), ngx_http_cmp_dns_wildcards); hash.hash = NULL; hash.temp_pool = ha.temp_pool; if (ngx_hash_wildcard_init(&hash, ha.dns_wc_head.elts, ha.dns_wc_head.nelts) != NGX_OK) { goto failed; } addr->wc_head = (ngx_hash_wildcard_t *) hash.hash; } if (ha.dns_wc_tail.nelts) { ngx_qsort(ha.dns_wc_tail.elts, (size_t) ha.dns_wc_tail.nelts, sizeof(ngx_hash_key_t), ngx_http_cmp_dns_wildcards); hash.hash = NULL; hash.temp_pool = ha.temp_pool; if (ngx_hash_wildcard_init(&hash, ha.dns_wc_tail.elts, ha.dns_wc_tail.nelts) != NGX_OK) { goto failed; } addr->wc_tail = (ngx_hash_wildcard_t *) hash.hash; } ngx_destroy_pool(ha.temp_pool); #if (NGX_PCRE) if (regex == 0) { return NGX_OK; } addr->nregex = regex; addr->regex = ngx_palloc(cf->pool, regex * sizeof(ngx_http_server_name_t)); if (addr->regex == NULL) { return NGX_ERROR; } i = 0; for (s = 0; s < addr->servers.nelts; s++) { name = cscfp[s]->server_names.elts; for (n = 0; n < cscfp[s]->server_names.nelts; n++) { if (name[n].regex) { addr->regex[i++] = name[n]; } } } #endif return NGX_OK; failed: ngx_destroy_pool(ha.temp_pool); return NGX_ERROR; } static ngx_int_t ngx_http_cmp_conf_addrs(const void *one, const void *two) { ngx_http_conf_addr_t *first, *second; first = (ngx_http_conf_addr_t *) one; second = (ngx_http_conf_addr_t *) two; if (first->opt.wildcard) { /* a wildcard address must be the last resort, shift it to the end */ return 1; } if (second->opt.wildcard) { /* a wildcard address must be the last resort, shift it to the end */ return -1; } if (first->opt.bind && !second->opt.bind) { /* shift explicit bind()ed addresses to the start */ return -1; } if (!first->opt.bind && second->opt.bind) { /* shift explicit bind()ed addresses to the start */ return 1; } /* do not sort by default */ return 0; } static int ngx_libc_cdecl ngx_http_cmp_dns_wildcards(const void *one, const void *two) { ngx_hash_key_t *first, *second; first = (ngx_hash_key_t *) one; second = (ngx_hash_key_t *) two; return ngx_dns_strcmp(first->key.data, second->key.data); } static ngx_int_t ngx_http_init_listening(ngx_conf_t *cf, ngx_http_conf_port_t *port) { ngx_uint_t i, last, bind_wildcard; ngx_listening_t *ls; ngx_http_port_t *hport; ngx_http_conf_addr_t *addr; addr = port->addrs.elts; last = port->addrs.nelts; /* * If there is a binding to an "*:port" then we need to bind() to * the "*:port" only and ignore other implicit bindings. The bindings * have been already sorted: explicit bindings are on the start, then * implicit bindings go, and wildcard binding is in the end. */ if (addr[last - 1].opt.wildcard) { addr[last - 1].opt.bind = 1; bind_wildcard = 1; } else { bind_wildcard = 0; } i = 0; while (i < last) { if (bind_wildcard && !addr[i].opt.bind) { i++; continue; } ls = ngx_http_add_listening(cf, &addr[i]); if (ls == NULL) { return NGX_ERROR; } hport = ngx_pcalloc(cf->pool, sizeof(ngx_http_port_t)); if (hport == NULL) { return NGX_ERROR; } ls->servers = hport; hport->naddrs = i + 1; switch (ls->sockaddr->sa_family) { #if (NGX_HAVE_INET6) case AF_INET6: if (ngx_http_add_addrs6(cf, hport, addr) != NGX_OK) { return NGX_ERROR; } break; #endif default: /* AF_INET */ if (ngx_http_add_addrs(cf, hport, addr) != NGX_OK) { return NGX_ERROR; } break; } if (ngx_clone_listening(cf, ls) != NGX_OK) { return NGX_ERROR; } addr++; last--; } return NGX_OK; } static ngx_listening_t * ngx_http_add_listening(ngx_conf_t *cf, ngx_http_conf_addr_t *addr) { ngx_listening_t *ls; ngx_http_core_loc_conf_t *clcf; ngx_http_core_srv_conf_t *cscf; ls = ngx_create_listening(cf, &addr->opt.u.sockaddr, addr->opt.socklen); if (ls == NULL) { return NULL; } ls->addr_ntop = 1; ls->handler = ngx_http_init_connection; cscf = addr->default_server; ls->pool_size = cscf->connection_pool_size; ls->post_accept_timeout = cscf->client_header_timeout; clcf = cscf->ctx->loc_conf[ngx_http_core_module.ctx_index]; ls->logp = clcf->error_log; ls->log.data = &ls->addr_text; ls->log.handler = ngx_accept_log_error; #if (NGX_WIN32) { ngx_iocp_conf_t *iocpcf = NULL; if (ngx_get_conf(cf->cycle->conf_ctx, ngx_events_module)) { iocpcf = ngx_event_get_conf(cf->cycle->conf_ctx, ngx_iocp_module); } if (iocpcf && iocpcf->acceptex_read) { ls->post_accept_buffer_size = cscf->client_header_buffer_size; } } #endif ls->backlog = addr->opt.backlog; ls->rcvbuf = addr->opt.rcvbuf; ls->sndbuf = addr->opt.sndbuf; ls->keepalive = addr->opt.so_keepalive; #if (NGX_HAVE_KEEPALIVE_TUNABLE) ls->keepidle = addr->opt.tcp_keepidle; ls->keepintvl = addr->opt.tcp_keepintvl; ls->keepcnt = addr->opt.tcp_keepcnt; #endif #if (NGX_HAVE_DEFERRED_ACCEPT && defined SO_ACCEPTFILTER) ls->accept_filter = addr->opt.accept_filter; #endif #if (NGX_HAVE_DEFERRED_ACCEPT && defined TCP_DEFER_ACCEPT) ls->deferred_accept = addr->opt.deferred_accept; #endif #if (NGX_HAVE_INET6 && defined IPV6_V6ONLY) ls->ipv6only = addr->opt.ipv6only; #endif #if (NGX_HAVE_SETFIB) ls->setfib = addr->opt.setfib; #endif #if (NGX_HAVE_TCP_FASTOPEN) ls->fastopen = addr->opt.fastopen; #endif #if (NGX_HAVE_REUSEPORT) ls->reuseport = addr->opt.reuseport; #endif return ls; } static ngx_int_t ngx_http_add_addrs(ngx_conf_t *cf, ngx_http_port_t *hport, ngx_http_conf_addr_t *addr) { ngx_uint_t i; ngx_http_in_addr_t *addrs; struct sockaddr_in *sin; ngx_http_virtual_names_t *vn; hport->addrs = ngx_pcalloc(cf->pool, hport->naddrs * sizeof(ngx_http_in_addr_t)); if (hport->addrs == NULL) { return NGX_ERROR; } addrs = hport->addrs; for (i = 0; i < hport->naddrs; i++) { sin = &addr[i].opt.u.sockaddr_in; addrs[i].addr = sin->sin_addr.s_addr; addrs[i].conf.default_server = addr[i].default_server; #if (NGX_HTTP_SSL) addrs[i].conf.ssl = addr[i].opt.ssl; #endif #if (NGX_HTTP_V2) addrs[i].conf.http2 = addr[i].opt.http2; #endif addrs[i].conf.proxy_protocol = addr[i].opt.proxy_protocol; if (addr[i].hash.buckets == NULL && (addr[i].wc_head == NULL || addr[i].wc_head->hash.buckets == NULL) && (addr[i].wc_tail == NULL || addr[i].wc_tail->hash.buckets == NULL) #if (NGX_PCRE) && addr[i].nregex == 0 #endif ) { continue; } vn = ngx_palloc(cf->pool, sizeof(ngx_http_virtual_names_t)); if (vn == NULL) { return NGX_ERROR; } addrs[i].conf.virtual_names = vn; vn->names.hash = addr[i].hash; vn->names.wc_head = addr[i].wc_head; vn->names.wc_tail = addr[i].wc_tail; #if (NGX_PCRE) vn->nregex = addr[i].nregex; vn->regex = addr[i].regex; #endif } return NGX_OK; } #if (NGX_HAVE_INET6) static ngx_int_t ngx_http_add_addrs6(ngx_conf_t *cf, ngx_http_port_t *hport, ngx_http_conf_addr_t *addr) { ngx_uint_t i; ngx_http_in6_addr_t *addrs6; struct sockaddr_in6 *sin6; ngx_http_virtual_names_t *vn; hport->addrs = ngx_pcalloc(cf->pool, hport->naddrs * sizeof(ngx_http_in6_addr_t)); if (hport->addrs == NULL) { return NGX_ERROR; } addrs6 = hport->addrs; for (i = 0; i < hport->naddrs; i++) { sin6 = &addr[i].opt.u.sockaddr_in6; addrs6[i].addr6 = sin6->sin6_addr; addrs6[i].conf.default_server = addr[i].default_server; #if (NGX_HTTP_SSL) addrs6[i].conf.ssl = addr[i].opt.ssl; #endif #if (NGX_HTTP_V2) addrs6[i].conf.http2 = addr[i].opt.http2; #endif if (addr[i].hash.buckets == NULL && (addr[i].wc_head == NULL || addr[i].wc_head->hash.buckets == NULL) && (addr[i].wc_tail == NULL || addr[i].wc_tail->hash.buckets == NULL) #if (NGX_PCRE) && addr[i].nregex == 0 #endif ) { continue; } vn = ngx_palloc(cf->pool, sizeof(ngx_http_virtual_names_t)); if (vn == NULL) { return NGX_ERROR; } addrs6[i].conf.virtual_names = vn; vn->names.hash = addr[i].hash; vn->names.wc_head = addr[i].wc_head; vn->names.wc_tail = addr[i].wc_tail; #if (NGX_PCRE) vn->nregex = addr[i].nregex; vn->regex = addr[i].regex; #endif } return NGX_OK; } #endif char * ngx_http_types_slot(ngx_conf_t *cf, ngx_command_t *cmd, void *conf) { char *p = conf; ngx_array_t **types; ngx_str_t *value, *default_type; ngx_uint_t i, n, hash; ngx_hash_key_t *type; types = (ngx_array_t **) (p + cmd->offset); if (*types == (void *) -1) { return NGX_CONF_OK; } default_type = cmd->post; if (*types == NULL) { *types = ngx_array_create(cf->temp_pool, 1, sizeof(ngx_hash_key_t)); if (*types == NULL) { return NGX_CONF_ERROR; } if (default_type) { type = ngx_array_push(*types); if (type == NULL) { return NGX_CONF_ERROR; } type->key = *default_type; type->key_hash = ngx_hash_key(default_type->data, default_type->len); type->value = (void *) 4; } } value = cf->args->elts; for (i = 1; i < cf->args->nelts; i++) { if (value[i].len == 1 && value[i].data[0] == '*') { *types = (void *) -1; return NGX_CONF_OK; } hash = ngx_hash_strlow(value[i].data, value[i].data, value[i].len); value[i].data[value[i].len] = '\0'; type = (*types)->elts; for (n = 0; n < (*types)->nelts; n++) { if (ngx_strcmp(value[i].data, type[n].key.data) == 0) { ngx_conf_log_error(NGX_LOG_WARN, cf, 0, "duplicate MIME type \"%V\"", &value[i]); goto next; } } type = ngx_array_push(*types); if (type == NULL) { return NGX_CONF_ERROR; } type->key = value[i]; type->key_hash = hash; type->value = (void *) 4; next: continue; } return NGX_CONF_OK; } char * ngx_http_merge_types(ngx_conf_t *cf, ngx_array_t **keys, ngx_hash_t *types_hash, ngx_array_t **prev_keys, ngx_hash_t *prev_types_hash, ngx_str_t *default_types) { ngx_hash_init_t hash; if (*keys) { if (*keys == (void *) -1) { return NGX_CONF_OK; } hash.hash = types_hash; hash.key = NULL; hash.max_size = 2048; hash.bucket_size = 64; hash.name = "test_types_hash"; hash.pool = cf->pool; hash.temp_pool = NULL; if (ngx_hash_init(&hash, (*keys)->elts, (*keys)->nelts) != NGX_OK) { return NGX_CONF_ERROR; } return NGX_CONF_OK; } if (prev_types_hash->buckets == NULL) { if (*prev_keys == NULL) { if (ngx_http_set_default_types(cf, prev_keys, default_types) != NGX_OK) { return NGX_CONF_ERROR; } } else if (*prev_keys == (void *) -1) { *keys = *prev_keys; return NGX_CONF_OK; } hash.hash = prev_types_hash; hash.key = NULL; hash.max_size = 2048; hash.bucket_size = 64; hash.name = "test_types_hash"; hash.pool = cf->pool; hash.temp_pool = NULL; if (ngx_hash_init(&hash, (*prev_keys)->elts, (*prev_keys)->nelts) != NGX_OK) { return NGX_CONF_ERROR; } } *types_hash = *prev_types_hash; return NGX_CONF_OK; } ngx_int_t ngx_http_set_default_types(ngx_conf_t *cf, ngx_array_t **types, ngx_str_t *default_type) { ngx_hash_key_t *type; *types = ngx_array_create(cf->temp_pool, 1, sizeof(ngx_hash_key_t)); if (*types == NULL) { return NGX_ERROR; } while (default_type->len) { type = ngx_array_push(*types); if (type == NULL) { return NGX_ERROR; } type->key = *default_type; type->key_hash = ngx_hash_key(default_type->data, default_type->len); type->value = (void *) 4; default_type++; } return NGX_OK; }
freeletics/nginx
src/http/ngx_http.c
C
bsd-2-clause
53,294
class Beast < Formula desc "Bayesian Evolutionary Analysis Sampling Trees" homepage "http://beast.community/" url "https://github.com/beast-dev/beast-mcmc/archive/v1.10.4.tar.gz" sha256 "e2f8a30e4f695bf0e58ac3e94778459a1db6cd0d476556d86c563e4b6a1181f7" head "https://github.com/beast-dev/beast-mcmc.git" bottle do cellar :any_skip_relocation sha256 "3c894f422a0ee8e3a60a4fc4383b7c92e3373bcddf913034ef92541a346c5d8c" => :mojave sha256 "a5c4138c07edad9c5fe6cb2a24c50c3b7e77abfa206f31e0b6d2f6ac62f2fa5b" => :high_sierra sha256 "c3b2d7c0e17ee1072e0278ff8cfc4d27cee98450e3b23c29a2e0724c6278def8" => :sierra end depends_on "ant" => :build depends_on "beagle" depends_on :java => "1.7+" def install system "ant", "linux" libexec.install Dir["release/Linux/BEASTv*/*"] pkgshare.install_symlink libexec/"examples" bin.install_symlink Dir[libexec/"bin/*"] end test do cp pkgshare/"examples/TestXML/ClockModels/testUCRelaxedClockLogNormal.xml", testpath # Run fewer generations to speed up tests inreplace "testUCRelaxedClockLogNormal.xml", 'chainLength="10000000"', 'chainLength="100000"' system "#{bin}/beast", "testUCRelaxedClockLogNormal.xml" %w[ops log trees].each do |ext| output = "testUCRelaxedClockLogNormal." + ext assert_predicate testpath/output, :exist?, "Failed to create #{output}" end end end
adamliter/homebrew-core
Formula/beast.rb
Ruby
bsd-2-clause
1,450
{-# LANGUAGE TypeOperators, MultiParamTypeClasses, FlexibleInstances #-} {-# OPTIONS_GHC -Wall #-} module Mixin where import Prelude hiding (log) class a <: b where up :: a -> b instance (t1 <: t2) => (t -> t1) <: (t -> t2) where up f = up . f instance (t1 <: t2) => (t2 -> t) <: (t1 -> t) where up f = f . up type Class t = t -> t type Mixin s t = s -> t -> t new :: Class a -> a new f = let r = f r in r with :: (t <: s) => Class s -> Mixin s t -> Class t klass `with` mixin = \this -> mixin (klass (up this)) this -- The below provides an example. fib' :: Class (Int -> Int) fib' _ 1 = 1 fib' _ 2 = 1 fib' this n = this (n-1) + this (n-2) instance (Int, String) <: Int where up = fst logging :: Mixin (Int -> Int) (Int -> (Int, String)) logging super _ 1 = (super 1, "1") logging super _ 2 = (super 2, "2 1") logging super this n = (super n, show n ++ " " ++ log1 ++ " " ++ log2) where (_, log1) = this (n-1) (_, log2) = this (n-2) fibWithLogging :: Int -> (Int, String) fibWithLogging = new (fib' `with` logging)
bixuanzju/fcore
lib/Mixin.hs
Haskell
bsd-2-clause
1,062
/*! \file TextureOperations.h \author Gregory Diamos <gregory.diamos@gatech.edu> \date Tuesday September 29, 2009 \brief Implementations of texture operations. */ #ifndef TEXTURE_OPERATIONS_H_INCLUDED #define TEXTURE_OPERATIONS_H_INCLUDED #include <ocelot/ir/interface/Texture.h> #include <ocelot/trace/interface/TraceEvent.h> #include <cmath> #include <cfloat> #include <hydrazine/interface/Casts.h> #include <hydrazine/interface/debug.h> #include <iostream> namespace executive { /*! \brief A namespace for texture sampling instructions */ namespace tex { ir::PTXF64 wrap( ir::PTXF64 b, unsigned int limit, ir::Texture::AddressMode mode ); ir::PTXF32 channelReadF32( const ir::Texture& texture, unsigned int shift, unsigned int mask, unsigned int index ); template<typename D> D channelRead( const ir::Texture& texture, unsigned int shift, unsigned int mask, unsigned int index ) { unsigned int bits = texture.x + texture.y + texture.z + texture.w; unsigned int bytes = bits / 8; unsigned int offset = shift / 8; D value = *((D*)(((ir::PTXB8*) texture.data) + index*bytes + offset)); value &= mask; return value; } template<unsigned int dim> void getShiftAndMask( unsigned int &shift, ir::PTXB64& mask, const ir::Texture& texture ) { mask = 1; switch (dim) { case 0: mask <<= (texture.x); --mask; shift = 0; break; case 1: mask <<= (texture.y); --mask; shift = texture.x; break; case 2: mask <<= (texture.z); --mask; shift = texture.x + texture.y; break; case 3: mask <<= (texture.w); --mask; shift = texture.z + texture.y + texture.x; break; default: assert("Invalid texture index" == 0); break; } } /*! \brief sample in one dimension */ template<unsigned int dim, typename D, typename B> D sample( const ir::Texture& texture, B b0 ) { D d = 0; ir::PTXF64 b = ( ir::PTXF64 ) b0; ir::PTXB64 mask; unsigned int shift; getShiftAndMask<dim>(shift, mask, texture); if (texture.normalize) { b = b * texture.size.x; } if (texture.interpolation == ir::Texture::Nearest) { ir::PTXF64 index = (ir::PTXF64)b; unsigned int windex = wrap(index, texture.size.x, texture.addressMode[0]); switch (texture.type) { case ir::Texture::Unsigned: { ir::PTXU32 result = channelRead<ir::PTXU32>(texture, shift, mask, windex); d = result; break; } case ir::Texture::Signed: { ir::PTXS32 result = channelRead<ir::PTXS32>(texture, shift, mask, windex); d = result; break; } case ir::Texture::Float: { ir::PTXF32 result = channelReadF32(texture, shift, mask, windex); d = result; break; } default: assert("Invalid texture data type" == 0); } } else { b -= 0.5f; ir::PTXF64 low = floor(b); ir::PTXF64 high = floor(b + 1); unsigned int wlow = wrap(low, texture.size.x, texture.addressMode[0]); unsigned int whigh = wrap(high, texture.size.x, texture.addressMode[0]); switch (texture.type) { case ir::Texture::Unsigned: { ir::PTXU64 result = channelRead<ir::PTXU32>(texture, shift, mask, wlow) * (high - b); result += channelRead<ir::PTXU32>(texture, shift, mask, whigh) * (b - low); d = result; break; } case ir::Texture::Signed: { ir::PTXS64 result = channelRead<ir::PTXS32>(texture, shift, mask, wlow) * (high - b); result += channelRead<ir::PTXS32>(texture, shift, mask, whigh) * (b - low); d = result; break; } case ir::Texture::Float: { ir::PTXF32 result = channelReadF32(texture, shift, mask, wlow) * (high - b); result += channelReadF32(texture, shift, mask, whigh) * (b - low); d = result; break; } default: assert("Invalid texture data type" == 0); } } if(texture.normalizedFloat) { ir::PTXF32 f = ( d + 0.0 ) / (mask + 1); d = hydrazine::bit_cast< D >( f ); } return d; } /*! \brief sample in 2 dimensions */ template<unsigned int dim, typename D, typename B> D sample(const ir::Texture& texture, B b0, B b1) { D d = 0; ir::PTXF64 b[2] = { ( ir::PTXF64 ) b0, ( ir::PTXF64 ) b1 }; ir::PTXB64 mask; unsigned int shift; getShiftAndMask<dim>(shift, mask, texture); if (texture.normalize) { b[0] = b[0] * texture.size.x; b[1] = b[1] * texture.size.y; } if (texture.interpolation == ir::Texture::Nearest) { ir::PTXF64 index[2] = { ( ir::PTXF64 )(ir::PTXS64)b[0], ( ir::PTXF64 )(ir::PTXS64)b[1] }; unsigned int windex[2]; windex[0] = wrap(index[0], texture.size.x, texture.addressMode[0]); windex[1] = wrap(index[1], texture.size.y, texture.addressMode[1]); switch (texture.type) { case ir::Texture::Unsigned: { ir::PTXU32 result = channelRead<ir::PTXU32>(texture, shift, mask, windex[0] + windex[1] * texture.size.x); d = result; break; } case ir::Texture::Signed: { ir::PTXS32 result = channelRead<ir::PTXS32>(texture, shift, mask, windex[0] + windex[1] * texture.size.x); d = result; break; } case ir::Texture::Float: { ir::PTXF32 result = channelReadF32(texture, shift, mask, windex[0] + windex[1] * texture.size.x); d = result; break; } default: assert("Invalid texture data type" == 0); } } else { b[0] -= 0.5f; b[1] -= 0.5f; ir::PTXF64 low[2] = {floor(b[0]), floor(b[1])}; ir::PTXF64 high[2] = {floor(b[0] + 1), floor(b[1] + 1)}; unsigned int wlow[2]; unsigned int whigh[2]; wlow[0] = wrap(low[0], texture.size.x, texture.addressMode[0]); wlow[1] = wrap(low[1], texture.size.y, texture.addressMode[1]); whigh[0] = wrap(high[0], texture.size.x, texture.addressMode[0]); whigh[1] = wrap(high[1], texture.size.y, texture.addressMode[1]); switch (texture.type) { case ir::Texture::Unsigned: { ir::PTXF64 result = channelRead<ir::PTXU32>(texture, shift, mask, wlow[0] + texture.size.x * wlow[1]) * (high[0] - b[0]) * (high[1] - b[1]); result += channelRead<ir::PTXU32>(texture, shift, mask, whigh[0] + texture.size.x * whigh[1]) * (b[0] - low[0]) * (b[1] - low[1]); result += channelRead<ir::PTXU32>(texture, shift, mask, wlow[0] + texture.size.x * whigh[1]) * (high[0] - b[0]) * (b[1] - low[1]); result += channelRead<ir::PTXU32>(texture, shift, mask, whigh[0] + texture.size.x * wlow[1]) * (b[0] - low[0]) * (high[1] - b[1]); d = result; break; } case ir::Texture::Signed: { ir::PTXF64 result = channelRead<ir::PTXS32>(texture, shift, mask, wlow[0] + texture.size.x * wlow[1]) * (high[0] - b[0]) * (high[1] - b[1]); result += channelRead<ir::PTXS32>(texture, shift, mask, whigh[0] + texture.size.x * whigh[1]) * (b[0] - low[0]) * (b[1] - low[1]); result += channelRead<ir::PTXS32>(texture, shift, mask, wlow[0] + texture.size.x * whigh[1]) * (high[0] - b[0]) * (b[1] - low[1]); result += channelRead<ir::PTXS32>(texture, shift, mask, high[0] + texture.size.x * low[1]) * (b[0] - low[0]) * (high[1] - b[1]); d = result; break; } case ir::Texture::Float: { ir::PTXF32 result = channelReadF32(texture, shift, mask, wlow[0] + texture.size.x * wlow[1]) * (high[0] - b[0]) * (high[1] - b[1]); result += channelReadF32(texture, shift, mask, whigh[0] + texture.size.x * whigh[1]) * (b[0] - low[0]) * (b[1] - low[1]); result += channelReadF32(texture, shift, mask, wlow[0] + texture.size.x * whigh[1]) * (high[0] - b[0]) * (b[1] - low[1]); result += channelReadF32(texture, shift, mask, whigh[0] + texture.size.x * wlow[1]) * (b[0] - low[0]) * (high[1] - b[1]); d = result; break; } default: assert("Invalid texture data type" == 0); } } if(texture.normalizedFloat) { ir::PTXF32 f = ( d + 0.0 ) / (mask + 1); d = hydrazine::bit_cast< D >( f ); } return d; } template<unsigned int dim, typename D, typename B> D sample(const ir::Texture& texture, B b0, B b1, B b2) { D d = 0; ir::PTXF64 b[3] = {( ir::PTXF64 ) b0, ( ir::PTXF64 ) b1, ( ir::PTXF64 ) b2}; ir::PTXB64 mask; unsigned int shift; getShiftAndMask<dim>(shift, mask, texture); if (texture.normalize) { b[0] = b[0] * texture.size.x; b[1] = b[1] * texture.size.y; b[2] = b[2] * texture.size.z; } if (texture.interpolation == ir::Texture::Nearest) { ir::PTXF64 index[3] = { (ir::PTXF64)b[0], (ir::PTXF64)b[1], (ir::PTXF64)b[2]}; unsigned int windex[3]; windex[0] = wrap(index[0], texture.size.x, texture.addressMode[0]); windex[1] = wrap(index[1], texture.size.y, texture.addressMode[1]); windex[2] = wrap(index[2], texture.size.z, texture.addressMode[2]); switch (texture.type) { case ir::Texture::Unsigned: { ir::PTXU32 result = channelRead<ir::PTXU32>(texture, shift, mask, windex[0] + windex[1]*texture.size.x + index[2]*texture.size.x*texture.size.y); d = result; break; } case ir::Texture::Signed: { ir::PTXS32 result = channelRead<ir::PTXS32>(texture, shift, mask, windex[0] + windex[1]*texture.size.x + windex[2]*texture.size.x*texture.size.y); d = result; break; } case ir::Texture::Float: { ir::PTXF32 result = channelReadF32(texture, shift, mask, windex[0] + windex[1]*texture.size.x + windex[2]*texture.size.x*texture.size.y); d = result; break; } default: assert("Invalid texture data type" == 0); } } else { b[0] -= 0.5f; b[1] -= 0.5f; b[2] -= 0.5f; ir::PTXF64 low[3] = {floor(b[0]), floor(b[1]), floor(b[2])}; ir::PTXF64 high[3] = {floor(b[0] + 1), floor(b[1] + 1), floor(b[2] + 1)}; unsigned int wlow[3]; unsigned int whigh[3]; wlow[0] = wrap(low[0], texture.size.x, texture.addressMode[0]); wlow[1] = wrap(low[1], texture.size.y, texture.addressMode[1]); wlow[2] = wrap(low[2], texture.size.z, texture.addressMode[2]); whigh[0] = wrap(high[0], texture.size.x, texture.addressMode[0]); whigh[1] = wrap(high[1], texture.size.y, texture.addressMode[1]); whigh[2] = wrap(high[2], texture.size.z, texture.addressMode[2]); switch (texture.type) { case ir::Texture::Unsigned: { ir::PTXF64 result = channelRead<ir::PTXU32>(texture, shift, mask, wlow[0] + texture.size.x * wlow[1] + texture.size.x * texture.size.y * wlow[2]) * (high[0] - b[0]) * (high[1] - b[1]) * (high[2] - b[2]); result += channelRead<ir::PTXU32>(texture, shift, mask, wlow[0] + texture.size.x * wlow[1] + texture.size.x * texture.size.y * whigh[2]) * (high[0] - b[0]) * (high[1] - b[1]) * (b[2] - low[2]); result += channelRead<ir::PTXU32>(texture, shift, mask, wlow[0] + texture.size.x * whigh[1] + texture.size.x * texture.size.y * wlow[2]) * (high[0] - b[0]) * (b[1] - low[1]) * (high[2] - b[2]); result += channelRead<ir::PTXU32>(texture, shift, mask, wlow[0] + texture.size.x * whigh[1] + texture.size.x * texture.size.y * whigh[2]) * (high[0] - b[0]) * (b[1] - low[1]) * (b[2] - low[2]); result += channelRead<ir::PTXU32>(texture, shift, mask, whigh[0] + texture.size.x * wlow[1] + texture.size.x * texture.size.y * wlow[2]) * (b[0] - low[0]) * (high[1] - b[1]) * (high[2] - b[2]); result += channelRead<ir::PTXU32>(texture, shift, mask, whigh[0] + texture.size.x * wlow[1] + texture.size.x * texture.size.y * whigh[2]) * (b[0] - low[0]) * (high[1] - b[1]) * (b[2] - low[2]); result += channelRead<ir::PTXU32>(texture, shift, mask, whigh[0] + texture.size.x * whigh[1] + texture.size.x * texture.size.y * wlow[2]) * (b[0] - low[0]) * (b[1] - low[1]) * (high[2] - b[2]); result += channelRead<ir::PTXU32>(texture, shift, mask, whigh[0] + texture.size.x * whigh[1] + texture.size.x * texture.size.y * whigh[2]) * (b[0] - low[0]) * (b[1] - low[1]) * (b[2] - low[2]); d = result; break; } case ir::Texture::Signed: { ir::PTXF64 result = channelRead<ir::PTXS32>(texture, shift, mask, wlow[0] + texture.size.x * wlow[1] + texture.size.x * texture.size.y * wlow[2]) * (high[0] - b[0]) * (high[1] - b[1]) * (high[2] - b[2]); result += channelRead<ir::PTXS32>(texture, shift, mask, wlow[0] + texture.size.x * wlow[1] + texture.size.x * texture.size.y * whigh[2]) * (high[0] - b[0]) * (high[1] - b[1]) * (b[2] - low[2]); result += channelRead<ir::PTXS32>(texture, shift, mask, wlow[0] + texture.size.x * whigh[1] + texture.size.x * texture.size.y * wlow[2]) * (high[0] - b[0]) * (b[1] - low[1]) * (high[2] - b[2]); result += channelRead<ir::PTXS32>(texture, shift, mask, wlow[0] + texture.size.x * whigh[1] + texture.size.x * texture.size.y * whigh[2]) * (high[0] - b[0]) * (b[1] - low[1]) * (b[2] - low[2]); result += channelRead<ir::PTXS32>(texture, shift, mask, whigh[0] + texture.size.x * wlow[1] + texture.size.x * texture.size.y * wlow[2]) * (b[0] - low[0]) * (high[1] - b[1]) * (high[2] - b[2]); result += channelRead<ir::PTXS32>(texture, shift, mask, whigh[0] + texture.size.x * wlow[1] + texture.size.x * texture.size.y * whigh[2]) * (b[0] - low[0]) * (high[1] - b[1]) * (b[2] - low[2]); result += channelRead<ir::PTXS32>(texture, shift, mask, whigh[0] + texture.size.x * whigh[1] + texture.size.x * texture.size.y * wlow[2]) * (b[0] - low[0]) * (b[1] - low[1]) * (high[2] - b[2]); result += channelRead<ir::PTXS32>(texture, shift, mask, whigh[0] + texture.size.x * whigh[1] + texture.size.x * texture.size.y * whigh[2]) * (b[0] - low[0]) * (b[1] - low[1]) * (b[2] - low[2]); d = result; break; } case ir::Texture::Float: { ir::PTXF32 result = channelReadF32(texture, shift, mask, wlow[0] + texture.size.x * wlow[1] + texture.size.x * texture.size.y * wlow[2]) * (high[0] - b[0]) * (high[1] - b[1]) * (high[2] - b[2]); result += channelReadF32(texture, shift, mask, wlow[0] + texture.size.x * wlow[1] + texture.size.x * texture.size.y * whigh[2]) * (high[0] - b[0]) * (high[1] - b[1]) * (b[2] - low[2]); result += channelReadF32(texture, shift, mask, wlow[0] + texture.size.x * whigh[1] + texture.size.x * texture.size.y * wlow[2]) * (high[0] - b[0]) * (b[1] - low[1]) * (high[2] - b[2]); result += channelReadF32(texture, shift, mask, wlow[0] + texture.size.x * whigh[1] + texture.size.x * texture.size.y * whigh[2]) * (high[0] - b[0]) * (b[1] - low[1]) * (b[2] - low[2]); result += channelReadF32(texture, shift, mask, whigh[0] + texture.size.x * wlow[1] + texture.size.x * texture.size.y * wlow[2]) * (b[0] - low[0]) * (high[1] - b[1]) * (high[2] - b[2]); result += channelReadF32(texture, shift, mask, whigh[0] + texture.size.x * wlow[1] + texture.size.x * texture.size.y * whigh[2]) * (b[0] - low[0]) * (high[1] - b[1]) * (b[2] - low[2]); result += channelReadF32(texture, shift, mask, whigh[0] + texture.size.x * whigh[1] + texture.size.x * texture.size.y * wlow[2]) * (b[0] - low[0]) * (b[1] - low[1]) * (high[2] - b[2]); result += channelReadF32(texture, shift, mask, whigh[0] + texture.size.x * whigh[1] + texture.size.x * texture.size.y * whigh[2]) * (b[0] - low[0]) * (b[1] - low[1]) * (b[2] - low[2]); d = result; break; } default: assert("Invalid texture data type" == 0); } } if( texture.normalizedFloat ) { ir::PTXF32 f = ( d + 0.0 ) / (mask + 1); d = hydrazine::bit_cast< D >( f ); } return d; } template <typename T> T fabs(T t) { if (t < 0) return -t; return t; } template <typename T> T signum(T t) { if (t < 0) return -1; if (t > 0) return 1; return 0; } template<unsigned int dim, typename D> D sampleCube(const ir::Texture& texture, float b0, float b1, float b2) { D d = 0; ir::PTXF32 b[3] = {b0,b1,b2}; ir::PTXB64 mask; unsigned int shift; getShiftAndMask<dim>(shift, mask, texture); // find which is largest in magnitude float magnitudes[3] = {fabs(b[0]), fabs(b[1]), fabs(b[2])}; float signs[3] = {signum(b[0]), signum(b[1]), signum(b[2])}; int maxDim = 0; for (int i = 1; i < 3; i++) { if (magnitudes[i] > magnitudes[maxDim]) { maxDim = i; } } int textureIndex = maxDim*2 + (signs[maxDim] < 0 ? 1 : 0); float layerCoords[3] = {0, 0}; layerCoords[0] = magnitudes[maxDim]; switch (textureIndex) { case 0: layerCoords[1] = -b[2]; layerCoords[2] = -b[1]; break; case 1: layerCoords[1] = b[2]; layerCoords[2] = -b[1]; break; case 2: layerCoords[1] = b[0]; layerCoords[2] = b[2]; break; case 3: layerCoords[1] = b[0]; layerCoords[2] = -b[2]; break; case 4: layerCoords[1] = b[0]; layerCoords[2] = -b[1]; break; case 5: layerCoords[1] = -b[0]; layerCoords[2] = -b[1]; break; default: break; } b[0] = (layerCoords[1] / layerCoords[0] + 1.0f) / 2.0f; b[1] = (layerCoords[2] / layerCoords[0] + 1.0f) / 2.0f; if (texture.normalize) { b[0] = b[0] * texture.size.x; b[1] = b[1] * texture.size.y; } if (texture.interpolation == ir::Texture::Nearest) { ir::PTXF64 index[2] = { (ir::PTXF64)b[0], (ir::PTXF64)b[1]}; unsigned int windex[2]; windex[0] = wrap(index[0], texture.size.x, texture.addressMode[0]); windex[1] = wrap(index[1], texture.size.y, texture.addressMode[1]); switch (texture.type) { case ir::Texture::Unsigned: { ir::PTXU32 result = channelRead<ir::PTXU32>(texture, shift, mask, windex[0] + windex[1]*texture.size.x + texture.size.x*texture.size.y*textureIndex); d = result; break; } case ir::Texture::Signed: { ir::PTXS32 result = channelRead<ir::PTXS32>(texture, shift, mask, windex[0] + windex[1]*texture.size.x + texture.size.x*texture.size.y*textureIndex); d = result; break; } case ir::Texture::Float: { ir::PTXF32 result = channelReadF32(texture, shift, mask, windex[0] + windex[1]*texture.size.x + texture.size.x*texture.size.y*textureIndex); d = result; break; } default: assert("Invalid texture data type" == 0); } } else if (texture.interpolation == ir::Texture::Linear) { b[0] -= 0.5f; b[1] -= 0.5f; ir::PTXF64 low[2] = {floor(b[0]), floor(b[1])}; ir::PTXF64 high[2] = {floor(b[0] + 1), floor(b[1] + 1)}; unsigned int wlow[3]; unsigned int whigh[3]; wlow[0] = wrap(low[0], texture.size.x, texture.addressMode[0]); wlow[1] = wrap(low[1], texture.size.y, texture.addressMode[1]); whigh[0] = wrap(high[0], texture.size.x, texture.addressMode[0]); whigh[1] = wrap(high[1], texture.size.y, texture.addressMode[1]); switch (texture.type) { case ir::Texture::Unsigned: { ir::PTXF64 result = channelRead<ir::PTXU32>(texture, shift, mask, wlow[0] + texture.size.x * wlow[1]) * (high[0] - b[0]) * (high[1] - b[1]); result += channelRead<ir::PTXU32>(texture, shift, mask, whigh[0] + texture.size.x * whigh[1]) * (b[0] - low[0]) * (b[1] - low[1]); result += channelRead<ir::PTXU32>(texture, shift, mask, wlow[0] + texture.size.x * whigh[1]) * (high[0] - b[0]) * (b[1] - low[1]); result += channelRead<ir::PTXU32>(texture, shift, mask, whigh[0] + texture.size.x * wlow[1]) * (b[0] - low[0]) * (high[1] - b[1]); d = result; break; } case ir::Texture::Signed: { ir::PTXF64 result = channelRead<ir::PTXS32>(texture, shift, mask, wlow[0] + texture.size.x * wlow[1]) * (high[0] - b[0]) * (high[1] - b[1]); result += channelRead<ir::PTXS32>(texture, shift, mask, whigh[0] + texture.size.x * whigh[1]) * (b[0] - low[0]) * (b[1] - low[1]); result += channelRead<ir::PTXS32>(texture, shift, mask, wlow[0] + texture.size.x * whigh[1]) * (high[0] - b[0]) * (b[1] - low[1]); result += channelRead<ir::PTXS32>(texture, shift, mask, high[0] + texture.size.x * low[1]) * (b[0] - low[0]) * (high[1] - b[1]); d = result; break; } case ir::Texture::Float: { ir::PTXF32 result = channelReadF32(texture, shift, mask, wlow[0] + texture.size.x * wlow[1]) * (high[0] - b[0]) * (high[1] - b[1]); result += channelReadF32(texture, shift, mask, whigh[0] + texture.size.x * whigh[1]) * (b[0] - low[0]) * (b[1] - low[1]); result += channelReadF32(texture, shift, mask, wlow[0] + texture.size.x * whigh[1]) * (high[0] - b[0]) * (b[1] - low[1]); result += channelReadF32(texture, shift, mask, whigh[0] + texture.size.x * wlow[1]) * (b[0] - low[0]) * (high[1] - b[1]); d = result; break; } default: assert("Invalid texture data type" == 0); } } else { assert(0 && "invalid texture interpolation mode"); } if( texture.normalizedFloat ) { ir::PTXF32 f = ( d + 0.0 ) / (mask + 1); d = hydrazine::bit_cast< D >( f ); } return d; } void addresses( const ir::Texture& texture, ir::PTXF64 b0, trace::TraceEvent::U64Vector& ); void addresses( const ir::Texture& texture, ir::PTXF64 b0, ir::PTXF64 b1, trace::TraceEvent::U64Vector& ); void addresses( const ir::Texture& texture, ir::PTXF64 b0, ir::PTXF64 b1, ir::PTXF64 b2, trace::TraceEvent::U64Vector& ); } } #endif
gtcasl/gpuocelot
ocelot/ocelot/executive/interface/TextureOperations.h
C
bsd-3-clause
23,162
<?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> <TS version="2.0" language="fi_FI"> <context> <name>MainUI</name> <message> <location filename="../MainUI.ui" line="14"/> <source>Insight</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="51"/> <source>Open Multimedia Player</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="54"/> <location filename="../MainUI.ui" line="67"/> <location filename="../MainUI.ui" line="80"/> <location filename="../MainUI.ui" line="136"/> <location filename="../MainUI.ui" line="143"/> <location filename="../MainUI.ui" line="215"/> <location filename="../MainUI.ui" line="225"/> <location filename="../MainUI.ui" line="271"/> <location filename="../MainUI.ui" line="281"/> <location filename="../MainUI.ui" line="309"/> <location filename="../MainUI.ui" line="337"/> <source>...</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="64"/> <source>View Slideshow</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="77"/> <source>Restore File(s)</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="106"/> <source>Playlist</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="116"/> <location filename="../MainUI.ui" line="268"/> <source>Go to Next</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="127"/> <source>TextLabel</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="212"/> <source>Go to Beginning</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="222"/> <source>Go to Previous</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="278"/> <source>Go to End</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="356"/> <source>Name</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="361"/> <source>Date Modified</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="366"/> <source>Size</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="371"/> <source>Owner</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="381"/> <source>Restore entire directory</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="384"/> <source>Restore All</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="394"/> <source>Restore Selected Item</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="397"/> <source>Restore Selection</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="420"/> <source>Overwrite Existing Files</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="443"/> <source>File</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="451"/> <source>Edit</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="457"/> <source>View</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="465"/> <source>Bookmarks</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="476"/> <source>toolBar</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="497"/> <source>New &amp;Tab</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="502"/> <source>Close Tab</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="507"/> <source>E&amp;xit</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="512"/> <source>&amp;Preferences</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="523"/> <source>Shortcuts</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="534"/> <source>Music Player</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="545"/> <source>Image Viewer</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="550"/> <source>UpDir</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="553"/> <source>Go up one directory</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="558"/> <source>Home</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="561"/> <source>Go to your home directory</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="572"/> <source>View Hidden Files</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="577"/> <source>Back</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="580"/> <source>Back to directory</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="585"/> <source>Refresh</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="588"/> <source>Refresh Directory</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="591"/> <source>F5</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="596"/> <source>Bookmark</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="599"/> <source>Bookmark this directory</source> <translation type="unfinished"></translation> </message> </context> </TS>
krytarowski/lumina
src-qt5/desktop-utils/lumina-fm/i18n/lumina-fm_fi.ts
TypeScript
bsd-3-clause
8,458
// Copyright 2015, Google Inc. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // Package grpcvtgateservice provides the gRPC glue for vtgate package grpcvtgateservice import ( "google.golang.org/grpc" mproto "github.com/youtube/vitess/go/mysql/proto" "github.com/youtube/vitess/go/vt/callerid" "github.com/youtube/vitess/go/vt/callinfo" "github.com/youtube/vitess/go/vt/key" "github.com/youtube/vitess/go/vt/servenv" tproto "github.com/youtube/vitess/go/vt/tabletserver/proto" "github.com/youtube/vitess/go/vt/topo" "github.com/youtube/vitess/go/vt/vtgate" "github.com/youtube/vitess/go/vt/vtgate/proto" "github.com/youtube/vitess/go/vt/vtgate/vtgateservice" "golang.org/x/net/context" pb "github.com/youtube/vitess/go/vt/proto/vtgate" pbs "github.com/youtube/vitess/go/vt/proto/vtgateservice" ) // VTGate is the public structure that is exported via gRPC type VTGate struct { server vtgateservice.VTGateService } // Execute is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) Execute(ctx context.Context, request *pb.ExecuteRequest) (response *pb.ExecuteResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) reply := new(proto.QueryResult) executeErr := vtg.server.Execute(ctx, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.TabletType, proto.ProtoToSession(request.Session), request.NotInTransaction, reply) response = &pb.ExecuteResponse{ Error: vtgate.VtGateErrorToVtRPCError(executeErr, reply.Error), } if executeErr == nil { response.Result = mproto.QueryResultToProto3(reply.Result) response.Session = proto.SessionToProto(reply.Session) return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, executeErr } // ExecuteShards is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) ExecuteShards(ctx context.Context, request *pb.ExecuteShardsRequest) (response *pb.ExecuteShardsResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) reply := new(proto.QueryResult) executeErr := vtg.server.ExecuteShards(ctx, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.Keyspace, request.Shards, request.TabletType, proto.ProtoToSession(request.Session), request.NotInTransaction, reply) response = &pb.ExecuteShardsResponse{ Error: vtgate.VtGateErrorToVtRPCError(executeErr, reply.Error), } if executeErr == nil { response.Result = mproto.QueryResultToProto3(reply.Result) response.Session = proto.SessionToProto(reply.Session) return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, executeErr } // ExecuteKeyspaceIds is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) ExecuteKeyspaceIds(ctx context.Context, request *pb.ExecuteKeyspaceIdsRequest) (response *pb.ExecuteKeyspaceIdsResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) reply := new(proto.QueryResult) executeErr := vtg.server.ExecuteKeyspaceIds(ctx, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.Keyspace, key.ProtoToKeyspaceIds(request.KeyspaceIds), request.TabletType, proto.ProtoToSession(request.Session), request.NotInTransaction, reply) response = &pb.ExecuteKeyspaceIdsResponse{ Error: vtgate.VtGateErrorToVtRPCError(executeErr, reply.Error), } if executeErr == nil { response.Result = mproto.QueryResultToProto3(reply.Result) response.Session = proto.SessionToProto(reply.Session) return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, executeErr } // ExecuteKeyRanges is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) ExecuteKeyRanges(ctx context.Context, request *pb.ExecuteKeyRangesRequest) (response *pb.ExecuteKeyRangesResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) reply := new(proto.QueryResult) executeErr := vtg.server.ExecuteKeyRanges(ctx, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.Keyspace, key.ProtoToKeyRanges(request.KeyRanges), request.TabletType, proto.ProtoToSession(request.Session), request.NotInTransaction, reply) response = &pb.ExecuteKeyRangesResponse{ Error: vtgate.VtGateErrorToVtRPCError(executeErr, reply.Error), } if executeErr == nil { response.Result = mproto.QueryResultToProto3(reply.Result) response.Session = proto.SessionToProto(reply.Session) return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, executeErr } // ExecuteEntityIds is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) ExecuteEntityIds(ctx context.Context, request *pb.ExecuteEntityIdsRequest) (response *pb.ExecuteEntityIdsResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) reply := new(proto.QueryResult) executeErr := vtg.server.ExecuteEntityIds(ctx, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.Keyspace, request.EntityColumnName, proto.ProtoToEntityIds(request.EntityKeyspaceIds), request.TabletType, proto.ProtoToSession(request.Session), request.NotInTransaction, reply) response = &pb.ExecuteEntityIdsResponse{ Error: vtgate.VtGateErrorToVtRPCError(executeErr, reply.Error), } if executeErr == nil { response.Result = mproto.QueryResultToProto3(reply.Result) response.Session = proto.SessionToProto(reply.Session) return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, executeErr } // ExecuteBatchShards is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) ExecuteBatchShards(ctx context.Context, request *pb.ExecuteBatchShardsRequest) (response *pb.ExecuteBatchShardsResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) reply := new(proto.QueryResultList) executeErr := vtg.server.ExecuteBatchShards(ctx, proto.ProtoToBoundShardQueries(request.Queries), request.TabletType, request.AsTransaction, proto.ProtoToSession(request.Session), reply) response = &pb.ExecuteBatchShardsResponse{ Error: vtgate.VtGateErrorToVtRPCError(executeErr, reply.Error), } if executeErr == nil { response.Results = tproto.QueryResultListToProto3(reply.List) response.Session = proto.SessionToProto(reply.Session) return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, executeErr } // ExecuteBatchKeyspaceIds is the RPC version of // vtgateservice.VTGateService method func (vtg *VTGate) ExecuteBatchKeyspaceIds(ctx context.Context, request *pb.ExecuteBatchKeyspaceIdsRequest) (response *pb.ExecuteBatchKeyspaceIdsResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) reply := new(proto.QueryResultList) executeErr := vtg.server.ExecuteBatchKeyspaceIds(ctx, proto.ProtoToBoundKeyspaceIdQueries(request.Queries), request.TabletType, request.AsTransaction, proto.ProtoToSession(request.Session), reply) response = &pb.ExecuteBatchKeyspaceIdsResponse{ Error: vtgate.VtGateErrorToVtRPCError(executeErr, reply.Error), } if executeErr == nil { response.Results = tproto.QueryResultListToProto3(reply.List) response.Session = proto.SessionToProto(reply.Session) return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, executeErr } // StreamExecute is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) StreamExecute(request *pb.StreamExecuteRequest, stream pbs.Vitess_StreamExecuteServer) (err error) { defer vtg.server.HandlePanic(&err) ctx := callerid.NewContext(callinfo.GRPCCallInfo(stream.Context()), request.CallerId, callerid.NewImmediateCallerID("grpc client")) return vtg.server.StreamExecute(ctx, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.TabletType, func(value *proto.QueryResult) error { return stream.Send(&pb.StreamExecuteResponse{ Result: mproto.QueryResultToProto3(value.Result), }) }) } // StreamExecuteShards is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) StreamExecuteShards(request *pb.StreamExecuteShardsRequest, stream pbs.Vitess_StreamExecuteShardsServer) (err error) { defer vtg.server.HandlePanic(&err) ctx := callerid.NewContext(callinfo.GRPCCallInfo(stream.Context()), request.CallerId, callerid.NewImmediateCallerID("grpc client")) return vtg.server.StreamExecuteShards(ctx, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.Keyspace, request.Shards, request.TabletType, func(value *proto.QueryResult) error { return stream.Send(&pb.StreamExecuteShardsResponse{ Result: mproto.QueryResultToProto3(value.Result), }) }) } // StreamExecuteKeyspaceIds is the RPC version of // vtgateservice.VTGateService method func (vtg *VTGate) StreamExecuteKeyspaceIds(request *pb.StreamExecuteKeyspaceIdsRequest, stream pbs.Vitess_StreamExecuteKeyspaceIdsServer) (err error) { defer vtg.server.HandlePanic(&err) ctx := callerid.NewContext(callinfo.GRPCCallInfo(stream.Context()), request.CallerId, callerid.NewImmediateCallerID("grpc client")) return vtg.server.StreamExecuteKeyspaceIds(ctx, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.Keyspace, key.ProtoToKeyspaceIds(request.KeyspaceIds), request.TabletType, func(value *proto.QueryResult) error { return stream.Send(&pb.StreamExecuteKeyspaceIdsResponse{ Result: mproto.QueryResultToProto3(value.Result), }) }) } // StreamExecuteKeyRanges is the RPC version of // vtgateservice.VTGateService method func (vtg *VTGate) StreamExecuteKeyRanges(request *pb.StreamExecuteKeyRangesRequest, stream pbs.Vitess_StreamExecuteKeyRangesServer) (err error) { defer vtg.server.HandlePanic(&err) ctx := callerid.NewContext(callinfo.GRPCCallInfo(stream.Context()), request.CallerId, callerid.NewImmediateCallerID("grpc client")) return vtg.server.StreamExecuteKeyRanges(ctx, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.Keyspace, key.ProtoToKeyRanges(request.KeyRanges), request.TabletType, func(value *proto.QueryResult) error { return stream.Send(&pb.StreamExecuteKeyRangesResponse{ Result: mproto.QueryResultToProto3(value.Result), }) }) } // Begin is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) Begin(ctx context.Context, request *pb.BeginRequest) (response *pb.BeginResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) outSession := new(proto.Session) beginErr := vtg.server.Begin(ctx, outSession) response = &pb.BeginResponse{ Error: vtgate.VtGateErrorToVtRPCError(beginErr, ""), } if beginErr == nil { response.Session = proto.SessionToProto(outSession) return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, beginErr } // Commit is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) Commit(ctx context.Context, request *pb.CommitRequest) (response *pb.CommitResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) commitErr := vtg.server.Commit(ctx, proto.ProtoToSession(request.Session)) response = &pb.CommitResponse{ Error: vtgate.VtGateErrorToVtRPCError(commitErr, ""), } if commitErr == nil { return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, commitErr } // Rollback is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) Rollback(ctx context.Context, request *pb.RollbackRequest) (response *pb.RollbackResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) rollbackErr := vtg.server.Rollback(ctx, proto.ProtoToSession(request.Session)) response = &pb.RollbackResponse{ Error: vtgate.VtGateErrorToVtRPCError(rollbackErr, ""), } if rollbackErr == nil { return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, rollbackErr } // SplitQuery is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) SplitQuery(ctx context.Context, request *pb.SplitQueryRequest) (response *pb.SplitQueryResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) reply := new(proto.SplitQueryResult) if err := vtg.server.SplitQuery(ctx, request.Keyspace, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.SplitColumn, int(request.SplitCount), reply); err != nil { return nil, err } return proto.SplitQueryPartsToProto(reply.Splits), nil } // GetSrvKeyspace is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) GetSrvKeyspace(ctx context.Context, request *pb.GetSrvKeyspaceRequest) (response *pb.GetSrvKeyspaceResponse, err error) { defer vtg.server.HandlePanic(&err) sk, err := vtg.server.GetSrvKeyspace(ctx, request.Keyspace) if err != nil { return nil, err } return &pb.GetSrvKeyspaceResponse{ SrvKeyspace: topo.SrvKeyspaceToProto(sk), }, nil } func init() { vtgate.RegisterVTGates = append(vtgate.RegisterVTGates, func(vtGate vtgateservice.VTGateService) { if servenv.GRPCCheckServiceMap("vtgateservice") { pbs.RegisterVitessServer(servenv.GRPCServer, &VTGate{vtGate}) } }) } // RegisterForTest registers the gRPC implementation on the gRPC // server. Useful for unit tests only, for real use, the init() // function does the registration. func RegisterForTest(s *grpc.Server, service vtgateservice.VTGateService) { pbs.RegisterVitessServer(s, &VTGate{service}) }
skyportsystems/vitess
go/vt/vtgate/grpcvtgateservice/server.go
GO
bsd-3-clause
15,051
/* * Copyright (c) 2017, The OpenThread Authors. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holder nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ /** * @file * This file implements the OpenThread platform abstraction for the alarm. * */ #include <stdbool.h> #include <stdint.h> #include <openthread/config.h> #include <openthread/platform/alarm-milli.h> #include <openthread/platform/diag.h> #include "utils/code_utils.h" #include "em_core.h" #include "rail.h" static uint32_t sTimerHi = 0; static uint32_t sTimerLo = 0; static uint32_t sAlarmT0 = 0; static uint32_t sAlarmDt = 0; static bool sIsRunning = false; void efr32AlarmInit(void) { } uint32_t otPlatAlarmMilliGetNow(void) { uint32_t timer_lo; uint32_t timer_ms; CORE_DECLARE_IRQ_STATE; CORE_ENTER_CRITICAL(); timer_lo = RAIL_GetTime(); if (timer_lo < sTimerLo) { sTimerHi++; } sTimerLo = timer_lo; timer_ms = (((uint64_t)sTimerHi << 32) | sTimerLo) / 1000; CORE_EXIT_CRITICAL(); return timer_ms; } void otPlatAlarmMilliStartAt(otInstance *aInstance, uint32_t t0, uint32_t dt) { (void)aInstance; sAlarmT0 = t0; sAlarmDt = dt; sIsRunning = true; } void otPlatAlarmMilliStop(otInstance *aInstance) { (void)aInstance; sIsRunning = false; } void efr32AlarmProcess(otInstance *aInstance) { uint32_t now = otPlatAlarmMilliGetNow(); uint32_t expires; bool fire = false; otEXPECT(sIsRunning); expires = sAlarmT0 + sAlarmDt; if (sAlarmT0 <= now) { fire = (expires >= sAlarmT0 && expires <= now); } else { fire = (expires >= sAlarmT0 || expires <= now); } if (fire) { sIsRunning = false; #if OPENTHREAD_ENABLE_DIAG if (otPlatDiagModeGet()) { otPlatDiagAlarmFired(aInstance); } else #endif { otPlatAlarmMilliFired(aInstance); } } exit: return; } void RAILCb_TimerExpired(void) { }
georgecpr/openthread
examples/platforms/efr32/alarm.c
C
bsd-3-clause
3,479
/** * Copyright (C) 2016 Turi * All rights reserved. * * This software may be modified and distributed under the terms * of the BSD license. See the LICENSE file for details. */ #ifndef GRAPHLAB_SFRAME_QUERY_OPTIMIZATION_APPEND_TRANSFORMS_H_ #define GRAPHLAB_SFRAME_QUERY_OPTIMIZATION_APPEND_TRANSFORMS_H_ #include <sframe_query_engine/planning/optimizations/optimization_transforms.hpp> #include <sframe_query_engine/planning/optimization_engine.hpp> #include <sframe_query_engine/operators/all_operators.hpp> #include <sframe_query_engine/planning/optimization_node_info.hpp> #include <sframe_query_engine/operators/operator_properties.hpp> #include <flexible_type/flexible_type.hpp> #include <array> namespace graphlab { namespace query_eval { class opt_append_transform : public opt_transform { bool transform_applies(planner_node_type t) { return (t == planner_node_type::APPEND_NODE); } }; /** Transform append(source, source) --> source */ class opt_append_on_source : public opt_append_transform { std::string description() { return "append(source, source) -> source"; } std::pair<bool, sframe> try_sframe_append(cnode_info_ptr n) { sframe new_sf; for (const auto& input: n->inputs) { if (input->type != planner_node_type::SFRAME_SOURCE_NODE) return {false, new_sf}; auto begin = input->p("begin_index"); auto end = input->p("end_index"); const auto& sf = input->any_p<sframe>("sframe"); if (begin == 0 && end == sf.size()) { // stupidly we need the names to match for the append to work... for (size_t i = 0; i < new_sf.num_columns(); ++i) { new_sf.set_column_name(i, sf.column_name(i)); } if(begin != end) new_sf = new_sf.append(sf); } else { return {false, new_sf}; } } if(new_sf.num_rows() == 0) { new_sf = n->inputs[0]->any_p<sframe>("sframe"); } return {true, new_sf}; } std::pair<bool, sarray<flexible_type> > try_sarray_append(cnode_info_ptr n) { sarray<flexible_type> new_sa; for (const auto& input: n->inputs) { if (input->type != planner_node_type::SARRAY_SOURCE_NODE) return {false, new_sa}; auto begin = input->p("begin_index"); auto end = input->p("end_index"); auto sa_ptr = input->any_p<std::shared_ptr<sarray<flexible_type> > >("sarray"); const auto& sa = *sa_ptr; if (begin == 0 && end == sa.size()) { if(begin != end) new_sa = new_sa.append(sa); } else { return {false, new_sa}; } } if(new_sa.size() == 0) { new_sa = *(n->inputs[0]->any_p<std::shared_ptr<sarray<flexible_type> > >("sarray")); } return {true, new_sa}; } bool apply_transform(optimization_engine *opt_manager, cnode_info_ptr n) { // only source nodes accepted // and all have the same begin and end positions ASSERT_NE(n->inputs.size(), 0); // Quickly fail if not dealing with two sframe/sarray sources if(! ((n->inputs[0]->type == planner_node_type::SFRAME_SOURCE_NODE || n->inputs[0]->type == planner_node_type::SARRAY_SOURCE_NODE) && (n->inputs[1]->type == planner_node_type::SFRAME_SOURCE_NODE || n->inputs[1]->type == planner_node_type::SARRAY_SOURCE_NODE))) { return false; } // Try append as sframe auto sframe_append_result = try_sframe_append(n); if (sframe_append_result.first) { auto& new_sf = sframe_append_result.second; // we can rewrite the current node. pnode_ptr new_pnode = op_sframe_source::make_planner_node(new_sf, 0, new_sf.num_rows()); opt_manager->replace_node(n, new_pnode); return true; } // Try append as sarray auto sarray_append_result = try_sarray_append(n); if (sarray_append_result.first) { auto& new_sa = sarray_append_result.second; // we can rewrite the current node. pnode_ptr new_pnode = op_sarray_source::make_planner_node(std::make_shared<sarray<flexible_type> >(new_sa), 0, new_sa.size()); opt_manager->replace_node(n, new_pnode); return true; } return false; } }; /** Eliminated by optimization to prune off an append of an empty * transform. */ class opt_eliminate_empty_append : public opt_append_transform { std::string description() { return "append(source, empty_source) -> source"; } bool apply_transform(optimization_engine *opt_manager, cnode_info_ptr n) { if(n->inputs[1]->length() == 0) { opt_manager->replace_node(n, n->inputs[0]->pnode); return true; } if(n->inputs[0]->length() == 0) { opt_manager->replace_node(n, n->inputs[1]->pnode); return true; } return false; } }; }} #endif /* _APPEND_TRANSFORMS_H_ */
ylow/SFrame
oss_src/sframe_query_engine/planning/optimizations/append_transforms.hpp
C++
bsd-3-clause
5,035
// Copyright 2013 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // This file implements initialization and assignment checks. package types import ( "go/ast" "go/token" ) // assignment reports whether x can be assigned to a variable of type T, // if necessary by attempting to convert untyped values to the appropriate // type. context describes the context in which the assignment takes place. // Use T == nil to indicate assignment to an untyped blank identifier. // x.mode is set to invalid if the assignment failed. func (check *Checker) assignment(x *operand, T Type, context string) { check.singleValue(x) switch x.mode { case invalid: return // error reported before case constant_, variable, mapindex, value, commaok: // ok default: unreachable() } if isUntyped(x.typ) { target := T // spec: "If an untyped constant is assigned to a variable of interface // type or the blank identifier, the constant is first converted to type // bool, rune, int, float64, complex128 or string respectively, depending // on whether the value is a boolean, rune, integer, floating-point, complex, // or string constant." if T == nil || IsInterface(T) { if T == nil && x.typ == Typ[UntypedNil] { check.errorf(x.pos(), "use of untyped nil in %s", context) x.mode = invalid return } target = Default(x.typ) } check.convertUntyped(x, target) if x.mode == invalid { return } } // x.typ is typed // spec: "If a left-hand side is the blank identifier, any typed or // non-constant value except for the predeclared identifier nil may // be assigned to it." if T == nil { return } if reason := ""; !x.assignableTo(check.conf, T, &reason) { if reason != "" { check.errorf(x.pos(), "cannot use %s as %s value in %s: %s", x, T, context, reason) } else { check.errorf(x.pos(), "cannot use %s as %s value in %s", x, T, context) } x.mode = invalid } } func (check *Checker) initConst(lhs *Const, x *operand) { if x.mode == invalid || x.typ == Typ[Invalid] || lhs.typ == Typ[Invalid] { if lhs.typ == nil { lhs.typ = Typ[Invalid] } return } // rhs must be a constant if x.mode != constant_ { check.errorf(x.pos(), "%s is not constant", x) if lhs.typ == nil { lhs.typ = Typ[Invalid] } return } assert(isConstType(x.typ)) // If the lhs doesn't have a type yet, use the type of x. if lhs.typ == nil { lhs.typ = x.typ } check.assignment(x, lhs.typ, "constant declaration") if x.mode == invalid { return } lhs.val = x.val } func (check *Checker) initVar(lhs *Var, x *operand, context string) Type { if x.mode == invalid || x.typ == Typ[Invalid] || lhs.typ == Typ[Invalid] { if lhs.typ == nil { lhs.typ = Typ[Invalid] } return nil } // If the lhs doesn't have a type yet, use the type of x. if lhs.typ == nil { typ := x.typ if isUntyped(typ) { // convert untyped types to default types if typ == Typ[UntypedNil] { check.errorf(x.pos(), "use of untyped nil in %s", context) lhs.typ = Typ[Invalid] return nil } typ = Default(typ) } lhs.typ = typ } check.assignment(x, lhs.typ, context) if x.mode == invalid { return nil } return x.typ } func (check *Checker) assignVar(lhs ast.Expr, x *operand) Type { if x.mode == invalid || x.typ == Typ[Invalid] { return nil } // Determine if the lhs is a (possibly parenthesized) identifier. ident, _ := unparen(lhs).(*ast.Ident) // Don't evaluate lhs if it is the blank identifier. if ident != nil && ident.Name == "_" { check.recordDef(ident, nil) check.assignment(x, nil, "assignment to _ identifier") if x.mode == invalid { return nil } return x.typ } // If the lhs is an identifier denoting a variable v, this assignment // is not a 'use' of v. Remember current value of v.used and restore // after evaluating the lhs via check.expr. var v *Var var v_used bool if ident != nil { if _, obj := check.scope.LookupParent(ident.Name, token.NoPos); obj != nil { v, _ = obj.(*Var) if v != nil { v_used = v.used } } } var z operand check.expr(&z, lhs) if v != nil { v.used = v_used // restore v.used } if z.mode == invalid || z.typ == Typ[Invalid] { return nil } // spec: "Each left-hand side operand must be addressable, a map index // expression, or the blank identifier. Operands may be parenthesized." switch z.mode { case invalid: return nil case variable, mapindex: // ok default: if sel, ok := z.expr.(*ast.SelectorExpr); ok { var op operand check.expr(&op, sel.X) if op.mode == mapindex { check.errorf(z.pos(), "cannot assign to struct field %s in map", ExprString(z.expr)) return nil } } check.errorf(z.pos(), "cannot assign to %s", &z) return nil } check.assignment(x, z.typ, "assignment") if x.mode == invalid { return nil } return x.typ } // If returnPos is valid, initVars is called to type-check the assignment of // return expressions, and returnPos is the position of the return statement. func (check *Checker) initVars(lhs []*Var, rhs []ast.Expr, returnPos token.Pos) { l := len(lhs) get, r, commaOk := unpack(func(x *operand, i int) { check.multiExpr(x, rhs[i]) }, len(rhs), l == 2 && !returnPos.IsValid()) if get == nil || l != r { // invalidate lhs and use rhs for _, obj := range lhs { if obj.typ == nil { obj.typ = Typ[Invalid] } } if get == nil { return // error reported by unpack } check.useGetter(get, r) if returnPos.IsValid() { check.errorf(returnPos, "wrong number of return values (want %d, got %d)", l, r) return } check.errorf(rhs[0].Pos(), "cannot initialize %d variables with %d values", l, r) return } context := "assignment" if returnPos.IsValid() { context = "return statement" } var x operand if commaOk { var a [2]Type for i := range a { get(&x, i) a[i] = check.initVar(lhs[i], &x, context) } check.recordCommaOkTypes(rhs[0], a) return } for i, lhs := range lhs { get(&x, i) check.initVar(lhs, &x, context) } } func (check *Checker) assignVars(lhs, rhs []ast.Expr) { l := len(lhs) get, r, commaOk := unpack(func(x *operand, i int) { check.multiExpr(x, rhs[i]) }, len(rhs), l == 2) if get == nil { return // error reported by unpack } if l != r { check.useGetter(get, r) check.errorf(rhs[0].Pos(), "cannot assign %d values to %d variables", r, l) return } var x operand if commaOk { var a [2]Type for i := range a { get(&x, i) a[i] = check.assignVar(lhs[i], &x) } check.recordCommaOkTypes(rhs[0], a) return } for i, lhs := range lhs { get(&x, i) check.assignVar(lhs, &x) } } func (check *Checker) shortVarDecl(pos token.Pos, lhs, rhs []ast.Expr) { scope := check.scope // collect lhs variables var newVars []*Var var lhsVars = make([]*Var, len(lhs)) for i, lhs := range lhs { var obj *Var if ident, _ := lhs.(*ast.Ident); ident != nil { // Use the correct obj if the ident is redeclared. The // variable's scope starts after the declaration; so we // must use Scope.Lookup here and call Scope.Insert // (via check.declare) later. name := ident.Name if alt := scope.Lookup(name); alt != nil { // redeclared object must be a variable if alt, _ := alt.(*Var); alt != nil { obj = alt } else { check.errorf(lhs.Pos(), "cannot assign to %s", lhs) } check.recordUse(ident, alt) } else { // declare new variable, possibly a blank (_) variable obj = NewVar(ident.Pos(), check.pkg, name, nil) if name != "_" { newVars = append(newVars, obj) } check.recordDef(ident, obj) } } else { check.errorf(lhs.Pos(), "cannot declare %s", lhs) } if obj == nil { obj = NewVar(lhs.Pos(), check.pkg, "_", nil) // dummy variable } lhsVars[i] = obj } check.initVars(lhsVars, rhs, token.NoPos) // declare new variables if len(newVars) > 0 { // spec: "The scope of a constant or variable identifier declared inside // a function begins at the end of the ConstSpec or VarSpec (ShortVarDecl // for short variable declarations) and ends at the end of the innermost // containing block." scopePos := rhs[len(rhs)-1].End() for _, obj := range newVars { check.declare(scope, nil, obj, scopePos) // recordObject already called } } else { check.softErrorf(pos, "no new variables on left side of :=") } }
vsekhar/elastic-go
src/go/types/assignments.go
GO
bsd-3-clause
8,455
import six import sys from optparse import make_option, NO_DEFAULT from django.core.management.base import BaseCommand, CommandError from django.conf import settings from django_extensions.management.modelviz import generate_dot try: import pygraphviz HAS_PYGRAPHVIZ = True except ImportError: HAS_PYGRAPHVIZ = False try: import pydot HAS_PYDOT = True except ImportError: HAS_PYDOT = False class Command(BaseCommand): graph_models_options = ( make_option('--pygraphviz', action='store_true', dest='pygraphviz', help='Use PyGraphViz to generate the image.'), make_option('--pydot', action='store_true', dest='pydot', help='Use PyDot to generate the image.'), make_option('--disable-fields', '-d', action='store_true', dest='disable_fields', help='Do not show the class member fields'), make_option('--group-models', '-g', action='store_true', dest='group_models', help='Group models together respective to their application'), make_option('--all-applications', '-a', action='store_true', dest='all_applications', help='Automatically include all applications from INSTALLED_APPS'), make_option('--output', '-o', action='store', dest='outputfile', help='Render output file. Type of output dependend on file extensions. Use png or jpg to render graph to image.'), make_option('--layout', '-l', action='store', dest='layout', default='dot', help='Layout to be used by GraphViz for visualization. Layouts: circo dot fdp neato nop nop1 nop2 twopi'), make_option('--verbose-names', '-n', action='store_true', dest='verbose_names', help='Use verbose_name of models and fields'), make_option('--language', '-L', action='store', dest='language', help='Specify language used for verbose_name localization'), make_option('--exclude-columns', '-x', action='store', dest='exclude_columns', help='Exclude specific column(s) from the graph. Can also load exclude list from file.'), make_option('--exclude-models', '-X', action='store', dest='exclude_models', help='Exclude specific model(s) from the graph. Can also load exclude list from file.'), make_option('--include-models', '-I', action='store', dest='include_models', help='Restrict the graph to specified models.'), make_option('--inheritance', '-e', action='store_true', dest='inheritance', default=True, help='Include inheritance arrows (default)'), make_option('--no-inheritance', '-E', action='store_false', dest='inheritance', help='Do not include inheritance arrows'), make_option('--hide-relations-from-fields', '-R', action='store_false', dest="relations_as_fields", default=True, help="Do not show relations as fields in the graph."), make_option('--disable-sort-fields', '-S', action="store_false", dest="sort_fields", default=True, help="Do not sort fields"), ) option_list = BaseCommand.option_list + graph_models_options help = "Creates a GraphViz dot file for the specified app names. You can pass multiple app names and they will all be combined into a single model. Output is usually directed to a dot file." args = "[appname]" label = 'application name' requires_model_validation = True can_import_settings = True def handle(self, *args, **options): self.options_from_settings(options) if len(args) < 1 and not options['all_applications']: raise CommandError("need one or more arguments for appname") use_pygraphviz = options.get('pygraphviz', False) use_pydot = options.get('pydot', False) cli_options = ' '.join(sys.argv[2:]) dotdata = generate_dot(args, cli_options=cli_options, **options) dotdata = dotdata.encode('utf-8') if options['outputfile']: if not use_pygraphviz and not use_pydot: if HAS_PYGRAPHVIZ: use_pygraphviz = True elif HAS_PYDOT: use_pydot = True if use_pygraphviz: self.render_output_pygraphviz(dotdata, **options) elif use_pydot: self.render_output_pydot(dotdata, **options) else: raise CommandError("Neither pygraphviz nor pydot could be found to generate the image") else: self.print_output(dotdata) def options_from_settings(self, options): defaults = getattr(settings, 'GRAPH_MODELS', None) if defaults: for option in self.graph_models_options: long_opt = option._long_opts[0] if long_opt: long_opt = long_opt.lstrip("-").replace("-", "_") if long_opt in defaults: default_value = None if not option.default == NO_DEFAULT: default_value = option.default if options[option.dest] == default_value: options[option.dest] = defaults[long_opt] def print_output(self, dotdata): if six.PY3 and isinstance(dotdata, six.binary_type): dotdata = dotdata.decode() print(dotdata) def render_output_pygraphviz(self, dotdata, **kwargs): """Renders the image using pygraphviz""" if not HAS_PYGRAPHVIZ: raise CommandError("You need to install pygraphviz python module") version = pygraphviz.__version__.rstrip("-svn") try: if tuple(int(v) for v in version.split('.')) < (0, 36): # HACK around old/broken AGraph before version 0.36 (ubuntu ships with this old version) import tempfile tmpfile = tempfile.NamedTemporaryFile() tmpfile.write(dotdata) tmpfile.seek(0) dotdata = tmpfile.name except ValueError: pass graph = pygraphviz.AGraph(dotdata) graph.layout(prog=kwargs['layout']) graph.draw(kwargs['outputfile']) def render_output_pydot(self, dotdata, **kwargs): """Renders the image using pydot""" if not HAS_PYDOT: raise CommandError("You need to install pydot python module") graph = pydot.graph_from_dot_data(dotdata) if not graph: raise CommandError("pydot returned an error") output_file = kwargs['outputfile'] formats = ['bmp', 'canon', 'cmap', 'cmapx', 'cmapx_np', 'dot', 'dia', 'emf', 'em', 'fplus', 'eps', 'fig', 'gd', 'gd2', 'gif', 'gv', 'imap', 'imap_np', 'ismap', 'jpe', 'jpeg', 'jpg', 'metafile', 'pdf', 'pic', 'plain', 'plain-ext', 'png', 'pov', 'ps', 'ps2', 'svg', 'svgz', 'tif', 'tiff', 'tk', 'vml', 'vmlz', 'vrml', 'wbmp', 'xdot'] ext = output_file[output_file.rfind('.') + 1:] format = ext if ext in formats else 'raw' graph.write(output_file, format=format)
WillisXChen/django-oscar
oscar/lib/python2.7/site-packages/django_extensions/management/commands/graph_models.py
Python
bsd-3-clause
7,278
// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef UI_GL_ANDROID_SURFACE_TEXTURE_H_ #define UI_GL_ANDROID_SURFACE_TEXTURE_H_ #include <jni.h> #include "base/android/scoped_java_ref.h" #include "base/callback.h" #include "base/macros.h" #include "base/memory/ref_counted.h" #include "ui/gl/gl_export.h" struct ANativeWindow; namespace gfx { // This class serves as a bridge for native code to call java functions inside // android SurfaceTexture class. class GL_EXPORT SurfaceTexture : public base::RefCountedThreadSafe<SurfaceTexture>{ public: static scoped_refptr<SurfaceTexture> Create(int texture_id); static scoped_refptr<SurfaceTexture> CreateSingleBuffered(int texture_id); // Set the listener callback, which will be invoked on the same thread that // is being called from here for registration. // Note: Since callbacks come in from Java objects that might outlive objects // being referenced from the callback, the only robust way here is to create // the callback from a weak pointer to your object. void SetFrameAvailableCallback(const base::Closure& callback); // Set the listener callback, but allow it to be invoked on any thread. The // same caveats apply as SetFrameAvailableCallback, plus whatever other issues // show up due to multithreading (e.g., don't bind the Closure to a method // via a weak ref). void SetFrameAvailableCallbackOnAnyThread(const base::Closure& callback); // Update the texture image to the most recent frame from the image stream. void UpdateTexImage(); // Release the texture content. This is needed only in single buffered mode // to allow the image content producer to take ownership // of the image buffer. // This is *only* supported on SurfaceTexture instantiated via // |CreateSingleBuffered(...)|. void ReleaseTexImage(); // Retrieve the 4x4 texture coordinate transform matrix associated with the // texture image set by the most recent call to updateTexImage. void GetTransformMatrix(float mtx[16]); // Attach the SurfaceTexture to the texture currently bound to // GL_TEXTURE_EXTERNAL_OES. void AttachToGLContext(); // Detaches the SurfaceTexture from the context that owns its current GL // texture. Must be called with that context current on the calling thread. void DetachFromGLContext(); // Creates a native render surface for this surface texture. // The caller must release the underlying reference when done with the handle // by calling ANativeWindow_release(). ANativeWindow* CreateSurface(); const base::android::JavaRef<jobject>& j_surface_texture() const { return j_surface_texture_; } // This should only be used to guard the SurfaceTexture instantiated via // |CreateSingleBuffered(...)| static bool IsSingleBufferModeSupported(); static bool RegisterSurfaceTexture(JNIEnv* env); protected: explicit SurfaceTexture( const base::android::ScopedJavaLocalRef<jobject>& j_surface_texture); private: friend class base::RefCountedThreadSafe<SurfaceTexture>; ~SurfaceTexture(); // Java SurfaceTexture instance. base::android::ScopedJavaGlobalRef<jobject> j_surface_texture_; DISALLOW_COPY_AND_ASSIGN(SurfaceTexture); }; } // namespace gfx #endif // UI_GL_ANDROID_SURFACE_TEXTURE_H_
XiaosongWei/chromium-crosswalk
ui/gl/android/surface_texture.h
C
bsd-3-clause
3,401
/**************************************************************************** ** ** Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies). ** All rights reserved. ** Contact: Nokia Corporation (qt-info@nokia.com) ** ** This file is part of the QtCore module of the Qt Toolkit. ** ** $QT_BEGIN_LICENSE:LGPL$ ** GNU Lesser General Public License Usage ** This file may be used under the terms of the GNU Lesser General Public ** License version 2.1 as published by the Free Software Foundation and ** appearing in the file LICENSE.LGPL included in the packaging of this ** file. Please review the following information to ensure the GNU Lesser ** General Public License version 2.1 requirements will be met: ** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. ** ** In addition, as a special exception, Nokia gives you certain additional ** rights. These rights are described in the Nokia Qt LGPL Exception ** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. ** ** GNU General Public License Usage ** Alternatively, this file may be used under the terms of the GNU General ** Public License version 3.0 as published by the Free Software Foundation ** and appearing in the file LICENSE.GPL included in the packaging of this ** file. Please review the following information to ensure the GNU General ** Public License version 3.0 requirements will be met: ** http://www.gnu.org/copyleft/gpl.html. ** ** Other Usage ** Alternatively, this file may be used in accordance with the terms and ** conditions contained in a signed written agreement between you and Nokia. ** ** ** ** ** ** $QT_END_LICENSE$ ** ****************************************************************************/ #include <QtCore/qmath.h> QT_BEGIN_NAMESPACE const qreal qt_sine_table[QT_SINE_TABLE_SIZE] = { qreal(0.0), qreal(0.024541228522912288), qreal(0.049067674327418015), qreal(0.073564563599667426), qreal(0.098017140329560604), qreal(0.1224106751992162), qreal(0.14673047445536175), qreal(0.17096188876030122), qreal(0.19509032201612825), qreal(0.2191012401568698), qreal(0.24298017990326387), qreal(0.26671275747489837), qreal(0.29028467725446233), qreal(0.31368174039889152), qreal(0.33688985339222005), qreal(0.35989503653498811), qreal(0.38268343236508978), qreal(0.40524131400498986), qreal(0.42755509343028208), qreal(0.44961132965460654), qreal(0.47139673682599764), qreal(0.49289819222978404), qreal(0.51410274419322166), qreal(0.53499761988709715), qreal(0.55557023301960218), qreal(0.57580819141784534), qreal(0.59569930449243336), qreal(0.61523159058062682), qreal(0.63439328416364549), qreal(0.65317284295377676), qreal(0.67155895484701833), qreal(0.68954054473706683), qreal(0.70710678118654746), qreal(0.72424708295146689), qreal(0.74095112535495911), qreal(0.75720884650648446), qreal(0.77301045336273699), qreal(0.78834642762660623), qreal(0.80320753148064483), qreal(0.81758481315158371), qreal(0.83146961230254524), qreal(0.84485356524970701), qreal(0.85772861000027212), qreal(0.87008699110871135), qreal(0.88192126434835494), qreal(0.89322430119551532), qreal(0.90398929312344334), qreal(0.91420975570353069), qreal(0.92387953251128674), qreal(0.93299279883473885), qreal(0.94154406518302081), qreal(0.94952818059303667), qreal(0.95694033573220894), qreal(0.96377606579543984), qreal(0.97003125319454397), qreal(0.97570213003852857), qreal(0.98078528040323043), qreal(0.98527764238894122), qreal(0.98917650996478101), qreal(0.99247953459870997), qreal(0.99518472667219682), qreal(0.99729045667869021), qreal(0.99879545620517241), qreal(0.99969881869620425), qreal(1.0), qreal(0.99969881869620425), qreal(0.99879545620517241), qreal(0.99729045667869021), qreal(0.99518472667219693), qreal(0.99247953459870997), qreal(0.98917650996478101), qreal(0.98527764238894122), qreal(0.98078528040323043), qreal(0.97570213003852857), qreal(0.97003125319454397), qreal(0.96377606579543984), qreal(0.95694033573220894), qreal(0.94952818059303667), qreal(0.94154406518302081), qreal(0.93299279883473885), qreal(0.92387953251128674), qreal(0.91420975570353069), qreal(0.90398929312344345), qreal(0.89322430119551521), qreal(0.88192126434835505), qreal(0.87008699110871146), qreal(0.85772861000027212), qreal(0.84485356524970723), qreal(0.83146961230254546), qreal(0.81758481315158371), qreal(0.80320753148064494), qreal(0.78834642762660634), qreal(0.7730104533627371), qreal(0.75720884650648468), qreal(0.74095112535495899), qreal(0.72424708295146689), qreal(0.70710678118654757), qreal(0.68954054473706705), qreal(0.67155895484701855), qreal(0.65317284295377664), qreal(0.63439328416364549), qreal(0.61523159058062693), qreal(0.59569930449243347), qreal(0.57580819141784545), qreal(0.55557023301960218), qreal(0.53499761988709715), qreal(0.51410274419322177), qreal(0.49289819222978415), qreal(0.47139673682599786), qreal(0.44961132965460687), qreal(0.42755509343028203), qreal(0.40524131400498992), qreal(0.38268343236508989), qreal(0.35989503653498833), qreal(0.33688985339222033), qreal(0.31368174039889141), qreal(0.29028467725446239), qreal(0.26671275747489848), qreal(0.24298017990326407), qreal(0.21910124015687005), qreal(0.19509032201612861), qreal(0.17096188876030122), qreal(0.1467304744553618), qreal(0.12241067519921635), qreal(0.098017140329560826), qreal(0.073564563599667732), qreal(0.049067674327417966), qreal(0.024541228522912326), qreal(0.0), qreal(-0.02454122852291208), qreal(-0.049067674327417724), qreal(-0.073564563599667496), qreal(-0.09801714032956059), qreal(-0.1224106751992161), qreal(-0.14673047445536158), qreal(-0.17096188876030097), qreal(-0.19509032201612836), qreal(-0.2191012401568698), qreal(-0.24298017990326382), qreal(-0.26671275747489825), qreal(-0.29028467725446211), qreal(-0.31368174039889118), qreal(-0.33688985339222011), qreal(-0.35989503653498811), qreal(-0.38268343236508967), qreal(-0.40524131400498969), qreal(-0.42755509343028181), qreal(-0.44961132965460665), qreal(-0.47139673682599764), qreal(-0.49289819222978393), qreal(-0.51410274419322155), qreal(-0.53499761988709693), qreal(-0.55557023301960196), qreal(-0.57580819141784534), qreal(-0.59569930449243325), qreal(-0.61523159058062671), qreal(-0.63439328416364527), qreal(-0.65317284295377653), qreal(-0.67155895484701844), qreal(-0.68954054473706683), qreal(-0.70710678118654746), qreal(-0.72424708295146678), qreal(-0.74095112535495888), qreal(-0.75720884650648423), qreal(-0.77301045336273666), qreal(-0.78834642762660589), qreal(-0.80320753148064505), qreal(-0.81758481315158382), qreal(-0.83146961230254524), qreal(-0.84485356524970701), qreal(-0.85772861000027201), qreal(-0.87008699110871135), qreal(-0.88192126434835494), qreal(-0.89322430119551521), qreal(-0.90398929312344312), qreal(-0.91420975570353047), qreal(-0.92387953251128652), qreal(-0.93299279883473896), qreal(-0.94154406518302081), qreal(-0.94952818059303667), qreal(-0.95694033573220882), qreal(-0.96377606579543984), qreal(-0.97003125319454397), qreal(-0.97570213003852846), qreal(-0.98078528040323032), qreal(-0.98527764238894111), qreal(-0.9891765099647809), qreal(-0.99247953459871008), qreal(-0.99518472667219693), qreal(-0.99729045667869021), qreal(-0.99879545620517241), qreal(-0.99969881869620425), qreal(-1.0), qreal(-0.99969881869620425), qreal(-0.99879545620517241), qreal(-0.99729045667869021), qreal(-0.99518472667219693), qreal(-0.99247953459871008), qreal(-0.9891765099647809), qreal(-0.98527764238894122), qreal(-0.98078528040323043), qreal(-0.97570213003852857), qreal(-0.97003125319454397), qreal(-0.96377606579543995), qreal(-0.95694033573220894), qreal(-0.94952818059303679), qreal(-0.94154406518302092), qreal(-0.93299279883473907), qreal(-0.92387953251128663), qreal(-0.91420975570353058), qreal(-0.90398929312344334), qreal(-0.89322430119551532), qreal(-0.88192126434835505), qreal(-0.87008699110871146), qreal(-0.85772861000027223), qreal(-0.84485356524970723), qreal(-0.83146961230254546), qreal(-0.81758481315158404), qreal(-0.80320753148064528), qreal(-0.78834642762660612), qreal(-0.77301045336273688), qreal(-0.75720884650648457), qreal(-0.74095112535495911), qreal(-0.724247082951467), qreal(-0.70710678118654768), qreal(-0.68954054473706716), qreal(-0.67155895484701866), qreal(-0.65317284295377709), qreal(-0.63439328416364593), qreal(-0.61523159058062737), qreal(-0.59569930449243325), qreal(-0.57580819141784523), qreal(-0.55557023301960218), qreal(-0.53499761988709726), qreal(-0.51410274419322188), qreal(-0.49289819222978426), qreal(-0.47139673682599792), qreal(-0.44961132965460698), qreal(-0.42755509343028253), qreal(-0.40524131400499042), qreal(-0.38268343236509039), qreal(-0.359895036534988), qreal(-0.33688985339222), qreal(-0.31368174039889152), qreal(-0.2902846772544625), qreal(-0.26671275747489859), qreal(-0.24298017990326418), qreal(-0.21910124015687016), qreal(-0.19509032201612872), qreal(-0.17096188876030177), qreal(-0.14673047445536239), qreal(-0.12241067519921603), qreal(-0.098017140329560506), qreal(-0.073564563599667412), qreal(-0.049067674327418091), qreal(-0.024541228522912448) }; QT_END_NAMESPACE
blorenz/phantomjs
src/qt/src/corelib/kernel/qmath.cpp
C++
bsd-3-clause
10,087
/*=================================================================== The Medical Imaging Interaction Toolkit (MITK) Copyright (c) German Cancer Research Center, Division of Medical and Biological Informatics. All rights reserved. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See LICENSE.txt or http://www.mitk.org for details. ===================================================================*/ #ifndef ClippedSurfaceBoundsCalculator_h_included #define ClippedSurfaceBoundsCalculator_h_included #include "mitkImage.h" #include "mitkPlaneGeometry.h" #include <vector> /** * \brief Find image slices visible on a given plane. * * The class name is not helpful in finding this class. Good suggestions welcome. * * Given a PlaneGeometry (e.g. the 2D plane of a render window), this class * calculates which slices of an mitk::Image are visible on this plane. * Calculation is done for X, Y, and Z direction, the result is available in * form of a pair (minimum,maximum) slice index. * * Such calculations are useful if you want to display information about the * currently visible slice (overlays, statistics, ...) and you don't want to * depend on any prior information about hat the renderwindow is currently showing. * * \warning The interface attempts to look like an ITK filter but it is far from being one. */ namespace mitk { class MITKCORE_EXPORT ClippedSurfaceBoundsCalculator { public: typedef std::vector<mitk::Point3D> PointListType; ClippedSurfaceBoundsCalculator(const mitk::PlaneGeometry* geometry = nullptr, mitk::Image::Pointer image = nullptr); ClippedSurfaceBoundsCalculator(const mitk::BaseGeometry* geometry, mitk::Image::Pointer image); ClippedSurfaceBoundsCalculator(const PointListType pointlist, mitk::Image::Pointer image); void InitializeOutput(); virtual ~ClippedSurfaceBoundsCalculator(); void SetInput(const mitk::PlaneGeometry* geometry, mitk::Image* image); void SetInput(const mitk::BaseGeometry *geometry, mitk::Image *image); void SetInput(const PointListType pointlist, mitk::Image *image); /** \brief Request calculation. How cut/visible slice indices are determined: 1. construct a bounding box of the image. This is the box that connect the outer voxel centers(!). 2. check the edges of this box. 3. intersect each edge with the plane geometry - if the intersection point is within the image box, we update the visible/cut slice indices for all dimensions. - else we ignore the cut */ void Update(); /** \brief Minimum (first) and maximum (second) slice index. */ typedef std::pair<int, int> OutputType; /** \brief What X coordinates (slice indices) are cut/visible in given plane. */ OutputType GetMinMaxSpatialDirectionX(); /** \brief What Y coordinates (slice indices) are cut/visible in given plane. */ OutputType GetMinMaxSpatialDirectionY(); /** \brief What Z coordinates (slice indices) are cut/visible in given plane. */ OutputType GetMinMaxSpatialDirectionZ(); protected: void CalculateIntersectionPoints(const mitk::PlaneGeometry* geometry); void CalculateIntersectionPoints( PointListType pointList ); /** * \brief Clips the resulting index-coordinates to make sure they do * not exceed the imagebounds. */ void EnforceImageBounds(); mitk::PlaneGeometry::ConstPointer m_PlaneGeometry; mitk::BaseGeometry::ConstPointer m_Geometry3D; mitk::Image::Pointer m_Image; std::vector<mitk::Point3D> m_ObjectPointsInWorldCoordinates; std::vector< OutputType > m_MinMaxOutput; }; } //namespace mitk #endif
NifTK/MITK
Modules/Core/include/mitkClippedSurfaceBoundsCalculator.h
C
bsd-3-clause
3,927
#include "bli_config.h" #include "bli_system.h" #include "bli_type_defs.h" #include "bli_cblas.h" #ifdef BLIS_ENABLE_CBLAS /* * * cblas_ztrsm.c * This program is a C interface to ztrsm. * Written by Keita Teranishi * 4/8/1998 * */ #include "cblas.h" #include "cblas_f77.h" void cblas_ztrsm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, const int M, const int N, const void *alpha, const void *A, const int lda, void *B, const int ldb) { char UL, TA, SD, DI; #ifdef F77_CHAR F77_CHAR F77_TA, F77_UL, F77_SD, F77_DI; #else #define F77_TA &TA #define F77_UL &UL #define F77_SD &SD #define F77_DI &DI #endif #ifdef F77_INT F77_INT F77_M=M, F77_N=N, F77_lda=lda, F77_ldb=ldb; #else #define F77_M M #define F77_N N #define F77_lda lda #define F77_ldb ldb #endif extern int CBLAS_CallFromC; extern int RowMajorStrg; RowMajorStrg = 0; CBLAS_CallFromC = 1; if( Order == CblasColMajor ) { if( Side == CblasRight) SD='R'; else if ( Side == CblasLeft ) SD='L'; else { cblas_xerbla(2, "cblas_ztrsm", "Illegal Side setting, %d\n", Side); CBLAS_CallFromC = 0; RowMajorStrg = 0; return; } if( Uplo == CblasUpper) UL='U'; else if ( Uplo == CblasLower ) UL='L'; else { cblas_xerbla(3, "cblas_ztrsm", "Illegal Uplo setting, %d\n", Uplo); CBLAS_CallFromC = 0; RowMajorStrg = 0; return; } if( TransA == CblasTrans) TA ='T'; else if ( TransA == CblasConjTrans ) TA='C'; else if ( TransA == CblasNoTrans ) TA='N'; else { cblas_xerbla(4, "cblas_ztrsm", "Illegal Trans setting, %d\n", TransA); CBLAS_CallFromC = 0; RowMajorStrg = 0; return; } if( Diag == CblasUnit ) DI='U'; else if ( Diag == CblasNonUnit ) DI='N'; else { cblas_xerbla(5, "cblas_ztrsm", "Illegal Diag setting, %d\n", Diag); CBLAS_CallFromC = 0; RowMajorStrg = 0; return; } #ifdef F77_CHAR F77_UL = C2F_CHAR(&UL); F77_TA = C2F_CHAR(&TA); F77_SD = C2F_CHAR(&SD); F77_DI = C2F_CHAR(&DI); #endif F77_ztrsm(F77_SD, F77_UL, F77_TA, F77_DI, &F77_M, &F77_N, alpha, A, &F77_lda, B, &F77_ldb); } else if (Order == CblasRowMajor) { RowMajorStrg = 1; if( Side == CblasRight) SD='L'; else if ( Side == CblasLeft ) SD='R'; else { cblas_xerbla(2, "cblas_ztrsm", "Illegal Side setting, %d\n", Side); CBLAS_CallFromC = 0; RowMajorStrg = 0; return; } if( Uplo == CblasUpper) UL='L'; else if ( Uplo == CblasLower ) UL='U'; else { cblas_xerbla(3, "cblas_ztrsm", "Illegal Uplo setting, %d\n", Uplo); CBLAS_CallFromC = 0; RowMajorStrg = 0; return; } if( TransA == CblasTrans) TA ='T'; else if ( TransA == CblasConjTrans ) TA='C'; else if ( TransA == CblasNoTrans ) TA='N'; else { cblas_xerbla(4, "cblas_ztrsm", "Illegal Trans setting, %d\n", TransA); CBLAS_CallFromC = 0; RowMajorStrg = 0; return; } if( Diag == CblasUnit ) DI='U'; else if ( Diag == CblasNonUnit ) DI='N'; else { cblas_xerbla(5, "cblas_ztrsm", "Illegal Diag setting, %d\n", Diag); CBLAS_CallFromC = 0; RowMajorStrg = 0; return; } #ifdef F77_CHAR F77_UL = C2F_CHAR(&UL); F77_TA = C2F_CHAR(&TA); F77_SD = C2F_CHAR(&SD); F77_DI = C2F_CHAR(&DI); #endif F77_ztrsm(F77_SD, F77_UL, F77_TA, F77_DI, &F77_N, &F77_M, alpha, A, &F77_lda, B, &F77_ldb); } else cblas_xerbla(1, "cblas_ztrsm", "Illegal Order setting, %d\n", Order); CBLAS_CallFromC = 0; RowMajorStrg = 0; return; } #endif
xianyi/blis
frame/compat/cblas/src/cblas_ztrsm.c
C
bsd-3-clause
4,130
// Copyright (c) 2013, Facebook, Inc. All rights reserved. // This source code is licensed under the BSD-style license found in the // LICENSE file in the root directory of this source tree. An additional grant // of patent rights can be found in the PATENTS file in the same directory. // Copyright (c) 2011 The LevelDB Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. See the AUTHORS file for names of contributors. #ifndef STORAGE_ROCKSDB_INCLUDE_OPTIONS_H_ #define STORAGE_ROCKSDB_INCLUDE_OPTIONS_H_ #include <stddef.h> #include <stdint.h> #include <string> #include <memory> #include <vector> #include <limits> #include <unordered_map> #include "rocksdb/version.h" #include "rocksdb/listener.h" #include "rocksdb/universal_compaction.h" #ifdef max #undef max #endif namespace rocksdb { class Cache; class CompactionFilter; class CompactionFilterFactory; class CompactionFilterFactoryV2; class Comparator; class Env; enum InfoLogLevel : unsigned char; class FilterPolicy; class Logger; class MergeOperator; class Snapshot; class TableFactory; class MemTableRepFactory; class TablePropertiesCollectorFactory; class RateLimiter; class DeleteScheduler; class Slice; class SliceTransform; class Statistics; class InternalKeyComparator; // DB contents are stored in a set of blocks, each of which holds a // sequence of key,value pairs. Each block may be compressed before // being stored in a file. The following enum describes which // compression method (if any) is used to compress a block. enum CompressionType : char { // NOTE: do not change the values of existing entries, as these are // part of the persistent format on disk. kNoCompression = 0x0, kSnappyCompression = 0x1, kZlibCompression = 0x2, kBZip2Compression = 0x3, kLZ4Compression = 0x4, kLZ4HCCompression = 0x5, // zstd format is not finalized yet so it's subject to changes. kZSTDNotFinalCompression = 0x40, }; enum CompactionStyle : char { // level based compaction style kCompactionStyleLevel = 0x0, // Universal compaction style // Not supported in ROCKSDB_LITE. kCompactionStyleUniversal = 0x1, // FIFO compaction style // Not supported in ROCKSDB_LITE kCompactionStyleFIFO = 0x2, // Disable background compaction. Compaction jobs are submitted // via CompactFiles(). // Not supported in ROCKSDB_LITE kCompactionStyleNone = 0x3, }; enum class WALRecoveryMode : char { // Original levelDB recovery // We tolerate incomplete record in trailing data on all logs // Use case : This is legacy behavior (default) kTolerateCorruptedTailRecords = 0x00, // Recover from clean shutdown // We don't expect to find any corruption in the WAL // Use case : This is ideal for unit tests and rare applications that // can require high consistency gaurantee kAbsoluteConsistency = 0x01, // Recover to point-in-time consistency // We stop the WAL playback on discovering WAL inconsistency // Use case : Ideal for systems that have disk controller cache like // hard disk, SSD without super capacitor that store related data kPointInTimeRecovery = 0x02, // Recovery after a disaster // We ignore any corruption in the WAL and try to salvage as much data as // possible // Use case : Ideal for last ditch effort to recover data or systems that // operate with low grade unrelated data kSkipAnyCorruptedRecords = 0x03, }; struct CompactionOptionsFIFO { // once the total sum of table files reaches this, we will delete the oldest // table file // Default: 1GB uint64_t max_table_files_size; CompactionOptionsFIFO() : max_table_files_size(1 * 1024 * 1024 * 1024) {} }; // Compression options for different compression algorithms like Zlib struct CompressionOptions { int window_bits; int level; int strategy; CompressionOptions() : window_bits(-14), level(-1), strategy(0) {} CompressionOptions(int wbits, int _lev, int _strategy) : window_bits(wbits), level(_lev), strategy(_strategy) {} }; enum UpdateStatus { // Return status For inplace update callback UPDATE_FAILED = 0, // Nothing to update UPDATED_INPLACE = 1, // Value updated inplace UPDATED = 2, // No inplace update. Merged value set }; struct DbPath { std::string path; uint64_t target_size; // Target size of total files under the path, in byte. DbPath() : target_size(0) {} DbPath(const std::string& p, uint64_t t) : path(p), target_size(t) {} }; struct Options; struct ColumnFamilyOptions { // Some functions that make it easier to optimize RocksDB // Use this if you don't need to keep the data sorted, i.e. you'll never use // an iterator, only Put() and Get() API calls // // Not supported in ROCKSDB_LITE ColumnFamilyOptions* OptimizeForPointLookup( uint64_t block_cache_size_mb); // Default values for some parameters in ColumnFamilyOptions are not // optimized for heavy workloads and big datasets, which means you might // observe write stalls under some conditions. As a starting point for tuning // RocksDB options, use the following two functions: // * OptimizeLevelStyleCompaction -- optimizes level style compaction // * OptimizeUniversalStyleCompaction -- optimizes universal style compaction // Universal style compaction is focused on reducing Write Amplification // Factor for big data sets, but increases Space Amplification. You can learn // more about the different styles here: // https://github.com/facebook/rocksdb/wiki/Rocksdb-Architecture-Guide // Make sure to also call IncreaseParallelism(), which will provide the // biggest performance gains. // Note: we might use more memory than memtable_memory_budget during high // write rate period // // OptimizeUniversalStyleCompaction is not supported in ROCKSDB_LITE ColumnFamilyOptions* OptimizeLevelStyleCompaction( uint64_t memtable_memory_budget = 512 * 1024 * 1024); ColumnFamilyOptions* OptimizeUniversalStyleCompaction( uint64_t memtable_memory_budget = 512 * 1024 * 1024); // ------------------- // Parameters that affect behavior // Comparator used to define the order of keys in the table. // Default: a comparator that uses lexicographic byte-wise ordering // // REQUIRES: The client must ensure that the comparator supplied // here has the same name and orders keys *exactly* the same as the // comparator provided to previous open calls on the same DB. const Comparator* comparator; // REQUIRES: The client must provide a merge operator if Merge operation // needs to be accessed. Calling Merge on a DB without a merge operator // would result in Status::NotSupported. The client must ensure that the // merge operator supplied here has the same name and *exactly* the same // semantics as the merge operator provided to previous open calls on // the same DB. The only exception is reserved for upgrade, where a DB // previously without a merge operator is introduced to Merge operation // for the first time. It's necessary to specify a merge operator when // openning the DB in this case. // Default: nullptr std::shared_ptr<MergeOperator> merge_operator; // A single CompactionFilter instance to call into during compaction. // Allows an application to modify/delete a key-value during background // compaction. // // If the client requires a new compaction filter to be used for different // compaction runs, it can specify compaction_filter_factory instead of this // option. The client should specify only one of the two. // compaction_filter takes precedence over compaction_filter_factory if // client specifies both. // // If multithreaded compaction is being used, the supplied CompactionFilter // instance may be used from different threads concurrently and so should be // thread-safe. // // Default: nullptr const CompactionFilter* compaction_filter; // This is a factory that provides compaction filter objects which allow // an application to modify/delete a key-value during background compaction. // // A new filter will be created on each compaction run. If multithreaded // compaction is being used, each created CompactionFilter will only be used // from a single thread and so does not need to be thread-safe. // // Default: nullptr std::shared_ptr<CompactionFilterFactory> compaction_filter_factory; // This is deprecated. Talk to us if you depend on // compaction_filter_factory_v2 and we'll put it back // std::shared_ptr<CompactionFilterFactoryV2> compaction_filter_factory_v2; // ------------------- // Parameters that affect performance // Amount of data to build up in memory (backed by an unsorted log // on disk) before converting to a sorted on-disk file. // // Larger values increase performance, especially during bulk loads. // Up to max_write_buffer_number write buffers may be held in memory // at the same time, // so you may wish to adjust this parameter to control memory usage. // Also, a larger write buffer will result in a longer recovery time // the next time the database is opened. // // Note that write_buffer_size is enforced per column family. // See db_write_buffer_size for sharing memory across column families. // // Default: 4MB // // Dynamically changeable through SetOptions() API size_t write_buffer_size; // The maximum number of write buffers that are built up in memory. // The default and the minimum number is 2, so that when 1 write buffer // is being flushed to storage, new writes can continue to the other // write buffer. // // Default: 2 // // Dynamically changeable through SetOptions() API int max_write_buffer_number; // The minimum number of write buffers that will be merged together // before writing to storage. If set to 1, then // all write buffers are fushed to L0 as individual files and this increases // read amplification because a get request has to check in all of these // files. Also, an in-memory merge may result in writing lesser // data to storage if there are duplicate records in each of these // individual write buffers. Default: 1 int min_write_buffer_number_to_merge; // The total maximum number of write buffers to maintain in memory including // copies of buffers that have already been flushed. Unlike // max_write_buffer_number, this parameter does not affect flushing. // This controls the minimum amount of write history that will be available // in memory for conflict checking when Transactions are used. // If this value is too low, some transactions may fail at commit time due // to not being able to determine whether there were any write conflicts. // // Setting this value to 0 will cause write buffers to be freed immediately // after they are flushed. // If this value is set to -1, 'max_write_buffer_number' will be used. // // Default: // If using an OptimisticTransactionDB, the default value will be set to the // value // of 'max_write_buffer_number' if it is not explicitly set by the user. // Otherwise, the default is 0. int max_write_buffer_number_to_maintain; // Compress blocks using the specified compression algorithm. This // parameter can be changed dynamically. // // Default: kSnappyCompression, if it's supported. If snappy is not linked // with the library, the default is kNoCompression. // // Typical speeds of kSnappyCompression on an Intel(R) Core(TM)2 2.4GHz: // ~200-500MB/s compression // ~400-800MB/s decompression // Note that these speeds are significantly faster than most // persistent storage speeds, and therefore it is typically never // worth switching to kNoCompression. Even if the input data is // incompressible, the kSnappyCompression implementation will // efficiently detect that and will switch to uncompressed mode. CompressionType compression; // Different levels can have different compression policies. There // are cases where most lower levels would like to use quick compression // algorithms while the higher levels (which have more data) use // compression algorithms that have better compression but could // be slower. This array, if non-empty, should have an entry for // each level of the database; these override the value specified in // the previous field 'compression'. // // NOTICE if level_compaction_dynamic_level_bytes=true, // compression_per_level[0] still determines L0, but other elements // of the array are based on base level (the level L0 files are merged // to), and may not match the level users see from info log for metadata. // If L0 files are merged to level-n, then, for i>0, compression_per_level[i] // determines compaction type for level n+i-1. // For example, if we have three 5 levels, and we determine to merge L0 // data to L4 (which means L1..L3 will be empty), then the new files go to // L4 uses compression type compression_per_level[1]. // If now L0 is merged to L2. Data goes to L2 will be compressed // according to compression_per_level[1], L3 using compression_per_level[2] // and L4 using compression_per_level[3]. Compaction for each level can // change when data grows. std::vector<CompressionType> compression_per_level; // different options for compression algorithms CompressionOptions compression_opts; // If non-nullptr, use the specified function to determine the // prefixes for keys. These prefixes will be placed in the filter. // Depending on the workload, this can reduce the number of read-IOP // cost for scans when a prefix is passed via ReadOptions to // db.NewIterator(). For prefix filtering to work properly, // "prefix_extractor" and "comparator" must be such that the following // properties hold: // // 1) key.starts_with(prefix(key)) // 2) Compare(prefix(key), key) <= 0. // 3) If Compare(k1, k2) <= 0, then Compare(prefix(k1), prefix(k2)) <= 0 // 4) prefix(prefix(key)) == prefix(key) // // Default: nullptr std::shared_ptr<const SliceTransform> prefix_extractor; // Number of levels for this database int num_levels; // Number of files to trigger level-0 compaction. A value <0 means that // level-0 compaction will not be triggered by number of files at all. // // Default: 4 // // Dynamically changeable through SetOptions() API int level0_file_num_compaction_trigger; // Soft limit on number of level-0 files. We start slowing down writes at this // point. A value <0 means that no writing slow down will be triggered by // number of files in level-0. // // Dynamically changeable through SetOptions() API int level0_slowdown_writes_trigger; // Maximum number of level-0 files. We stop writes at this point. // // Dynamically changeable through SetOptions() API int level0_stop_writes_trigger; // This does not do anything anymore. Deprecated. int max_mem_compaction_level; // Target file size for compaction. // target_file_size_base is per-file size for level-1. // Target file size for level L can be calculated by // target_file_size_base * (target_file_size_multiplier ^ (L-1)) // For example, if target_file_size_base is 2MB and // target_file_size_multiplier is 10, then each file on level-1 will // be 2MB, and each file on level 2 will be 20MB, // and each file on level-3 will be 200MB. // // Default: 2MB. // // Dynamically changeable through SetOptions() API uint64_t target_file_size_base; // By default target_file_size_multiplier is 1, which means // by default files in different levels will have similar size. // // Dynamically changeable through SetOptions() API int target_file_size_multiplier; // Control maximum total data size for a level. // max_bytes_for_level_base is the max total for level-1. // Maximum number of bytes for level L can be calculated as // (max_bytes_for_level_base) * (max_bytes_for_level_multiplier ^ (L-1)) // For example, if max_bytes_for_level_base is 20MB, and if // max_bytes_for_level_multiplier is 10, total data size for level-1 // will be 20MB, total file size for level-2 will be 200MB, // and total file size for level-3 will be 2GB. // // Default: 10MB. // // Dynamically changeable through SetOptions() API uint64_t max_bytes_for_level_base; // If true, RocksDB will pick target size of each level dynamically. // We will pick a base level b >= 1. L0 will be directly merged into level b, // instead of always into level 1. Level 1 to b-1 need to be empty. // We try to pick b and its target size so that // 1. target size is in the range of // (max_bytes_for_level_base / max_bytes_for_level_multiplier, // max_bytes_for_level_base] // 2. target size of the last level (level num_levels-1) equals to extra size // of the level. // At the same time max_bytes_for_level_multiplier and // max_bytes_for_level_multiplier_additional are still satisfied. // // With this option on, from an empty DB, we make last level the base level, // which means merging L0 data into the last level, until it exceeds // max_bytes_for_level_base. And then we make the second last level to be // base level, to start to merge L0 data to second last level, with its // target size to be 1/max_bytes_for_level_multiplier of the last level's // extra size. After the data accumulates more so that we need to move the // base level to the third last one, and so on. // // For example, assume max_bytes_for_level_multiplier=10, num_levels=6, // and max_bytes_for_level_base=10MB. // Target sizes of level 1 to 5 starts with: // [- - - - 10MB] // with base level is level. Target sizes of level 1 to 4 are not applicable // because they will not be used. // Until the size of Level 5 grows to more than 10MB, say 11MB, we make // base target to level 4 and now the targets looks like: // [- - - 1.1MB 11MB] // While data are accumulated, size targets are tuned based on actual data // of level 5. When level 5 has 50MB of data, the target is like: // [- - - 5MB 50MB] // Until level 5's actual size is more than 100MB, say 101MB. Now if we keep // level 4 to be the base level, its target size needs to be 10.1MB, which // doesn't satisfy the target size range. So now we make level 3 the target // size and the target sizes of the levels look like: // [- - 1.01MB 10.1MB 101MB] // In the same way, while level 5 further grows, all levels' targets grow, // like // [- - 5MB 50MB 500MB] // Until level 5 exceeds 1000MB and becomes 1001MB, we make level 2 the // base level and make levels' target sizes like this: // [- 1.001MB 10.01MB 100.1MB 1001MB] // and go on... // // By doing it, we give max_bytes_for_level_multiplier a priority against // max_bytes_for_level_base, for a more predictable LSM tree shape. It is // useful to limit worse case space amplification. // // max_bytes_for_level_multiplier_additional is ignored with this flag on. // // Turning this feature on or off for an existing DB can cause unexpected // LSM tree structure so it's not recommended. // // NOTE: this option is experimental // // Default: false bool level_compaction_dynamic_level_bytes; // Default: 10. // // Dynamically changeable through SetOptions() API int max_bytes_for_level_multiplier; // Different max-size multipliers for different levels. // These are multiplied by max_bytes_for_level_multiplier to arrive // at the max-size of each level. // // Default: 1 // // Dynamically changeable through SetOptions() API std::vector<int> max_bytes_for_level_multiplier_additional; // Maximum number of bytes in all compacted files. We avoid expanding // the lower level file set of a compaction if it would make the // total compaction cover more than // (expanded_compaction_factor * targetFileSizeLevel()) many bytes. // // Dynamically changeable through SetOptions() API int expanded_compaction_factor; // Maximum number of bytes in all source files to be compacted in a // single compaction run. We avoid picking too many files in the // source level so that we do not exceed the total source bytes // for compaction to exceed // (source_compaction_factor * targetFileSizeLevel()) many bytes. // Default:1, i.e. pick maxfilesize amount of data as the source of // a compaction. // // Dynamically changeable through SetOptions() API int source_compaction_factor; // Control maximum bytes of overlaps in grandparent (i.e., level+2) before we // stop building a single file in a level->level+1 compaction. // // Dynamically changeable through SetOptions() API int max_grandparent_overlap_factor; // Puts are delayed to options.delayed_write_rate when any level has a // compaction score that exceeds soft_rate_limit. This is ignored when == 0.0. // CONSTRAINT: soft_rate_limit <= hard_rate_limit. If this constraint does not // hold, RocksDB will set soft_rate_limit = hard_rate_limit // // Default: 0 (disabled) // // Dynamically changeable through SetOptions() API double soft_rate_limit; // DEPRECATED -- this options is no longer usde double hard_rate_limit; // DEPRECATED -- this options is no longer used unsigned int rate_limit_delay_max_milliseconds; // size of one block in arena memory allocation. // If <= 0, a proper value is automatically calculated (usually 1/10 of // writer_buffer_size). // // There are two additonal restriction of the The specified size: // (1) size should be in the range of [4096, 2 << 30] and // (2) be the multiple of the CPU word (which helps with the memory // alignment). // // We'll automatically check and adjust the size number to make sure it // conforms to the restrictions. // // Default: 0 // // Dynamically changeable through SetOptions() API size_t arena_block_size; // Disable automatic compactions. Manual compactions can still // be issued on this column family // // Dynamically changeable through SetOptions() API bool disable_auto_compactions; // DEPREACTED // Does not have any effect. bool purge_redundant_kvs_while_flush; // The compaction style. Default: kCompactionStyleLevel CompactionStyle compaction_style; // If true, compaction will verify checksum on every read that happens // as part of compaction // // Default: true // // Dynamically changeable through SetOptions() API bool verify_checksums_in_compaction; // The options needed to support Universal Style compactions CompactionOptionsUniversal compaction_options_universal; // The options for FIFO compaction style CompactionOptionsFIFO compaction_options_fifo; // Use KeyMayExist API to filter deletes when this is true. // If KeyMayExist returns false, i.e. the key definitely does not exist, then // the delete is a noop. KeyMayExist only incurs in-memory look up. // This optimization avoids writing the delete to storage when appropriate. // // Default: false // // Dynamically changeable through SetOptions() API bool filter_deletes; // An iteration->Next() sequentially skips over keys with the same // user-key unless this option is set. This number specifies the number // of keys (with the same userkey) that will be sequentially // skipped before a reseek is issued. // // Default: 8 // // Dynamically changeable through SetOptions() API uint64_t max_sequential_skip_in_iterations; // This is a factory that provides MemTableRep objects. // Default: a factory that provides a skip-list-based implementation of // MemTableRep. std::shared_ptr<MemTableRepFactory> memtable_factory; // This is a factory that provides TableFactory objects. // Default: a block-based table factory that provides a default // implementation of TableBuilder and TableReader with default // BlockBasedTableOptions. std::shared_ptr<TableFactory> table_factory; // Block-based table related options are moved to BlockBasedTableOptions. // Related options that were originally here but now moved include: // no_block_cache // block_cache // block_cache_compressed // block_size // block_size_deviation // block_restart_interval // filter_policy // whole_key_filtering // If you'd like to customize some of these options, you will need to // use NewBlockBasedTableFactory() to construct a new table factory. // This option allows user to to collect their own interested statistics of // the tables. // Default: empty vector -- no user-defined statistics collection will be // performed. typedef std::vector<std::shared_ptr<TablePropertiesCollectorFactory>> TablePropertiesCollectorFactories; TablePropertiesCollectorFactories table_properties_collector_factories; // Allows thread-safe inplace updates. If this is true, there is no way to // achieve point-in-time consistency using snapshot or iterator (assuming // concurrent updates). Hence iterator and multi-get will return results // which are not consistent as of any point-in-time. // If inplace_callback function is not set, // Put(key, new_value) will update inplace the existing_value iff // * key exists in current memtable // * new sizeof(new_value) <= sizeof(existing_value) // * existing_value for that key is a put i.e. kTypeValue // If inplace_callback function is set, check doc for inplace_callback. // Default: false. bool inplace_update_support; // Number of locks used for inplace update // Default: 10000, if inplace_update_support = true, else 0. // // Dynamically changeable through SetOptions() API size_t inplace_update_num_locks; // existing_value - pointer to previous value (from both memtable and sst). // nullptr if key doesn't exist // existing_value_size - pointer to size of existing_value). // nullptr if key doesn't exist // delta_value - Delta value to be merged with the existing_value. // Stored in transaction logs. // merged_value - Set when delta is applied on the previous value. // Applicable only when inplace_update_support is true, // this callback function is called at the time of updating the memtable // as part of a Put operation, lets say Put(key, delta_value). It allows the // 'delta_value' specified as part of the Put operation to be merged with // an 'existing_value' of the key in the database. // If the merged value is smaller in size that the 'existing_value', // then this function can update the 'existing_value' buffer inplace and // the corresponding 'existing_value'_size pointer, if it wishes to. // The callback should return UpdateStatus::UPDATED_INPLACE. // In this case. (In this case, the snapshot-semantics of the rocksdb // Iterator is not atomic anymore). // If the merged value is larger in size than the 'existing_value' or the // application does not wish to modify the 'existing_value' buffer inplace, // then the merged value should be returned via *merge_value. It is set by // merging the 'existing_value' and the Put 'delta_value'. The callback should // return UpdateStatus::UPDATED in this case. This merged value will be added // to the memtable. // If merging fails or the application does not wish to take any action, // then the callback should return UpdateStatus::UPDATE_FAILED. // Please remember that the original call from the application is Put(key, // delta_value). So the transaction log (if enabled) will still contain (key, // delta_value). The 'merged_value' is not stored in the transaction log. // Hence the inplace_callback function should be consistent across db reopens. // Default: nullptr UpdateStatus (*inplace_callback)(char* existing_value, uint32_t* existing_value_size, Slice delta_value, std::string* merged_value); // if prefix_extractor is set and bloom_bits is not 0, create prefix bloom // for memtable // // Dynamically changeable through SetOptions() API uint32_t memtable_prefix_bloom_bits; // number of hash probes per key // // Dynamically changeable through SetOptions() API uint32_t memtable_prefix_bloom_probes; // Page size for huge page TLB for bloom in memtable. If <=0, not allocate // from huge page TLB but from malloc. // Need to reserve huge pages for it to be allocated. For example: // sysctl -w vm.nr_hugepages=20 // See linux doc Documentation/vm/hugetlbpage.txt // // Dynamically changeable through SetOptions() API size_t memtable_prefix_bloom_huge_page_tlb_size; // Control locality of bloom filter probes to improve cache miss rate. // This option only applies to memtable prefix bloom and plaintable // prefix bloom. It essentially limits every bloom checking to one cache line. // This optimization is turned off when set to 0, and positive number to turn // it on. // Default: 0 uint32_t bloom_locality; // Maximum number of successive merge operations on a key in the memtable. // // When a merge operation is added to the memtable and the maximum number of // successive merges is reached, the value of the key will be calculated and // inserted into the memtable instead of the merge operation. This will // ensure that there are never more than max_successive_merges merge // operations in the memtable. // // Default: 0 (disabled) // // Dynamically changeable through SetOptions() API size_t max_successive_merges; // The number of partial merge operands to accumulate before partial // merge will be performed. Partial merge will not be called // if the list of values to merge is less than min_partial_merge_operands. // // If min_partial_merge_operands < 2, then it will be treated as 2. // // Default: 2 uint32_t min_partial_merge_operands; // This flag specifies that the implementation should optimize the filters // mainly for cases where keys are found rather than also optimize for keys // missed. This would be used in cases where the application knows that // there are very few misses or the performance in the case of misses is not // important. // // For now, this flag allows us to not store filters for the last level i.e // the largest level which contains data of the LSM store. For keys which // are hits, the filters in this level are not useful because we will search // for the data anyway. NOTE: the filters in other levels are still useful // even for key hit because they tell us whether to look in that level or go // to the higher level. // // Default: false bool optimize_filters_for_hits; // After writing every SST file, reopen it and read all the keys. // Default: false bool paranoid_file_checks; // Measure IO stats in compactions, if true. // Default: false bool compaction_measure_io_stats; // Create ColumnFamilyOptions with default values for all fields ColumnFamilyOptions(); // Create ColumnFamilyOptions from Options explicit ColumnFamilyOptions(const Options& options); void Dump(Logger* log) const; }; struct DBOptions { // Some functions that make it easier to optimize RocksDB #ifndef ROCKSDB_LITE // By default, RocksDB uses only one background thread for flush and // compaction. Calling this function will set it up such that total of // `total_threads` is used. Good value for `total_threads` is the number of // cores. You almost definitely want to call this function if your system is // bottlenecked by RocksDB. DBOptions* IncreaseParallelism(int total_threads = 16); #endif // ROCKSDB_LITE // If true, the database will be created if it is missing. // Default: false bool create_if_missing; // If true, missing column families will be automatically created. // Default: false bool create_missing_column_families; // If true, an error is raised if the database already exists. // Default: false bool error_if_exists; // If true, RocksDB will aggressively check consistency of the data. // Also, if any of the writes to the database fails (Put, Delete, Merge, // Write), the database will switch to read-only mode and fail all other // Write operations. // In most cases you want this to be set to true. // Default: true bool paranoid_checks; // Use the specified object to interact with the environment, // e.g. to read/write files, schedule background work, etc. // Default: Env::Default() Env* env; // Use to control write rate of flush and compaction. Flush has higher // priority than compaction. Rate limiting is disabled if nullptr. // If rate limiter is enabled, bytes_per_sync is set to 1MB by default. // Default: nullptr std::shared_ptr<RateLimiter> rate_limiter; // Use to control files deletion rate, can be used among multiple // RocksDB instances. delete_scheduler is only used to delete table files that // need to be deleted from the first db_path (db_name if db_paths is empty), // other files types and other db_paths wont be affected by delete_scheduler. // Default: nullptr (disabled) std::shared_ptr<DeleteScheduler> delete_scheduler; // Any internal progress/error information generated by the db will // be written to info_log if it is non-nullptr, or to a file stored // in the same directory as the DB contents if info_log is nullptr. // Default: nullptr std::shared_ptr<Logger> info_log; InfoLogLevel info_log_level; // Number of open files that can be used by the DB. You may need to // increase this if your database has a large working set. Value -1 means // files opened are always kept open. You can estimate number of files based // on target_file_size_base and target_file_size_multiplier for level-based // compaction. For universal-style compaction, you can usually set it to -1. // Default: 5000 int max_open_files; // If max_open_files is -1, DB will open all files on DB::Open(). You can // use this option to increase the number of threads used to open the files. // Default: 1 int max_file_opening_threads; // Once write-ahead logs exceed this size, we will start forcing the flush of // column families whose memtables are backed by the oldest live WAL file // (i.e. the ones that are causing all the space amplification). If set to 0 // (default), we will dynamically choose the WAL size limit to be // [sum of all write_buffer_size * max_write_buffer_number] * 4 // Default: 0 uint64_t max_total_wal_size; // If non-null, then we should collect metrics about database operations // Statistics objects should not be shared between DB instances as // it does not use any locks to prevent concurrent updates. std::shared_ptr<Statistics> statistics; // If true, then the contents of manifest and data files are not synced // to stable storage. Their contents remain in the OS buffers till the // OS decides to flush them. This option is good for bulk-loading // of data. Once the bulk-loading is complete, please issue a // sync to the OS to flush all dirty buffesrs to stable storage. // Default: false bool disableDataSync; // If true, then every store to stable storage will issue a fsync. // If false, then every store to stable storage will issue a fdatasync. // This parameter should be set to true while storing data to // filesystem like ext3 that can lose files after a reboot. // Default: false bool use_fsync; // A list of paths where SST files can be put into, with its target size. // Newer data is placed into paths specified earlier in the vector while // older data gradually moves to paths specified later in the vector. // // For example, you have a flash device with 10GB allocated for the DB, // as well as a hard drive of 2TB, you should config it to be: // [{"/flash_path", 10GB}, {"/hard_drive", 2TB}] // // The system will try to guarantee data under each path is close to but // not larger than the target size. But current and future file sizes used // by determining where to place a file are based on best-effort estimation, // which means there is a chance that the actual size under the directory // is slightly more than target size under some workloads. User should give // some buffer room for those cases. // // If none of the paths has sufficient room to place a file, the file will // be placed to the last path anyway, despite to the target size. // // Placing newer data to ealier paths is also best-efforts. User should // expect user files to be placed in higher levels in some extreme cases. // // If left empty, only one path will be used, which is db_name passed when // opening the DB. // Default: empty std::vector<DbPath> db_paths; // This specifies the info LOG dir. // If it is empty, the log files will be in the same dir as data. // If it is non empty, the log files will be in the specified dir, // and the db data dir's absolute path will be used as the log file // name's prefix. std::string db_log_dir; // This specifies the absolute dir path for write-ahead logs (WAL). // If it is empty, the log files will be in the same dir as data, // dbname is used as the data dir by default // If it is non empty, the log files will be in kept the specified dir. // When destroying the db, // all log files in wal_dir and the dir itself is deleted std::string wal_dir; // The periodicity when obsolete files get deleted. The default // value is 6 hours. The files that get out of scope by compaction // process will still get automatically delete on every compaction, // regardless of this setting uint64_t delete_obsolete_files_period_micros; // Maximum number of concurrent background compaction jobs, submitted to // the default LOW priority thread pool. // If you're increasing this, also consider increasing number of threads in // LOW priority thread pool. For more information, see // Env::SetBackgroundThreads // Default: 1 int max_background_compactions; // This integer represents the maximum number of threads that will // concurrently perform a level-based compaction from L0 to L1. A value // of 1 means there is no parallelism, and a greater number enables a // multi-threaded version of the L0-L1 compaction that divides the compaction // into multiple, smaller ones that are run simultaneously. This is still // under development and is only available for level-based compaction. // Default: 1 uint32_t max_subcompactions; // Maximum number of concurrent background memtable flush jobs, submitted to // the HIGH priority thread pool. // // By default, all background jobs (major compaction and memtable flush) go // to the LOW priority pool. If this option is set to a positive number, // memtable flush jobs will be submitted to the HIGH priority pool. // It is important when the same Env is shared by multiple db instances. // Without a separate pool, long running major compaction jobs could // potentially block memtable flush jobs of other db instances, leading to // unnecessary Put stalls. // // If you're increasing this, also consider increasing number of threads in // HIGH priority thread pool. For more information, see // Env::SetBackgroundThreads // Default: 1 int max_background_flushes; // Specify the maximal size of the info log file. If the log file // is larger than `max_log_file_size`, a new info log file will // be created. // If max_log_file_size == 0, all logs will be written to one // log file. size_t max_log_file_size; // Time for the info log file to roll (in seconds). // If specified with non-zero value, log file will be rolled // if it has been active longer than `log_file_time_to_roll`. // Default: 0 (disabled) size_t log_file_time_to_roll; // Maximal info log files to be kept. // Default: 1000 size_t keep_log_file_num; // manifest file is rolled over on reaching this limit. // The older manifest file be deleted. // The default value is MAX_INT so that roll-over does not take place. uint64_t max_manifest_file_size; // Number of shards used for table cache. int table_cache_numshardbits; // DEPRECATED // int table_cache_remove_scan_count_limit; // The following two fields affect how archived logs will be deleted. // 1. If both set to 0, logs will be deleted asap and will not get into // the archive. // 2. If WAL_ttl_seconds is 0 and WAL_size_limit_MB is not 0, // WAL files will be checked every 10 min and if total size is greater // then WAL_size_limit_MB, they will be deleted starting with the // earliest until size_limit is met. All empty files will be deleted. // 3. If WAL_ttl_seconds is not 0 and WAL_size_limit_MB is 0, then // WAL files will be checked every WAL_ttl_secondsi / 2 and those that // are older than WAL_ttl_seconds will be deleted. // 4. If both are not 0, WAL files will be checked every 10 min and both // checks will be performed with ttl being first. uint64_t WAL_ttl_seconds; uint64_t WAL_size_limit_MB; // Number of bytes to preallocate (via fallocate) the manifest // files. Default is 4mb, which is reasonable to reduce random IO // as well as prevent overallocation for mounts that preallocate // large amounts of data (such as xfs's allocsize option). size_t manifest_preallocation_size; // Data being read from file storage may be buffered in the OS // Default: true bool allow_os_buffer; // Allow the OS to mmap file for reading sst tables. Default: false bool allow_mmap_reads; // Allow the OS to mmap file for writing. // DB::SyncWAL() only works if this is set to false. // Default: false bool allow_mmap_writes; // Disable child process inherit open files. Default: true bool is_fd_close_on_exec; // DEPRECATED -- this options is no longer used bool skip_log_error_on_recovery; // if not zero, dump rocksdb.stats to LOG every stats_dump_period_sec // Default: 600 (10 min) unsigned int stats_dump_period_sec; // If set true, will hint the underlying file system that the file // access pattern is random, when a sst file is opened. // Default: true bool advise_random_on_open; // Amount of data to build up in memtables across all column // families before writing to disk. // // This is distinct from write_buffer_size, which enforces a limit // for a single memtable. // // This feature is disabled by default. Specify a non-zero value // to enable it. // // Default: 0 (disabled) size_t db_write_buffer_size; // Specify the file access pattern once a compaction is started. // It will be applied to all input files of a compaction. // Default: NORMAL enum AccessHint { NONE, NORMAL, SEQUENTIAL, WILLNEED }; AccessHint access_hint_on_compaction_start; // If true, always create a new file descriptor and new table reader // for compaction inputs. Turn this parameter on may introduce extra // memory usage in the table reader, if it allocates extra memory // for indexes. This will allow file descriptor prefetch options // to be set for compaction input files and not to impact file // descriptors for the same file used by user queries. // Suggest to enable BlockBasedTableOptions.cache_index_and_filter_blocks // for this mode if using block-based table. // // Default: false bool new_table_reader_for_compaction_inputs; // If non-zero, we perform bigger reads when doing compaction. If you're // running RocksDB on spinning disks, you should set this to at least 2MB. // That way RocksDB's compaction is doing sequential instead of random reads. // // When non-zero, we also force new_table_reader_for_compaction_inputs to // true. // // Default: 0 size_t compaction_readahead_size; // Use adaptive mutex, which spins in the user space before resorting // to kernel. This could reduce context switch when the mutex is not // heavily contended. However, if the mutex is hot, we could end up // wasting spin time. // Default: false bool use_adaptive_mutex; // Create DBOptions with default values for all fields DBOptions(); // Create DBOptions from Options explicit DBOptions(const Options& options); void Dump(Logger* log) const; // Allows OS to incrementally sync files to disk while they are being // written, asynchronously, in the background. This operation can be used // to smooth out write I/Os over time. Users shouldn't reply on it for // persistency guarantee. // Issue one request for every bytes_per_sync written. 0 turns it off. // Default: 0 // // You may consider using rate_limiter to regulate write rate to device. // When rate limiter is enabled, it automatically enables bytes_per_sync // to 1MB. // // This option applies to table files uint64_t bytes_per_sync; // Same as bytes_per_sync, but applies to WAL files // Default: 0, turned off uint64_t wal_bytes_per_sync; // A vector of EventListeners which call-back functions will be called // when specific RocksDB event happens. std::vector<std::shared_ptr<EventListener>> listeners; // If true, then the status of the threads involved in this DB will // be tracked and available via GetThreadList() API. // // Default: false bool enable_thread_tracking; // The limited write rate to DB if soft_rate_limit or // level0_slowdown_writes_trigger is triggered. It is calcualted using // size of user write requests before compression. // Unit: byte per second. // // Default: 1MB/s uint64_t delayed_write_rate; // If true, then DB::Open() will not update the statistics used to optimize // compaction decision by loading table properties from many files. // Turning off this feature will improve DBOpen time espcially in // disk environment. // // Default: false bool skip_stats_update_on_db_open; // Recovery mode to control the consistency while replaying WAL // Default: kTolerateCorruptedTailRecords WALRecoveryMode wal_recovery_mode; // A global cache for table-level rows. // Default: nullptr (disabled) // Not supported in ROCKSDB_LITE mode! std::shared_ptr<Cache> row_cache; }; // Options to control the behavior of a database (passed to DB::Open) struct Options : public DBOptions, public ColumnFamilyOptions { // Create an Options object with default values for all fields. Options() : DBOptions(), ColumnFamilyOptions() {} Options(const DBOptions& db_options, const ColumnFamilyOptions& column_family_options) : DBOptions(db_options), ColumnFamilyOptions(column_family_options) {} void Dump(Logger* log) const; void DumpCFOptions(Logger* log) const; // Set appropriate parameters for bulk loading. // The reason that this is a function that returns "this" instead of a // constructor is to enable chaining of multiple similar calls in the future. // // All data will be in level 0 without any automatic compaction. // It's recommended to manually call CompactRange(NULL, NULL) before reading // from the database, because otherwise the read can be very slow. Options* PrepareForBulkLoad(); }; // // An application can issue a read request (via Get/Iterators) and specify // if that read should process data that ALREADY resides on a specified cache // level. For example, if an application specifies kBlockCacheTier then the // Get call will process data that is already processed in the memtable or // the block cache. It will not page in data from the OS cache or data that // resides in storage. enum ReadTier { kReadAllTier = 0x0, // data in memtable, block cache, OS cache or storage kBlockCacheTier = 0x1 // data in memtable or block cache }; // Options that control read operations struct ReadOptions { // If true, all data read from underlying storage will be // verified against corresponding checksums. // Default: true bool verify_checksums; // Should the "data block"/"index block"/"filter block" read for this // iteration be cached in memory? // Callers may wish to set this field to false for bulk scans. // Default: true bool fill_cache; // If this option is set and memtable implementation allows, Seek // might only return keys with the same prefix as the seek-key // // ! DEPRECATED: prefix_seek is on by default when prefix_extractor // is configured // bool prefix_seek; // If "snapshot" is non-nullptr, read as of the supplied snapshot // (which must belong to the DB that is being read and which must // not have been released). If "snapshot" is nullptr, use an impliicit // snapshot of the state at the beginning of this read operation. // Default: nullptr const Snapshot* snapshot; // If "prefix" is non-nullptr, and ReadOptions is being passed to // db.NewIterator, only return results when the key begins with this // prefix. This field is ignored by other calls (e.g., Get). // Options.prefix_extractor must also be set, and // prefix_extractor.InRange(prefix) must be true. The iterator // returned by NewIterator when this option is set will behave just // as if the underlying store did not contain any non-matching keys, // with two exceptions. Seek() only accepts keys starting with the // prefix, and SeekToLast() is not supported. prefix filter with this // option will sometimes reduce the number of read IOPs. // Default: nullptr // // ! DEPRECATED // const Slice* prefix; // "iterate_upper_bound" defines the extent upto which the forward iterator // can returns entries. Once the bound is reached, Valid() will be false. // "iterate_upper_bound" is exclusive ie the bound value is // not a valid entry. If iterator_extractor is not null, the Seek target // and iterator_upper_bound need to have the same prefix. // This is because ordering is not guaranteed outside of prefix domain. // There is no lower bound on the iterator. If needed, that can be easily // implemented // // Default: nullptr const Slice* iterate_upper_bound; // Specify if this read request should process data that ALREADY // resides on a particular cache. If the required data is not // found at the specified cache, then Status::Incomplete is returned. // Default: kReadAllTier ReadTier read_tier; // Specify to create a tailing iterator -- a special iterator that has a // view of the complete database (i.e. it can also be used to read newly // added data) and is optimized for sequential reads. It will return records // that were inserted into the database after the creation of the iterator. // Default: false // Not supported in ROCKSDB_LITE mode! bool tailing; // Specify to create a managed iterator -- a special iterator that // uses less resources by having the ability to free its underlying // resources on request. // Default: false // Not supported in ROCKSDB_LITE mode! bool managed; // Enable a total order seek regardless of index format (e.g. hash index) // used in the table. Some table format (e.g. plain table) may not support // this option. bool total_order_seek; ReadOptions(); ReadOptions(bool cksum, bool cache); }; // Options that control write operations struct WriteOptions { // If true, the write will be flushed from the operating system // buffer cache (by calling WritableFile::Sync()) before the write // is considered complete. If this flag is true, writes will be // slower. // // If this flag is false, and the machine crashes, some recent // writes may be lost. Note that if it is just the process that // crashes (i.e., the machine does not reboot), no writes will be // lost even if sync==false. // // In other words, a DB write with sync==false has similar // crash semantics as the "write()" system call. A DB write // with sync==true has similar crash semantics to a "write()" // system call followed by "fdatasync()". // // Default: false bool sync; // If true, writes will not first go to the write ahead log, // and the write may got lost after a crash. bool disableWAL; // The option is deprecated. It's not used anymore. uint64_t timeout_hint_us; // If true and if user is trying to write to column families that don't exist // (they were dropped), ignore the write (don't return an error). If there // are multiple writes in a WriteBatch, other writes will succeed. // Default: false bool ignore_missing_column_families; WriteOptions() : sync(false), disableWAL(false), timeout_hint_us(0), ignore_missing_column_families(false) {} }; // Options that control flush operations struct FlushOptions { // If true, the flush will wait until the flush is done. // Default: true bool wait; FlushOptions() : wait(true) {} }; // Get options based on some guidelines. Now only tune parameter based on // flush/compaction and fill default parameters for other parameters. // total_write_buffer_limit: budget for memory spent for mem tables // read_amplification_threshold: comfortable value of read amplification // write_amplification_threshold: comfortable value of write amplification. // target_db_size: estimated total DB size. extern Options GetOptions(size_t total_write_buffer_limit, int read_amplification_threshold = 8, int write_amplification_threshold = 32, uint64_t target_db_size = 68719476736 /* 64GB */); // CompactionOptions are used in CompactFiles() call. struct CompactionOptions { // Compaction output compression type // Default: snappy CompressionType compression; // Compaction will create files of size `output_file_size_limit`. // Default: MAX, which means that compaction will create a single file uint64_t output_file_size_limit; CompactionOptions() : compression(kSnappyCompression), output_file_size_limit(std::numeric_limits<uint64_t>::max()) {} }; // For level based compaction, we can configure if we want to skip/force // bottommost level compaction. enum class BottommostLevelCompaction { // Skip bottommost level compaction kSkip, // Only compact bottommost level if there is a compaction filter // This is the default option kIfHaveCompactionFilter, // Always compact bottommost level kForce, }; // CompactRangeOptions is used by CompactRange() call. struct CompactRangeOptions { // If true, compacted files will be moved to the minimum level capable // of holding the data or given level (specified non-negative target_level). bool change_level = false; // If change_level is true and target_level have non-negative value, compacted // files will be moved to target_level. int target_level = -1; // Compaction outputs will be placed in options.db_paths[target_path_id]. // Behavior is undefined if target_path_id is out of range. uint32_t target_path_id = 0; // By default level based compaction will only compact the bottommost level // if there is a compaction filter BottommostLevelCompaction bottommost_level_compaction = BottommostLevelCompaction::kIfHaveCompactionFilter; }; } // namespace rocksdb #endif // STORAGE_ROCKSDB_INCLUDE_OPTIONS_H_
lgscofield/rocksdb
include/rocksdb/options.h
C
bsd-3-clause
55,557
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/component_updater/recovery_component_installer.h" #include <stdint.h> #include <memory> #include <string> #include "base/base_paths.h" #include "base/bind.h" #include "base/command_line.h" #include "base/files/file_path.h" #include "base/files/file_util.h" #include "base/json/json_file_value_serializer.h" #include "base/logging.h" #include "base/metrics/histogram.h" #include "base/path_service.h" #include "base/process/kill.h" #include "base/process/launch.h" #include "base/process/process.h" #include "base/threading/worker_pool.h" #include "build/build_config.h" #include "chrome/common/chrome_switches.h" #include "chrome/common/pref_names.h" #include "components/component_updater/component_updater_paths.h" #include "components/component_updater/component_updater_service.h" #include "components/component_updater/pref_names.h" #include "components/prefs/pref_registry_simple.h" #include "components/prefs/pref_service.h" #include "components/update_client/update_client.h" #include "content/public/browser/browser_thread.h" using content::BrowserThread; namespace component_updater { namespace { // CRX hash. The extension id is: npdjjkjlcidkjlamlmmdelcjbcpdjocm. const uint8_t kSha2Hash[] = {0xdf, 0x39, 0x9a, 0x9b, 0x28, 0x3a, 0x9b, 0x0c, 0xbc, 0xc3, 0x4b, 0x29, 0x12, 0xf3, 0x9e, 0x2c, 0x19, 0x7a, 0x71, 0x4b, 0x0a, 0x7c, 0x80, 0x1c, 0xf6, 0x29, 0x7c, 0x0a, 0x5f, 0xea, 0x67, 0xb7}; // File name of the recovery binary on different platforms. const base::FilePath::CharType kRecoveryFileName[] = #if defined(OS_WIN) FILE_PATH_LITERAL("ChromeRecovery.exe"); #else // OS_LINUX, OS_MACOSX, etc. FILE_PATH_LITERAL("ChromeRecovery"); #endif const char kRecoveryManifestName[] = "ChromeRecovery"; // ChromeRecovery process exit codes. enum ChromeRecoveryExitCode { EXIT_CODE_RECOVERY_SUCCEEDED = 0, EXIT_CODE_RECOVERY_SKIPPED = 1, EXIT_CODE_ELEVATION_NEEDED = 2, }; enum RecoveryComponentEvent { RCE_RUNNING_NON_ELEVATED = 0, RCE_ELEVATION_NEEDED = 1, RCE_FAILED = 2, RCE_SUCCEEDED = 3, RCE_SKIPPED = 4, RCE_RUNNING_ELEVATED = 5, RCE_ELEVATED_FAILED = 6, RCE_ELEVATED_SUCCEEDED = 7, RCE_ELEVATED_SKIPPED = 8, RCE_COMPONENT_DOWNLOAD_ERROR = 9, RCE_COUNT }; void RecordRecoveryComponentUMAEvent(RecoveryComponentEvent event) { UMA_HISTOGRAM_ENUMERATION("RecoveryComponent.Event", event, RCE_COUNT); } #if !defined(OS_CHROMEOS) // Checks if elevated recovery simulation switch was present on the command // line. This is for testing purpose. bool SimulatingElevatedRecovery() { return base::CommandLine::ForCurrentProcess()->HasSwitch( switches::kSimulateElevatedRecovery); } #endif // !defined(OS_CHROMEOS) base::CommandLine GetRecoveryInstallCommandLine( const base::FilePath& command, const base::DictionaryValue& manifest, bool is_deferred_run, const Version& version) { base::CommandLine command_line(command); // Add a flag to for re-attempted install with elevated privilege so that the // recovery executable can report back accordingly. if (is_deferred_run) command_line.AppendArg("/deferredrun"); std::string arguments; if (manifest.GetStringASCII("x-recovery-args", &arguments)) command_line.AppendArg(arguments); std::string add_version; if (manifest.GetStringASCII("x-recovery-add-version", &add_version) && add_version == "yes") { std::string version_string = "/version "; version_string += version.GetString(); command_line.AppendArg(version_string); } return command_line; } #if defined(OS_WIN) std::unique_ptr<base::DictionaryValue> ReadManifest( const base::FilePath& manifest) { JSONFileValueDeserializer deserializer(manifest); std::string error; return base::DictionaryValue::From(deserializer.Deserialize(NULL, &error)); } void WaitForElevatedInstallToComplete(base::Process process) { int installer_exit_code = 0; const base::TimeDelta kMaxWaitTime = base::TimeDelta::FromSeconds(600); if (process.WaitForExitWithTimeout(kMaxWaitTime, &installer_exit_code)) { if (installer_exit_code == EXIT_CODE_RECOVERY_SUCCEEDED) { RecordRecoveryComponentUMAEvent(RCE_ELEVATED_SUCCEEDED); } else { RecordRecoveryComponentUMAEvent(RCE_ELEVATED_SKIPPED); } } else { RecordRecoveryComponentUMAEvent(RCE_ELEVATED_FAILED); } } void DoElevatedInstallRecoveryComponent(const base::FilePath& path) { const base::FilePath main_file = path.Append(kRecoveryFileName); const base::FilePath manifest_file = path.Append(FILE_PATH_LITERAL("manifest.json")); if (!base::PathExists(main_file) || !base::PathExists(manifest_file)) return; std::unique_ptr<base::DictionaryValue> manifest(ReadManifest(manifest_file)); std::string name; manifest->GetStringASCII("name", &name); if (name != kRecoveryManifestName) return; std::string proposed_version; manifest->GetStringASCII("version", &proposed_version); const Version version(proposed_version.c_str()); if (!version.IsValid()) return; const bool is_deferred_run = true; const auto cmdline = GetRecoveryInstallCommandLine( main_file, *manifest, is_deferred_run, version); RecordRecoveryComponentUMAEvent(RCE_RUNNING_ELEVATED); base::LaunchOptions options; options.start_hidden = true; base::Process process = base::LaunchElevatedProcess(cmdline, options); base::WorkerPool::PostTask( FROM_HERE, base::Bind(&WaitForElevatedInstallToComplete, base::Passed(&process)), true); } void ElevatedInstallRecoveryComponent(const base::FilePath& installer_path) { base::WorkerPool::PostTask( FROM_HERE, base::Bind(&DoElevatedInstallRecoveryComponent, installer_path), true); } #endif // defined(OS_WIN) } // namespace // Component installer that is responsible to repair the chrome installation // or repair the Google update installation. This is a last resort safety // mechanism. // For user Chrome, recovery component just installs silently. For machine // Chrome, elevation may be needed. If that happens, the installer will set // preference flag prefs::kRecoveryComponentNeedsElevation to request that. // There is a global error service monitors this flag and will pop up // bubble if the flag is set to true. // See chrome/browser/recovery/recovery_install_global_error.cc for details. class RecoveryComponentInstaller : public update_client::CrxInstaller { public: RecoveryComponentInstaller(const Version& version, PrefService* prefs); // ComponentInstaller implementation: void OnUpdateError(int error) override; bool Install(const base::DictionaryValue& manifest, const base::FilePath& unpack_path) override; bool GetInstalledFile(const std::string& file, base::FilePath* installed_file) override; bool Uninstall() override; private: ~RecoveryComponentInstaller() override {} bool RunInstallCommand(const base::CommandLine& cmdline, const base::FilePath& installer_folder) const; Version current_version_; PrefService* prefs_; }; void SimulateElevatedRecoveryHelper(PrefService* prefs) { prefs->SetBoolean(prefs::kRecoveryComponentNeedsElevation, true); } void RecoveryRegisterHelper(ComponentUpdateService* cus, PrefService* prefs) { DCHECK_CURRENTLY_ON(BrowserThread::UI); Version version(prefs->GetString(prefs::kRecoveryComponentVersion)); if (!version.IsValid()) { NOTREACHED(); return; } update_client::CrxComponent recovery; recovery.name = "recovery"; recovery.installer = new RecoveryComponentInstaller(version, prefs); recovery.version = version; recovery.pk_hash.assign(kSha2Hash, &kSha2Hash[sizeof(kSha2Hash)]); if (!cus->RegisterComponent(recovery)) { NOTREACHED() << "Recovery component registration failed."; } } void RecoveryUpdateVersionHelper(const Version& version, PrefService* prefs) { DCHECK_CURRENTLY_ON(BrowserThread::UI); prefs->SetString(prefs::kRecoveryComponentVersion, version.GetString()); } void SetPrefsForElevatedRecoveryInstall(const base::FilePath& unpack_path, PrefService* prefs) { DCHECK_CURRENTLY_ON(BrowserThread::UI); prefs->SetFilePath(prefs::kRecoveryComponentUnpackPath, unpack_path); prefs->SetBoolean(prefs::kRecoveryComponentNeedsElevation, true); } RecoveryComponentInstaller::RecoveryComponentInstaller(const Version& version, PrefService* prefs) : current_version_(version), prefs_(prefs) { DCHECK(version.IsValid()); } void RecoveryComponentInstaller::OnUpdateError(int error) { RecordRecoveryComponentUMAEvent(RCE_COMPONENT_DOWNLOAD_ERROR); NOTREACHED() << "Recovery component update error: " << error; } #if defined(OS_WIN) void WaitForInstallToComplete(base::Process process, const base::FilePath& installer_folder, PrefService* prefs) { int installer_exit_code = 0; const base::TimeDelta kMaxWaitTime = base::TimeDelta::FromSeconds(600); if (process.WaitForExitWithTimeout(kMaxWaitTime, &installer_exit_code)) { if (installer_exit_code == EXIT_CODE_ELEVATION_NEEDED) { RecordRecoveryComponentUMAEvent(RCE_ELEVATION_NEEDED); BrowserThread::PostTask( BrowserThread::UI, FROM_HERE, base::Bind(&SetPrefsForElevatedRecoveryInstall, installer_folder, prefs)); } else if (installer_exit_code == EXIT_CODE_RECOVERY_SUCCEEDED) { RecordRecoveryComponentUMAEvent(RCE_SUCCEEDED); } else if (installer_exit_code == EXIT_CODE_RECOVERY_SKIPPED) { RecordRecoveryComponentUMAEvent(RCE_SKIPPED); } } else { RecordRecoveryComponentUMAEvent(RCE_FAILED); } } bool RecoveryComponentInstaller::RunInstallCommand( const base::CommandLine& cmdline, const base::FilePath& installer_folder) const { RecordRecoveryComponentUMAEvent(RCE_RUNNING_NON_ELEVATED); base::LaunchOptions options; options.start_hidden = true; base::Process process = base::LaunchProcess(cmdline, options); if (!process.IsValid()) return false; // Let worker pool thread wait for us so we don't block Chrome shutdown. base::WorkerPool::PostTask( FROM_HERE, base::Bind(&WaitForInstallToComplete, base::Passed(&process), installer_folder, prefs_), true); // Returns true regardless of install result since from updater service // perspective the install is done, even we may need to do elevated // install later. return true; } #else bool RecoveryComponentInstaller::RunInstallCommand( const base::CommandLine& cmdline, const base::FilePath&) const { return base::LaunchProcess(cmdline, base::LaunchOptions()).IsValid(); } #endif // defined(OS_WIN) #if defined(OS_POSIX) // Sets the POSIX executable permissions on a file bool SetPosixExecutablePermission(const base::FilePath& path) { int permissions = 0; if (!base::GetPosixFilePermissions(path, &permissions)) return false; const int kExecutableMask = base::FILE_PERMISSION_EXECUTE_BY_USER | base::FILE_PERMISSION_EXECUTE_BY_GROUP | base::FILE_PERMISSION_EXECUTE_BY_OTHERS; if ((permissions & kExecutableMask) == kExecutableMask) return true; // No need to update return base::SetPosixFilePermissions(path, permissions | kExecutableMask); } #endif // defined(OS_POSIX) bool RecoveryComponentInstaller::Install(const base::DictionaryValue& manifest, const base::FilePath& unpack_path) { std::string name; manifest.GetStringASCII("name", &name); if (name != kRecoveryManifestName) return false; std::string proposed_version; manifest.GetStringASCII("version", &proposed_version); Version version(proposed_version.c_str()); if (!version.IsValid()) return false; if (current_version_.CompareTo(version) >= 0) return false; // Passed the basic tests. Copy the installation to a permanent directory. base::FilePath path; if (!PathService::Get(DIR_RECOVERY_BASE, &path)) return false; if (!base::PathExists(path) && !base::CreateDirectory(path)) return false; path = path.AppendASCII(version.GetString()); if (base::PathExists(path) && !base::DeleteFile(path, true)) return false; if (!base::Move(unpack_path, path)) { DVLOG(1) << "Recovery component move failed."; return false; } base::FilePath main_file = path.Append(kRecoveryFileName); if (!base::PathExists(main_file)) return false; #if defined(OS_POSIX) // The current version of the CRX unzipping does not restore // correctly the executable flags/permissions. See https://crbug.com/555011 if (!SetPosixExecutablePermission(main_file)) { DVLOG(1) << "Recovery component failed to set the executable " "permission on the file: " << main_file.value(); return false; } #endif // Run the recovery component. const bool is_deferred_run = false; const auto cmdline = GetRecoveryInstallCommandLine( main_file, manifest, is_deferred_run, current_version_); if (!RunInstallCommand(cmdline, path)) { return false; } current_version_ = version; if (prefs_) { BrowserThread::PostTask( BrowserThread::UI, FROM_HERE, base::Bind(&RecoveryUpdateVersionHelper, version, prefs_)); } return true; } bool RecoveryComponentInstaller::GetInstalledFile( const std::string& file, base::FilePath* installed_file) { return false; } bool RecoveryComponentInstaller::Uninstall() { return false; } void RegisterRecoveryComponent(ComponentUpdateService* cus, PrefService* prefs) { #if !defined(OS_CHROMEOS) if (SimulatingElevatedRecovery()) { BrowserThread::PostTask( BrowserThread::UI, FROM_HERE, base::Bind(&SimulateElevatedRecoveryHelper, prefs)); } // We delay execute the registration because we are not required in // the critical path during browser startup. BrowserThread::PostDelayedTask( BrowserThread::UI, FROM_HERE, base::Bind(&RecoveryRegisterHelper, cus, prefs), base::TimeDelta::FromSeconds(6)); #endif // !defined(OS_CHROMEOS) } void RegisterPrefsForRecoveryComponent(PrefRegistrySimple* registry) { registry->RegisterStringPref(prefs::kRecoveryComponentVersion, "0.0.0.0"); registry->RegisterFilePathPref(prefs::kRecoveryComponentUnpackPath, base::FilePath()); registry->RegisterBooleanPref(prefs::kRecoveryComponentNeedsElevation, false); } void AcceptedElevatedRecoveryInstall(PrefService* prefs) { DCHECK_CURRENTLY_ON(BrowserThread::UI); #if defined(OS_WIN) ElevatedInstallRecoveryComponent( prefs->GetFilePath(prefs::kRecoveryComponentUnpackPath)); #endif // OS_WIN prefs->SetBoolean(prefs::kRecoveryComponentNeedsElevation, false); } void DeclinedElevatedRecoveryInstall(PrefService* prefs) { DCHECK_CURRENTLY_ON(BrowserThread::UI); prefs->SetBoolean(prefs::kRecoveryComponentNeedsElevation, false); } } // namespace component_updater
axinging/chromium-crosswalk
chrome/browser/component_updater/recovery_component_installer.cc
C++
bsd-3-clause
15,443
<?php /** * Zend Framework (http://framework.zend.com/) * * @link http://github.com/zendframework/zf2 for the canonical source repository * @copyright Copyright (c) 2005-2013 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ namespace ZendTest\Db\Sql\Ddl\Column; use Zend\Db\Sql\Ddl\Column\Text; class TextTest extends \PHPUnit_Framework_TestCase { /** * @covers Zend\Db\Sql\Ddl\Column\Text::getExpressionData */ public function testGetExpressionData() { $column = new Text('foo'); $this->assertEquals( array(array('%s TEXT %s %s', array('foo', 'NOT NULL', ''), array($column::TYPE_IDENTIFIER, $column::TYPE_LITERAL, $column::TYPE_LITERAL, $column::TYPE_LITERAL))), $column->getExpressionData() ); } }
emilgeorgiev89/ZFTEST
tests/ZendTest/Db/Sql/Ddl/Column/TextTest.php
PHP
bsd-3-clause
863
<body> <p>Test simple shared worker sharing cases. Should print several PASS lines followed by DONE.</p> <div id=result></div> <script> function log(message) { document.getElementById("result").innerHTML += message + "<br>"; } if (window.testRunner) { testRunner.dumpAsText(); testRunner.waitUntilDone(); } // Load two workers simultaneously, to ensure that simultaneous loads also yield the same instance. // Loading a worker named "name" tests that workers shutdown when the parent document exits, because other tests also create workers with that same name but with different URLs. var worker = new SharedWorker('resources/shared-worker-common.js', 'name'); var worker2 = new SharedWorker('resources/shared-worker-common.js', 'name'); try { new SharedWorker('resources/some-other-url.js', 'name'); log("FAIL: Creating SharedWorker with different URLs but the same name should fail"); } catch (ex) { log("PASS: Exception thrown when creating SharedWorker with different URLs but same name: " + ex); } // Set something in global context in one worker, read value back on other worker, to make sure they are truly shared. worker.port.postMessage("eval self.foo"); worker.port.onmessage = function(event) { log((event.data == "self.foo: undefined" ? "PASS: " : "FAIL: ") + "Accessing new instance of shared worker: " + event.data); worker.port.postMessage("eval self.foo = 1234"); worker.port.onmessage = function(event) { log((event.data == "self.foo = 1234: 1234" ? "PASS: " : "FAIL: ") + "Setting global variable in shared worker: " + event.data); worker2.port.postMessage("eval self.foo"); worker2.port.onmessage = function(event) { log((event.data == "self.foo: 1234" ? "PASS: " : "FAIL: ") + "Accessing simultaneously-loaded instance of shared worker: " + event.data); testNewWorker(); } } } function testNewWorker() { // New name, so should be a distinct worker from the previous one. var worker3 = new SharedWorker('resources/shared-worker-common.js', 'name2'); worker3.port.postMessage("eval self.foo"); worker3.port.onmessage = function(event) { log((event.data == "self.foo: undefined" ? "PASS: " : "FAIL: ") + "Accessing new instance of shared worker: " + event.data); testAlreadyLoaded(); }; } function testAlreadyLoaded() { // Make sure that referencing a worker that is already loaded yields the same instance. var worker4 = new SharedWorker('resources/shared-worker-common.js', 'name'); worker4.port.postMessage("eval self.foo"); worker4.port.onmessage = function(event) { log((event.data == "self.foo: 1234" ? "PASS: " : "FAIL: ") + "Accessing already-loaded instance of shared worker: " + event.data); done(); }; } function done() { log("DONE"); if (window.testRunner) testRunner.notifyDone(); } </script> </body>
leighpauls/k2cro4
content/test/data/layout_tests/LayoutTests/fast/workers/shared-worker-shared.html
HTML
bsd-3-clause
2,943
import json from tempfile import mkdtemp from os.path import join, basename from shutil import rmtree from distutils.dir_util import copy_tree from twisted.trial import unittest from twisted.internet.defer import inlineCallbacks from slyd.projectspec import create_project_resource from slyd.projectspec import convert_template from .utils import TestSite, test_spec_manager from .settings import SPEC_DATA_DIR class CrawlerSpecTest(unittest.TestCase): spider = """ { "exclude_patterns": [], "follow_patterns": [ ".+MobileHomePark.php?key=d+" ], "links_to_follow": "patterns", "respect_nofollow": true, "start_urls": [ "http://www.mhvillage.com/" ], "templates": [] } """ def setUp(self): sm = test_spec_manager() spec_resource = create_project_resource(sm) self.temp_project_dir = mkdtemp(dir=SPEC_DATA_DIR, prefix='test-run-') self.project = basename(self.temp_project_dir) self.specsite = TestSite(spec_resource, project=self.project) test_project_dir = join(SPEC_DATA_DIR, 'test') copy_tree(test_project_dir, self.temp_project_dir) @inlineCallbacks def _get_check_resource(self, resource, converter=None): result = yield self.specsite.get(resource) ffile = join(self.temp_project_dir, resource + ".json") fdata = json.load(open(ffile)) if converter: converter(fdata) rdata = json.loads(result.value()) self.assertEqual(fdata, rdata) def test_get_resource(self): self._get_check_resource("project") self._get_check_resource("spiders/pinterest.com", convert_template) @inlineCallbacks def post_command(self, spider, cmd, *args, **kwargs): obj = {'cmd': cmd, 'args': args} result = yield self.specsite.post(spider, data=json.dumps(obj)) self.assertEqual(result.responseCode, kwargs.get('expect', 200)) @inlineCallbacks def test_updating(self): result = yield self.specsite.post('spiders/testpost', data=self.spider) self.assertEqual(result.responseCode, 200) result = yield self.specsite.get('spiders/testpost') self.assertEqual(json.loads(result.value()), json.loads(self.spider)) # should fail - missing required fields result = yield self.specsite.post('spiders/testpost', data='{}') self.assertEqual(result.responseCode, 400) @inlineCallbacks def test_commands(self): self.post_command('spiders', 'unknown', expect=400) self.post_command('spiders', 'mv', expect=400) self.post_command('spiders', 'mv', '../notallowed', 'whatever', expect=400) self.post_command('spiders', 'mv', 'notallowedexists', 'whatever', expect=404) self.post_command('spiders', 'rm', 'notexists', expect=404) # TODO: mv to existing spider - 400 yield self.specsite.post('spiders/c', data=self.spider) self._get_check_resource('spiders/c') self.post_command('spiders', 'mv', 'c', 'c2') result = yield self.specsite.get('spiders/c') self.assertEqual(result.value(), '{}\n') self._get_check_resource('spiders/c2') yield self.specsite.post('spiders/c3', data=self.spider) # overwrites self.post_command('spiders', 'mv', 'c2', 'c3') result = yield self.specsite.get('spiders/c2') self.assertEqual(result.value(), '{}\n') self.post_command('spiders', 'rm', 'c3') result = yield self.specsite.get('spiders/c3') self.assertEqual(result.value(), '{}\n') def tearDown(self): rmtree(self.temp_project_dir)
CENDARI/portia
slyd/tests/test_spec.py
Python
bsd-3-clause
3,884
# -*- coding: utf-8 -*- """ Created on Sat Aug 24 15:08:01 2013 @author: steve """ import numpy as np import mdptoolbox from .utils import SMALLNUM, P_forest, R_forest, P_small, R_small, P_sparse from .utils import P_forest_sparse, R_forest_sparse def test_ValueIterationGS_small(): sdp = mdptoolbox.mdp.ValueIterationGS(P_small, R_small, 0.9) sdp.run() p = (1, 0) itr = 28 # from Octave MDPtoolbox v = np.matrix('42.27744026138212, 35.89524504047155') assert sdp.iter == itr assert sdp.policy == p assert (np.absolute(np.array(sdp.V) - v) < SMALLNUM).all() def test_ValueIterationGS_small_sparse(): sdp = mdptoolbox.mdp.ValueIterationGS(P_sparse, R_small, 0.9) sdp.run() p = (1, 0) itr = 28 # from Octave MDPtoolbox v = np.matrix('42.27744026138212, 35.89524504047155') assert sdp.iter == itr assert sdp.policy == p assert (np.absolute(np.array(sdp.V) - v) < SMALLNUM).all() def test_ValueIterationGS_forest(): sdp = mdptoolbox.mdp.ValueIterationGS(P_forest, R_forest, 0.96) sdp.run() p = (0, 0, 0) v = np.matrix('69.98910821400665, 73.46560194552877, 77.46560194552877') itr = 63 # from Octave MDPtoolbox assert sdp.max_iter == 63 assert sdp.policy == p assert sdp.iter == itr assert (np.absolute(np.array(sdp.V) - v) < SMALLNUM).all() def test_ValueIterationGS_forest_sparse(): sdp = mdptoolbox.mdp.ValueIterationGS(P_forest_sparse, R_forest_sparse, 0.96) sdp.run() p = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) itr = 16 # from Octave MDPtoolbox assert sdp.policy == p assert sdp.iter == itr
silgon/pymdptoolbox
src/tests/test_ValueIterationGS.py
Python
bsd-3-clause
1,658
<?php /** * Zend Framework * * LICENSE * * This source file is subject to the new BSD license that is bundled * with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://framework.zend.com/license/new-bsd * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@zend.com so we can send you a copy immediately. * * @category Zend * @package Zend_Gdata * @subpackage Photos * @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License * @version $Id: Id.php 24593 2012-01-05 20:35:02Z matthew $ */ /** * @see Zend_Gdata_Extension */ require_once 'Zend/Gdata/Extension.php'; /** * @see Zend_Gdata_Photos */ require_once 'Zend/Gdata/Photos.php'; /** * Represents the gphoto:id element used by the API. This class * represents the unique ID assigned to an element by the servers. * * @category Zend * @package Zend_Gdata * @subpackage Photos * @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ class Zend_Gdata_Photos_Extension_Id extends Zend_Gdata_Extension { protected $_rootNamespace = 'gphoto'; protected $_rootElement = 'id'; /** * Constructs a new Zend_Gdata_Photos_Extension_Id object. * * @param string $text (optional) The ID being represented. */ public function __construct($text = null) { $this->registerAllNamespaces(Zend_Gdata_Photos::$namespaces); parent::__construct(); $this->setText($text); } }
Riges/KawaiViewModel
www/libs/Zend/Gdata/Photos/Extension/Id.php
PHP
bsd-3-clause
1,846
// Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/web_applications/preinstalled_web_apps/google_chat.h" #include "chrome/browser/web_applications/web_app_id_constants.h" namespace web_app { ExternalInstallOptions GetConfigForGoogleChat() { ExternalInstallOptions options( /*install_url=*/GURL( "https://mail.google.com/chat/download?usp=chrome_default"), /*user_display_mode=*/DisplayMode::kStandalone, /*install_source=*/ExternalInstallSource::kExternalDefault); // Exclude managed users until we have a way for admins to block the app. options.user_type_allowlist = {"unmanaged"}; options.only_for_new_users = true; options.expected_app_id = kGoogleChatAppId; return options; } } // namespace web_app
chromium/chromium
chrome/browser/web_applications/preinstalled_web_apps/google_chat.cc
C++
bsd-3-clause
891
/*================================================================================ Copyright (c) 2013 Steve Jin. All Rights Reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of VMware, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL VMWARE, INC. OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ================================================================================*/ package com.vmware.vim25; /** * @author Steve Jin (http://www.doublecloud.org) * @version 5.1 */ @SuppressWarnings("all") public class CustomizationUserData extends DynamicData { public String fullName; public String orgName; public CustomizationName computerName; public String productId; public String getFullName() { return this.fullName; } public String getOrgName() { return this.orgName; } public CustomizationName getComputerName() { return this.computerName; } public String getProductId() { return this.productId; } public void setFullName(String fullName) { this.fullName = fullName; } public void setOrgName(String orgName) { this.orgName = orgName; } public void setComputerName(CustomizationName computerName) { this.computerName = computerName; } public void setProductId(String productId) { this.productId = productId; } }
n4ybn/yavijava
src/main/java/com/vmware/vim25/CustomizationUserData.java
Java
bsd-3-clause
2,636
/* Copyright 2010 Larry Gritz and the other authors and contributors. All Rights Reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the software's owners nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. (This is the Modified BSD License) */ #include "sgi_pvt.h" #include "OpenImageIO/dassert.h" OIIO_PLUGIN_NAMESPACE_BEGIN // Obligatory material to make this a recognizeable imageio plugin: OIIO_PLUGIN_EXPORTS_BEGIN OIIO_EXPORT int sgi_imageio_version = OIIO_PLUGIN_VERSION; OIIO_EXPORT ImageInput *sgi_input_imageio_create () { return new SgiInput; } OIIO_EXPORT const char *sgi_input_extensions[] = { "sgi", "rgb", "rgba", "bw", "int", "inta", NULL }; OIIO_PLUGIN_EXPORTS_END bool SgiInput::valid_file (const std::string &filename) const { FILE *fd = Filesystem::fopen (filename, "rb"); if (!fd) return false; int16_t magic; bool ok = (::fread (&magic, sizeof(magic), 1, fd) == 1) && (magic == sgi_pvt::SGI_MAGIC); fclose (fd); return ok; } bool SgiInput::open (const std::string &name, ImageSpec &spec) { // saving name for later use m_filename = name; m_fd = Filesystem::fopen (m_filename, "rb"); if (!m_fd) { error ("Could not open file \"%s\"", name.c_str()); return false; } if (! read_header ()) return false; if (m_sgi_header.magic != sgi_pvt::SGI_MAGIC) { error ("\"%s\" is not a SGI file, magic number doesn't match", m_filename.c_str()); close (); return false; } int height = 0; int nchannels = 0; switch (m_sgi_header.dimension) { case sgi_pvt::ONE_SCANLINE_ONE_CHANNEL: height = 1; nchannels = 1; break; case sgi_pvt::MULTI_SCANLINE_ONE_CHANNEL: height = m_sgi_header.ysize; nchannels = 1; break; case sgi_pvt::MULTI_SCANLINE_MULTI_CHANNEL: height = m_sgi_header.ysize; nchannels = m_sgi_header.zsize; break; default: error ("Bad dimension: %d", m_sgi_header.dimension); close (); return false; } if (m_sgi_header.colormap == sgi_pvt::COLORMAP || m_sgi_header.colormap == sgi_pvt::SCREEN) { error ("COLORMAP and SCREEN color map types aren't supported"); close (); return false; } m_spec = ImageSpec (m_sgi_header.xsize, height, nchannels, m_sgi_header.bpc == 1 ? TypeDesc::UINT8 : TypeDesc::UINT16); if (strlen (m_sgi_header.imagename)) m_spec.attribute("ImageDescription", m_sgi_header.imagename); if (m_sgi_header.storage == sgi_pvt::RLE) { m_spec.attribute("compression", "rle"); if (! read_offset_tables ()) return false; } spec = m_spec; return true; } bool SgiInput::read_native_scanline (int y, int z, void *data) { if (y < 0 || y > m_spec.height) return false; y = m_spec.height - y - 1; int bpc = m_sgi_header.bpc; std::vector<std::vector<unsigned char> > channeldata (m_spec.nchannels); if (m_sgi_header.storage == sgi_pvt::RLE) { // reading and uncompressing first channel (red in RGBA images) for (int c = 0; c < m_spec.nchannels; ++c) { int off = y + c*m_spec.height; // offset for this scanline/channel int scanline_offset = start_tab[off]; int scanline_length = length_tab[off]; channeldata[c].resize (m_spec.width * bpc); uncompress_rle_channel (scanline_offset, scanline_length, &(channeldata[c][0])); } } else { // non-RLE case -- just read directly into our channel data for (int c = 0; c < m_spec.nchannels; ++c) { int off = y + c*m_spec.height; // offset for this scanline/channel int scanline_offset = sgi_pvt::SGI_HEADER_LEN + off * m_spec.width * bpc; fseek (m_fd, scanline_offset, SEEK_SET); channeldata[c].resize (m_spec.width * bpc); if (! fread (&(channeldata[c][0]), 1, m_spec.width * bpc)) return false; } } if (m_spec.nchannels == 1) { // If just one channel, no interleaving is necessary, just memcpy memcpy (data, &(channeldata[0][0]), channeldata[0].size()); } else { unsigned char *cdata = (unsigned char *)data; for (int x = 0; x < m_spec.width; ++x) { for (int c = 0; c < m_spec.nchannels; ++c) { *cdata++ = channeldata[c][x*bpc]; if (bpc == 2) *cdata++ = channeldata[c][x*bpc+1]; } } } // Swap endianness if needed if (bpc == 2 && littleendian()) swap_endian ((unsigned short *)data, m_spec.width*m_spec.nchannels); return true; } bool SgiInput::uncompress_rle_channel(int scanline_off, int scanline_len, unsigned char *out) { int bpc = m_sgi_header.bpc; std::vector<unsigned char> rle_scanline (scanline_len); fseek (m_fd, scanline_off, SEEK_SET); if (! fread (&rle_scanline[0], 1, scanline_len)) return false; int limit = m_spec.width; int i = 0; if (bpc == 1) { // 1 bit per channel while (i < scanline_len) { // Read a byte, it is the count. unsigned char value = rle_scanline[i++]; int count = value & 0x7F; // If the count is zero, we're done if (! count) break; // If the high bit is set, we just copy the next 'count' values if (value & 0x80) { while (count--) { DASSERT (i < scanline_len && limit > 0); *(out++) = rle_scanline[i++]; --limit; } } // If the high bit is zero, we copy the NEXT value, count times else { value = rle_scanline[i++]; while (count--) { DASSERT (limit > 0); *(out++) = value; --limit; } } } } else { // 2 bits per channel ASSERT (bpc == 2); while (i < scanline_len) { // Read a byte, it is the count. unsigned short value = (rle_scanline[i] << 8) | rle_scanline[i+1]; i += 2; int count = value & 0x7F; // If the count is zero, we're done if (! count) break; // If the high bit is set, we just copy the next 'count' values if (value & 0x80) { while (count--) { DASSERT (i+1 < scanline_len && limit > 0); *(out++) = rle_scanline[i++]; *(out++) = rle_scanline[i++]; --limit; } } // If the high bit is zero, we copy the NEXT value, count times else { while (count--) { DASSERT (limit > 0); *(out++) = rle_scanline[i]; *(out++) = rle_scanline[i+1]; --limit; } i += 2; } } } if (i != scanline_len || limit != 0) { error ("Corrupt RLE data"); return false; } return true; } bool SgiInput::close() { if (m_fd) fclose (m_fd); init (); return true; } bool SgiInput::read_header() { if (!fread(&m_sgi_header.magic, sizeof(m_sgi_header.magic), 1) || !fread(&m_sgi_header.storage, sizeof(m_sgi_header.storage), 1) || !fread(&m_sgi_header.bpc, sizeof(m_sgi_header.bpc), 1) || !fread(&m_sgi_header.dimension, sizeof(m_sgi_header.dimension), 1) || !fread(&m_sgi_header.xsize, sizeof(m_sgi_header.xsize), 1) || !fread(&m_sgi_header.ysize, sizeof(m_sgi_header.ysize), 1) || !fread(&m_sgi_header.zsize, sizeof(m_sgi_header.zsize), 1) || !fread(&m_sgi_header.pixmin, sizeof(m_sgi_header.pixmin), 1) || !fread(&m_sgi_header.pixmax, sizeof(m_sgi_header.pixmax), 1) || !fread(&m_sgi_header.dummy, sizeof(m_sgi_header.dummy), 1) || !fread(&m_sgi_header.imagename, sizeof(m_sgi_header.imagename), 1)) return false; m_sgi_header.imagename[79] = '\0'; if (! fread(&m_sgi_header.colormap, sizeof(m_sgi_header.colormap), 1)) return false; //don't read dummy bytes fseek (m_fd, 404, SEEK_CUR); if (littleendian()) { swap_endian(&m_sgi_header.magic); swap_endian(&m_sgi_header.dimension); swap_endian(&m_sgi_header.xsize); swap_endian(&m_sgi_header.ysize); swap_endian(&m_sgi_header.zsize); swap_endian(&m_sgi_header.pixmin); swap_endian(&m_sgi_header.pixmax); swap_endian(&m_sgi_header.colormap); } return true; } bool SgiInput::read_offset_tables () { int tables_size = m_sgi_header.ysize * m_sgi_header.zsize; start_tab.resize(tables_size); length_tab.resize(tables_size); if (!fread (&start_tab[0], sizeof(uint32_t), tables_size) || !fread (&length_tab[0], sizeof(uint32_t), tables_size)) return false; if (littleendian ()) { swap_endian (&length_tab[0], length_tab.size ()); swap_endian (&start_tab[0], start_tab.size()); } return true; } OIIO_PLUGIN_NAMESPACE_END
sambler/oiio
src/sgi.imageio/sgiinput.cpp
C++
bsd-3-clause
10,909
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef COMPONENTS_CLIENT_HINTS_COMMON_CLIENT_HINTS_H_ #define COMPONENTS_CLIENT_HINTS_COMMON_CLIENT_HINTS_H_ #include "components/content_settings/core/common/content_settings.h" namespace url { class Origin; } namespace blink { class EnabledClientHints; } namespace client_hints { const char kClientHintsSettingKey[] = "client_hints"; // Retrieves the persistent client hints that should be set when fetching a // resource from |url|. The method updates |client_hints| with the result. // |client_hints_rules| contains the content settings for the client hints. void GetAllowedClientHintsFromSource( const url::Origin& origin, const ContentSettingsForOneType& client_hints_rules, blink::EnabledClientHints* client_hints); } // namespace client_hints #endif // COMPONENTS_CLIENT_HINTS_COMMON_CLIENT_HINTS_H_
chromium/chromium
components/client_hints/common/client_hints.h
C
bsd-3-clause
997
<!DOCTYPE html> <html> <head> <meta charset="utf-8"> <link rel="shortcut icon" type="image/ico" href="http://www.datatables.net/favicon.ico"> <title>DataTables example - jQuery UI ThemeRoller</title> <link rel="stylesheet" type="text/css" href="//code.jquery.com/ui/1.10.3/themes/smoothness/jquery-ui.css"> <link rel="stylesheet" type="text/css" href= "//cdn.datatables.net/plug-ins/505bef35b56/integration/jqueryui/dataTables.jqueryui.css"> <link rel="stylesheet" type="text/css" href="../resources/syntax/shCore.css"> <link rel="stylesheet" type="text/css" href="../resources/demo.css"> <style type="text/css" class="init"> </style> <script type="text/javascript" language="javascript" src="../../media/js/jquery.js"></script> <script type="text/javascript" language="javascript" src="../../media/js/jquery.dataTables.js"></script> <script type="text/javascript" language="javascript" src= "//cdn.datatables.net/plug-ins/505bef35b56/integration/jqueryui/dataTables.jqueryui.js"></script> <script type="text/javascript" language="javascript" src="../resources/syntax/shCore.js"></script> <script type="text/javascript" language="javascript" src="../resources/demo.js"></script> <script type="text/javascript" language="javascript" class="init"> $(document).ready(function() { $('#example').dataTable(); } ); </script> </head> <body class="dt-example"> <div class="container"> <section> <h1>DataTables example <span>- jQuery UI ThemeRoller</span></h1> <div class="info"> <p>DataTables has the ability to integrate seamlessly with almost any styling library, and integration files are provided for several of the popular styling libraries, including <a href= "//jqueryui.com">jQuery UI</a>.</p> <p>To have your table styles integrate with jQuery UI's ThemeRoller styles, simply include the DataTables CSS and JS integration files for jQuery UI, as shown in this example. Note also that because jQuery doesn't provide table styles like some other CSS frameworks, the CSS integration file does add this styling information.</p> <p>Please note that DataTables 1.10- actually has built-in support for jQuery UI styling integration through the <a href="//datatables.net/reference/option/jQueryUI"><code class="option" title= "Initialisation option">jQueryUI</code></a> option. However, this option is deprecated in DataTables 1.10 and will be removed DataTables 1.11, matching how styling integration is provided for other libraries - a more modular, maintainable and extensible method. The method presented on this page is the method that will be use in future.</p> </div> <table id="example" class="display" cellspacing="0" width="100%"> <thead> <tr> <th>Name</th> <th>Position</th> <th>Office</th> <th>Age</th> <th>Start date</th> <th>Salary</th> </tr> </thead> <tfoot> <tr> <th>Name</th> <th>Position</th> <th>Office</th> <th>Age</th> <th>Start date</th> <th>Salary</th> </tr> </tfoot> <tbody> <tr> <td>Tiger Nixon</td> <td>System Architect</td> <td>Edinburgh</td> <td>61</td> <td>2011/04/25</td> <td>$320,800</td> </tr> <tr> <td>Garrett Winters</td> <td>Accountant</td> <td>Tokyo</td> <td>63</td> <td>2011/07/25</td> <td>$170,750</td> </tr> <tr> <td>Ashton Cox</td> <td>Junior Technical Author</td> <td>San Francisco</td> <td>66</td> <td>2009/01/12</td> <td>$86,000</td> </tr> <tr> <td>Cedric Kelly</td> <td>Senior Javascript Developer</td> <td>Edinburgh</td> <td>22</td> <td>2012/03/29</td> <td>$433,060</td> </tr> <tr> <td>Airi Satou</td> <td>Accountant</td> <td>Tokyo</td> <td>33</td> <td>2008/11/28</td> <td>$162,700</td> </tr> <tr> <td>Brielle Williamson</td> <td>Integration Specialist</td> <td>New York</td> <td>61</td> <td>2012/12/02</td> <td>$372,000</td> </tr> <tr> <td>Herrod Chandler</td> <td>Sales Assistant</td> <td>San Francisco</td> <td>59</td> <td>2012/08/06</td> <td>$137,500</td> </tr> <tr> <td>Rhona Davidson</td> <td>Integration Specialist</td> <td>Tokyo</td> <td>55</td> <td>2010/10/14</td> <td>$327,900</td> </tr> <tr> <td>Colleen Hurst</td> <td>Javascript Developer</td> <td>San Francisco</td> <td>39</td> <td>2009/09/15</td> <td>$205,500</td> </tr> <tr> <td>Sonya Frost</td> <td>Software Engineer</td> <td>Edinburgh</td> <td>23</td> <td>2008/12/13</td> <td>$103,600</td> </tr> <tr> <td>Jena Gaines</td> <td>Office Manager</td> <td>London</td> <td>30</td> <td>2008/12/19</td> <td>$90,560</td> </tr> <tr> <td>Quinn Flynn</td> <td>Support Lead</td> <td>Edinburgh</td> <td>22</td> <td>2013/03/03</td> <td>$342,000</td> </tr> <tr> <td>Charde Marshall</td> <td>Regional Director</td> <td>San Francisco</td> <td>36</td> <td>2008/10/16</td> <td>$470,600</td> </tr> <tr> <td>Haley Kennedy</td> <td>Senior Marketing Designer</td> <td>London</td> <td>43</td> <td>2012/12/18</td> <td>$313,500</td> </tr> <tr> <td>Tatyana Fitzpatrick</td> <td>Regional Director</td> <td>London</td> <td>19</td> <td>2010/03/17</td> <td>$385,750</td> </tr> <tr> <td>Michael Silva</td> <td>Marketing Designer</td> <td>London</td> <td>66</td> <td>2012/11/27</td> <td>$198,500</td> </tr> <tr> <td>Paul Byrd</td> <td>Chief Financial Officer (CFO)</td> <td>New York</td> <td>64</td> <td>2010/06/09</td> <td>$725,000</td> </tr> <tr> <td>Gloria Little</td> <td>Systems Administrator</td> <td>New York</td> <td>59</td> <td>2009/04/10</td> <td>$237,500</td> </tr> <tr> <td>Bradley Greer</td> <td>Software Engineer</td> <td>London</td> <td>41</td> <td>2012/10/13</td> <td>$132,000</td> </tr> <tr> <td>Dai Rios</td> <td>Personnel Lead</td> <td>Edinburgh</td> <td>35</td> <td>2012/09/26</td> <td>$217,500</td> </tr> <tr> <td>Jenette Caldwell</td> <td>Development Lead</td> <td>New York</td> <td>30</td> <td>2011/09/03</td> <td>$345,000</td> </tr> <tr> <td>Yuri Berry</td> <td>Chief Marketing Officer (CMO)</td> <td>New York</td> <td>40</td> <td>2009/06/25</td> <td>$675,000</td> </tr> <tr> <td>Caesar Vance</td> <td>Pre-Sales Support</td> <td>New York</td> <td>21</td> <td>2011/12/12</td> <td>$106,450</td> </tr> <tr> <td>Doris Wilder</td> <td>Sales Assistant</td> <td>Sidney</td> <td>23</td> <td>2010/09/20</td> <td>$85,600</td> </tr> <tr> <td>Angelica Ramos</td> <td>Chief Executive Officer (CEO)</td> <td>London</td> <td>47</td> <td>2009/10/09</td> <td>$1,200,000</td> </tr> <tr> <td>Gavin Joyce</td> <td>Developer</td> <td>Edinburgh</td> <td>42</td> <td>2010/12/22</td> <td>$92,575</td> </tr> <tr> <td>Jennifer Chang</td> <td>Regional Director</td> <td>Singapore</td> <td>28</td> <td>2010/11/14</td> <td>$357,650</td> </tr> <tr> <td>Brenden Wagner</td> <td>Software Engineer</td> <td>San Francisco</td> <td>28</td> <td>2011/06/07</td> <td>$206,850</td> </tr> <tr> <td>Fiona Green</td> <td>Chief Operating Officer (COO)</td> <td>San Francisco</td> <td>48</td> <td>2010/03/11</td> <td>$850,000</td> </tr> <tr> <td>Shou Itou</td> <td>Regional Marketing</td> <td>Tokyo</td> <td>20</td> <td>2011/08/14</td> <td>$163,000</td> </tr> <tr> <td>Michelle House</td> <td>Integration Specialist</td> <td>Sidney</td> <td>37</td> <td>2011/06/02</td> <td>$95,400</td> </tr> <tr> <td>Suki Burks</td> <td>Developer</td> <td>London</td> <td>53</td> <td>2009/10/22</td> <td>$114,500</td> </tr> <tr> <td>Prescott Bartlett</td> <td>Technical Author</td> <td>London</td> <td>27</td> <td>2011/05/07</td> <td>$145,000</td> </tr> <tr> <td>Gavin Cortez</td> <td>Team Leader</td> <td>San Francisco</td> <td>22</td> <td>2008/10/26</td> <td>$235,500</td> </tr> <tr> <td>Martena Mccray</td> <td>Post-Sales support</td> <td>Edinburgh</td> <td>46</td> <td>2011/03/09</td> <td>$324,050</td> </tr> <tr> <td>Unity Butler</td> <td>Marketing Designer</td> <td>San Francisco</td> <td>47</td> <td>2009/12/09</td> <td>$85,675</td> </tr> <tr> <td>Howard Hatfield</td> <td>Office Manager</td> <td>San Francisco</td> <td>51</td> <td>2008/12/16</td> <td>$164,500</td> </tr> <tr> <td>Hope Fuentes</td> <td>Secretary</td> <td>San Francisco</td> <td>41</td> <td>2010/02/12</td> <td>$109,850</td> </tr> <tr> <td>Vivian Harrell</td> <td>Financial Controller</td> <td>San Francisco</td> <td>62</td> <td>2009/02/14</td> <td>$452,500</td> </tr> <tr> <td>Timothy Mooney</td> <td>Office Manager</td> <td>London</td> <td>37</td> <td>2008/12/11</td> <td>$136,200</td> </tr> <tr> <td>Jackson Bradshaw</td> <td>Director</td> <td>New York</td> <td>65</td> <td>2008/09/26</td> <td>$645,750</td> </tr> <tr> <td>Olivia Liang</td> <td>Support Engineer</td> <td>Singapore</td> <td>64</td> <td>2011/02/03</td> <td>$234,500</td> </tr> <tr> <td>Bruno Nash</td> <td>Software Engineer</td> <td>London</td> <td>38</td> <td>2011/05/03</td> <td>$163,500</td> </tr> <tr> <td>Sakura Yamamoto</td> <td>Support Engineer</td> <td>Tokyo</td> <td>37</td> <td>2009/08/19</td> <td>$139,575</td> </tr> <tr> <td>Thor Walton</td> <td>Developer</td> <td>New York</td> <td>61</td> <td>2013/08/11</td> <td>$98,540</td> </tr> <tr> <td>Finn Camacho</td> <td>Support Engineer</td> <td>San Francisco</td> <td>47</td> <td>2009/07/07</td> <td>$87,500</td> </tr> <tr> <td>Serge Baldwin</td> <td>Data Coordinator</td> <td>Singapore</td> <td>64</td> <td>2012/04/09</td> <td>$138,575</td> </tr> <tr> <td>Zenaida Frank</td> <td>Software Engineer</td> <td>New York</td> <td>63</td> <td>2010/01/04</td> <td>$125,250</td> </tr> <tr> <td>Zorita Serrano</td> <td>Software Engineer</td> <td>San Francisco</td> <td>56</td> <td>2012/06/01</td> <td>$115,000</td> </tr> <tr> <td>Jennifer Acosta</td> <td>Junior Javascript Developer</td> <td>Edinburgh</td> <td>43</td> <td>2013/02/01</td> <td>$75,650</td> </tr> <tr> <td>Cara Stevens</td> <td>Sales Assistant</td> <td>New York</td> <td>46</td> <td>2011/12/06</td> <td>$145,600</td> </tr> <tr> <td>Hermione Butler</td> <td>Regional Director</td> <td>London</td> <td>47</td> <td>2011/03/21</td> <td>$356,250</td> </tr> <tr> <td>Lael Greer</td> <td>Systems Administrator</td> <td>London</td> <td>21</td> <td>2009/02/27</td> <td>$103,500</td> </tr> <tr> <td>Jonas Alexander</td> <td>Developer</td> <td>San Francisco</td> <td>30</td> <td>2010/07/14</td> <td>$86,500</td> </tr> <tr> <td>Shad Decker</td> <td>Regional Director</td> <td>Edinburgh</td> <td>51</td> <td>2008/11/13</td> <td>$183,000</td> </tr> <tr> <td>Michael Bruce</td> <td>Javascript Developer</td> <td>Singapore</td> <td>29</td> <td>2011/06/27</td> <td>$183,000</td> </tr> <tr> <td>Donna Snider</td> <td>Customer Support</td> <td>New York</td> <td>27</td> <td>2011/01/25</td> <td>$112,000</td> </tr> </tbody> </table> <ul class="tabs"> <li class="active">Javascript</li> <li>HTML</li> <li>CSS</li> <li>Ajax</li> <li>Server-side script</li> </ul> <div class="tabs"> <div class="js"> <p>The Javascript shown below is used to initialise the table shown in this example:</p><code class="multiline brush: js;">$(document).ready(function() { $('#example').dataTable(); } );</code> <p>In addition to the above code, the following Javascript library files are loaded for use in this example:</p> <ul> <li><a href="../../media/js/jquery.js">../../media/js/jquery.js</a></li> <li><a href="../../media/js/jquery.dataTables.js">../../media/js/jquery.dataTables.js</a></li> <li><a href= "//cdn.datatables.net/plug-ins/505bef35b56/integration/jqueryui/dataTables.jqueryui.js">//cdn.datatables.net/plug-ins/505bef35b56/integration/jqueryui/dataTables.jqueryui.js</a></li> </ul> </div> <div class="table"> <p>The HTML shown below is the raw HTML table element, before it has been enhanced by DataTables:</p> </div> <div class="css"> <div> <p>This example uses a little bit of additional CSS beyond what is loaded from the library files (below), in order to correctly display the table. The additional CSS used is shown below:</p><code class="multiline brush: js;"></code> </div> <p>The following CSS library files are loaded for use in this example to provide the styling of the table:</p> <ul> <li><a href= "//code.jquery.com/ui/1.10.3/themes/smoothness/jquery-ui.css">//code.jquery.com/ui/1.10.3/themes/smoothness/jquery-ui.css</a></li> <li><a href= "//cdn.datatables.net/plug-ins/505bef35b56/integration/jqueryui/dataTables.jqueryui.css">//cdn.datatables.net/plug-ins/505bef35b56/integration/jqueryui/dataTables.jqueryui.css</a></li> </ul> </div> <div class="ajax"> <p>This table loads data by Ajax. The latest data that has been loaded is shown below. This data will update automatically as any additional data is loaded.</p> </div> <div class="php"> <p>The script used to perform the server-side processing for this table is shown below. Please note that this is just an example script using PHP. Server-side processing scripts can be written in any language, using <a href="//datatables.net/manual/server-side">the protocol described in the DataTables documentation</a>.</p> </div> </div> </section> </div> <section> <div class="footer"> <div class="gradient"></div> <div class="liner"> <h2>Other examples</h2> <div class="toc"> <div class="toc-group"> <h3><a href="../basic_init/index.html">Basic initialisation</a></h3> <ul class="toc"> <li><a href="../basic_init/zero_configuration.html">Zero configuration</a></li> <li><a href="../basic_init/filter_only.html">Feature enable / disable</a></li> <li><a href="../basic_init/table_sorting.html">Default ordering (sorting)</a></li> <li><a href="../basic_init/multi_col_sort.html">Multi-column ordering</a></li> <li><a href="../basic_init/multiple_tables.html">Multiple tables</a></li> <li><a href="../basic_init/hidden_columns.html">Hidden columns</a></li> <li><a href="../basic_init/complex_header.html">Complex headers (rowspan and colspan)</a></li> <li><a href="../basic_init/dom.html">DOM positioning</a></li> <li><a href="../basic_init/flexible_width.html">Flexible table width</a></li> <li><a href="../basic_init/state_save.html">State saving</a></li> <li><a href="../basic_init/alt_pagination.html">Alternative pagination</a></li> <li><a href="../basic_init/scroll_y.html">Scroll - vertical</a></li> <li><a href="../basic_init/scroll_x.html">Scroll - horizontal</a></li> <li><a href="../basic_init/scroll_xy.html">Scroll - horizontal and vertical</a></li> <li><a href="../basic_init/scroll_y_theme.html">Scroll - vertical with jQuery UI ThemeRoller</a></li> <li><a href="../basic_init/comma-decimal.html">Language - Comma decimal place</a></li> <li><a href="../basic_init/language.html">Language options</a></li> </ul> </div> <div class="toc-group"> <h3><a href="../advanced_init/index.html">Advanced initialisation</a></h3> <ul class="toc"> <li><a href="../advanced_init/events_live.html">DOM / jQuery events</a></li> <li><a href="../advanced_init/dt_events.html">DataTables events</a></li> <li><a href="../advanced_init/column_render.html">Column rendering</a></li> <li><a href="../advanced_init/length_menu.html">Page length options</a></li> <li><a href="../advanced_init/dom_multiple_elements.html">Multiple table control elements</a></li> <li><a href="../advanced_init/complex_header.html">Complex headers (rowspan / colspan)</a></li> <li><a href="../advanced_init/html5-data-attributes.html">HTML5 data-* attributes</a></li> <li><a href="../advanced_init/language_file.html">Language file</a></li> <li><a href="../advanced_init/defaults.html">Setting defaults</a></li> <li><a href="../advanced_init/row_callback.html">Row created callback</a></li> <li><a href="../advanced_init/row_grouping.html">Row grouping</a></li> <li><a href="../advanced_init/footer_callback.html">Footer callback</a></li> <li><a href="../advanced_init/dom_toolbar.html">Custom toolbar elements</a></li> <li><a href="../advanced_init/sort_direction_control.html">Order direction sequence control</a></li> </ul> </div> <div class="toc-group"> <h3><a href="./index.html">Styling</a></h3> <ul class="toc active"> <li><a href="./display.html">Base style</a></li> <li><a href="./no-classes.html">Base style - no styling classes</a></li> <li><a href="./row-border.html">Base style - row borders</a></li> <li><a href="./cell-border.html">Base style - cell borders</a></li> <li><a href="./hover.html">Base style - hover</a></li> <li><a href="./order-column.html">Base style - order-column</a></li> <li><a href="./stripe.html">Base style - stripe</a></li> <li class="active"><a href="./jqueryUI.html">jQuery UI ThemeRoller</a></li> <li><a href="./bootstrap.html">Bootstrap</a></li> <li><a href="./foundation.html">Foundation</a></li> </ul> </div> <div class="toc-group"> <h3><a href="../data_sources/index.html">Data sources</a></h3> <ul class="toc"> <li><a href="../data_sources/dom.html">HTML (DOM) sourced data</a></li> <li><a href="../data_sources/ajax.html">Ajax sourced data</a></li> <li><a href="../data_sources/js_array.html">Javascript sourced data</a></li> <li><a href="../data_sources/server_side.html">Server-side processing</a></li> </ul> </div> <div class="toc-group"> <h3><a href="../api/index.html">API</a></h3> <ul class="toc"> <li><a href="../api/add_row.html">Add rows</a></li> <li><a href="../api/multi_filter.html">Individual column filtering (text inputs)</a></li> <li><a href="../api/multi_filter_select.html">Individual column filtering (select inputs)</a></li> <li><a href="../api/highlight.html">Highlighting rows and columns</a></li> <li><a href="../api/row_details.html">Child rows (show extra / detailed information)</a></li> <li><a href="../api/select_row.html">Row selection (multiple rows)</a></li> <li><a href="../api/select_single_row.html">Row selection and deletion (single row)</a></li> <li><a href="../api/form.html">Form inputs</a></li> <li><a href="../api/counter_columns.html">Index column</a></li> <li><a href="../api/show_hide.html">Show / hide columns dynamically</a></li> <li><a href="../api/api_in_init.html">Using API in callbacks</a></li> <li><a href="../api/tabs_and_scrolling.html">Scrolling and jQuery UI tabs</a></li> <li><a href="../api/regex.html">Filtering API (regular expressions)</a></li> </ul> </div> <div class="toc-group"> <h3><a href="../ajax/index.html">Ajax</a></h3> <ul class="toc"> <li><a href="../ajax/simple.html">Ajax data source (arrays)</a></li> <li><a href="../ajax/objects.html">Ajax data source (objects)</a></li> <li><a href="../ajax/deep.html">Nested object data (objects)</a></li> <li><a href="../ajax/objects_subarrays.html">Nested object data (arrays)</a></li> <li><a href="../ajax/orthogonal-data.html">Orthogonal data</a></li> <li><a href="../ajax/null_data_source.html">Generated content for a column</a></li> <li><a href="../ajax/custom_data_property.html">Custom data source property</a></li> <li><a href="../ajax/custom_data_flat.html">Flat array data source</a></li> <li><a href="../ajax/defer_render.html">Deferred rendering for speed</a></li> </ul> </div> <div class="toc-group"> <h3><a href="../server_side/index.html">Server-side</a></h3> <ul class="toc"> <li><a href="../server_side/simple.html">Server-side processing</a></li> <li><a href="../server_side/custom_vars.html">Custom HTTP variables</a></li> <li><a href="../server_side/post.html">POST data</a></li> <li><a href="../server_side/ids.html">Automatic addition of row ID attributes</a></li> <li><a href="../server_side/object_data.html">Object data source</a></li> <li><a href="../server_side/row_details.html">Row details</a></li> <li><a href="../server_side/select_rows.html">Row selection</a></li> <li><a href="../server_side/jsonp.html">JSONP data source for remote domains</a></li> <li><a href="../server_side/defer_loading.html">Deferred loading of data</a></li> <li><a href="../server_side/pipeline.html">Pipelining data to reduce Ajax calls for paging</a></li> </ul> </div> <div class="toc-group"> <h3><a href="../plug-ins/index.html">Plug-ins</a></h3> <ul class="toc"> <li><a href="../plug-ins/api.html">API plug-in methods</a></li> <li><a href="../plug-ins/sorting_auto.html">Ordering plug-ins (with type detection)</a></li> <li><a href="../plug-ins/sorting_manual.html">Ordering plug-ins (no type detection)</a></li> <li><a href="../plug-ins/range_filtering.html">Custom filtering - range search</a></li> <li><a href="../plug-ins/dom_sort.html">Live DOM ordering</a></li> </ul> </div> </div> <div class="epilogue"> <p>Please refer to the <a href="http://www.datatables.net">DataTables documentation</a> for full information about its API properties and methods.<br> Additionally, there are a wide range of <a href="http://www.datatables.net/extras">extras</a> and <a href="http://www.datatables.net/plug-ins">plug-ins</a> which extend the capabilities of DataTables.</p> <p class="copyright">DataTables designed and created by <a href= "http://www.sprymedia.co.uk">SpryMedia Ltd</a> &#169; 2007-2014<br> DataTables is licensed under the <a href="http://www.datatables.net/mit">MIT license</a>.</p> </div> </div> </div> </section> </body> </html>
akaidrive2014/persseleb
themes/admin/tisa/assets/lib/DataTables/examples/styling/jqueryUI.html
HTML
bsd-3-clause
24,558
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>State types, algebras and operations</title> <link rel="stylesheet" href="../../../../../../../doc/src/boostbook.css" type="text/css"> <meta name="generator" content="DocBook XSL Stylesheets V1.79.1"> <link rel="home" href="../../index.html" title="Chapter 1. Boost.Numeric.Odeint"> <link rel="up" href="../odeint_in_detail.html" title="odeint in detail"> <link rel="prev" href="iterators_and_ranges.html" title="Iterators and Ranges"> <link rel="next" href="using_boost__ref.html" title="Using boost::ref"> </head> <body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF"> <table cellpadding="2" width="100%"><tr> <td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../logo.jpg"></td> <td align="center"><a href="../../../../../../../index.html">Home</a></td> <td align="center"><a href="../../../../../../../libs/libraries.htm">Libraries</a></td> <td align="center"><a href="http://www.boost.org/users/people.html">People</a></td> <td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td> <td align="center"><a href="../../../../../../../more/index.htm">More</a></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="iterators_and_ranges.html"><img src="../../../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../odeint_in_detail.html"><img src="../../../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="using_boost__ref.html"><img src="../../../../../../../doc/src/images/next.png" alt="Next"></a> </div> <div class="section"> <div class="titlepage"><div><div><h3 class="title"> <a name="boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations"></a><a class="link" href="state_types__algebras_and_operations.html" title="State types, algebras and operations">State types, algebras and operations</a> </h3></div></div></div> <div class="toc"><dl class="toc"> <dt><span class="section"><a href="state_types__algebras_and_operations.html#boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.construction_resizing">Construction/Resizing</a></span></dt> <dt><span class="section"><a href="state_types__algebras_and_operations.html#boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.algebras_and_operations">Algebras and Operations</a></span></dt> <dt><span class="section"><a href="state_types__algebras_and_operations.html#boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.adapt_your_own_operations">Adapt your own operations</a></span></dt> </dl></div> <p> In odeint the stepper algorithms are implemented independently of the underlying fundamental mathematical operations. This is realized by giving the user full control over the state type and the mathematical operations for this state type. Technically, this is done by introducing three concepts: StateType, Algebra, Operations. Most of the steppers in odeint expect three class types fulfilling these concepts as template parameters. Note that these concepts are not fully independent of each other but rather a valid combination must be provided in order to make the steppers work. In the following we will give some examples on reasonable state_type-algebra-operations combinations. For the most common state types, like <code class="computeroutput"><span class="identifier">vector</span><span class="special">&lt;</span><span class="keyword">double</span><span class="special">&gt;</span></code> or <code class="computeroutput"><span class="identifier">array</span><span class="special">&lt;</span><span class="keyword">double</span><span class="special">,</span><span class="identifier">N</span><span class="special">&gt;</span></code> the default values range_algebra and default_operations are perfectly fine and odeint can be used as is without worrying about algebra/operations at all. </p> <div class="important"><table border="0" summary="Important"> <tr> <td rowspan="2" align="center" valign="top" width="25"><img alt="[Important]" src="../../../../../../../doc/src/images/important.png"></td> <th align="left">Important</th> </tr> <tr><td align="left" valign="top"><p> state_type, algebra and operations are not independent, a valid combination must be provided to make odeint work properly </p></td></tr> </table></div> <p> Moreover, as odeint handles the memory required for intermediate temporary objects itself, it also needs knowledge about how to create state_type objects and maybe how to allocate memory (resizing). All in all, the following things have to be taken care of when odeint is used with non-standard state types: </p> <div class="itemizedlist"><ul class="itemizedlist" style="list-style-type: disc; "> <li class="listitem"> construction/destruction </li> <li class="listitem"> resizing (if possible/required) </li> <li class="listitem"> algebraic operations </li> </ul></div> <p> Again, odeint already provides basic interfaces for most of the usual state types. So if you use a <code class="computeroutput"><span class="identifier">std</span><span class="special">::</span><span class="identifier">vector</span></code>, or a <code class="computeroutput"><span class="identifier">boost</span><span class="special">::</span><span class="identifier">array</span></code> as state type no additional work is required, they just work out of the box. </p> <div class="section"> <div class="titlepage"><div><div><h4 class="title"> <a name="boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.construction_resizing"></a><a class="link" href="state_types__algebras_and_operations.html#boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.construction_resizing" title="Construction/Resizing">Construction/Resizing</a> </h4></div></div></div> <div class="toc"><dl class="toc"> <dt><span class="section"><a href="state_types__algebras_and_operations.html#boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.construction_resizing.using_the_container_interface">Using the container interface</a></span></dt> <dt><span class="section"><a href="state_types__algebras_and_operations.html#boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.construction_resizing.std__list">std::list</a></span></dt> </dl></div> <p> We distinguish between two basic state types: fixed sized and dynamically sized. For fixed size state types the default constructor <code class="computeroutput"><span class="identifier">state_type</span><span class="special">()</span></code> already allocates the required memory, prominent example is <code class="computeroutput"><span class="identifier">boost</span><span class="special">::</span><span class="identifier">array</span><span class="special">&lt;</span><span class="identifier">T</span><span class="special">,</span><span class="identifier">N</span><span class="special">&gt;</span></code>. Dynamically sized types have to be resized to make sure enough memory is allocated, the standard constructor does not take care of the resizing. Examples for this are the STL containers like <code class="computeroutput"><span class="identifier">vector</span><span class="special">&lt;</span><span class="keyword">double</span><span class="special">&gt;</span></code>. </p> <p> The most easy way of getting your own state type to work with odeint is to use a fixed size state, base calculations on the range_algebra and provide the following functionality: </p> <div class="informaltable"><table class="table"> <colgroup> <col> <col> <col> <col> </colgroup> <thead><tr> <th> <p> Name </p> </th> <th> <p> Expression </p> </th> <th> <p> Type </p> </th> <th> <p> Semantics </p> </th> </tr></thead> <tbody> <tr> <td> <p> Construct State </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">State</span> <span class="identifier">x</span><span class="special">()</span></code> </p> </td> <td> <p> <code class="computeroutput"><span class="keyword">void</span></code> </p> </td> <td> <p> Creates an instance of <code class="computeroutput"><span class="identifier">State</span></code> and allocates memory. </p> </td> </tr> <tr> <td> <p> Begin of the sequence </p> </td> <td> <p> boost::begin(x) </p> </td> <td> <p> Iterator </p> </td> <td> <p> Returns an iterator pointing to the begin of the sequence </p> </td> </tr> <tr> <td> <p> End of the sequence </p> </td> <td> <p> boost::end(x) </p> </td> <td> <p> Iterator </p> </td> <td> <p> Returns an iterator pointing to the end of the sequence </p> </td> </tr> </tbody> </table></div> <div class="warning"><table border="0" summary="Warning"> <tr> <td rowspan="2" align="center" valign="top" width="25"><img alt="[Warning]" src="../../../../../../../doc/src/images/warning.png"></td> <th align="left">Warning</th> </tr> <tr><td align="left" valign="top"><p> If your state type does not allocate memory by default construction, you <span class="bold"><strong>must define it as resizeable</strong></span> and provide resize functionality (see below). Otherwise segmentation faults will occur. </p></td></tr> </table></div> <p> So fixed sized arrays supported by <a href="http://www.boost.org/doc/libs/release/libs/range/" target="_top">Boost.Range</a> immediately work with odeint. For dynamically sized arrays one has to additionally supply the resize functionality. First, the state has to be tagged as resizeable by specializing the struct <code class="computeroutput"><span class="identifier">is_resizeable</span></code> which consists of one typedef and one bool value: </p> <div class="informaltable"><table class="table"> <colgroup> <col> <col> <col> <col> </colgroup> <thead><tr> <th> <p> Name </p> </th> <th> <p> Expression </p> </th> <th> <p> Type </p> </th> <th> <p> Semantics </p> </th> </tr></thead> <tbody> <tr> <td> <p> Resizability </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">is_resizeable</span><span class="special">&lt;</span><span class="identifier">State</span><span class="special">&gt;::</span><span class="identifier">type</span></code> </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">boost</span><span class="special">::</span><span class="identifier">true_type</span></code> or <code class="computeroutput"><span class="identifier">boost</span><span class="special">::</span><span class="identifier">false_type</span></code> </p> </td> <td> <p> Determines resizeability of the state type, returns <code class="computeroutput"><span class="identifier">boost</span><span class="special">::</span><span class="identifier">true_type</span></code> if the state is resizeable. </p> </td> </tr> <tr> <td> <p> Resizability </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">is_resizeable</span><span class="special">&lt;</span><span class="identifier">State</span><span class="special">&gt;::</span><span class="identifier">value</span></code> </p> </td> <td> <p> <code class="computeroutput"><span class="keyword">bool</span></code> </p> </td> <td> <p> Same as above, but with <code class="computeroutput"><span class="keyword">bool</span></code> value. </p> </td> </tr> </tbody> </table></div> <p> Defining <code class="computeroutput"><span class="identifier">type</span></code> to be <code class="computeroutput"><span class="identifier">true_type</span></code> and <code class="computeroutput"><span class="identifier">value</span></code> as <code class="computeroutput"><span class="keyword">true</span></code> tells odeint that your state is resizeable. By default, odeint now expects the support of <code class="computeroutput"><span class="identifier">boost</span><span class="special">::</span><span class="identifier">size</span><span class="special">(</span><span class="identifier">x</span><span class="special">)</span></code> and a <code class="computeroutput"><span class="identifier">x</span><span class="special">.</span><span class="identifier">resize</span><span class="special">(</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">size</span><span class="special">(</span><span class="identifier">y</span><span class="special">)</span> <span class="special">)</span></code> member function for resizing: </p> <div class="informaltable"><table class="table"> <colgroup> <col> <col> <col> <col> </colgroup> <thead><tr> <th> <p> Name </p> </th> <th> <p> Expression </p> </th> <th> <p> Type </p> </th> <th> <p> Semantics </p> </th> </tr></thead> <tbody> <tr> <td> <p> Get size </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">boost</span><span class="special">::</span><span class="identifier">size</span><span class="special">(</span> <span class="identifier">x</span> <span class="special">)</span></code> </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">size_type</span></code> </p> </td> <td> <p> Returns the current size of x. </p> </td> </tr> <tr> <td> <p> Resize </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">x</span><span class="special">.</span><span class="identifier">resize</span><span class="special">(</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">size</span><span class="special">(</span> <span class="identifier">y</span> <span class="special">)</span> <span class="special">)</span></code> </p> </td> <td> <p> <code class="computeroutput"><span class="keyword">void</span></code> </p> </td> <td> <p> Resizes x to have the same size as y. </p> </td> </tr> </tbody> </table></div> <div class="section"> <div class="titlepage"><div><div><h5 class="title"> <a name="boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.construction_resizing.using_the_container_interface"></a><a class="link" href="state_types__algebras_and_operations.html#boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.construction_resizing.using_the_container_interface" title="Using the container interface">Using the container interface</a> </h5></div></div></div> <p> As a first example we take the most simple case and implement our own vector <code class="computeroutput"><span class="identifier">my_vector</span></code> which will provide a container interface. This makes <a href="http://www.boost.org/doc/libs/release/libs/range/" target="_top">Boost.Range</a> working out-of-box. We add a little functionality to our vector which makes it allocate some default capacity by construction. This is helpful when using resizing as then a resize can be assured to not require a new allocation. </p> <p> </p> <pre class="programlisting"><span class="keyword">template</span><span class="special">&lt;</span> <span class="identifier">size_t</span> <span class="identifier">MAX_N</span> <span class="special">&gt;</span> <span class="keyword">class</span> <span class="identifier">my_vector</span> <span class="special">{</span> <span class="keyword">typedef</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">vector</span><span class="special">&lt;</span> <span class="keyword">double</span> <span class="special">&gt;</span> <span class="identifier">vector</span><span class="special">;</span> <span class="keyword">public</span><span class="special">:</span> <span class="keyword">typedef</span> <span class="identifier">vector</span><span class="special">::</span><span class="identifier">iterator</span> <span class="identifier">iterator</span><span class="special">;</span> <span class="keyword">typedef</span> <span class="identifier">vector</span><span class="special">::</span><span class="identifier">const_iterator</span> <span class="identifier">const_iterator</span><span class="special">;</span> <span class="keyword">public</span><span class="special">:</span> <span class="identifier">my_vector</span><span class="special">(</span> <span class="keyword">const</span> <span class="identifier">size_t</span> <span class="identifier">N</span> <span class="special">)</span> <span class="special">:</span> <span class="identifier">m_v</span><span class="special">(</span> <span class="identifier">N</span> <span class="special">)</span> <span class="special">{</span> <span class="identifier">m_v</span><span class="special">.</span><span class="identifier">reserve</span><span class="special">(</span> <span class="identifier">MAX_N</span> <span class="special">);</span> <span class="special">}</span> <span class="identifier">my_vector</span><span class="special">()</span> <span class="special">:</span> <span class="identifier">m_v</span><span class="special">()</span> <span class="special">{</span> <span class="identifier">m_v</span><span class="special">.</span><span class="identifier">reserve</span><span class="special">(</span> <span class="identifier">MAX_N</span> <span class="special">);</span> <span class="special">}</span> <span class="comment">// ... [ implement container interface ]</span> </pre> <p> </p> <p> The only thing that has to be done other than defining is thus declaring my_vector as resizeable: </p> <p> </p> <pre class="programlisting"><span class="comment">// define my_vector as resizeable</span> <span class="keyword">namespace</span> <span class="identifier">boost</span> <span class="special">{</span> <span class="keyword">namespace</span> <span class="identifier">numeric</span> <span class="special">{</span> <span class="keyword">namespace</span> <span class="identifier">odeint</span> <span class="special">{</span> <span class="keyword">template</span><span class="special">&lt;</span><span class="identifier">size_t</span> <span class="identifier">N</span><span class="special">&gt;</span> <span class="keyword">struct</span> <span class="identifier">is_resizeable</span><span class="special">&lt;</span> <span class="identifier">my_vector</span><span class="special">&lt;</span><span class="identifier">N</span><span class="special">&gt;</span> <span class="special">&gt;</span> <span class="special">{</span> <span class="keyword">typedef</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">true_type</span> <span class="identifier">type</span><span class="special">;</span> <span class="keyword">static</span> <span class="keyword">const</span> <span class="keyword">bool</span> <span class="identifier">value</span> <span class="special">=</span> <span class="identifier">type</span><span class="special">::</span><span class="identifier">value</span><span class="special">;</span> <span class="special">};</span> <span class="special">}</span> <span class="special">}</span> <span class="special">}</span> </pre> <p> </p> <p> If we wouldn't specialize the <code class="computeroutput"><span class="identifier">is_resizeable</span></code> template, the code would still compile but odeint would not adjust the size of temporary internal instances of my_vector and hence try to fill zero-sized vectors resulting in segmentation faults! The full example can be found in <a href="https://github.com/headmyshoulder/odeint-v2/blob/master/examples/my_vector.cpp" target="_top">my_vector.cpp</a> </p> </div> <div class="section"> <div class="titlepage"><div><div><h5 class="title"> <a name="boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.construction_resizing.std__list"></a><a class="link" href="state_types__algebras_and_operations.html#boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.construction_resizing.std__list" title="std::list">std::list</a> </h5></div></div></div> <p> If your state type does work with <a href="http://www.boost.org/doc/libs/release/libs/range/" target="_top">Boost.Range</a>, but handles resizing differently you are required to specialize two implementations used by odeint to check a state's size and to resize: </p> <div class="informaltable"><table class="table"> <colgroup> <col> <col> <col> <col> </colgroup> <thead><tr> <th> <p> Name </p> </th> <th> <p> Expression </p> </th> <th> <p> Type </p> </th> <th> <p> Semantics </p> </th> </tr></thead> <tbody> <tr> <td> <p> Check size </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">same_size_impl</span><span class="special">&lt;</span><span class="identifier">State</span><span class="special">,</span><span class="identifier">State</span><span class="special">&gt;::</span><span class="identifier">same_size</span><span class="special">(</span><span class="identifier">x</span> <span class="special">,</span> <span class="identifier">y</span><span class="special">)</span></code> </p> </td> <td> <p> <code class="computeroutput"><span class="keyword">bool</span></code> </p> </td> <td> <p> Returns true if the size of x equals the size of y. </p> </td> </tr> <tr> <td> <p> Resize </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">resize_impl</span><span class="special">&lt;</span><span class="identifier">State</span><span class="special">,</span><span class="identifier">State</span><span class="special">&gt;::</span><span class="identifier">resize</span><span class="special">(</span><span class="identifier">x</span> <span class="special">,</span> <span class="identifier">y</span><span class="special">)</span></code> </p> </td> <td> <p> <code class="computeroutput"><span class="keyword">void</span></code> </p> </td> <td> <p> Resizes x to have the same size as y. </p> </td> </tr> </tbody> </table></div> <p> As an example we will use a <code class="computeroutput"><span class="identifier">std</span><span class="special">::</span><span class="identifier">list</span></code> as state type in odeint. Because <code class="computeroutput"><span class="identifier">std</span><span class="special">::</span><span class="identifier">list</span></code> is not supported by <code class="computeroutput"><span class="identifier">boost</span><span class="special">::</span><span class="identifier">size</span></code> we have to replace the same_size and resize implementation to get list to work with odeint. The following code shows the required template specializations: </p> <p> </p> <pre class="programlisting"><span class="keyword">typedef</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">list</span><span class="special">&lt;</span> <span class="keyword">double</span> <span class="special">&gt;</span> <span class="identifier">state_type</span><span class="special">;</span> <span class="keyword">namespace</span> <span class="identifier">boost</span> <span class="special">{</span> <span class="keyword">namespace</span> <span class="identifier">numeric</span> <span class="special">{</span> <span class="keyword">namespace</span> <span class="identifier">odeint</span> <span class="special">{</span> <span class="keyword">template</span><span class="special">&lt;</span> <span class="special">&gt;</span> <span class="keyword">struct</span> <span class="identifier">is_resizeable</span><span class="special">&lt;</span> <span class="identifier">state_type</span> <span class="special">&gt;</span> <span class="special">{</span> <span class="comment">// declare resizeability</span> <span class="keyword">typedef</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">true_type</span> <span class="identifier">type</span><span class="special">;</span> <span class="keyword">const</span> <span class="keyword">static</span> <span class="keyword">bool</span> <span class="identifier">value</span> <span class="special">=</span> <span class="identifier">type</span><span class="special">::</span><span class="identifier">value</span><span class="special">;</span> <span class="special">};</span> <span class="keyword">template</span><span class="special">&lt;</span> <span class="special">&gt;</span> <span class="keyword">struct</span> <span class="identifier">same_size_impl</span><span class="special">&lt;</span> <span class="identifier">state_type</span> <span class="special">,</span> <span class="identifier">state_type</span> <span class="special">&gt;</span> <span class="special">{</span> <span class="comment">// define how to check size</span> <span class="keyword">static</span> <span class="keyword">bool</span> <span class="identifier">same_size</span><span class="special">(</span> <span class="keyword">const</span> <span class="identifier">state_type</span> <span class="special">&amp;</span><span class="identifier">v1</span> <span class="special">,</span> <span class="keyword">const</span> <span class="identifier">state_type</span> <span class="special">&amp;</span><span class="identifier">v2</span> <span class="special">)</span> <span class="special">{</span> <span class="keyword">return</span> <span class="identifier">v1</span><span class="special">.</span><span class="identifier">size</span><span class="special">()</span> <span class="special">==</span> <span class="identifier">v2</span><span class="special">.</span><span class="identifier">size</span><span class="special">();</span> <span class="special">}</span> <span class="special">};</span> <span class="keyword">template</span><span class="special">&lt;</span> <span class="special">&gt;</span> <span class="keyword">struct</span> <span class="identifier">resize_impl</span><span class="special">&lt;</span> <span class="identifier">state_type</span> <span class="special">,</span> <span class="identifier">state_type</span> <span class="special">&gt;</span> <span class="special">{</span> <span class="comment">// define how to resize</span> <span class="keyword">static</span> <span class="keyword">void</span> <span class="identifier">resize</span><span class="special">(</span> <span class="identifier">state_type</span> <span class="special">&amp;</span><span class="identifier">v1</span> <span class="special">,</span> <span class="keyword">const</span> <span class="identifier">state_type</span> <span class="special">&amp;</span><span class="identifier">v2</span> <span class="special">)</span> <span class="special">{</span> <span class="identifier">v1</span><span class="special">.</span><span class="identifier">resize</span><span class="special">(</span> <span class="identifier">v2</span><span class="special">.</span><span class="identifier">size</span><span class="special">()</span> <span class="special">);</span> <span class="special">}</span> <span class="special">};</span> <span class="special">}</span> <span class="special">}</span> <span class="special">}</span> </pre> <p> </p> <p> With these definitions odeint knows how to resize <code class="computeroutput"><span class="identifier">std</span><span class="special">::</span><span class="identifier">list</span></code>s and so they can be used as state types. A complete example can be found in <a href="https://github.com/headmyshoulder/odeint-v2/blob/master/examples/list_lattice.cpp" target="_top">list_lattice.cpp</a>. </p> </div> </div> <div class="section"> <div class="titlepage"><div><div><h4 class="title"> <a name="boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.algebras_and_operations"></a><a class="link" href="state_types__algebras_and_operations.html#boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.algebras_and_operations" title="Algebras and Operations">Algebras and Operations</a> </h4></div></div></div> <div class="toc"><dl class="toc"> <dt><span class="section"><a href="state_types__algebras_and_operations.html#boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.algebras_and_operations.gsl_vector">GSL Vector</a></span></dt> <dt><span class="section"><a href="state_types__algebras_and_operations.html#boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.algebras_and_operations.vector_space_algebra">Vector Space Algebra</a></span></dt> <dt><span class="section"><a href="state_types__algebras_and_operations.html#boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.algebras_and_operations.point_type">Point type</a></span></dt> </dl></div> <p> To provide maximum flexibility odeint is implemented in a highly modularized way. This means it is possible to change the underlying mathematical operations without touching the integration algorithms. The fundamental mathematical operations are those of a vector space, that is addition of <code class="computeroutput"><span class="identifier">state_types</span></code> and multiplication of <code class="computeroutput"><span class="identifier">state_type</span></code>s with a scalar (<code class="computeroutput"><span class="identifier">time_type</span></code>). In odeint this is realized in two concepts: <span class="underline">Algebra</span> and <span class="underline">Operations</span>. The standard way how this works is by the range algebra which provides functions that apply a specific operation to each of the individual elements of a container based on the <a href="http://www.boost.org/doc/libs/release/libs/range/" target="_top">Boost.Range</a> library. If your state type is not supported by <a href="http://www.boost.org/doc/libs/release/libs/range/" target="_top">Boost.Range</a> there are several possibilities to tell odeint how to do algebraic operations: </p> <div class="itemizedlist"><ul class="itemizedlist" style="list-style-type: disc; "> <li class="listitem"> Implement <code class="computeroutput"><span class="identifier">boost</span><span class="special">::</span><span class="identifier">begin</span></code> and <code class="computeroutput"><span class="identifier">boost</span><span class="special">::</span><span class="identifier">end</span></code> for your state type so it works with <a href="http://www.boost.org/doc/libs/release/libs/range/" target="_top">Boost.Range</a>. </li> <li class="listitem"> Implement vector-vector addition operator <code class="computeroutput"><span class="special">+</span></code> and scalar-vector multiplication operator <code class="computeroutput"><span class="special">*</span></code> and use the non-standard <code class="computeroutput"><span class="identifier">vector_space_algebra</span></code>. </li> <li class="listitem"> Implement your own algebra that implements the required functions. </li> </ul></div> <div class="section"> <div class="titlepage"><div><div><h5 class="title"> <a name="boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.algebras_and_operations.gsl_vector"></a><a class="link" href="state_types__algebras_and_operations.html#boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.algebras_and_operations.gsl_vector" title="GSL Vector">GSL Vector</a> </h5></div></div></div> <p> In the following example we will try to use the <code class="computeroutput"><span class="identifier">gsl_vector</span></code> type from <a href="http://www.gsl.org" target="_top">GSL</a> (GNU Scientific Library) as state type in odeint. We will realize this by implementing a wrapper around the gsl_vector that takes care of construction/destruction. Also, <a href="http://www.boost.org/doc/libs/release/libs/range/" target="_top">Boost.Range</a> is extended such that it works with <code class="computeroutput"><span class="identifier">gsl_vector</span></code>s as well which required also the implementation of a new <code class="computeroutput"><span class="identifier">gsl_iterator</span></code>. </p> <div class="note"><table border="0" summary="Note"> <tr> <td rowspan="2" align="center" valign="top" width="25"><img alt="[Note]" src="../../../../../../../doc/src/images/note.png"></td> <th align="left">Note</th> </tr> <tr><td align="left" valign="top"><p> odeint already includes all the code presented here, see <a href="https://github.com/headmyshoulder/odeint-v2/blob/master/boost/numeric/odeint/external/gsl/gsl_wrapper.hpp" target="_top">gsl_wrapper.hpp</a>, so <code class="computeroutput"><span class="identifier">gsl_vector</span></code>s can be used straight out-of-box. The following description is just for educational purpose. </p></td></tr> </table></div> <p> The GSL is a C library, so <code class="computeroutput"><span class="identifier">gsl_vector</span></code> has neither constructor, nor destructor or any <code class="computeroutput"><span class="identifier">begin</span></code> or <code class="computeroutput"><span class="identifier">end</span></code> function, no iterators at all. So to make it work with odeint plenty of things have to be implemented. Note that all of the work shown here is already included in odeint, so using <code class="computeroutput"><span class="identifier">gsl_vector</span></code>s in odeint doesn't require any further adjustments. We present it here just as an educational example. We start with defining appropriate constructors and destructors. This is done by specializing the <code class="computeroutput"><span class="identifier">state_wrapper</span></code> for <code class="computeroutput"><span class="identifier">gsl_vector</span></code>. State wrappers are used by the steppers internally to create and manage temporary instances of state types: </p> <p> </p> <pre class="programlisting"><span class="keyword">template</span><span class="special">&lt;&gt;</span> <span class="keyword">struct</span> <span class="identifier">state_wrapper</span><span class="special">&lt;</span> <span class="identifier">gsl_vector</span><span class="special">*</span> <span class="special">&gt;</span> <span class="special">{</span> <span class="keyword">typedef</span> <span class="keyword">double</span> <span class="identifier">value_type</span><span class="special">;</span> <span class="keyword">typedef</span> <span class="identifier">gsl_vector</span><span class="special">*</span> <span class="identifier">state_type</span><span class="special">;</span> <span class="keyword">typedef</span> <span class="identifier">state_wrapper</span><span class="special">&lt;</span> <span class="identifier">gsl_vector</span><span class="special">*</span> <span class="special">&gt;</span> <span class="identifier">state_wrapper_type</span><span class="special">;</span> <span class="identifier">state_type</span> <span class="identifier">m_v</span><span class="special">;</span> <span class="identifier">state_wrapper</span><span class="special">(</span> <span class="special">)</span> <span class="special">{</span> <span class="identifier">m_v</span> <span class="special">=</span> <span class="identifier">gsl_vector_alloc</span><span class="special">(</span> <span class="number">1</span> <span class="special">);</span> <span class="special">}</span> <span class="identifier">state_wrapper</span><span class="special">(</span> <span class="keyword">const</span> <span class="identifier">state_wrapper_type</span> <span class="special">&amp;</span><span class="identifier">x</span> <span class="special">)</span> <span class="special">{</span> <span class="identifier">resize</span><span class="special">(</span> <span class="identifier">m_v</span> <span class="special">,</span> <span class="identifier">x</span><span class="special">.</span><span class="identifier">m_v</span> <span class="special">);</span> <span class="identifier">gsl_vector_memcpy</span><span class="special">(</span> <span class="identifier">m_v</span> <span class="special">,</span> <span class="identifier">x</span><span class="special">.</span><span class="identifier">m_v</span> <span class="special">);</span> <span class="special">}</span> <span class="special">~</span><span class="identifier">state_wrapper</span><span class="special">()</span> <span class="special">{</span> <span class="identifier">gsl_vector_free</span><span class="special">(</span> <span class="identifier">m_v</span> <span class="special">);</span> <span class="special">}</span> <span class="special">};</span> </pre> <p> </p> <p> This <code class="computeroutput"><span class="identifier">state_wrapper</span></code> specialization tells odeint how gsl_vectors are created, copied and destroyed. Next we need resizing, this is required because gsl_vectors are dynamically sized objects: </p> <pre class="programlisting"><span class="keyword">template</span><span class="special">&lt;&gt;</span> <span class="keyword">struct</span> <span class="identifier">is_resizeable</span><span class="special">&lt;</span> <span class="identifier">gsl_vector</span><span class="special">*</span> <span class="special">&gt;</span> <span class="special">{</span> <span class="keyword">typedef</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">true_type</span> <span class="identifier">type</span><span class="special">;</span> <span class="keyword">const</span> <span class="keyword">static</span> <span class="keyword">bool</span> <span class="identifier">value</span> <span class="special">=</span> <span class="identifier">type</span><span class="special">::</span><span class="identifier">value</span><span class="special">;</span> <span class="special">};</span> <span class="keyword">template</span> <span class="special">&lt;&gt;</span> <span class="keyword">struct</span> <span class="identifier">same_size_impl</span><span class="special">&lt;</span> <span class="identifier">gsl_vector</span><span class="special">*</span> <span class="special">,</span> <span class="identifier">gsl_vector</span><span class="special">*</span> <span class="special">&gt;</span> <span class="special">{</span> <span class="keyword">static</span> <span class="keyword">bool</span> <span class="identifier">same_size</span><span class="special">(</span> <span class="keyword">const</span> <span class="identifier">gsl_vector</span><span class="special">*</span> <span class="identifier">x</span> <span class="special">,</span> <span class="keyword">const</span> <span class="identifier">gsl_vector</span><span class="special">*</span> <span class="identifier">y</span> <span class="special">)</span> <span class="special">{</span> <span class="keyword">return</span> <span class="identifier">x</span><span class="special">-&gt;</span><span class="identifier">size</span> <span class="special">==</span> <span class="identifier">y</span><span class="special">-&gt;</span><span class="identifier">size</span><span class="special">;</span> <span class="special">}</span> <span class="special">};</span> <span class="keyword">template</span> <span class="special">&lt;&gt;</span> <span class="keyword">struct</span> <span class="identifier">resize_impl</span><span class="special">&lt;</span> <span class="identifier">gsl_vector</span><span class="special">*</span> <span class="special">,</span> <span class="identifier">gsl_vector</span><span class="special">*</span> <span class="special">&gt;</span> <span class="special">{</span> <span class="keyword">static</span> <span class="keyword">void</span> <span class="identifier">resize</span><span class="special">(</span> <span class="identifier">gsl_vector</span><span class="special">*</span> <span class="identifier">x</span> <span class="special">,</span> <span class="keyword">const</span> <span class="identifier">gsl_vector</span><span class="special">*</span> <span class="identifier">y</span> <span class="special">)</span> <span class="special">{</span> <span class="identifier">gsl_vector_free</span><span class="special">(</span> <span class="identifier">x</span> <span class="special">);</span> <span class="identifier">x</span> <span class="special">=</span> <span class="identifier">gsl_vector_alloc</span><span class="special">(</span> <span class="identifier">y</span><span class="special">-&gt;</span><span class="identifier">size</span> <span class="special">);</span> <span class="special">}</span> <span class="special">};</span> </pre> <p> </p> <p> Up to now, we defined creation/destruction and resizing, but gsl_vectors also don't support iterators, so we first implement a gsl iterator: </p> <p> </p> <pre class="programlisting"><span class="comment">/* * defines an iterator for gsl_vector */</span> <span class="keyword">class</span> <span class="identifier">gsl_vector_iterator</span> <span class="special">:</span> <span class="keyword">public</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">iterator_facade</span><span class="special">&lt;</span> <span class="identifier">gsl_vector_iterator</span> <span class="special">,</span> <span class="keyword">double</span> <span class="special">,</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">random_access_traversal_tag</span> <span class="special">&gt;</span> <span class="special">{</span> <span class="keyword">public</span> <span class="special">:</span> <span class="identifier">gsl_vector_iterator</span><span class="special">(</span> <span class="keyword">void</span> <span class="special">):</span> <span class="identifier">m_p</span><span class="special">(</span><span class="number">0</span><span class="special">)</span> <span class="special">,</span> <span class="identifier">m_stride</span><span class="special">(</span> <span class="number">0</span> <span class="special">)</span> <span class="special">{</span> <span class="special">}</span> <span class="keyword">explicit</span> <span class="identifier">gsl_vector_iterator</span><span class="special">(</span> <span class="identifier">gsl_vector</span> <span class="special">*</span><span class="identifier">p</span> <span class="special">)</span> <span class="special">:</span> <span class="identifier">m_p</span><span class="special">(</span> <span class="identifier">p</span><span class="special">-&gt;</span><span class="identifier">data</span> <span class="special">)</span> <span class="special">,</span> <span class="identifier">m_stride</span><span class="special">(</span> <span class="identifier">p</span><span class="special">-&gt;</span><span class="identifier">stride</span> <span class="special">)</span> <span class="special">{</span> <span class="special">}</span> <span class="keyword">friend</span> <span class="identifier">gsl_vector_iterator</span> <span class="identifier">end_iterator</span><span class="special">(</span> <span class="identifier">gsl_vector</span> <span class="special">*</span> <span class="special">);</span> <span class="keyword">private</span> <span class="special">:</span> <span class="keyword">friend</span> <span class="keyword">class</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">iterator_core_access</span><span class="special">;</span> <span class="keyword">friend</span> <span class="keyword">class</span> <span class="identifier">const_gsl_vector_iterator</span><span class="special">;</span> <span class="keyword">void</span> <span class="identifier">increment</span><span class="special">(</span> <span class="keyword">void</span> <span class="special">)</span> <span class="special">{</span> <span class="identifier">m_p</span> <span class="special">+=</span> <span class="identifier">m_stride</span><span class="special">;</span> <span class="special">}</span> <span class="keyword">void</span> <span class="identifier">decrement</span><span class="special">(</span> <span class="keyword">void</span> <span class="special">)</span> <span class="special">{</span> <span class="identifier">m_p</span> <span class="special">-=</span> <span class="identifier">m_stride</span><span class="special">;</span> <span class="special">}</span> <span class="keyword">void</span> <span class="identifier">advance</span><span class="special">(</span> <span class="identifier">ptrdiff_t</span> <span class="identifier">n</span> <span class="special">)</span> <span class="special">{</span> <span class="identifier">m_p</span> <span class="special">+=</span> <span class="identifier">n</span><span class="special">*</span><span class="identifier">m_stride</span><span class="special">;</span> <span class="special">}</span> <span class="keyword">bool</span> <span class="identifier">equal</span><span class="special">(</span> <span class="keyword">const</span> <span class="identifier">gsl_vector_iterator</span> <span class="special">&amp;</span><span class="identifier">other</span> <span class="special">)</span> <span class="keyword">const</span> <span class="special">{</span> <span class="keyword">return</span> <span class="keyword">this</span><span class="special">-&gt;</span><span class="identifier">m_p</span> <span class="special">==</span> <span class="identifier">other</span><span class="special">.</span><span class="identifier">m_p</span><span class="special">;</span> <span class="special">}</span> <span class="keyword">bool</span> <span class="identifier">equal</span><span class="special">(</span> <span class="keyword">const</span> <span class="identifier">const_gsl_vector_iterator</span> <span class="special">&amp;</span><span class="identifier">other</span> <span class="special">)</span> <span class="keyword">const</span><span class="special">;</span> <span class="keyword">double</span><span class="special">&amp;</span> <span class="identifier">dereference</span><span class="special">(</span> <span class="keyword">void</span> <span class="special">)</span> <span class="keyword">const</span> <span class="special">{</span> <span class="keyword">return</span> <span class="special">*</span><span class="identifier">m_p</span><span class="special">;</span> <span class="special">}</span> <span class="keyword">double</span> <span class="special">*</span><span class="identifier">m_p</span><span class="special">;</span> <span class="identifier">size_t</span> <span class="identifier">m_stride</span><span class="special">;</span> <span class="special">};</span> </pre> <p> A similar class exists for the <code class="computeroutput"><span class="keyword">const</span></code> version of the iterator. Then we have a function returning the end iterator (similarly for <code class="computeroutput"><span class="keyword">const</span></code> again): </p> <pre class="programlisting"><span class="identifier">gsl_vector_iterator</span> <span class="identifier">end_iterator</span><span class="special">(</span> <span class="identifier">gsl_vector</span> <span class="special">*</span><span class="identifier">x</span> <span class="special">)</span> <span class="special">{</span> <span class="identifier">gsl_vector_iterator</span> <span class="identifier">iter</span><span class="special">(</span> <span class="identifier">x</span> <span class="special">);</span> <span class="identifier">iter</span><span class="special">.</span><span class="identifier">m_p</span> <span class="special">+=</span> <span class="identifier">iter</span><span class="special">.</span><span class="identifier">m_stride</span> <span class="special">*</span> <span class="identifier">x</span><span class="special">-&gt;</span><span class="identifier">size</span><span class="special">;</span> <span class="keyword">return</span> <span class="identifier">iter</span><span class="special">;</span> <span class="special">}</span> </pre> <p> </p> <p> Finally, the bindings for <a href="http://www.boost.org/doc/libs/release/libs/range/" target="_top">Boost.Range</a> are added: </p> <pre class="programlisting"><span class="comment">// template&lt;&gt;</span> <span class="keyword">inline</span> <span class="identifier">gsl_vector_iterator</span> <span class="identifier">range_begin</span><span class="special">(</span> <span class="identifier">gsl_vector</span> <span class="special">*</span><span class="identifier">x</span> <span class="special">)</span> <span class="special">{</span> <span class="keyword">return</span> <span class="identifier">gsl_vector_iterator</span><span class="special">(</span> <span class="identifier">x</span> <span class="special">);</span> <span class="special">}</span> <span class="comment">// template&lt;&gt;</span> <span class="keyword">inline</span> <span class="identifier">gsl_vector_iterator</span> <span class="identifier">range_end</span><span class="special">(</span> <span class="identifier">gsl_vector</span> <span class="special">*</span><span class="identifier">x</span> <span class="special">)</span> <span class="special">{</span> <span class="keyword">return</span> <span class="identifier">end_iterator</span><span class="special">(</span> <span class="identifier">x</span> <span class="special">);</span> <span class="special">}</span> </pre> <p> Again with similar definitions for the <code class="computeroutput"><span class="keyword">const</span></code> versions. This eventually makes odeint work with gsl vectors as state types. The full code for these bindings is found in <a href="https://github.com/headmyshoulder/odeint-v2/blob/master/boost/numeric/odeint/external/gsl/gsl_wrapper.hpp" target="_top">gsl_wrapper.hpp</a>. It might look rather complicated but keep in mind that gsl is a pre-compiled C library. </p> </div> <div class="section"> <div class="titlepage"><div><div><h5 class="title"> <a name="boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.algebras_and_operations.vector_space_algebra"></a><a class="link" href="state_types__algebras_and_operations.html#boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.algebras_and_operations.vector_space_algebra" title="Vector Space Algebra">Vector Space Algebra</a> </h5></div></div></div> <p> As seen above, the standard way of performing algebraic operations on container-like state types in odeint is to iterate through the elements of the container and perform the operations element-wise on the underlying value type. This is realized by means of the <code class="computeroutput"><span class="identifier">range_algebra</span></code> that uses <a href="http://www.boost.org/doc/libs/release/libs/range/" target="_top">Boost.Range</a> for obtaining iterators of the state types. However, there are other ways to implement the algebraic operations on containers, one of which is defining the addition/multiplication operators for the containers directly and then using the <code class="computeroutput"><span class="identifier">vector_space_algebra</span></code>. If you use this algebra, the following operators have to be defined for the state_type: </p> <div class="informaltable"><table class="table"> <colgroup> <col> <col> <col> <col> </colgroup> <thead><tr> <th> <p> Name </p> </th> <th> <p> Expression </p> </th> <th> <p> Type </p> </th> <th> <p> Semantics </p> </th> </tr></thead> <tbody> <tr> <td> <p> Addition </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">x</span> <span class="special">+</span> <span class="identifier">y</span></code> </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">state_type</span></code> </p> </td> <td> <p> Calculates the vector sum 'x+y'. </p> </td> </tr> <tr> <td> <p> Assign addition </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">x</span> <span class="special">+=</span> <span class="identifier">y</span></code> </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">state_type</span></code> </p> </td> <td> <p> Performs x+y in place. </p> </td> </tr> <tr> <td> <p> Scalar multiplication </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">a</span> <span class="special">*</span> <span class="identifier">x</span> </code> </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">state_type</span></code> </p> </td> <td> <p> Performs multiplication of vector x with scalar a. </p> </td> </tr> <tr> <td> <p> Assign scalar multiplication </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">x</span> <span class="special">*=</span> <span class="identifier">a</span></code> </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">state_type</span></code> </p> </td> <td> <p> Performs in-place multiplication of vector x with scalar a. </p> </td> </tr> </tbody> </table></div> <p> Defining these operators makes your state type work with any basic Runge-Kutta stepper. However, if you want to use step-size control, some more functionality is required. Specifically, operations like <span class="emphasis"><em>max<sub>i</sub>( |err<sub>i</sub>| / (alpha * |s<sub>i</sub>|) )</em></span> have to be performed. <span class="emphasis"><em>err</em></span> and <span class="emphasis"><em>s</em></span> are state_types, alpha is a scalar. As you can see, we need element wise absolute value and division as well as an reduce operation to get the maximum value. So for controlled steppers the following things have to be implemented: </p> <div class="informaltable"><table class="table"> <colgroup> <col> <col> <col> <col> </colgroup> <thead><tr> <th> <p> Name </p> </th> <th> <p> Expression </p> </th> <th> <p> Type </p> </th> <th> <p> Semantics </p> </th> </tr></thead> <tbody> <tr> <td> <p> Division </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">x</span> <span class="special">/</span> <span class="identifier">y</span></code> </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">state_type</span></code> </p> </td> <td> <p> Calculates the element-wise division 'x/y' </p> </td> </tr> <tr> <td> <p> Absolute value </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">abs</span><span class="special">(</span> <span class="identifier">x</span> <span class="special">)</span></code> </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">state_type</span></code> </p> </td> <td> <p> Element wise absolute value </p> </td> </tr> <tr> <td> <p> Reduce </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">vector_space_reduce_impl</span><span class="special">&lt;</span> <span class="identifier">state_type</span> <span class="special">&gt;::</span><span class="identifier">reduce</span><span class="special">(</span> <span class="identifier">state</span> <span class="special">,</span> <span class="identifier">operation</span> <span class="special">,</span> <span class="identifier">init</span> <span class="special">)</span></code> </p> </td> <td> <p> <code class="computeroutput"><span class="identifier">value_type</span></code> </p> </td> <td> <p> Performs the <code class="computeroutput"><span class="identifier">operation</span></code> for subsequently each element of <code class="computeroutput"><span class="identifier">state</span></code> and returns the aggregate value. E.g. </p> <p> <code class="computeroutput"><span class="identifier">init</span> <span class="special">=</span> <span class="keyword">operator</span><span class="special">(</span> <span class="identifier">init</span> <span class="special">,</span> <span class="identifier">state</span><span class="special">[</span><span class="number">0</span><span class="special">]</span> <span class="special">);</span></code> </p> <p> <code class="computeroutput"><span class="identifier">init</span> <span class="special">=</span> <span class="keyword">operator</span><span class="special">(</span> <span class="identifier">init</span> <span class="special">,</span> <span class="identifier">state</span><span class="special">[</span><span class="number">1</span><span class="special">]</span> <span class="special">)</span></code> </p> <p> <code class="computeroutput"><span class="special">...</span></code> </p> </td> </tr> </tbody> </table></div> </div> <div class="section"> <div class="titlepage"><div><div><h5 class="title"> <a name="boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.algebras_and_operations.point_type"></a><a class="link" href="state_types__algebras_and_operations.html#boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.algebras_and_operations.point_type" title="Point type">Point type</a> </h5></div></div></div> <p> Here we show how to implement the required operators on a state type. As example we define a new class <code class="computeroutput"><span class="identifier">point3D</span></code> representing a three-dimensional vector with components x,y,z and define addition and scalar multiplication operators for it. We use <a href="http://www.boost.org/doc/libs/release/libs/utility/operators.htm" target="_top">Boost.Operators</a> to reduce the amount of code to be written. The class for the point type looks as follows: </p> <p> </p> <pre class="programlisting"><span class="keyword">class</span> <span class="identifier">point3D</span> <span class="special">:</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">additive1</span><span class="special">&lt;</span> <span class="identifier">point3D</span> <span class="special">,</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">additive2</span><span class="special">&lt;</span> <span class="identifier">point3D</span> <span class="special">,</span> <span class="keyword">double</span> <span class="special">,</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">multiplicative2</span><span class="special">&lt;</span> <span class="identifier">point3D</span> <span class="special">,</span> <span class="keyword">double</span> <span class="special">&gt;</span> <span class="special">&gt;</span> <span class="special">&gt;</span> <span class="special">{</span> <span class="keyword">public</span><span class="special">:</span> <span class="keyword">double</span> <span class="identifier">x</span> <span class="special">,</span> <span class="identifier">y</span> <span class="special">,</span> <span class="identifier">z</span><span class="special">;</span> <span class="identifier">point3D</span><span class="special">()</span> <span class="special">:</span> <span class="identifier">x</span><span class="special">(</span> <span class="number">0.0</span> <span class="special">)</span> <span class="special">,</span> <span class="identifier">y</span><span class="special">(</span> <span class="number">0.0</span> <span class="special">)</span> <span class="special">,</span> <span class="identifier">z</span><span class="special">(</span> <span class="number">0.0</span> <span class="special">)</span> <span class="special">{</span> <span class="special">}</span> <span class="identifier">point3D</span><span class="special">(</span> <span class="keyword">const</span> <span class="keyword">double</span> <span class="identifier">val</span> <span class="special">)</span> <span class="special">:</span> <span class="identifier">x</span><span class="special">(</span> <span class="identifier">val</span> <span class="special">)</span> <span class="special">,</span> <span class="identifier">y</span><span class="special">(</span> <span class="identifier">val</span> <span class="special">)</span> <span class="special">,</span> <span class="identifier">z</span><span class="special">(</span> <span class="identifier">val</span> <span class="special">)</span> <span class="special">{</span> <span class="special">}</span> <span class="identifier">point3D</span><span class="special">(</span> <span class="keyword">const</span> <span class="keyword">double</span> <span class="identifier">_x</span> <span class="special">,</span> <span class="keyword">const</span> <span class="keyword">double</span> <span class="identifier">_y</span> <span class="special">,</span> <span class="keyword">const</span> <span class="keyword">double</span> <span class="identifier">_z</span> <span class="special">)</span> <span class="special">:</span> <span class="identifier">x</span><span class="special">(</span> <span class="identifier">_x</span> <span class="special">)</span> <span class="special">,</span> <span class="identifier">y</span><span class="special">(</span> <span class="identifier">_y</span> <span class="special">)</span> <span class="special">,</span> <span class="identifier">z</span><span class="special">(</span> <span class="identifier">_z</span> <span class="special">)</span> <span class="special">{</span> <span class="special">}</span> <span class="identifier">point3D</span><span class="special">&amp;</span> <span class="keyword">operator</span><span class="special">+=(</span> <span class="keyword">const</span> <span class="identifier">point3D</span> <span class="special">&amp;</span><span class="identifier">p</span> <span class="special">)</span> <span class="special">{</span> <span class="identifier">x</span> <span class="special">+=</span> <span class="identifier">p</span><span class="special">.</span><span class="identifier">x</span><span class="special">;</span> <span class="identifier">y</span> <span class="special">+=</span> <span class="identifier">p</span><span class="special">.</span><span class="identifier">y</span><span class="special">;</span> <span class="identifier">z</span> <span class="special">+=</span> <span class="identifier">p</span><span class="special">.</span><span class="identifier">z</span><span class="special">;</span> <span class="keyword">return</span> <span class="special">*</span><span class="keyword">this</span><span class="special">;</span> <span class="special">}</span> <span class="identifier">point3D</span><span class="special">&amp;</span> <span class="keyword">operator</span><span class="special">*=(</span> <span class="keyword">const</span> <span class="keyword">double</span> <span class="identifier">a</span> <span class="special">)</span> <span class="special">{</span> <span class="identifier">x</span> <span class="special">*=</span> <span class="identifier">a</span><span class="special">;</span> <span class="identifier">y</span> <span class="special">*=</span> <span class="identifier">a</span><span class="special">;</span> <span class="identifier">z</span> <span class="special">*=</span> <span class="identifier">a</span><span class="special">;</span> <span class="keyword">return</span> <span class="special">*</span><span class="keyword">this</span><span class="special">;</span> <span class="special">}</span> <span class="special">};</span> </pre> <p> </p> <p> By deriving from <a href="http://www.boost.org/doc/libs/release/libs/utility/operators.htm" target="_top">Boost.Operators</a> classes we don't have to define outer class operators like <code class="computeroutput"><span class="keyword">operator</span><span class="special">+(</span> <span class="identifier">point3D</span> <span class="special">,</span> <span class="identifier">point3D</span> <span class="special">)</span></code> because that is taken care of by the operators library. Note that for simple Runge-Kutta schemes (like <code class="computeroutput"><span class="identifier">runge_kutta4</span></code>) only the <code class="computeroutput"><span class="special">+</span></code> and <code class="computeroutput"><span class="special">*</span></code> operators are required. If, however, a controlled stepper is used one also needs to specify the division operator <code class="computeroutput"><span class="special">/</span></code> because calculation of the error term involves an element wise division of the state types. Additionally, controlled steppers require an <code class="computeroutput"><span class="identifier">abs</span></code> function calculating the element-wise absolute value for the state type: </p> <p> </p> <pre class="programlisting"><span class="comment">// only required for steppers with error control</span> <span class="identifier">point3D</span> <span class="keyword">operator</span><span class="special">/(</span> <span class="keyword">const</span> <span class="identifier">point3D</span> <span class="special">&amp;</span><span class="identifier">p1</span> <span class="special">,</span> <span class="keyword">const</span> <span class="identifier">point3D</span> <span class="special">&amp;</span><span class="identifier">p2</span> <span class="special">)</span> <span class="special">{</span> <span class="keyword">return</span> <span class="identifier">point3D</span><span class="special">(</span> <span class="identifier">p1</span><span class="special">.</span><span class="identifier">x</span><span class="special">/</span><span class="identifier">p2</span><span class="special">.</span><span class="identifier">x</span> <span class="special">,</span> <span class="identifier">p1</span><span class="special">.</span><span class="identifier">y</span><span class="special">/</span><span class="identifier">p2</span><span class="special">.</span><span class="identifier">y</span> <span class="special">,</span> <span class="identifier">p1</span><span class="special">.</span><span class="identifier">z</span><span class="special">/</span><span class="identifier">p2</span><span class="special">.</span><span class="identifier">z</span> <span class="special">);</span> <span class="special">}</span> <span class="identifier">point3D</span> <span class="identifier">abs</span><span class="special">(</span> <span class="keyword">const</span> <span class="identifier">point3D</span> <span class="special">&amp;</span><span class="identifier">p</span> <span class="special">)</span> <span class="special">{</span> <span class="keyword">return</span> <span class="identifier">point3D</span><span class="special">(</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">abs</span><span class="special">(</span><span class="identifier">p</span><span class="special">.</span><span class="identifier">x</span><span class="special">)</span> <span class="special">,</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">abs</span><span class="special">(</span><span class="identifier">p</span><span class="special">.</span><span class="identifier">y</span><span class="special">)</span> <span class="special">,</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">abs</span><span class="special">(</span><span class="identifier">p</span><span class="special">.</span><span class="identifier">z</span><span class="special">)</span> <span class="special">);</span> <span class="special">}</span> </pre> <p> </p> <p> Finally, we have to provide a specialization to calculate the infintity norm of a state: </p> <p> </p> <pre class="programlisting"><span class="comment">// also only for steppers with error control</span> <span class="keyword">namespace</span> <span class="identifier">boost</span> <span class="special">{</span> <span class="keyword">namespace</span> <span class="identifier">numeric</span> <span class="special">{</span> <span class="keyword">namespace</span> <span class="identifier">odeint</span> <span class="special">{</span> <span class="keyword">template</span><span class="special">&lt;&gt;</span> <span class="keyword">struct</span> <span class="identifier">vector_space_norm_inf</span><span class="special">&lt;</span> <span class="identifier">point3D</span> <span class="special">&gt;</span> <span class="special">{</span> <span class="keyword">typedef</span> <span class="keyword">double</span> <span class="identifier">result_type</span><span class="special">;</span> <span class="keyword">double</span> <span class="keyword">operator</span><span class="special">()(</span> <span class="keyword">const</span> <span class="identifier">point3D</span> <span class="special">&amp;</span><span class="identifier">p</span> <span class="special">)</span> <span class="keyword">const</span> <span class="special">{</span> <span class="keyword">using</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">max</span><span class="special">;</span> <span class="keyword">using</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">abs</span><span class="special">;</span> <span class="keyword">return</span> <span class="identifier">max</span><span class="special">(</span> <span class="identifier">max</span><span class="special">(</span> <span class="identifier">abs</span><span class="special">(</span> <span class="identifier">p</span><span class="special">.</span><span class="identifier">x</span> <span class="special">)</span> <span class="special">,</span> <span class="identifier">abs</span><span class="special">(</span> <span class="identifier">p</span><span class="special">.</span><span class="identifier">y</span> <span class="special">)</span> <span class="special">)</span> <span class="special">,</span> <span class="identifier">abs</span><span class="special">(</span> <span class="identifier">p</span><span class="special">.</span><span class="identifier">z</span> <span class="special">)</span> <span class="special">);</span> <span class="special">}</span> <span class="special">};</span> <span class="special">}</span> <span class="special">}</span> <span class="special">}</span> </pre> <p> </p> <p> Again, note that the two last steps were only required if you want to use controlled steppers. For simple steppers definition of the simple <code class="computeroutput"><span class="special">+=</span></code> and <code class="computeroutput"><span class="special">*=</span></code> operators are sufficient. Having defined such a point type, we can easily perform the integration on a Lorenz system by explicitely configuring the <code class="computeroutput"><span class="identifier">vector_space_algebra</span></code> in the stepper's template argument list: </p> <p> </p> <pre class="programlisting"><span class="keyword">const</span> <span class="keyword">double</span> <span class="identifier">sigma</span> <span class="special">=</span> <span class="number">10.0</span><span class="special">;</span> <span class="keyword">const</span> <span class="keyword">double</span> <span class="identifier">R</span> <span class="special">=</span> <span class="number">28.0</span><span class="special">;</span> <span class="keyword">const</span> <span class="keyword">double</span> <span class="identifier">b</span> <span class="special">=</span> <span class="number">8.0</span> <span class="special">/</span> <span class="number">3.0</span><span class="special">;</span> <span class="keyword">void</span> <span class="identifier">lorenz</span><span class="special">(</span> <span class="keyword">const</span> <span class="identifier">point3D</span> <span class="special">&amp;</span><span class="identifier">x</span> <span class="special">,</span> <span class="identifier">point3D</span> <span class="special">&amp;</span><span class="identifier">dxdt</span> <span class="special">,</span> <span class="keyword">const</span> <span class="keyword">double</span> <span class="identifier">t</span> <span class="special">)</span> <span class="special">{</span> <span class="identifier">dxdt</span><span class="special">.</span><span class="identifier">x</span> <span class="special">=</span> <span class="identifier">sigma</span> <span class="special">*</span> <span class="special">(</span> <span class="identifier">x</span><span class="special">.</span><span class="identifier">y</span> <span class="special">-</span> <span class="identifier">x</span><span class="special">.</span><span class="identifier">x</span> <span class="special">);</span> <span class="identifier">dxdt</span><span class="special">.</span><span class="identifier">y</span> <span class="special">=</span> <span class="identifier">R</span> <span class="special">*</span> <span class="identifier">x</span><span class="special">.</span><span class="identifier">x</span> <span class="special">-</span> <span class="identifier">x</span><span class="special">.</span><span class="identifier">y</span> <span class="special">-</span> <span class="identifier">x</span><span class="special">.</span><span class="identifier">x</span> <span class="special">*</span> <span class="identifier">x</span><span class="special">.</span><span class="identifier">z</span><span class="special">;</span> <span class="identifier">dxdt</span><span class="special">.</span><span class="identifier">z</span> <span class="special">=</span> <span class="special">-</span><span class="identifier">b</span> <span class="special">*</span> <span class="identifier">x</span><span class="special">.</span><span class="identifier">z</span> <span class="special">+</span> <span class="identifier">x</span><span class="special">.</span><span class="identifier">x</span> <span class="special">*</span> <span class="identifier">x</span><span class="special">.</span><span class="identifier">y</span><span class="special">;</span> <span class="special">}</span> <span class="keyword">using</span> <span class="keyword">namespace</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">numeric</span><span class="special">::</span><span class="identifier">odeint</span><span class="special">;</span> <span class="keyword">int</span> <span class="identifier">main</span><span class="special">()</span> <span class="special">{</span> <span class="identifier">point3D</span> <span class="identifier">x</span><span class="special">(</span> <span class="number">10.0</span> <span class="special">,</span> <span class="number">5.0</span> <span class="special">,</span> <span class="number">5.0</span> <span class="special">);</span> <span class="comment">// point type defines it's own operators -&gt; use vector_space_algebra !</span> <span class="keyword">typedef</span> <span class="identifier">runge_kutta_dopri5</span><span class="special">&lt;</span> <span class="identifier">point3D</span> <span class="special">,</span> <span class="keyword">double</span> <span class="special">,</span> <span class="identifier">point3D</span> <span class="special">,</span> <span class="keyword">double</span> <span class="special">,</span> <span class="identifier">vector_space_algebra</span> <span class="special">&gt;</span> <span class="identifier">stepper</span><span class="special">;</span> <span class="keyword">int</span> <span class="identifier">steps</span> <span class="special">=</span> <span class="identifier">integrate_adaptive</span><span class="special">(</span> <span class="identifier">make_controlled</span><span class="special">&lt;</span><span class="identifier">stepper</span><span class="special">&gt;(</span> <span class="number">1E-10</span> <span class="special">,</span> <span class="number">1E-10</span> <span class="special">)</span> <span class="special">,</span> <span class="identifier">lorenz</span> <span class="special">,</span> <span class="identifier">x</span> <span class="special">,</span> <span class="number">0.0</span> <span class="special">,</span> <span class="number">10.0</span> <span class="special">,</span> <span class="number">0.1</span> <span class="special">);</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">cout</span> <span class="special">&lt;&lt;</span> <span class="identifier">x</span> <span class="special">&lt;&lt;</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">endl</span><span class="special">;</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">cout</span> <span class="special">&lt;&lt;</span> <span class="string">"steps: "</span> <span class="special">&lt;&lt;</span> <span class="identifier">steps</span> <span class="special">&lt;&lt;</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">endl</span><span class="special">;</span> <span class="special">}</span> </pre> <p> </p> <p> The whole example can be found in <a href="https://github.com/headmyshoulder/odeint-v2/blob/master/examples/lorenz_point.cpp" target="_top">lorenz_point.cpp</a> </p> <div class="note"><table border="0" summary="Note"> <tr> <td rowspan="2" align="center" valign="top" width="25"><img alt="[Note]" src="../../../../../../../doc/src/images/note.png"></td> <th align="left">Note</th> </tr> <tr><td align="left" valign="top"><p> For the most <code class="computeroutput"><span class="identifier">state_types</span></code>, odeint is able to automatically determine the correct algebra and operations. But if you want to use your own <code class="computeroutput"><span class="identifier">state_type</span></code>, as in this example with <code class="computeroutput"><span class="identifier">point3D</span></code>, you have to manually configure the right algebra/operations, unless your <code class="computeroutput"><span class="identifier">state_type</span></code> works with the default choice of <code class="computeroutput"><span class="identifier">range_algebra</span></code> and <code class="computeroutput"><span class="identifier">default_operations</span></code>. </p></td></tr> </table></div> </div> </div> <p> gsl_vector, gsl_matrix, ublas::matrix, blitz::matrix, thrust </p> <div class="section"> <div class="titlepage"><div><div><h4 class="title"> <a name="boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.adapt_your_own_operations"></a><a class="link" href="state_types__algebras_and_operations.html#boost_numeric_odeint.odeint_in_detail.state_types__algebras_and_operations.adapt_your_own_operations" title="Adapt your own operations">Adapt your own operations</a> </h4></div></div></div> <p> to be continued </p> <div class="itemizedlist"><ul class="itemizedlist" style="list-style-type: disc; "> <li class="listitem"> thrust </li> <li class="listitem"> gsl_complex </li> <li class="listitem"> min, max, pow </li> </ul></div> </div> </div> <table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr> <td align="left"></td> <td align="right"><div class="copyright-footer">Copyright © 2009-2015 Karsten Ahnert and Mario Mulansky<p> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>) </p> </div></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="iterators_and_ranges.html"><img src="../../../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../odeint_in_detail.html"><img src="../../../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="using_boost__ref.html"><img src="../../../../../../../doc/src/images/next.png" alt="Next"></a> </div> </body> </html>
stan-dev/math
lib/boost_1.75.0/libs/numeric/odeint/doc/html/boost_numeric_odeint/odeint_in_detail/state_types__algebras_and_operations.html
HTML
bsd-3-clause
89,819
/** * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ #import <Foundation/Foundation.h> #import "RCTBridgeMethod.h" #import "RCTNullability.h" @class RCTBridge; @interface RCTMethodArgument : NSObject @property (nonatomic, copy, readonly) NSString *type; @property (nonatomic, readonly) RCTNullability nullability; @property (nonatomic, readonly) BOOL unused; @end @interface RCTModuleMethod : NSObject <RCTBridgeMethod> @property (nonatomic, readonly) Class moduleClass; @property (nonatomic, readonly) SEL selector; - (instancetype)initWithMethodSignature:(NSString *)objCMethodName JSMethodName:(NSString *)JSMethodName moduleClass:(Class)moduleClass NS_DESIGNATED_INITIALIZER; - (void)invokeWithBridge:(RCTBridge *)bridge module:(id)module arguments:(NSArray *)arguments; @end
udnisap/react-native
React/Base/RCTModuleMethod.h
C
bsd-3-clause
1,133
/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is Ajax.org Code Editor (ACE). * * The Initial Developer of the Original Code is * Ajax.org B.V. * Portions created by the Initial Developer are Copyright (C) 2010 * the Initial Developer. All Rights Reserved. * * Contributor(s): * Fabian Jakobs <fabian AT ajax DOT org> * * Alternatively, the contents of this file may be used under the terms of * either the GNU General Public License Version 2 or later (the "GPL"), or * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the MPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the MPL, the GPL or the LGPL. * * ***** END LICENSE BLOCK ***** */ define('ace/theme/solarized_dark', ['require', 'exports', 'module', 'ace/lib/dom'], function(require, exports, module) { exports.isDark = true; exports.cssClass = "ace-solarized-dark"; exports.cssText = "\ .ace-solarized-dark .ace_editor {\ border: 2px solid rgb(159, 159, 159);\ }\ \ .ace-solarized-dark .ace_editor.ace_focus {\ border: 2px solid #327fbd;\ }\ \ .ace-solarized-dark .ace_gutter {\ background: #01313f;\ color: #d0edf7;\ }\ \ .ace-solarized-dark .ace_print_margin {\ width: 1px;\ background: #e8e8e8;\ }\ \ .ace-solarized-dark .ace_scroller {\ background-color: #002B36;\ }\ \ .ace-solarized-dark .ace_text-layer {\ color: #93A1A1;\ }\ \ .ace-solarized-dark .ace_cursor {\ border-left: 2px solid #D30102;\ }\ \ .ace-solarized-dark .ace_cursor.ace_overwrite {\ border-left: 0px;\ border-bottom: 1px solid #D30102;\ }\ \ .ace-solarized-dark .ace_marker-layer .ace_selection {\ background: #073642;\ }\ \ .ace-solarized-dark.multiselect .ace_selection.start {\ box-shadow: 0 0 3px 0px #002B36;\ border-radius: 2px;\ }\ \ .ace-solarized-dark .ace_marker-layer .ace_step {\ background: rgb(102, 82, 0);\ }\ \ .ace-solarized-dark .ace_marker-layer .ace_bracket {\ margin: -1px 0 0 -1px;\ border: 1px solid rgba(147, 161, 161, 0.50);\ }\ \ .ace-solarized-dark .ace_marker-layer .ace_active_line {\ background: #073642;\ }\ \ .ace-solarized-dark .ace_gutter_active_line {\ background-color: #0d3440;\ }\ \ .ace-solarized-dark .ace_marker-layer .ace_selected_word {\ border: 1px solid #073642;\ }\ \ .ace-solarized-dark .ace_invisible {\ color: rgba(147, 161, 161, 0.50);\ }\ \ .ace-solarized-dark .ace_keyword, .ace-solarized-dark .ace_meta {\ color:#859900;\ }\ \ .ace-solarized-dark .ace_constant.ace_language {\ color:#B58900;\ }\ \ .ace-solarized-dark .ace_constant.ace_numeric {\ color:#D33682;\ }\ \ .ace-solarized-dark .ace_constant.ace_other {\ color:#CB4B16;\ }\ \ .ace-solarized-dark .ace_fold {\ background-color: #268BD2;\ border-color: #93A1A1;\ }\ \ .ace-solarized-dark .ace_support.ace_function {\ color:#268BD2;\ }\ \ .ace-solarized-dark .ace_storage {\ color:#93A1A1;\ }\ \ .ace-solarized-dark .ace_variable {\ color:#268BD2;\ }\ \ .ace-solarized-dark .ace_string {\ color:#2AA198;\ }\ \ .ace-solarized-dark .ace_string.ace_regexp {\ color:#D30102;\ }\ \ .ace-solarized-dark .ace_comment {\ font-style:italic;\ color:#657B83;\ }\ \ .ace-solarized-dark .ace_variable.ace_language {\ color:#268BD2;\ }\ \ .ace-solarized-dark .ace_entity.ace_other.ace_attribute-name {\ color:#93A1A1;\ }\ \ .ace-solarized-dark .ace_entity.ace_name.ace_function {\ color:#268BD2;\ }\ \ .ace-solarized-dark .ace_markup.ace_underline {\ text-decoration:underline;\ }"; var dom = require("../lib/dom"); dom.importCssString(exports.cssText, exports.cssClass); });
Osmose/snippets-service
vendor-local/lib/python/django_ace/static/django_ace/ace/theme-solarized_dark.js
JavaScript
bsd-3-clause
4,589
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "cc/animation/keyframe_effect.h" #include <algorithm> #include <memory> #include <string> #include <utility> #include "base/containers/cxx20_erase.h" #include "base/time/time.h" #include "cc/animation/animation.h" #include "cc/animation/animation_host.h" #include "cc/animation/animation_timeline.h" #include "cc/animation/scroll_offset_animation_curve.h" #include "cc/trees/property_animation_state.h" #include "ui/gfx/animation/keyframe/animation_curve.h" #include "ui/gfx/animation/keyframe/target_property.h" #include "ui/gfx/geometry/transform_operations.h" namespace cc { namespace { bool NeedsFinishedEvent(KeyframeModel* keyframe_model) { // The controlling instance (i.e., impl instance), sends the finish event and // does not need to receive it. if (keyframe_model->is_controlling_instance()) return false; return !keyframe_model->received_finished_event(); } // Returns indices for keyframe_models that have matching group id. std::vector<size_t> FindAnimationsWithSameGroupId( const std::vector<std::unique_ptr<gfx::KeyframeModel>>& keyframe_models, int group_id) { std::vector<size_t> group; for (size_t i = 0; i < keyframe_models.size(); ++i) { auto* cc_keyframe_model = KeyframeModel::ToCcKeyframeModel(keyframe_models[i].get()); if (cc_keyframe_model->group() != group_id) continue; group.push_back(i); } return group; } } // namespace KeyframeEffect::KeyframeEffect(Animation* animation) : animation_(animation), element_animations_(), needs_to_start_keyframe_models_(false), scroll_offset_animation_was_interrupted_(false), is_ticking_(false), needs_push_properties_(false) {} KeyframeEffect::~KeyframeEffect() { DCHECK(!has_bound_element_animations()); } void KeyframeEffect::SetNeedsPushProperties() { needs_push_properties_ = true; // TODO(smcgruer): We only need the below calls when needs_push_properties_ // goes from false to true - see http://crbug.com/764405 DCHECK(element_animations()); element_animations_->SetNeedsPushProperties(); animation_->SetNeedsPushProperties(); } void KeyframeEffect::BindElementAnimations( ElementAnimations* element_animations) { DCHECK(element_animations); DCHECK(!element_animations_); element_animations_ = element_animations; DCHECK(element_id_); DCHECK(element_id_ == element_animations->element_id()); if (has_any_keyframe_model()) KeyframeModelAdded(); SetNeedsPushProperties(); } void KeyframeEffect::UnbindElementAnimations() { SetNeedsPushProperties(); element_animations_ = nullptr; } void KeyframeEffect::AttachElement(ElementId element_id) { DCHECK(!element_id_); DCHECK(element_id); element_id_ = element_id; } void KeyframeEffect::DetachElement() { DCHECK(element_id_); element_id_ = ElementId(); } void KeyframeEffect::Tick(base::TimeTicks monotonic_time) { DCHECK(has_bound_element_animations()); if (!element_animations_->has_element_in_any_list()) return; if (needs_to_start_keyframe_models_) StartKeyframeModels(monotonic_time); for (auto& keyframe_model : keyframe_models()) { TickKeyframeModel(monotonic_time, keyframe_model.get()); } last_tick_time_ = monotonic_time; element_animations_->UpdateClientAnimationState(); } void KeyframeEffect::RemoveFromTicking() { is_ticking_ = false; // Resetting last_tick_time_ here ensures that calling ::UpdateState // before ::Animate doesn't start a keyframe model. last_tick_time_ = absl::nullopt; animation_->RemoveFromTicking(); } void KeyframeEffect::UpdateState(bool start_ready_keyframe_models, AnimationEvents* events) { DCHECK(has_bound_element_animations()); // Animate hasn't been called, this happens if an element has been added // between the Commit and Draw phases. if (last_tick_time_ == absl::nullopt) start_ready_keyframe_models = false; if (start_ready_keyframe_models) PromoteStartedKeyframeModels(events); auto last_tick_time = last_tick_time_.value_or(base::TimeTicks()); MarkFinishedKeyframeModels(last_tick_time); MarkKeyframeModelsForDeletion(last_tick_time, events); PurgeKeyframeModelsMarkedForDeletion(/* impl_only */ true); if (start_ready_keyframe_models) { if (needs_to_start_keyframe_models_) { StartKeyframeModels(last_tick_time); PromoteStartedKeyframeModels(events); } } if (!element_animations()->has_element_in_any_list()) RemoveFromTicking(); } void KeyframeEffect::UpdateTickingState() { if (animation_->has_animation_host()) { bool was_ticking = is_ticking_; is_ticking_ = HasNonDeletedKeyframeModel() && element_animations_->has_element_in_any_list(); if (is_ticking_ && !was_ticking) { animation_->AddToTicking(); } else if (!is_ticking_ && was_ticking) { RemoveFromTicking(); } } } void KeyframeEffect::Pause(base::TimeDelta pause_offset, PauseCondition pause_condition) { bool did_pause = false; for (auto& keyframe_model : keyframe_models()) { // TODO(crbug.com/1076012): KeyframeEffect is paused with local time for // scroll-linked animations. To make sure the start event of a keyframe // model is sent to blink, we should not set its run state to PAUSED until // such event is sent. This should be revisited once KeyframeEffect is able // to tick scroll-linked keyframe models directly. if (pause_condition == PauseCondition::kAfterStart && (keyframe_model->run_state() == gfx::KeyframeModel::WAITING_FOR_TARGET_AVAILABILITY || keyframe_model->run_state() == gfx::KeyframeModel::STARTING)) continue; keyframe_model->Pause(pause_offset); did_pause = true; } if (!did_pause || !has_bound_element_animations()) return; animation_->SetNeedsCommit(); SetNeedsPushProperties(); } void KeyframeEffect::AddKeyframeModel( std::unique_ptr<gfx::KeyframeModel> keyframe_model) { KeyframeModel* cc_keyframe_model = KeyframeModel::ToCcKeyframeModel(keyframe_model.get()); DCHECK(!cc_keyframe_model->is_impl_only() || keyframe_model->TargetProperty() == TargetProperty::SCROLL_OFFSET); // This is to make sure that keyframe models in the same group, i.e., start // together, don't animate the same property. DCHECK(std::none_of(keyframe_models().begin(), keyframe_models().end(), [&](const auto& existing_keyframe_model) { auto* cc_existing_keyframe_model = KeyframeModel::ToCcKeyframeModel( existing_keyframe_model.get()); return keyframe_model->TargetProperty() == existing_keyframe_model->TargetProperty() && cc_keyframe_model->group() == cc_existing_keyframe_model->group(); })); if (keyframe_model->TargetProperty() == TargetProperty::SCROLL_OFFSET) { // We should never have more than one scroll offset animation queued on the // same scrolling element as this would result in multiple automated // scrolls. DCHECK(std::none_of( keyframe_models().begin(), keyframe_models().end(), [&](const auto& existing_keyframe_model) { auto* cc_existing_keyframe_model = KeyframeModel::ToCcKeyframeModel(existing_keyframe_model.get()); return cc_existing_keyframe_model->TargetProperty() == TargetProperty::SCROLL_OFFSET && !cc_existing_keyframe_model->is_finished() && (!cc_existing_keyframe_model->is_controlling_instance() || cc_existing_keyframe_model->affects_pending_elements()); })); } gfx::KeyframeEffect::AddKeyframeModel(std::move(keyframe_model)); if (has_bound_element_animations()) { KeyframeModelAdded(); SetNeedsPushProperties(); } } void KeyframeEffect::PauseKeyframeModel(int keyframe_model_id, base::TimeDelta time_offset) { for (auto& keyframe_model : keyframe_models()) { if (keyframe_model->id() == keyframe_model_id) { keyframe_model->Pause(time_offset); } } if (has_bound_element_animations()) { animation_->SetNeedsCommit(); SetNeedsPushProperties(); } } void KeyframeEffect::AbortKeyframeModel(int keyframe_model_id) { if (gfx::KeyframeModel* keyframe_model = GetKeyframeModelById(keyframe_model_id)) { if (!keyframe_model->is_finished()) { keyframe_model->SetRunState(gfx::KeyframeModel::ABORTED, last_tick_time_.value_or(base::TimeTicks())); if (has_bound_element_animations()) element_animations_->UpdateClientAnimationState(); } } if (has_bound_element_animations()) { animation_->SetNeedsCommit(); SetNeedsPushProperties(); } } void KeyframeEffect::AbortKeyframeModelsWithProperty( TargetProperty::Type target_property, bool needs_completion) { if (needs_completion) DCHECK(target_property == TargetProperty::SCROLL_OFFSET); bool aborted_keyframe_model = false; for (auto& keyframe_model : keyframe_models()) { if (keyframe_model->TargetProperty() == target_property && !keyframe_model->is_finished()) { // Currently only impl-only scroll offset KeyframeModels can be completed // on the main thread. if (needs_completion && KeyframeModel::ToCcKeyframeModel(keyframe_model.get()) ->is_impl_only()) { keyframe_model->SetRunState( gfx::KeyframeModel::ABORTED_BUT_NEEDS_COMPLETION, last_tick_time_.value_or(base::TimeTicks())); } else { keyframe_model->SetRunState( gfx::KeyframeModel::ABORTED, last_tick_time_.value_or(base::TimeTicks())); } aborted_keyframe_model = true; } } if (has_bound_element_animations()) { if (aborted_keyframe_model) element_animations_->UpdateClientAnimationState(); animation_->SetNeedsCommit(); SetNeedsPushProperties(); } } void KeyframeEffect::ActivateKeyframeModels() { DCHECK(has_bound_element_animations()); bool keyframe_model_activated = false; for (auto& keyframe_model : keyframe_models()) { auto* cc_keyframe_model = KeyframeModel::ToCcKeyframeModel(keyframe_model.get()); if (cc_keyframe_model->affects_active_elements() != cc_keyframe_model->affects_pending_elements()) { keyframe_model_activated = true; } cc_keyframe_model->set_affects_active_elements( cc_keyframe_model->affects_pending_elements()); } if (keyframe_model_activated) element_animations_->UpdateClientAnimationState(); scroll_offset_animation_was_interrupted_ = false; } void KeyframeEffect::KeyframeModelAdded() { DCHECK(has_bound_element_animations()); animation_->SetNeedsCommit(); needs_to_start_keyframe_models_ = true; UpdateTickingState(); for (auto& keyframe_model : keyframe_models()) { element_animations_->AttachToCurve(keyframe_model->curve()); } element_animations_->UpdateClientAnimationState(); } bool KeyframeEffect::DispatchAnimationEventToKeyframeModel( const AnimationEvent& event) { DCHECK(!event.is_impl_only); KeyframeModel* keyframe_model = KeyframeModel::ToCcKeyframeModel( GetKeyframeModelById(event.uid.model_id)); bool dispatched = false; switch (event.type) { case AnimationEvent::STARTED: if (keyframe_model && keyframe_model->needs_synchronized_start_time()) { keyframe_model->set_needs_synchronized_start_time(false); if (!keyframe_model->has_set_start_time()) keyframe_model->set_start_time(event.monotonic_time); dispatched = true; } break; case AnimationEvent::FINISHED: if (keyframe_model) { keyframe_model->set_received_finished_event(true); dispatched = true; } else { // This is for the case when a keyframe_model is already removed on main // thread, but the impl version of it sent a finished event and is now // waiting for deletion. We would need to delete that keyframe_model // during push properties. SetNeedsPushProperties(); } break; case AnimationEvent::ABORTED: if (keyframe_model) { keyframe_model->SetRunState(gfx::KeyframeModel::ABORTED, event.monotonic_time); keyframe_model->set_received_finished_event(true); dispatched = true; ElementAnimations* element_animations = animation_->animation_host() ->GetElementAnimationsForElementId(element_id()) .get(); if (element_animations) element_animations->UpdateClientAnimationState(); } break; case AnimationEvent::TAKEOVER: // TODO(crbug.com/1018213): Routing TAKEOVER events is broken. // We need to purge KeyframeModels marked for deletion on CT. SetNeedsPushProperties(); dispatched = true; break; case AnimationEvent::TIME_UPDATED: // TIME_UPDATED events are used to synchronize effect time between cc and // main thread worklet animations. Keyframe models are not involved in // this process. NOTREACHED(); break; } return dispatched; } bool KeyframeEffect::HasTickingKeyframeModel() const { for (const auto& keyframe_model : keyframe_models()) { if (!keyframe_model->is_finished()) return true; } return false; } bool KeyframeEffect::RequiresInvalidation() const { for (const auto& it : keyframe_models()) { if (it->TargetProperty() == TargetProperty::NATIVE_PROPERTY || it->TargetProperty() == TargetProperty::CSS_CUSTOM_PROPERTY) { return true; } } return false; } bool KeyframeEffect::AffectsNativeProperty() const { for (const auto& it : keyframe_models()) { // TODO(crbug.com/1257778): include the SCROLL_OFFSET here so that we won't // create a compositor animation frame sequence tracker when there is a // composited scroll. if (it->TargetProperty() != TargetProperty::CSS_CUSTOM_PROPERTY && it->TargetProperty() != TargetProperty::NATIVE_PROPERTY) return true; } return false; } bool KeyframeEffect::HasNonDeletedKeyframeModel() const { for (const auto& keyframe_model : keyframe_models()) { if (keyframe_model->run_state() != gfx::KeyframeModel::WAITING_FOR_DELETION) return true; } return false; } bool KeyframeEffect::AnimationsPreserveAxisAlignment() const { for (const auto& keyframe_model : keyframe_models()) { if (keyframe_model->is_finished()) continue; if (!keyframe_model->curve()->PreservesAxisAlignment()) return false; } return true; } float KeyframeEffect::MaximumScale(ElementListType list_type) const { float maximum_scale = kInvalidScale; for (const auto& keyframe_model : keyframe_models()) { if (keyframe_model->is_finished()) continue; auto* cc_keyframe_model = KeyframeModel::ToCcKeyframeModel(keyframe_model.get()); if ((list_type == ElementListType::ACTIVE && !cc_keyframe_model->affects_active_elements()) || (list_type == ElementListType::PENDING && !cc_keyframe_model->affects_pending_elements())) continue; float curve_maximum_scale = kInvalidScale; if (keyframe_model->curve()->MaximumScale(&curve_maximum_scale)) maximum_scale = std::max(maximum_scale, curve_maximum_scale); } return maximum_scale; } bool KeyframeEffect::IsPotentiallyAnimatingProperty( TargetProperty::Type target_property, ElementListType list_type) const { for (const auto& keyframe_model : keyframe_models()) { if (!keyframe_model->is_finished() && keyframe_model->TargetProperty() == target_property) { auto* cc_keyframe_model = KeyframeModel::ToCcKeyframeModel(keyframe_model.get()); if ((list_type == ElementListType::ACTIVE && cc_keyframe_model->affects_active_elements()) || (list_type == ElementListType::PENDING && cc_keyframe_model->affects_pending_elements())) return true; } } return false; } bool KeyframeEffect::IsCurrentlyAnimatingProperty( TargetProperty::Type target_property, ElementListType list_type) const { for (const auto& keyframe_model : keyframe_models()) { auto* cc_keyframe_model = KeyframeModel::ToCcKeyframeModel(keyframe_model.get()); if (!keyframe_model->is_finished() && cc_keyframe_model->InEffect( last_tick_time_.value_or(base::TimeTicks())) && keyframe_model->TargetProperty() == target_property) { if ((list_type == ElementListType::ACTIVE && cc_keyframe_model->affects_active_elements()) || (list_type == ElementListType::PENDING && cc_keyframe_model->affects_pending_elements())) return true; } } return false; } void KeyframeEffect::GetPropertyAnimationState( PropertyAnimationState* pending_state, PropertyAnimationState* active_state) const { pending_state->Clear(); active_state->Clear(); for (const auto& keyframe_model : keyframe_models()) { if (!keyframe_model->is_finished()) { auto* cc_keyframe_model = KeyframeModel::ToCcKeyframeModel(keyframe_model.get()); bool in_effect = cc_keyframe_model->InEffect( last_tick_time_.value_or(base::TimeTicks())); bool active = cc_keyframe_model->affects_active_elements(); bool pending = cc_keyframe_model->affects_pending_elements(); int property = keyframe_model->TargetProperty(); if (pending) pending_state->potentially_animating[property] = true; if (pending && in_effect) pending_state->currently_running[property] = true; if (active) active_state->potentially_animating[property] = true; if (active && in_effect) active_state->currently_running[property] = true; } } } void KeyframeEffect::MarkAbortedKeyframeModelsForDeletion( KeyframeEffect* keyframe_effect_impl) { bool keyframe_model_aborted = false; auto& keyframe_models_impl = keyframe_effect_impl->keyframe_models(); for (const auto& keyframe_model_impl : keyframe_models_impl) { // If the keyframe_model has been aborted on the main thread, mark it for // deletion. if (gfx::KeyframeModel* keyframe_model = GetKeyframeModelById(keyframe_model_impl->id())) { if (keyframe_model->run_state() == gfx::KeyframeModel::ABORTED) { keyframe_model_impl->SetRunState( gfx::KeyframeModel::WAITING_FOR_DELETION, keyframe_effect_impl->last_tick_time_.value_or(base::TimeTicks())); keyframe_model->SetRunState( gfx::KeyframeModel::WAITING_FOR_DELETION, last_tick_time_.value_or(base::TimeTicks())); keyframe_model_aborted = true; } } } if (has_bound_element_animations() && keyframe_model_aborted) element_animations_->SetNeedsPushProperties(); } void KeyframeEffect::PurgeKeyframeModelsMarkedForDeletion(bool impl_only) { base::EraseIf(keyframe_models(), [impl_only](const auto& keyframe_model) { return keyframe_model->run_state() == gfx::KeyframeModel::WAITING_FOR_DELETION && (!impl_only || KeyframeModel::ToCcKeyframeModel(keyframe_model.get()) ->is_impl_only()); }); } void KeyframeEffect::PurgeDeletedKeyframeModels() { base::EraseIf(keyframe_models(), [](const auto& keyframe_model) { return keyframe_model->run_state() == gfx::KeyframeModel::WAITING_FOR_DELETION && !KeyframeModel::ToCcKeyframeModel(keyframe_model.get()) ->affects_pending_elements(); }); } void KeyframeEffect::PushNewKeyframeModelsToImplThread( KeyframeEffect* keyframe_effect_impl) const { // Any new KeyframeModels owned by the main thread's Animation are // cloned and added to the impl thread's Animation. for (const auto& keyframe_model : keyframe_models()) { // If the keyframe_model is finished, do not copy it over to impl since the // impl instance, if there was one, was just removed in // |RemoveKeyframeModelsCompletedOnMainThread|. if (keyframe_model->is_finished()) continue; // If the keyframe_model is already running on the impl thread, there is no // need to copy it over. if (keyframe_effect_impl->GetKeyframeModelById(keyframe_model->id())) continue; if (keyframe_model->TargetProperty() == TargetProperty::SCROLL_OFFSET && !ScrollOffsetAnimationCurve::ToScrollOffsetAnimationCurve( keyframe_model->curve()) ->HasSetInitialValue()) { gfx::PointF current_scroll_offset; if (keyframe_effect_impl->HasElementInActiveList()) { current_scroll_offset = keyframe_effect_impl->ScrollOffsetForAnimation(); } else { // The owning layer isn't yet in the active tree, so the main thread // scroll offset will be up to date. current_scroll_offset = ScrollOffsetForAnimation(); } ScrollOffsetAnimationCurve* curve = ScrollOffsetAnimationCurve::ToScrollOffsetAnimationCurve( keyframe_model->curve()); curve->SetInitialValue(current_scroll_offset); } // The new keyframe_model should be set to run as soon as possible. gfx::KeyframeModel::RunState initial_run_state = gfx::KeyframeModel::WAITING_FOR_TARGET_AVAILABILITY; std::unique_ptr<KeyframeModel> to_add( KeyframeModel::ToCcKeyframeModel(keyframe_model.get()) ->CreateImplInstance(initial_run_state)); DCHECK(!to_add->needs_synchronized_start_time()); to_add->set_affects_active_elements(false); keyframe_effect_impl->AddKeyframeModel(std::move(to_add)); } } namespace { bool IsCompleted(gfx::KeyframeModel* keyframe_model, const KeyframeEffect* main_thread_keyframe_effect) { if (KeyframeModel::ToCcKeyframeModel(keyframe_model)->is_impl_only()) { return (keyframe_model->run_state() == gfx::KeyframeModel::WAITING_FOR_DELETION); } else { gfx::KeyframeModel* main_thread_keyframe_model = main_thread_keyframe_effect->GetKeyframeModelById(keyframe_model->id()); return !main_thread_keyframe_model || main_thread_keyframe_model->is_finished(); } } } // namespace void KeyframeEffect::RemoveKeyframeModelsCompletedOnMainThread( KeyframeEffect* keyframe_effect_impl) const { bool keyframe_model_completed = false; // Animations removed on the main thread should no longer affect pending // elements, and should stop affecting active elements after the next call // to ActivateKeyframeEffects. If already WAITING_FOR_DELETION, they can be // removed immediately. for (auto& keyframe_model : keyframe_effect_impl->keyframe_models()) { if (IsCompleted(keyframe_model.get(), this)) { KeyframeModel::ToCcKeyframeModel(keyframe_model.get()) ->set_affects_pending_elements(false); keyframe_model_completed = true; } } keyframe_effect_impl->PurgeDeletedKeyframeModels(); if (has_bound_element_animations() && keyframe_model_completed) element_animations_->SetNeedsPushProperties(); } void KeyframeEffect::PushPropertiesTo(KeyframeEffect* keyframe_effect_impl) { if (!needs_push_properties_) return; needs_push_properties_ = false; // Synchronize the keyframe_model target between main and impl side. if (element_id_ != keyframe_effect_impl->element_id_) { // We have to detach/attach via the Animation as it may need to inform // the host as well. if (keyframe_effect_impl->has_attached_element()) keyframe_effect_impl->animation_->DetachElement(); if (element_id_) { if (element_id_.GetStableId() == ElementId::kReservedElementId) keyframe_effect_impl->animation_->AttachNoElement(); else keyframe_effect_impl->animation_->AttachElement(element_id_); } } keyframe_effect_impl->scroll_offset_animation_was_interrupted_ = scroll_offset_animation_was_interrupted_; scroll_offset_animation_was_interrupted_ = false; // If neither main nor impl have any KeyframeModels, there is nothing further // to synchronize. if (!has_any_keyframe_model() && !keyframe_effect_impl->has_any_keyframe_model()) return; // Synchronize the main-thread and impl-side keyframe model lists, removing // aborted KeyframeModels and pushing any new animations. MarkAbortedKeyframeModelsForDeletion(keyframe_effect_impl); PurgeKeyframeModelsMarkedForDeletion(/* impl_only */ false); RemoveKeyframeModelsCompletedOnMainThread(keyframe_effect_impl); PushNewKeyframeModelsToImplThread(keyframe_effect_impl); // Now that the keyframe model lists are synchronized, push the properties for // the individual KeyframeModels. for (const auto& keyframe_model : keyframe_models()) { KeyframeModel* current_impl = KeyframeModel::ToCcKeyframeModel( keyframe_effect_impl->GetKeyframeModelById(keyframe_model->id())); if (current_impl) KeyframeModel::ToCcKeyframeModel(keyframe_model.get()) ->PushPropertiesTo(current_impl); } keyframe_effect_impl->UpdateTickingState(); } std::string KeyframeEffect::KeyframeModelsToString() const { std::string str; for (size_t i = 0; i < keyframe_models().size(); i++) { if (i > 0) str.append(", "); str.append(KeyframeModel::ToCcKeyframeModel(keyframe_models()[i].get()) ->ToString()); } return str; } base::TimeDelta KeyframeEffect::MinimumTickInterval() const { base::TimeDelta min_interval = base::TimeDelta::Max(); for (const auto& model : keyframe_models()) { base::TimeDelta interval = model->curve()->TickInterval(); if (interval.is_zero()) return interval; if (interval < min_interval) min_interval = interval; } return min_interval; } void KeyframeEffect::RemoveKeyframeModelRange( typename KeyframeModels::iterator to_remove_begin, typename KeyframeModels::iterator to_remove_end) { bool keyframe_model_removed = false; for (auto it = to_remove_begin; it != to_remove_end; ++it) { if ((*it)->TargetProperty() == TargetProperty::SCROLL_OFFSET) { if (has_bound_element_animations()) scroll_offset_animation_was_interrupted_ = true; } else if (!(*it)->is_finished()) { keyframe_model_removed = true; } } gfx::KeyframeEffect::RemoveKeyframeModelRange(to_remove_begin, to_remove_end); if (has_bound_element_animations()) { UpdateTickingState(); if (keyframe_model_removed) element_animations_->UpdateClientAnimationState(); animation_->SetNeedsCommit(); SetNeedsPushProperties(); } } void KeyframeEffect::StartKeyframeModels(base::TimeTicks monotonic_time) { DCHECK(needs_to_start_keyframe_models_); needs_to_start_keyframe_models_ = false; // First collect running properties affecting each type of element. gfx::TargetProperties blocked_properties_for_active_elements; gfx::TargetProperties blocked_properties_for_pending_elements; std::vector<size_t> keyframe_models_waiting_for_target; keyframe_models_waiting_for_target.reserve(keyframe_models().size()); for (size_t i = 0; i < keyframe_models().size(); ++i) { auto* cc_keyframe_model = KeyframeModel::ToCcKeyframeModel(keyframe_models()[i].get()); if (cc_keyframe_model->run_state() == gfx::KeyframeModel::STARTING || cc_keyframe_model->run_state() == gfx::KeyframeModel::RUNNING) { int property = cc_keyframe_model->TargetProperty(); if (cc_keyframe_model->affects_active_elements()) { blocked_properties_for_active_elements[property] = true; } if (cc_keyframe_model->affects_pending_elements()) { blocked_properties_for_pending_elements[property] = true; } } else if (cc_keyframe_model->run_state() == gfx::KeyframeModel::WAITING_FOR_TARGET_AVAILABILITY) { keyframe_models_waiting_for_target.push_back(i); } } for (size_t i = 0; i < keyframe_models_waiting_for_target.size(); ++i) { // Collect all properties for KeyframeModels with the same group id (they // should all also be in the list of KeyframeModels). size_t keyframe_model_index = keyframe_models_waiting_for_target[i]; KeyframeModel* keyframe_model_waiting_for_target = KeyframeModel::ToCcKeyframeModel( keyframe_models()[keyframe_model_index].get()); // Check for the run state again even though the keyframe_model was waiting // for target because it might have changed the run state while handling // previous keyframe_model in this loop (if they belong to same group). if (keyframe_model_waiting_for_target->run_state() == gfx::KeyframeModel::WAITING_FOR_TARGET_AVAILABILITY) { gfx::TargetProperties enqueued_properties; bool affects_active_elements = keyframe_model_waiting_for_target->affects_active_elements(); bool affects_pending_elements = keyframe_model_waiting_for_target->affects_pending_elements(); enqueued_properties[keyframe_model_waiting_for_target->TargetProperty()] = true; for (size_t j = keyframe_model_index + 1; j < keyframe_models().size(); ++j) { auto* cc_keyframe_model = KeyframeModel::ToCcKeyframeModel(keyframe_models()[j].get()); if (keyframe_model_waiting_for_target->group() == cc_keyframe_model->group()) { enqueued_properties[cc_keyframe_model->TargetProperty()] = true; affects_active_elements |= cc_keyframe_model->affects_active_elements(); affects_pending_elements |= cc_keyframe_model->affects_pending_elements(); } } // Check to see if intersection of the list of properties affected by // the group and the list of currently blocked properties is null, taking // into account the type(s) of elements affected by the group. In any // case, the group's target properties need to be added to the lists of // blocked properties. bool null_intersection = true; for (int property = TargetProperty::FIRST_TARGET_PROPERTY; property <= TargetProperty::LAST_TARGET_PROPERTY; ++property) { if (enqueued_properties[property]) { if (affects_active_elements) { if (blocked_properties_for_active_elements[property]) null_intersection = false; else blocked_properties_for_active_elements[property] = true; } if (affects_pending_elements) { if (blocked_properties_for_pending_elements[property]) null_intersection = false; else blocked_properties_for_pending_elements[property] = true; } } } // If the intersection is null, then we are free to start the // KeyframeModels in the group. if (null_intersection) { keyframe_model_waiting_for_target->SetRunState( gfx::KeyframeModel::STARTING, monotonic_time); for (size_t j = keyframe_model_index + 1; j < keyframe_models().size(); ++j) { auto* cc_keyframe_model = KeyframeModel::ToCcKeyframeModel(keyframe_models()[j].get()); if (keyframe_model_waiting_for_target->group() == cc_keyframe_model->group()) { cc_keyframe_model->SetRunState(gfx::KeyframeModel::STARTING, monotonic_time); } } } else { needs_to_start_keyframe_models_ = true; } } } } void KeyframeEffect::PromoteStartedKeyframeModels(AnimationEvents* events) { for (auto& keyframe_model : keyframe_models()) { if (keyframe_model->run_state() == gfx::KeyframeModel::STARTING && KeyframeModel::ToCcKeyframeModel(keyframe_model.get()) ->affects_active_elements()) { auto* cc_keyframe_model = KeyframeModel::ToCcKeyframeModel(keyframe_model.get()); cc_keyframe_model->SetRunState( gfx::KeyframeModel::RUNNING, last_tick_time_.value_or(base::TimeTicks())); if (!cc_keyframe_model->has_set_start_time() && !cc_keyframe_model->needs_synchronized_start_time()) cc_keyframe_model->set_start_time( last_tick_time_.value_or(base::TimeTicks())); base::TimeTicks start_time; if (cc_keyframe_model->has_set_start_time()) start_time = cc_keyframe_model->start_time(); else start_time = last_tick_time_.value_or(base::TimeTicks()); GenerateEvent(events, *cc_keyframe_model, AnimationEvent::STARTED, start_time); } } } void KeyframeEffect::MarkKeyframeModelsForDeletion( base::TimeTicks monotonic_time, AnimationEvents* events) { bool marked_keyframe_model_for_deletion = false; auto MarkForDeletion = [&](KeyframeModel* keyframe_model) { keyframe_model->SetRunState(gfx::KeyframeModel::WAITING_FOR_DELETION, monotonic_time); marked_keyframe_model_for_deletion = true; }; // Non-aborted KeyframeModels are marked for deletion after a corresponding // AnimationEvent::FINISHED event is sent or received. This means that if // we don't have an events vector, we must ensure that non-aborted // KeyframeModels have received a finished event before marking them for // deletion. for (auto& keyframe_model : keyframe_models()) { KeyframeModel* cc_keyframe_model = KeyframeModel::ToCcKeyframeModel(keyframe_model.get()); if (cc_keyframe_model->run_state() == gfx::KeyframeModel::ABORTED) { GenerateEvent(events, *cc_keyframe_model, AnimationEvent::ABORTED, monotonic_time); // If this is the controlling instance or it has already received finish // event, keyframe model can be marked for deletion. if (!NeedsFinishedEvent(cc_keyframe_model)) MarkForDeletion(cc_keyframe_model); continue; } // If this is an aborted controlling instance that need completion on the // main thread, generate takeover event. if (cc_keyframe_model->is_controlling_instance() && cc_keyframe_model->run_state() == gfx::KeyframeModel::ABORTED_BUT_NEEDS_COMPLETION) { GenerateTakeoverEventForScrollAnimation(events, *cc_keyframe_model, monotonic_time); // Remove the keyframe model from the impl thread. MarkForDeletion(cc_keyframe_model); continue; } if (cc_keyframe_model->run_state() != gfx::KeyframeModel::FINISHED) continue; // Since deleting an animation on the main thread leads to its deletion // on the impl thread, we only mark a FINISHED main thread animation for // deletion once it has received a FINISHED event from the impl thread. if (NeedsFinishedEvent(cc_keyframe_model)) continue; // If a keyframe model is finished, and not already marked for deletion, // find out if all other keyframe models in the same group are also // finished. std::vector<size_t> keyframe_models_in_same_group = FindAnimationsWithSameGroupId(keyframe_models(), cc_keyframe_model->group()); bool a_keyframe_model_in_same_group_is_not_finished = std::any_of( keyframe_models_in_same_group.cbegin(), keyframe_models_in_same_group.cend(), [&](size_t index) { auto* keyframe_model = KeyframeModel::ToCcKeyframeModel(keyframe_models()[index].get()); return !keyframe_model->is_finished() || (keyframe_model->run_state() == gfx::KeyframeModel::FINISHED && NeedsFinishedEvent(keyframe_model)); }); if (a_keyframe_model_in_same_group_is_not_finished) continue; // Now remove all the keyframe models which belong to the same group and are // not yet aborted. These will be set to WAITING_FOR_DELETION which also // ensures we don't try to delete them again. for (size_t j = 0; j < keyframe_models_in_same_group.size(); ++j) { KeyframeModel* same_group_keyframe_model = KeyframeModel::ToCcKeyframeModel( keyframe_models()[keyframe_models_in_same_group[j]].get()); // Skip any keyframe model in this group which is already processed. if (same_group_keyframe_model->run_state() == gfx::KeyframeModel::WAITING_FOR_DELETION || same_group_keyframe_model->run_state() == gfx::KeyframeModel::ABORTED) continue; GenerateEvent(events, *same_group_keyframe_model, AnimationEvent::FINISHED, monotonic_time); MarkForDeletion(same_group_keyframe_model); } } // We need to purge KeyframeModels marked for deletion, which happens in // PushPropertiesTo(). if (marked_keyframe_model_for_deletion) SetNeedsPushProperties(); } void KeyframeEffect::MarkFinishedKeyframeModels( base::TimeTicks monotonic_time) { DCHECK(has_bound_element_animations()); bool keyframe_model_finished = false; for (auto& keyframe_model : keyframe_models()) { if (!keyframe_model->is_finished() && keyframe_model->IsFinishedAt(monotonic_time)) { keyframe_model->SetRunState(gfx::KeyframeModel::FINISHED, monotonic_time); keyframe_model_finished = true; SetNeedsPushProperties(); } auto* cc_keyframe_model = KeyframeModel::ToCcKeyframeModel(keyframe_model.get()); if (!cc_keyframe_model->affects_active_elements() && !cc_keyframe_model->affects_pending_elements()) { switch (keyframe_model->run_state()) { case gfx::KeyframeModel::WAITING_FOR_TARGET_AVAILABILITY: case gfx::KeyframeModel::STARTING: case gfx::KeyframeModel::RUNNING: case gfx::KeyframeModel::PAUSED: keyframe_model->SetRunState(gfx::KeyframeModel::FINISHED, monotonic_time); keyframe_model_finished = true; break; default: break; } } } if (keyframe_model_finished) element_animations_->UpdateClientAnimationState(); } bool KeyframeEffect::HasElementInActiveList() const { DCHECK(has_bound_element_animations()); return element_animations_->has_element_in_active_list(); } gfx::PointF KeyframeEffect::ScrollOffsetForAnimation() const { DCHECK(has_bound_element_animations()); return element_animations_->ScrollOffsetForAnimation(); } void KeyframeEffect::GenerateEvent(AnimationEvents* events, const KeyframeModel& keyframe_model, AnimationEvent::Type type, base::TimeTicks monotonic_time) { if (!events) return; AnimationEvent event(type, {animation_->animation_timeline()->id(), animation_->id(), keyframe_model.id()}, keyframe_model.group(), keyframe_model.TargetProperty(), monotonic_time); event.is_impl_only = KeyframeModel::ToCcKeyframeModel(&keyframe_model)->is_impl_only(); if (!event.is_impl_only) { events->events_.push_back(event); return; } // For impl only animations notify delegate directly, do not record the event. animation_->DispatchAndDelegateAnimationEvent(event); } void KeyframeEffect::GenerateTakeoverEventForScrollAnimation( AnimationEvents* events, const KeyframeModel& keyframe_model, base::TimeTicks monotonic_time) { DCHECK_EQ(keyframe_model.TargetProperty(), TargetProperty::SCROLL_OFFSET); if (!events) return; AnimationEvent takeover_event( AnimationEvent::TAKEOVER, {animation_->animation_timeline()->id(), animation_->id(), keyframe_model.id()}, keyframe_model.group(), keyframe_model.TargetProperty(), monotonic_time); takeover_event.animation_start_time = keyframe_model.start_time(); const ScrollOffsetAnimationCurve* scroll_offset_animation_curve = ScrollOffsetAnimationCurve::ToScrollOffsetAnimationCurve( keyframe_model.curve()); takeover_event.curve = scroll_offset_animation_curve->Clone(); // Notify main thread. events->events_.push_back(takeover_event); AnimationEvent finished_event( AnimationEvent::FINISHED, {animation_->animation_timeline()->id(), animation_->id(), keyframe_model.id()}, keyframe_model.group(), keyframe_model.TargetProperty(), monotonic_time); // Notify the compositor that the animation is finished. finished_event.is_impl_only = true; animation_->DispatchAndDelegateAnimationEvent(finished_event); } } // namespace cc
chromium/chromium
cc/animation/keyframe_effect.cc
C++
bsd-3-clause
41,191
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "components/nacl/zygote/nacl_fork_delegate_linux.h" #include <signal.h> #include <stddef.h> #include <stdlib.h> #include <sys/resource.h> #include <sys/socket.h> #include <memory> #include <set> #include "base/command_line.h" #include "base/cpu.h" #include "base/files/file_path.h" #include "base/files/scoped_file.h" #include "base/logging.h" #include "base/path_service.h" #include "base/pickle.h" #include "base/posix/eintr_wrapper.h" #include "base/posix/global_descriptors.h" #include "base/posix/unix_domain_socket.h" #include "base/process/kill.h" #include "base/process/launch.h" #include "base/strings/string_split.h" #include "build/build_config.h" #include "components/nacl/common/nacl_paths.h" #include "components/nacl/common/nacl_switches.h" #include "components/nacl/loader/nacl_helper_linux.h" #include "content/public/common/content_descriptors.h" #include "content/public/common/content_switches.h" #include "sandbox/linux/services/namespace_sandbox.h" #include "sandbox/linux/suid/client/setuid_sandbox_client.h" #include "sandbox/linux/suid/client/setuid_sandbox_host.h" #include "sandbox/linux/suid/common/sandbox.h" #include "sandbox/policy/switches.h" #include "third_party/cros_system_api/switches/chrome_switches.h" namespace { // Note these need to match up with their counterparts in nacl_helper_linux.c // and nacl_helper_bootstrap_linux.c. const char kNaClHelperReservedAtZero[] = "--reserved_at_zero=0xXXXXXXXXXXXXXXXX"; const char kNaClHelperRDebug[] = "--r_debug=0xXXXXXXXXXXXXXXXX"; // This is an environment variable which controls which (if any) other // environment variables are passed through to NaCl processes. e.g., // NACL_ENV_PASSTHROUGH="PATH,CWD" would pass both $PATH and $CWD to the child // process. const char kNaClEnvPassthrough[] = "NACL_ENV_PASSTHROUGH"; char kNaClEnvPassthroughDelimiter = ','; // The following environment variables are always passed through if they exist // in the parent process. const char kNaClExeStderr[] = "NACL_EXE_STDERR"; const char kNaClExeStdout[] = "NACL_EXE_STDOUT"; const char kNaClVerbosity[] = "NACLVERBOSITY"; #if defined(ARCH_CPU_X86) bool NonZeroSegmentBaseIsSlow() { base::CPU cpuid; // Using a non-zero segment base is known to be very slow on Intel // Atom CPUs. See "Segmentation-based Memory Protection Mechanism // on Intel Atom Microarchitecture: Coding Optimizations" (Leonardo // Potenza, Intel). // // The following list of CPU model numbers is taken from: // "Intel 64 and IA-32 Architectures Software Developer's Manual" // (http://download.intel.com/products/processor/manual/325462.pdf), // "Table 35-1. CPUID Signature Values of DisplayFamily_DisplayModel" // (Volume 3C, 35-1), which contains: // "06_36H - Intel Atom S Processor Family // 06_1CH, 06_26H, 06_27H, 06_35, 06_36 - Intel Atom Processor Family" if (cpuid.family() == 6) { switch (cpuid.model()) { case 0x1c: case 0x26: case 0x27: case 0x35: case 0x36: return true; } } return false; } #endif // Send an IPC request on |ipc_channel|. The request is contained in // |request_pickle| and can have file descriptors attached in |attached_fds|. // |reply_data_buffer| must be allocated by the caller and will contain the // reply. The size of the reply will be written to |reply_size|. // This code assumes that only one thread can write to |ipc_channel| to make // requests. bool SendIPCRequestAndReadReply(int ipc_channel, const std::vector<int>& attached_fds, const base::Pickle& request_pickle, char* reply_data_buffer, size_t reply_data_buffer_size, ssize_t* reply_size) { DCHECK_LE(static_cast<size_t>(kNaClMaxIPCMessageLength), reply_data_buffer_size); DCHECK(reply_size); if (!base::UnixDomainSocket::SendMsg(ipc_channel, request_pickle.data(), request_pickle.size(), attached_fds)) { LOG(ERROR) << "SendIPCRequestAndReadReply: SendMsg failed"; return false; } // Then read the remote reply. std::vector<base::ScopedFD> received_fds; const ssize_t msg_len = base::UnixDomainSocket::RecvMsg(ipc_channel, reply_data_buffer, reply_data_buffer_size, &received_fds); if (msg_len <= 0) { LOG(ERROR) << "SendIPCRequestAndReadReply: RecvMsg failed"; return false; } *reply_size = msg_len; return true; } } // namespace. namespace nacl { void AddNaClZygoteForkDelegates( std::vector<std::unique_ptr<content::ZygoteForkDelegate>>* delegates) { // We don't need the delegates for the unsandboxed zygote since NaCl always // starts from the sandboxed zygote. if (base::CommandLine::ForCurrentProcess()->HasSwitch( sandbox::policy::switches::kNoZygoteSandbox)) { return; } delegates->push_back(std::make_unique<NaClForkDelegate>()); } NaClForkDelegate::NaClForkDelegate() : status_(kNaClHelperUnused), fd_(-1) {} void NaClForkDelegate::Init(const int sandboxdesc, const bool enable_layer1_sandbox) { VLOG(1) << "NaClForkDelegate::Init()"; // TODO(rickyz): Make IsSuidSandboxChild a static function. std::unique_ptr<sandbox::SetuidSandboxClient> setuid_sandbox_client( sandbox::SetuidSandboxClient::Create()); const bool using_setuid_sandbox = setuid_sandbox_client->IsSuidSandboxChild(); const bool using_namespace_sandbox = sandbox::NamespaceSandbox::InNewUserNamespace(); CHECK(!(using_setuid_sandbox && using_namespace_sandbox)); if (enable_layer1_sandbox) { CHECK(using_setuid_sandbox || using_namespace_sandbox); } std::unique_ptr<sandbox::SetuidSandboxHost> setuid_sandbox_host( sandbox::SetuidSandboxHost::Create()); // For communications between the NaCl loader process and // the browser process. int nacl_sandbox_descriptor = base::GlobalDescriptors::kBaseDescriptor + kSandboxIPCChannel; // Confirm a hard-wired assumption. DCHECK_EQ(sandboxdesc, nacl_sandbox_descriptor); int fds[2]; PCHECK(0 == socketpair(PF_UNIX, SOCK_SEQPACKET, 0, fds)); bool use_nacl_bootstrap = true; #if defined(ARCH_CPU_X86_64) // Using nacl_helper_bootstrap is not necessary on x86-64 because // NaCl's x86-64 sandbox is not zero-address-based. Starting // nacl_helper through nacl_helper_bootstrap works on x86-64, but it // leaves nacl_helper_bootstrap mapped at a fixed address at the // bottom of the address space, which is undesirable because it // effectively defeats ASLR. use_nacl_bootstrap = false; #elif defined(ARCH_CPU_X86) // Performance vs. security trade-off: We prefer using a // non-zero-address-based sandbox on x86-32 because it provides some // ASLR and so is more secure. However, on Atom CPUs, using a // non-zero segment base is very slow, so we use a zero-based // sandbox on those. use_nacl_bootstrap = NonZeroSegmentBaseIsSlow(); #endif status_ = kNaClHelperUnused; base::FilePath helper_exe; base::FilePath helper_bootstrap_exe; if (!base::PathService::Get(nacl::FILE_NACL_HELPER, &helper_exe)) { status_ = kNaClHelperMissing; } else if (use_nacl_bootstrap && !base::PathService::Get(nacl::FILE_NACL_HELPER_BOOTSTRAP, &helper_bootstrap_exe)) { status_ = kNaClHelperBootstrapMissing; } else { base::CommandLine::StringVector argv_to_launch; { base::CommandLine cmd_line(base::CommandLine::NO_PROGRAM); if (use_nacl_bootstrap) cmd_line.SetProgram(helper_bootstrap_exe); else cmd_line.SetProgram(helper_exe); // Append any switches that need to be forwarded to the NaCl helper. static constexpr const char* kForwardSwitches[] = { sandbox::policy::switches::kAllowSandboxDebugging, sandbox::policy::switches::kDisableSeccompFilterSandbox, sandbox::policy::switches::kNoSandbox, switches::kEnableNaClDebug, switches::kVerboseLoggingInNacl, chromeos::switches::kFeatureFlags, }; const base::CommandLine& current_cmd_line = *base::CommandLine::ForCurrentProcess(); cmd_line.CopySwitchesFrom(current_cmd_line, kForwardSwitches, std::size(kForwardSwitches)); // The command line needs to be tightly controlled to use // |helper_bootstrap_exe|. So from now on, argv_to_launch should be // modified directly. argv_to_launch = cmd_line.argv(); } if (use_nacl_bootstrap) { // Arguments to the bootstrap helper which need to be at the start // of the command line, right after the helper's path. base::CommandLine::StringVector bootstrap_prepend; bootstrap_prepend.push_back(helper_exe.value()); bootstrap_prepend.push_back(kNaClHelperReservedAtZero); bootstrap_prepend.push_back(kNaClHelperRDebug); argv_to_launch.insert(argv_to_launch.begin() + 1, bootstrap_prepend.begin(), bootstrap_prepend.end()); } base::LaunchOptions options; options.fds_to_remap.push_back( std::make_pair(fds[1], kNaClZygoteDescriptor)); options.fds_to_remap.push_back( std::make_pair(sandboxdesc, nacl_sandbox_descriptor)); base::ScopedFD dummy_fd; if (using_setuid_sandbox) { // NaCl needs to keep tight control of the cmd_line, so prepend the // setuid sandbox wrapper manually. base::FilePath sandbox_path = setuid_sandbox_host->GetSandboxBinaryPath(); argv_to_launch.insert(argv_to_launch.begin(), sandbox_path.value()); setuid_sandbox_host->SetupLaunchOptions(&options, &dummy_fd); setuid_sandbox_host->SetupLaunchEnvironment(); } // The NaCl processes spawned may need to exceed the ambient soft limit // on RLIMIT_AS to allocate the untrusted address space and its guard // regions. The nacl_helper itself cannot just raise its own limit, // because the existing limit may prevent the initial exec of // nacl_helper_bootstrap from succeeding, with its large address space // reservation. std::vector<int> max_these_limits; max_these_limits.push_back(RLIMIT_AS); options.maximize_rlimits = &max_these_limits; // Clear the environment for the NaCl Helper process. options.clear_environment = true; AddPassthroughEnvToOptions(&options); #ifdef COMPONENT_BUILD // In component build, nacl_helper loads libgnutls.so. // Newer versions of libgnutls do implicit initialization when loaded that // leaves an additional /dev/urandom file descriptor open. Passing the // following env var asks libgnutls not to do that implicit initialization. // (crbug.com/973024) options.environment["GNUTLS_NO_EXPLICIT_INIT"] = "1"; #endif base::Process process = using_namespace_sandbox ? sandbox::NamespaceSandbox::LaunchProcess(argv_to_launch, options) : base::LaunchProcess(argv_to_launch, options); if (!process.IsValid()) status_ = kNaClHelperLaunchFailed; // parent and error cases are handled below if (using_setuid_sandbox) { // Sanity check that dummy_fd was kept alive for LaunchProcess. DCHECK(dummy_fd.is_valid()); } } if (IGNORE_EINTR(close(fds[1])) != 0) LOG(ERROR) << "close(fds[1]) failed"; if (status_ == kNaClHelperUnused) { const ssize_t kExpectedLength = strlen(kNaClHelperStartupAck); char buf[kExpectedLength]; // Wait for ack from nacl_helper, indicating it is ready to help const ssize_t nread = HANDLE_EINTR(read(fds[0], buf, sizeof(buf))); if (nread == kExpectedLength && memcmp(buf, kNaClHelperStartupAck, nread) == 0) { // all is well status_ = kNaClHelperSuccess; fd_ = fds[0]; return; } status_ = kNaClHelperAckFailed; LOG(ERROR) << "Bad NaCl helper startup ack (" << nread << " bytes)"; } // TODO(bradchen): Make this LOG(ERROR) when the NaCl helper // becomes the default. fd_ = -1; if (IGNORE_EINTR(close(fds[0])) != 0) LOG(ERROR) << "close(fds[0]) failed"; } void NaClForkDelegate::InitialUMA(std::string* uma_name, int* uma_sample, int* uma_boundary_value) { *uma_name = "NaCl.Client.Helper.InitState"; *uma_sample = status_; *uma_boundary_value = kNaClHelperStatusBoundary; } NaClForkDelegate::~NaClForkDelegate() { // side effect of close: delegate process will terminate if (status_ == kNaClHelperSuccess) { if (IGNORE_EINTR(close(fd_)) != 0) LOG(ERROR) << "close(fd_) failed"; } } bool NaClForkDelegate::CanHelp(const std::string& process_type, std::string* uma_name, int* uma_sample, int* uma_boundary_value) { if (process_type != switches::kNaClLoaderProcess) return false; *uma_name = "NaCl.Client.Helper.StateOnFork"; *uma_sample = status_; *uma_boundary_value = kNaClHelperStatusBoundary; return true; } pid_t NaClForkDelegate::Fork(const std::string& process_type, const std::vector<int>& fds, const std::string& channel_id) { VLOG(1) << "NaClForkDelegate::Fork"; DCHECK(fds.size() == kNumPassedFDs); if (status_ != kNaClHelperSuccess) { LOG(ERROR) << "Cannot launch NaCl process: nacl_helper failed to start"; return -1; } // First, send a remote fork request. base::Pickle write_pickle; write_pickle.WriteInt(nacl::kNaClForkRequest); write_pickle.WriteString(channel_id); char reply_buf[kNaClMaxIPCMessageLength]; ssize_t reply_size = 0; bool got_reply = SendIPCRequestAndReadReply(fd_, fds, write_pickle, reply_buf, sizeof(reply_buf), &reply_size); if (!got_reply) { LOG(ERROR) << "Could not perform remote fork."; return -1; } // Now see if the other end managed to fork. base::Pickle reply_pickle(reply_buf, reply_size); base::PickleIterator iter(reply_pickle); pid_t nacl_child; if (!iter.ReadInt(&nacl_child)) { LOG(ERROR) << "NaClForkDelegate::Fork: pickle failed"; return -1; } VLOG(1) << "nacl_child is " << nacl_child; return nacl_child; } bool NaClForkDelegate::GetTerminationStatus(pid_t pid, bool known_dead, base::TerminationStatus* status, int* exit_code) { VLOG(1) << "NaClForkDelegate::GetTerminationStatus"; DCHECK(status); DCHECK(exit_code); base::Pickle write_pickle; write_pickle.WriteInt(nacl::kNaClGetTerminationStatusRequest); write_pickle.WriteInt(pid); write_pickle.WriteBool(known_dead); const std::vector<int> empty_fds; char reply_buf[kNaClMaxIPCMessageLength]; ssize_t reply_size = 0; bool got_reply = SendIPCRequestAndReadReply(fd_, empty_fds, write_pickle, reply_buf, sizeof(reply_buf), &reply_size); if (!got_reply) { LOG(ERROR) << "Could not perform remote GetTerminationStatus."; return false; } base::Pickle reply_pickle(reply_buf, reply_size); base::PickleIterator iter(reply_pickle); int termination_status; if (!iter.ReadInt(&termination_status) || termination_status < 0 || termination_status >= base::TERMINATION_STATUS_MAX_ENUM) { LOG(ERROR) << "GetTerminationStatus: pickle failed"; return false; } int remote_exit_code; if (!iter.ReadInt(&remote_exit_code)) { LOG(ERROR) << "GetTerminationStatus: pickle failed"; return false; } *status = static_cast<base::TerminationStatus>(termination_status); *exit_code = remote_exit_code; return true; } // static void NaClForkDelegate::AddPassthroughEnvToOptions( base::LaunchOptions* options) { std::unique_ptr<base::Environment> env(base::Environment::Create()); std::string pass_through_string; std::vector<std::string> pass_through_vars; if (env->GetVar(kNaClEnvPassthrough, &pass_through_string)) { pass_through_vars = base::SplitString( pass_through_string, std::string(1, kNaClEnvPassthroughDelimiter), base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL); } pass_through_vars.push_back(kNaClExeStderr); pass_through_vars.push_back(kNaClExeStdout); pass_through_vars.push_back(kNaClVerbosity); pass_through_vars.push_back(sandbox::kSandboxEnvironmentApiRequest); for (size_t i = 0; i < pass_through_vars.size(); ++i) { std::string temp; if (env->GetVar(pass_through_vars[i], &temp)) options->environment[pass_through_vars[i]] = temp; } } } // namespace nacl
chromium/chromium
components/nacl/zygote/nacl_fork_delegate_linux.cc
C++
bsd-3-clause
16,987
// Copyright 2022 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CC_PAINT_SKOTTIE_TEXT_PROPERTY_VALUE_H_ #define CC_PAINT_SKOTTIE_TEXT_PROPERTY_VALUE_H_ #include <string> #include "base/containers/flat_map.h" #include "base/memory/ref_counted_memory.h" #include "base/memory/scoped_refptr.h" #include "cc/paint/paint_export.h" #include "cc/paint/skottie_resource_metadata.h" namespace cc { // Contains a subset of the fields in skottie::TextPropertyValue that the caller // may want to override when rendering the animation. The primary field of // course is the text itself, but other fields may be added to this class as // desired. All skottie::TextPropertyValue fields not present in this class will // ultimately assume the same values as those baked into the Lottie file when // rendered. // // This class is intentionally cheap to copy. class CC_PAINT_EXPORT SkottieTextPropertyValue { public: explicit SkottieTextPropertyValue(std::string text); SkottieTextPropertyValue(const SkottieTextPropertyValue& other); SkottieTextPropertyValue& operator=(const SkottieTextPropertyValue& other); ~SkottieTextPropertyValue(); bool operator==(const SkottieTextPropertyValue& other) const; bool operator!=(const SkottieTextPropertyValue& other) const; void SetText(std::string text); const std::string& text() const { return text_->data(); } private: // Make the text ref-counted to eliminate as many deep copies as possible when // this class is passed through the rendering pipeline. Note the text's string // content is never mutated once it's set, eliminating the chance of any race // conditions. scoped_refptr<base::RefCountedString> text_; // For fast comparison operator. size_t text_hash_ = 0; }; // Node name in the Lottie file (hashed) to corresponding // SkottieTextPropertyValue. using SkottieTextPropertyValueMap = base::flat_map<SkottieResourceIdHash, SkottieTextPropertyValue>; } // namespace cc #endif // CC_PAINT_SKOTTIE_TEXT_PROPERTY_VALUE_H_
chromium/chromium
cc/paint/skottie_text_property_value.h
C
bsd-3-clause
2,115
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_SESSIONS_SESSION_SERVICE_H_ #define CHROME_BROWSER_SESSIONS_SESSION_SERVICE_H_ #include <map> #include <string> #include "base/callback_list.h" #include "base/gtest_prod_util.h" #include "base/memory/weak_ptr.h" #include "chrome/browser/sessions/session_service_base.h" #include "chrome/browser/ui/browser.h" #include "components/sessions/core/command_storage_manager_delegate.h" #include "components/tab_groups/tab_group_id.h" #include "components/tab_groups/tab_group_visual_data.h" #include "third_party/abseil-cpp/absl/types/optional.h" class Profile; namespace content { class WebContents; } // namespace content namespace sessions { struct SessionWindow; } // namespace sessions struct StartupTab; using StartupTabs = std::vector<StartupTab>; // SessionService ------------------------------------------------------------ // SessionService is responsible for maintaining the state of open windows // and tabs so that they can be restored at a later date. The state of the // currently open browsers is referred to as the current session. // // SessionService supports restoring from the last session. The last session // typically corresponds to the last run of the browser, but not always. For // example, if the user has a tabbed browser and app window running, closes the // tabbed browser, then creates a new tabbed browser the current session is made // the last session and the current session reset. This is done to provide the // illusion that app windows run in separate processes. Similar behavior occurs // with incognito windows. // // SessionService itself uses functions from session_service_commands to store // commands which can rebuild the open state of the browser (as |SessionWindow|, // |SessionTab| and |SerializedNavigationEntry|). The commands are periodically // flushed to |CommandStorageBackend| and written to a file. Every so often // |SessionService| rebuilds the contents of the file from the open state of the // browser. // TODO(stahon@microsoft.com) When AppSessionService is implemented, we should // make a pass in SessionService to remove app related code. class SessionService : public SessionServiceBase { friend class SessionServiceTestHelper; public: // Creates a SessionService for the specified profile. explicit SessionService(Profile* profile); SessionService(const SessionService&) = delete; SessionService& operator=(const SessionService&) = delete; ~SessionService() override; // Returns true if `window_type` identifies a type tracked by SessionService. static bool IsRelevantWindowType( sessions::SessionWindow::WindowType window_type); // Returns true if restore should be triggered. If `browser` is non-null this // is called as the result of a new Browser being created. If `browser` is // null this is called from RestoreIfNecessary(); bool ShouldRestore(Browser* browser); // Invoke at a point when you think session restore might occur. For example, // during startup and window creation this is invoked to see if a session // needs to be restored. If a session needs to be restored it is done so // asynchronously and true is returned. If false is returned the session was // not restored and the caller needs to create a new window. // Since RestoreIfNecessary can potentially trigger a restore, we need to // know whether the caller intends for us to restore apps or not. bool RestoreIfNecessary(const StartupTabs& startup_tabs, bool restore_apps); // Moves the current session to the last session. This is useful when a // checkpoint occurs, such as when the user launches the app and no tabbed // browsers are running. void MoveCurrentSessionToLastSession(); // Deletes the last session. void DeleteLastSession(); // Sets a tab's group ID, if any. Note that a group can't be split between // multiple windows. void SetTabGroup(const SessionID& window_id, const SessionID& tab_id, absl::optional<tab_groups::TabGroupId> group); // Updates the metadata associated with a tab group. |window_id| should be // the window where the group currently resides. Note that a group can't be // split between multiple windows. void SetTabGroupMetadata(const SessionID& window_id, const tab_groups::TabGroupId& group_id, const tab_groups::TabGroupVisualData* visual_data); // Sets the pinned state of the tab. void SetPinnedState(const SessionID& window_id, const SessionID& tab_id, bool is_pinned); void AddTabExtraData(const SessionID& window_id, const SessionID& tab_id, const char* key, const std::string data); void AddWindowExtraData(const SessionID& window_id, const char* key, const std::string data); void TabClosed(const SessionID& window_id, const SessionID& tab_id) override; // Notification a window has opened. void WindowOpened(Browser* browser) override; // Notification the window is about to close. void WindowClosing(const SessionID& window_id) override; // Notification a window has finished closing. void WindowClosed(const SessionID& window_id) override; // Sets the type of window. In order for the contents of a window to be // tracked SetWindowType must be invoked with a type we track // (ShouldRestoreOfWindowType returns true). void SetWindowType(const SessionID& window_id, Browser::Type type) override; void SetWindowUserTitle(const SessionID& window_id, const std::string& user_title); // CommandStorageManagerDelegate: void OnErrorWritingSessionCommands() override; void SetTabUserAgentOverride(const SessionID& window_id, const SessionID& tab_id, const sessions::SerializedUserAgentOverride& user_agent_override) override; protected: Browser::Type GetDesiredBrowserTypeForWebContents() override; void DidScheduleCommand() override; private: // Allow tests to access our innards for testing purposes. FRIEND_TEST_ALL_PREFIXES(SessionServiceTest, SavedSessionNotification); FRIEND_TEST_ALL_PREFIXES(SessionServiceTest, RestoreActivation1); FRIEND_TEST_ALL_PREFIXES(SessionServiceTest, RestoreActivation2); FRIEND_TEST_ALL_PREFIXES(SessionServiceTest, RemoveUnusedRestoreWindowsTest); FRIEND_TEST_ALL_PREFIXES(SessionServiceTest, Workspace); FRIEND_TEST_ALL_PREFIXES(SessionServiceTest, WorkspaceSavedOnOpened); FRIEND_TEST_ALL_PREFIXES(SessionServiceTest, VisibleOnAllWorkspaces); FRIEND_TEST_ALL_PREFIXES(NoStartupWindowTest, DontInitSessionServiceForApps); using IdToRange = std::map<SessionID, std::pair<int, int>>; // Returns true if a window of given |window_type| should get // restored upon session restore. bool ShouldRestoreWindowOfType( sessions::SessionWindow::WindowType type) const override; // Implementation of RestoreIfNecessary. If |browser| is non-null and we // need to restore, the tabs are added to it, otherwise a new browser is // created. bool RestoreIfNecessary(const StartupTabs& startup_tabs, Browser* browser, bool restore_apps); // Adds commands to commands that will recreate the state of the specified // tab. This adds at most kMaxNavigationCountToPersist navigations (in each // direction from the current navigation index). // A pair is added to tab_to_available_range indicating the range of // indices that were written. void BuildCommandsForTab(const SessionID& window_id, content::WebContents* tab, int index_in_window, absl::optional<tab_groups::TabGroupId> group, bool is_pinned, IdToRange* tab_to_available_range) override; // Schedules a reset of the existing commands. A reset means the contents // of the file are recreated from the state of the browser. void ScheduleResetCommands() override; // Converts all pending tab/window closes to commands and schedules them. void CommitPendingCloses(); // Returns true if there is only one window open with a single tab that // shares our profile. bool IsOnlyOneTabLeft() const; // Returns true if there are open trackable browser windows whose ids do // match |window_id| with our profile. A trackable window is a window from // which |ShouldRestoreWindowOfType| returns true. See // |ShouldRestoreWindowOfType| for details. bool HasOpenTrackableBrowsers(const SessionID& window_id) const; // Will rebuild session commands if rebuild_on_next_save_ is true. void RebuildCommandsIfRequired() override; // Invoked with true when all browsers start closing. void OnClosingAllBrowsersChanged(bool closing); // If necessary, removes the current exit event and adds a new one. This // does nothing if `pending_window_close_ids_` is empty, which means the // user is potentially closing the last browser. void LogExitEvent(); // If an exit event was logged, it is removed. void RemoveExitEvent(); // When the user closes the last window, where the last window is the // last tabbed browser and no more tabbed browsers are open with the same // profile, the window ID is added here. These IDs are only committed (which // marks them as closed) if the user creates a new tabbed browser. using PendingWindowCloseIDs = std::set<SessionID>; PendingWindowCloseIDs pending_window_close_ids_; // Set of tabs that have been closed by way of the last window or last tab // closing, but not yet committed. using PendingTabCloseIDs = std::set<SessionID>; PendingTabCloseIDs pending_tab_close_ids_; // When a window other than the last window (see description of // pending_window_close_ids) is closed, the id is added to this set. using WindowClosingIDs = std::set<SessionID>; WindowClosingIDs window_closing_ids_; // Are there any open trackable browsers? bool has_open_trackable_browsers_ = false; // Used to override HasOpenTrackableBrowsers() bool has_open_trackable_browser_for_test_ = true; // Use to override IsOnlyOneTableft() bool is_only_one_tab_left_for_test_ = false; // If true and a new tabbed browser is created and there are no opened // tabbed browser (has_open_trackable_browsers_ is false), then the current // session is made the last session. See description above class for details // on current/last session. bool move_on_new_browser_ = false; // For browser_tests, since we want to simulate the browser shutting down // without quitting. bool force_browser_not_alive_with_no_windows_ = false; base::CallbackListSubscription closing_all_browsers_subscription_; bool did_log_exit_ = false; int unrecoverable_write_error_count_ = 0; // True if this is the first SessionService created for the Profile. A value // of false means the first SessionService was destroyed and a new one // created. const bool is_first_session_service_; // Set to true once a valid command has been scheduled. bool did_schedule_command_ = false; base::WeakPtrFactory<SessionService> weak_factory_{this}; }; #endif // CHROME_BROWSER_SESSIONS_SESSION_SERVICE_H_
chromium/chromium
chrome/browser/sessions/session_service.h
C
bsd-3-clause
11,621
// // ASCollectionViewLayoutInspector.h // AsyncDisplayKit // // Created by Garrett Moon on 11/19/16. // Copyright © 2016 Facebook. All rights reserved. // #import <Foundation/Foundation.h> #import <AsyncDisplayKit/ASDimension.h> #import <AsyncDisplayKit/ASScrollDirection.h> @class ASCollectionView; @protocol ASCollectionDataSource; @protocol ASCollectionDelegate; NS_ASSUME_NONNULL_BEGIN extern ASSizeRange NodeConstrainedSizeForScrollDirection(ASCollectionView *collectionView); @protocol ASCollectionViewLayoutInspecting <NSObject> /** * Asks the inspector to provide a constrained size range for the given collection view node. */ - (ASSizeRange)collectionView:(ASCollectionView *)collectionView constrainedSizeForNodeAtIndexPath:(NSIndexPath *)indexPath; /** * Return the directions in which your collection view can scroll */ - (ASScrollDirection)scrollableDirections; @optional /** * Asks the inspector to provide a constrained size range for the given supplementary node. */ - (ASSizeRange)collectionView:(ASCollectionView *)collectionView constrainedSizeForSupplementaryNodeOfKind:(NSString *)kind atIndexPath:(NSIndexPath *)indexPath; /** * Asks the inspector for the number of supplementary views for the given kind in the specified section. */ - (NSUInteger)collectionView:(ASCollectionView *)collectionView supplementaryNodesOfKind:(NSString *)kind inSection:(NSUInteger)section; /** * Allow the inspector to respond to delegate changes. * * @discussion A great time to update perform selector caches! */ - (void)didChangeCollectionViewDelegate:(nullable id<ASCollectionDelegate>)delegate; /** * Allow the inspector to respond to dataSource changes. * * @discussion A great time to update perform selector caches! */ - (void)didChangeCollectionViewDataSource:(nullable id<ASCollectionDataSource>)dataSource; #pragma mark Deprecated Methods /** * Asks the inspector for the number of supplementary sections in the collection view for the given kind. * * @deprecated This method will not be called, and it is only deprecated as a reminder to remove it. * Supplementary elements must exist in the same sections as regular collection view items i.e. -numberOfSectionsInCollectionView: */ - (NSUInteger)collectionView:(ASCollectionView *)collectionView numberOfSectionsForSupplementaryNodeOfKind:(NSString *)kind ASDISPLAYNODE_DEPRECATED_MSG("Use ASCollectionNode's method instead."); @end /** * A layout inspector for non-flow layouts that returns a constrained size to let the cells layout itself as * far as possible based on the scrollable direction of the collection view. * It doesn't support supplementary nodes and therefore doesn't implement delegate methods * that are related to supplementary node's management. * * @warning This class is not meant to be subclassed and will be restricted in the future. */ @interface ASCollectionViewLayoutInspector : NSObject <ASCollectionViewLayoutInspecting> - (instancetype)initWithCollectionView:(ASCollectionView *)collectionView ASDISPLAYNODE_DEPRECATED_MSG("Use -init instead."); @end NS_ASSUME_NONNULL_END
maicki/AsyncDisplayKit
Source/Details/ASCollectionViewLayoutInspector.h
C
bsd-3-clause
3,122
<?php /** * PhpThumb Library Example File * * This file contains example usage for the PHP Thumb Library * * PHP Version 5 with GD 2.0+ * PhpThumb : PHP Thumb Library <http://phpthumb.gxdlabs.com> * Copyright (c) 2009, Ian Selby/Gen X Design * * Author(s): Ian Selby <ian@gen-x-design.com> * * Licensed under the MIT License * Redistributions of files must retain the above copyright notice. * * @author Ian Selby <ian@gen-x-design.com> * @copyright Copyright (c) 2009 Gen X Design * @link http://phpthumb.gxdlabs.com * @license http://www.opensource.org/licenses/mit-license.php The MIT License * @version 3.0 * @package PhpThumb * @subpackage Examples * @filesource */ require_once '../ThumbLib.inc.php'; $thumb = PhpThumbFactory::create('test.jpg'); $thumb->rotateImageNDegrees(180); $thumb->show(); ?>
quyettvq/linhbui.vn
common/utils/phpthumb/examples/rotate_advanced.php
PHP
bsd-3-clause
870
/** * PANDA 3D SOFTWARE * Copyright (c) Carnegie Mellon University. All rights reserved. * * All use of this software is subject to the terms of the revised BSD * license. You should have received a copy of this license along * with this source code in a file named "LICENSE." * * @file cvsCopy.h * @author drose * @date 2000-10-31 */ #ifndef CVSCOPY_H #define CVSCOPY_H #include "pandatoolbase.h" #include "cvsSourceTree.h" #include "programBase.h" #include "filename.h" #include "pvector.h" /** * This is the base class for a family of programs that copy files, typically * model files like .flt files and their associated textures, into a CVS- * controlled source tree. */ class CVSCopy : public ProgramBase { public: CVSCopy(); CVSSourceTree::FilePath import(const Filename &source, void *extra_data, CVSSourceDirectory *suggested_dir); bool continue_after_error(); protected: virtual bool handle_args(Args &args); virtual bool post_command_line(); virtual bool verify_file(const Filename &source, const Filename &dest, CVSSourceDirectory *dest_dir, void *extra_data); virtual bool copy_file(const Filename &source, const Filename &dest, CVSSourceDirectory *dest_dir, void *extra_data, bool new_file)=0; bool verify_binary_file(Filename source, Filename dest); bool copy_binary_file(Filename source, Filename dest); bool cvs_add(const Filename &filename); static string protect_from_shell(const string &source); virtual string filter_filename(const string &source); private: bool scan_hierarchy(); bool scan_for_root(const string &dirname); string prompt(const string &message); protected: bool _force; bool _interactive; bool _got_model_dirname; Filename _model_dirname; bool _got_map_dirname; Filename _map_dirname; bool _got_root_dirname; Filename _root_dirname; Filename _key_filename; bool _no_cvs; string _cvs_binary; bool _user_aborted; typedef pvector<Filename> SourceFiles; SourceFiles _source_files; CVSSourceTree _tree; CVSSourceDirectory *_model_dir; CVSSourceDirectory *_map_dir; typedef pmap<string, CVSSourceTree::FilePath> CopiedFiles; CopiedFiles _copied_files; }; #endif
brakhane/panda3d
pandatool/src/cvscopy/cvsCopy.h
C
bsd-3-clause
2,311
<?php /** * This file is part of the Propel package. * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. * * @license MIT License */ namespace Propel\Generator\Util; use Propel\Generator\Builder\Util\SchemaReader; use Propel\Generator\Config\GeneratorConfigInterface; use Propel\Generator\Config\QuickGeneratorConfig; use Propel\Generator\Model\Table; use Propel\Generator\Platform\PlatformInterface; use Propel\Generator\Platform\SqlitePlatform; use Propel\Runtime\Adapter\Pdo\SqliteAdapter; use Propel\Runtime\Connection\PdoConnection; use Propel\Runtime\Connection\ConnectionInterface; use Propel\Runtime\Connection\ConnectionWrapper; use Propel\Runtime\Connection\StatementInterface; use Propel\Runtime\Propel; class QuickBuilder { protected $schema, $platform, $config, $database; protected $classTargets = array('tablemap', 'object', 'query', 'objectstub', 'querystub'); public function setSchema($schema) { $this->schema = $schema; } /** * Setter for the platform property * * @param PlatformInterface $platform */ public function setPlatform($platform) { $this->platform = $platform; } /** * Getter for the platform property * * @return PlatformInterface */ public function getPlatform() { if (null === $this->platform) { $this->platform = new SqlitePlatform(); } return $this->platform; } /** * Setter for the config property * * @param GeneratorConfigInterface $config */ public function setConfig(GeneratorConfigInterface $config) { $this->config = $config; } /** * Getter for the config property * * @return GeneratorConfigInterface */ public function getConfig() { if (null === $this->config) { $this->config = new QuickGeneratorConfig(); } return $this->config; } public static function buildSchema($schema, $dsn = null, $user = null, $pass = null, $adapter = null) { $builder = new self; $builder->setSchema($schema); return $builder->build($dsn, $user, $pass, $adapter); } public function build($dsn = null, $user = null, $pass = null, $adapter = null, array $classTargets = null) { if (null === $dsn) { $dsn = 'sqlite::memory:'; } if (null === $adapter) { $adapter = new SqliteAdapter(); } if (null === $classTargets) { $classTargets = $this->classTargets; } $pdo = new PdoConnection($dsn, $user, $pass); $con = new ConnectionWrapper($pdo); $con->setAttribute(\PDO::ATTR_ERRMODE, \PDO::ERRMODE_WARNING); $adapter->initConnection($con, []); $this->buildSQL($con); $this->buildClasses($classTargets); $name = $this->getDatabase()->getName(); Propel::getServiceContainer()->setAdapter($name, $adapter); Propel::getServiceContainer()->setConnection($name, $con); return $con; } public function getDatabase() { if (null === $this->database) { $xtad = new SchemaReader($this->getPlatform()); $xtad->setGeneratorConfig($this->getConfig()); $appData = $xtad->parseString($this->schema); $this->database = $appData->getDatabase(); // does final initialization } return $this->database; } public function buildSQL(ConnectionInterface $con) { $statements = SqlParser::parseString($this->getSQL()); foreach ($statements as $statement) { if (strpos($statement, 'DROP') === 0) { // drop statements cause errors since the table doesn't exist continue; } $stmt = $con->prepare($statement); if ($stmt instanceof StatementInterface) { // only execute if has no error $stmt->execute(); } } return count($statements); } public function getSQL() { return $this->getPlatform()->getAddTablesDDL($this->getDatabase()); } public function getBuildName($classTargets = null) { $tables = []; foreach ($this->getDatabase()->getTables() as $table) { if (count($tables) > 3) break; $tables[] = $table->getName(); } $name = implode('_', $tables); if (!$classTargets || count($classTargets) == 5) { $name .= '-all'; } else { $name .= '-' . implode('_', $classTargets); } return $name; } public function buildClasses(array $classTargets = null) { $code = "<?php\n".$this->getClasses($classTargets); $name = $this->getBuildName($classTargets); $tempFile = sys_get_temp_dir() . '/propelQuickBuilder'.$name.'.php'; file_put_contents($tempFile, $code); include($tempFile); } public function getClasses(array $classTargets = null) { $script = ''; foreach ($this->getDatabase()->getTables() as $table) { $script .= $this->getClassesForTable($table, $classTargets); } return $script; } public function getClassesForTable(Table $table, array $classTargets = null) { if (null === $classTargets) { $classTargets = $this->classTargets; } $script = ''; foreach ($classTargets as $target) { $class = $this->getConfig()->getConfiguredBuilder($table, $target)->build(); $script .= $this->fixNamespaceDeclarations($class); } if ($col = $table->getChildrenColumn()) { if ($col->isEnumeratedClasses()) { foreach ($col->getChildren() as $child) { if ($child->getAncestor()) { $builder = $this->getConfig()->getConfiguredBuilder($table, 'queryinheritance'); $builder->setChild($child); $class = $builder->build(); $script .= $this->fixNamespaceDeclarations($class); foreach (array('objectmultiextend', 'queryinheritancestub') as $target) { $builder = $this->getConfig()->getConfiguredBuilder($table, $target); $builder->setChild($child); $class = $builder->build(); $script .= $this->fixNamespaceDeclarations($class); } } } } } if ($table->getInterface()) { $interface = $this->getConfig()->getConfiguredBuilder($table, 'interface')->build(); $script .= $this->fixNamespaceDeclarations($interface); } if ($table->hasAdditionalBuilders()) { foreach ($table->getAdditionalBuilders() as $builderClass) { $builder = new $builderClass($table); $class = $builder->build(); $script .= $this->fixNamespaceDeclarations($class); } } $script = str_replace('<?php', '', $script); return $script; } public static function debugClassesForTable($schema, $tableName) { $builder = new self; $builder->setSchema($schema); foreach ($builder->getDatabase()->getTables() as $table) { if ($table->getName() == $tableName) { echo $builder->getClassesForTable($table); } } } /** * @see https://github.com/symfony/symfony/blob/master/src/Symfony/Component/ClassLoader/ClassCollectionLoader.php */ public function fixNamespaceDeclarations($source) { $source = $this->forceNamespace($source); if (!function_exists('token_get_all')) { return $source; } $output = ''; $inNamespace = false; $tokens = token_get_all($source); for ($i = 0, $max = count($tokens); $i < $max; $i++) { $token = $tokens[$i]; if (is_string($token)) { $output .= $token; } elseif (in_array($token[0], array(T_COMMENT, T_DOC_COMMENT))) { // strip comments continue; } elseif (T_NAMESPACE === $token[0]) { if ($inNamespace) { $output .= "}\n"; } $output .= $token[1]; // namespace name and whitespaces while (($t = $tokens[++$i]) && is_array($t) && in_array($t[0], array(T_WHITESPACE, T_NS_SEPARATOR, T_STRING))) { $output .= $t[1]; } if (is_string($t) && '{' === $t) { $inNamespace = false; --$i; } else { $output .= "\n{"; $inNamespace = true; } } else { $output .= $token[1]; } } if ($inNamespace) { $output .= "}\n"; } return $output; } /** * Prevent generated class without namespace to fail. * * @param string $code * @return string */ protected function forceNamespace($code) { if (0 === preg_match('/\nnamespace/', $code)) { return "\nnamespace\n{\n" . $code . "\n}\n"; } return $code; } }
mahedi2014/macroHock
vendor/propel/propel/src/Propel/Generator/Util/QuickBuilder.php
PHP
mit
9,605
# frozen_string_literal: true RSpec.describe RuboCop::Cop::Lint::UnusedBlockArgument, :config do subject(:cop) { described_class.new(config) } let(:cop_config) { { 'AllowUnusedKeywordArguments' => false } } shared_examples 'auto-correction' do |name, old_source, new_source| it "auto-corrects #{name}" do corrected_source = autocorrect_source(old_source) expect(corrected_source).to eq(new_source) end end context 'inspection' do context 'when a block takes multiple arguments' do context 'and an argument is unused' do it 'registers an offense' do message = "Unused block argument - `value`. If it's " \ 'necessary, use `_` or `_value` as an argument ' \ "name to indicate that it won't be used." expect_offense(<<~RUBY) hash.each do |key, value| ^^^^^ #{message} puts key end RUBY end end context 'and arguments are swap-assigned' do it 'accepts' do expect_no_offenses(<<~RUBY) hash.each do |key, value| key, value = value, key end RUBY end end context "and one argument is assigned to another, whilst other's value " \ 'is not used' do it 'registers an offense' do message = 'Unused block argument - `key`. ' \ "If it's necessary, use `_` or `_key` as an argument " \ "name to indicate that it won't be used." expect_offense(<<~RUBY) hash.each do |key, value| ^^^ #{message} key, value = value, 42 end RUBY end end context 'and all the arguments are unused' do it 'registers offenses and suggests omitting them' do (key_message, value_message) = %w[key value].map do |arg| "Unused block argument - `#{arg}`. You can omit all the " \ "arguments if you don't care about them." end expect_offense(<<~RUBY) hash = { foo: 'FOO', bar: 'BAR' } hash.each do |key, value| ^^^^^ #{value_message} ^^^ #{key_message} puts :something end RUBY end end end context 'when a block takes single argument' do context 'and the argument is unused' do it 'registers an offense and suggests omitting that' do message = 'Unused block argument - `index`. ' \ "You can omit the argument if you don't care about it." expect_offense(<<~RUBY) 1.times do |index| ^^^^^ #{message} puts :something end RUBY end end context 'and the method call is `define_method`' do it 'registers an offense' do message = 'Unused block argument - `bar`. ' \ "If it's necessary, use `_` or `_bar` as an argument " \ "name to indicate that it won't be used." expect_offense(<<~RUBY) define_method(:foo) do |bar| ^^^ #{message} puts 'baz' end RUBY end end end context 'when a block have a block local variable' do context 'and the variable is unused' do it 'registers an offense' do expect_offense(<<~RUBY) 1.times do |index; block_local_variable| ^^^^^^^^^^^^^^^^^^^^ Unused block local variable - `block_local_variable`. puts index end RUBY end end end context 'when a lambda block takes arguments' do context 'and all the arguments are unused' do it 'registers offenses and suggests using a proc' do (foo_message, bar_message) = %w[foo bar].map do |arg| "Unused block argument - `#{arg}`. " \ "If it's necessary, use `_` or `_#{arg}` as an argument name " \ "to indicate that it won't be used. " \ 'Also consider using a proc without arguments instead of a ' \ "lambda if you want it to accept any arguments but don't care " \ 'about them.' end expect_offense(<<~RUBY) -> (foo, bar) { do_something } ^^^ #{bar_message} ^^^ #{foo_message} RUBY end end context 'and an argument is unused' do it 'registers an offense' do message = 'Unused block argument - `foo`. ' \ "If it's necessary, use `_` or `_foo` as an argument " \ "name to indicate that it won't be used." expect_offense(<<~RUBY) -> (foo, bar) { puts bar } ^^^ #{message} RUBY end end end context 'when an underscore-prefixed block argument is not used' do it 'accepts' do expect_no_offenses(<<~RUBY) 1.times do |_index| puts 'foo' end RUBY end end context 'when an optional keyword argument is unused' do context 'when the method call is `define_method`' do it 'registers an offense' do message = 'Unused block argument - `bar`. ' \ "If it's necessary, use `_` or `_bar` as an argument name " \ "to indicate that it won't be used." expect_offense(<<~RUBY) define_method(:foo) do |bar: 'default'| ^^^ #{message} puts 'bar' end RUBY end context 'when AllowUnusedKeywordArguments set' do let(:cop_config) { { 'AllowUnusedKeywordArguments' => true } } it 'does not care' do expect_no_offenses(<<~RUBY) define_method(:foo) do |bar: 'default'| puts 'bar' end RUBY end end end context 'when the method call is not `define_method`' do it 'registers an offense' do message = 'Unused block argument - `bar`. ' \ "You can omit the argument if you don't care about it." expect_offense(<<~RUBY) foo(:foo) do |bar: 'default'| ^^^ #{message} puts 'bar' end RUBY end context 'when AllowUnusedKeywordArguments set' do let(:cop_config) { { 'AllowUnusedKeywordArguments' => true } } it 'does not care' do expect_no_offenses(<<~RUBY) foo(:foo) do |bar: 'default'| puts 'bar' end RUBY end end end end context 'when a method argument is not used' do it 'does not care' do expect_no_offenses(<<~RUBY) def some_method(foo) end RUBY end end context 'when a variable is not used' do it 'does not care' do expect_no_offenses(<<~RUBY) 1.times do foo = 1 end RUBY end end context 'in a method calling `binding` without arguments' do it 'accepts all arguments' do expect_no_offenses(<<~RUBY) test do |key, value| puts something(binding) end RUBY end context 'inside a method definition' do it 'registers offenses' do (key_message, value_message) = %w[key value].map do |arg| "Unused block argument - `#{arg}`. You can omit all the " \ "arguments if you don't care about them." end expect_offense(<<~RUBY) test do |key, value| ^^^^^ #{value_message} ^^^ #{key_message} def other(a) puts something(binding) end end RUBY end end end context 'in a method calling `binding` with arguments' do context 'when a method argument is unused' do it 'registers an offense' do (key_message, value_message) = %w[key value].map do |arg| "Unused block argument - `#{arg}`. You can omit all the " \ "arguments if you don't care about them." end expect_offense(<<~RUBY) test do |key, value| ^^^^^ #{value_message} ^^^ #{key_message} puts something(binding(:other)) end RUBY end end end context 'with an empty block' do context 'when not configured to ignore empty blocks' do let(:cop_config) { { 'IgnoreEmptyBlocks' => false } } it 'registers an offense' do message = 'Unused block argument - `bar`. You can omit the ' \ "argument if you don't care about it." expect_offense(<<~RUBY) super { |bar| } ^^^ #{message} RUBY end end context 'when configured to ignore empty blocks' do let(:cop_config) { { 'IgnoreEmptyBlocks' => true } } it 'does not register an offense' do expect_no_offenses('super { |bar| }') end end end end context 'auto-correct' do it_behaves_like( 'auto-correction', 'fixes single', 'arr.map { |foo| stuff }', 'arr.map { |_foo| stuff }' ) it_behaves_like( 'auto-correction', 'fixes multiple', 'hash.map { |key, val| stuff }', 'hash.map { |_key, _val| stuff }' ) it_behaves_like( 'auto-correction', 'preserves whitespace', <<-SOURCE, hash.map { |key, val| stuff } SOURCE <<-CORRECTED_SOURCE hash.map { |_key, _val| stuff } CORRECTED_SOURCE ) it_behaves_like( 'auto-correction', 'preserves splat', 'obj.method { |foo, *bars, baz| stuff(foo, baz) }', 'obj.method { |foo, *_bars, baz| stuff(foo, baz) }' ) it_behaves_like( 'auto-correction', 'preserves default', 'obj.method { |foo, bar = baz| stuff(foo) }', 'obj.method { |foo, _bar = baz| stuff(foo) }' ) it 'ignores used arguments' do original_source = 'obj.method { |foo, baz| stuff(foo, baz) }' expect(autocorrect_source(original_source)).to eq(original_source) end end context 'when IgnoreEmptyBlocks config parameter is set' do let(:cop_config) { { 'IgnoreEmptyBlocks' => true } } it 'accepts an empty block with a single unused parameter' do expect_no_offenses('->(arg) { }') end it 'registers an offense for a non-empty block with an unused parameter' do message = "Unused block argument - `arg`. If it's necessary, use `_` " \ "or `_arg` as an argument name to indicate that it won't " \ 'be used. Also consider using a proc without arguments ' \ 'instead of a lambda if you want it to accept any arguments ' \ "but don't care about them." expect_offense(<<~RUBY) ->(arg) { 1 } ^^^ #{message} RUBY end it 'accepts an empty block with multiple unused parameters' do expect_no_offenses('->(arg1, arg2, *others) { }') end it 'registers an offense for a non-empty block with multiple unused args' do (arg1_message, arg2_message, others_message) = %w[arg1 arg2 others] .map do |arg| "Unused block argument - `#{arg}`. If it's necessary, use `_` or " \ "`_#{arg}` as an argument name to indicate that it won't be used. " \ 'Also consider using a proc without arguments instead of a lambda ' \ "if you want it to accept any arguments but don't care about them." end expect_offense(<<~RUBY) ->(arg1, arg2, *others) { 1 } ^^^^^^ #{others_message} ^^^^ #{arg2_message} ^^^^ #{arg1_message} RUBY end end end
vergenzt/rubocop
spec/rubocop/cop/lint/unused_block_argument_spec.rb
Ruby
mit
12,384
try: from urllib.parse import urljoin except ImportError: from urlparse import urljoin try: import cPickle as pickle except ImportError: import pickle # Handle the case where the requests module has been patched to not have # urllib3 bundled as part of its source. try: from pip._vendor.requests.packages.urllib3.response import HTTPResponse except ImportError: from pip._vendor.urllib3.response import HTTPResponse try: from pip._vendor.requests.packages.urllib3.util import is_fp_closed except ImportError: from pip._vendor.urllib3.util import is_fp_closed # Replicate some six behaviour try: text_type = unicode except NameError: text_type = str
ncos/lisa
src/lisa_drive/scripts/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/cachecontrol/compat.py
Python
mit
724
// Copyright 2012 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // This file relies on the fact that the following declarations have been made // in runtime.js: // var $Object = global.Object; // var $Boolean = global.Boolean; // var $Number = global.Number; // var $Function = global.Function; // var $Array = global.Array; // // in math.js: // var $floor = MathFloor var $isNaN = GlobalIsNaN; var $isFinite = GlobalIsFinite; // ---------------------------------------------------------------------------- // Helper function used to install functions on objects. function InstallFunctions(object, attributes, functions) { if (functions.length >= 8) { %OptimizeObjectForAddingMultipleProperties(object, functions.length >> 1); } for (var i = 0; i < functions.length; i += 2) { var key = functions[i]; var f = functions[i + 1]; %FunctionSetName(f, key); %FunctionRemovePrototype(f); %AddNamedProperty(object, key, f, attributes); %SetNativeFlag(f); } %ToFastProperties(object); } // Helper function to install a getter-only accessor property. function InstallGetter(object, name, getter) { %FunctionSetName(getter, name); %FunctionRemovePrototype(getter); %DefineAccessorPropertyUnchecked(object, name, getter, null, DONT_ENUM); %SetNativeFlag(getter); } // Helper function to install a getter/setter accessor property. function InstallGetterSetter(object, name, getter, setter) { %FunctionSetName(getter, name); %FunctionSetName(setter, name); %FunctionRemovePrototype(getter); %FunctionRemovePrototype(setter); %DefineAccessorPropertyUnchecked(object, name, getter, setter, DONT_ENUM); %SetNativeFlag(getter); %SetNativeFlag(setter); } // Helper function for installing constant properties on objects. function InstallConstants(object, constants) { if (constants.length >= 4) { %OptimizeObjectForAddingMultipleProperties(object, constants.length >> 1); } var attributes = DONT_ENUM | DONT_DELETE | READ_ONLY; for (var i = 0; i < constants.length; i += 2) { var name = constants[i]; var k = constants[i + 1]; %AddNamedProperty(object, name, k, attributes); } %ToFastProperties(object); } // Prevents changes to the prototype of a built-in function. // The "prototype" property of the function object is made non-configurable, // and the prototype object is made non-extensible. The latter prevents // changing the __proto__ property. function SetUpLockedPrototype(constructor, fields, methods) { %CheckIsBootstrapping(); var prototype = constructor.prototype; // Install functions first, because this function is used to initialize // PropertyDescriptor itself. var property_count = (methods.length >> 1) + (fields ? fields.length : 0); if (property_count >= 4) { %OptimizeObjectForAddingMultipleProperties(prototype, property_count); } if (fields) { for (var i = 0; i < fields.length; i++) { %AddNamedProperty(prototype, fields[i], UNDEFINED, DONT_ENUM | DONT_DELETE); } } for (var i = 0; i < methods.length; i += 2) { var key = methods[i]; var f = methods[i + 1]; %AddNamedProperty(prototype, key, f, DONT_ENUM | DONT_DELETE | READ_ONLY); %SetNativeFlag(f); } %InternalSetPrototype(prototype, null); %ToFastProperties(prototype); } // ---------------------------------------------------------------------------- // ECMA 262 - 15.1.4 function GlobalIsNaN(number) { if (!IS_NUMBER(number)) number = NonNumberToNumber(number); return NUMBER_IS_NAN(number); } // ECMA 262 - 15.1.5 function GlobalIsFinite(number) { if (!IS_NUMBER(number)) number = NonNumberToNumber(number); return NUMBER_IS_FINITE(number); } // ECMA-262 - 15.1.2.2 function GlobalParseInt(string, radix) { if (IS_UNDEFINED(radix) || radix === 10 || radix === 0) { // Some people use parseInt instead of Math.floor. This // optimization makes parseInt on a Smi 12 times faster (60ns // vs 800ns). The following optimization makes parseInt on a // non-Smi number 9 times faster (230ns vs 2070ns). Together // they make parseInt on a string 1.4% slower (274ns vs 270ns). if (%_IsSmi(string)) return string; if (IS_NUMBER(string) && ((0.01 < string && string < 1e9) || (-1e9 < string && string < -0.01))) { // Truncate number. return string | 0; } string = TO_STRING_INLINE(string); radix = radix | 0; } else { // The spec says ToString should be evaluated before ToInt32. string = TO_STRING_INLINE(string); radix = TO_INT32(radix); if (!(radix == 0 || (2 <= radix && radix <= 36))) { return NAN; } } if (%_HasCachedArrayIndex(string) && (radix == 0 || radix == 10)) { return %_GetCachedArrayIndex(string); } return %StringParseInt(string, radix); } // ECMA-262 - 15.1.2.3 function GlobalParseFloat(string) { string = TO_STRING_INLINE(string); if (%_HasCachedArrayIndex(string)) return %_GetCachedArrayIndex(string); return %StringParseFloat(string); } function GlobalEval(x) { if (!IS_STRING(x)) return x; // For consistency with JSC we require the global object passed to // eval to be the global object from which 'eval' originated. This // is not mandated by the spec. // We only throw if the global has been detached, since we need the // receiver as this-value for the call. if (!%IsAttachedGlobal(global)) { throw new $EvalError('The "this" value passed to eval must ' + 'be the global object from which eval originated'); } var global_proxy = %GlobalProxy(global); var f = %CompileString(x, false); if (!IS_FUNCTION(f)) return f; return %_CallFunction(global_proxy, f); } // ---------------------------------------------------------------------------- // Set up global object. function SetUpGlobal() { %CheckIsBootstrapping(); var attributes = DONT_ENUM | DONT_DELETE | READ_ONLY; // ECMA 262 - 15.1.1.1. %AddNamedProperty(global, "NaN", NAN, attributes); // ECMA-262 - 15.1.1.2. %AddNamedProperty(global, "Infinity", INFINITY, attributes); // ECMA-262 - 15.1.1.3. %AddNamedProperty(global, "undefined", UNDEFINED, attributes); // Set up non-enumerable function on the global object. InstallFunctions(global, DONT_ENUM, $Array( "isNaN", GlobalIsNaN, "isFinite", GlobalIsFinite, "parseInt", GlobalParseInt, "parseFloat", GlobalParseFloat, "eval", GlobalEval )); } SetUpGlobal(); // ---------------------------------------------------------------------------- // Object // ECMA-262 - 15.2.4.2 function ObjectToString() { if (IS_UNDEFINED(this) && !IS_UNDETECTABLE(this)) return "[object Undefined]"; if (IS_NULL(this)) return "[object Null]"; return "[object " + %_ClassOf(ToObject(this)) + "]"; } // ECMA-262 - 15.2.4.3 function ObjectToLocaleString() { CHECK_OBJECT_COERCIBLE(this, "Object.prototype.toLocaleString"); return this.toString(); } // ECMA-262 - 15.2.4.4 function ObjectValueOf() { return ToObject(this); } // ECMA-262 - 15.2.4.5 function ObjectHasOwnProperty(V) { if (%IsJSProxy(this)) { // TODO(rossberg): adjust once there is a story for symbols vs proxies. if (IS_SYMBOL(V)) return false; var handler = %GetHandler(this); return CallTrap1(handler, "hasOwn", DerivedHasOwnTrap, ToName(V)); } return %HasOwnProperty(TO_OBJECT_INLINE(this), ToName(V)); } // ECMA-262 - 15.2.4.6 function ObjectIsPrototypeOf(V) { CHECK_OBJECT_COERCIBLE(this, "Object.prototype.isPrototypeOf"); if (!IS_SPEC_OBJECT(V)) return false; return %IsInPrototypeChain(this, V); } // ECMA-262 - 15.2.4.6 function ObjectPropertyIsEnumerable(V) { var P = ToName(V); if (%IsJSProxy(this)) { // TODO(rossberg): adjust once there is a story for symbols vs proxies. if (IS_SYMBOL(V)) return false; var desc = GetOwnPropertyJS(this, P); return IS_UNDEFINED(desc) ? false : desc.isEnumerable(); } return %IsPropertyEnumerable(ToObject(this), P); } // Extensions for providing property getters and setters. function ObjectDefineGetter(name, fun) { var receiver = this; if (receiver == null && !IS_UNDETECTABLE(receiver)) { receiver = %GlobalProxy(global); } if (!IS_SPEC_FUNCTION(fun)) { throw new $TypeError( 'Object.prototype.__defineGetter__: Expecting function'); } var desc = new PropertyDescriptor(); desc.setGet(fun); desc.setEnumerable(true); desc.setConfigurable(true); DefineOwnProperty(ToObject(receiver), ToName(name), desc, false); } function ObjectLookupGetter(name) { var receiver = this; if (receiver == null && !IS_UNDETECTABLE(receiver)) { receiver = %GlobalProxy(global); } return %LookupAccessor(ToObject(receiver), ToName(name), GETTER); } function ObjectDefineSetter(name, fun) { var receiver = this; if (receiver == null && !IS_UNDETECTABLE(receiver)) { receiver = %GlobalProxy(global); } if (!IS_SPEC_FUNCTION(fun)) { throw new $TypeError( 'Object.prototype.__defineSetter__: Expecting function'); } var desc = new PropertyDescriptor(); desc.setSet(fun); desc.setEnumerable(true); desc.setConfigurable(true); DefineOwnProperty(ToObject(receiver), ToName(name), desc, false); } function ObjectLookupSetter(name) { var receiver = this; if (receiver == null && !IS_UNDETECTABLE(receiver)) { receiver = %GlobalProxy(global); } return %LookupAccessor(ToObject(receiver), ToName(name), SETTER); } function ObjectKeys(obj) { if (!IS_SPEC_OBJECT(obj)) { throw MakeTypeError("called_on_non_object", ["Object.keys"]); } if (%IsJSProxy(obj)) { var handler = %GetHandler(obj); var names = CallTrap0(handler, "keys", DerivedKeysTrap); return ToNameArray(names, "keys", false); } return %OwnKeys(obj); } // ES5 8.10.1. function IsAccessorDescriptor(desc) { if (IS_UNDEFINED(desc)) return false; return desc.hasGetter() || desc.hasSetter(); } // ES5 8.10.2. function IsDataDescriptor(desc) { if (IS_UNDEFINED(desc)) return false; return desc.hasValue() || desc.hasWritable(); } // ES5 8.10.3. function IsGenericDescriptor(desc) { if (IS_UNDEFINED(desc)) return false; return !(IsAccessorDescriptor(desc) || IsDataDescriptor(desc)); } function IsInconsistentDescriptor(desc) { return IsAccessorDescriptor(desc) && IsDataDescriptor(desc); } // ES5 8.10.4 function FromPropertyDescriptor(desc) { if (IS_UNDEFINED(desc)) return desc; if (IsDataDescriptor(desc)) { return { value: desc.getValue(), writable: desc.isWritable(), enumerable: desc.isEnumerable(), configurable: desc.isConfigurable() }; } // Must be an AccessorDescriptor then. We never return a generic descriptor. return { get: desc.getGet(), set: desc.getSet(), enumerable: desc.isEnumerable(), configurable: desc.isConfigurable() }; } // Harmony Proxies function FromGenericPropertyDescriptor(desc) { if (IS_UNDEFINED(desc)) return desc; var obj = new $Object(); if (desc.hasValue()) { %AddNamedProperty(obj, "value", desc.getValue(), NONE); } if (desc.hasWritable()) { %AddNamedProperty(obj, "writable", desc.isWritable(), NONE); } if (desc.hasGetter()) { %AddNamedProperty(obj, "get", desc.getGet(), NONE); } if (desc.hasSetter()) { %AddNamedProperty(obj, "set", desc.getSet(), NONE); } if (desc.hasEnumerable()) { %AddNamedProperty(obj, "enumerable", desc.isEnumerable(), NONE); } if (desc.hasConfigurable()) { %AddNamedProperty(obj, "configurable", desc.isConfigurable(), NONE); } return obj; } // ES5 8.10.5. function ToPropertyDescriptor(obj) { if (!IS_SPEC_OBJECT(obj)) { throw MakeTypeError("property_desc_object", [obj]); } var desc = new PropertyDescriptor(); if ("enumerable" in obj) { desc.setEnumerable(ToBoolean(obj.enumerable)); } if ("configurable" in obj) { desc.setConfigurable(ToBoolean(obj.configurable)); } if ("value" in obj) { desc.setValue(obj.value); } if ("writable" in obj) { desc.setWritable(ToBoolean(obj.writable)); } if ("get" in obj) { var get = obj.get; if (!IS_UNDEFINED(get) && !IS_SPEC_FUNCTION(get)) { throw MakeTypeError("getter_must_be_callable", [get]); } desc.setGet(get); } if ("set" in obj) { var set = obj.set; if (!IS_UNDEFINED(set) && !IS_SPEC_FUNCTION(set)) { throw MakeTypeError("setter_must_be_callable", [set]); } desc.setSet(set); } if (IsInconsistentDescriptor(desc)) { throw MakeTypeError("value_and_accessor", [obj]); } return desc; } // For Harmony proxies. function ToCompletePropertyDescriptor(obj) { var desc = ToPropertyDescriptor(obj); if (IsGenericDescriptor(desc) || IsDataDescriptor(desc)) { if (!desc.hasValue()) desc.setValue(UNDEFINED); if (!desc.hasWritable()) desc.setWritable(false); } else { // Is accessor descriptor. if (!desc.hasGetter()) desc.setGet(UNDEFINED); if (!desc.hasSetter()) desc.setSet(UNDEFINED); } if (!desc.hasEnumerable()) desc.setEnumerable(false); if (!desc.hasConfigurable()) desc.setConfigurable(false); return desc; } function PropertyDescriptor() { // Initialize here so they are all in-object and have the same map. // Default values from ES5 8.6.1. this.value_ = UNDEFINED; this.hasValue_ = false; this.writable_ = false; this.hasWritable_ = false; this.enumerable_ = false; this.hasEnumerable_ = false; this.configurable_ = false; this.hasConfigurable_ = false; this.get_ = UNDEFINED; this.hasGetter_ = false; this.set_ = UNDEFINED; this.hasSetter_ = false; } SetUpLockedPrototype(PropertyDescriptor, $Array( "value_", "hasValue_", "writable_", "hasWritable_", "enumerable_", "hasEnumerable_", "configurable_", "hasConfigurable_", "get_", "hasGetter_", "set_", "hasSetter_" ), $Array( "toString", function() { return "[object PropertyDescriptor]"; }, "setValue", function(value) { this.value_ = value; this.hasValue_ = true; }, "getValue", function() { return this.value_; }, "hasValue", function() { return this.hasValue_; }, "setEnumerable", function(enumerable) { this.enumerable_ = enumerable; this.hasEnumerable_ = true; }, "isEnumerable", function () { return this.enumerable_; }, "hasEnumerable", function() { return this.hasEnumerable_; }, "setWritable", function(writable) { this.writable_ = writable; this.hasWritable_ = true; }, "isWritable", function() { return this.writable_; }, "hasWritable", function() { return this.hasWritable_; }, "setConfigurable", function(configurable) { this.configurable_ = configurable; this.hasConfigurable_ = true; }, "hasConfigurable", function() { return this.hasConfigurable_; }, "isConfigurable", function() { return this.configurable_; }, "setGet", function(get) { this.get_ = get; this.hasGetter_ = true; }, "getGet", function() { return this.get_; }, "hasGetter", function() { return this.hasGetter_; }, "setSet", function(set) { this.set_ = set; this.hasSetter_ = true; }, "getSet", function() { return this.set_; }, "hasSetter", function() { return this.hasSetter_; })); // Converts an array returned from Runtime_GetOwnProperty to an actual // property descriptor. For a description of the array layout please // see the runtime.cc file. function ConvertDescriptorArrayToDescriptor(desc_array) { if (desc_array === false) { throw 'Internal error: invalid desc_array'; } if (IS_UNDEFINED(desc_array)) { return UNDEFINED; } var desc = new PropertyDescriptor(); // This is an accessor. if (desc_array[IS_ACCESSOR_INDEX]) { desc.setGet(desc_array[GETTER_INDEX]); desc.setSet(desc_array[SETTER_INDEX]); } else { desc.setValue(desc_array[VALUE_INDEX]); desc.setWritable(desc_array[WRITABLE_INDEX]); } desc.setEnumerable(desc_array[ENUMERABLE_INDEX]); desc.setConfigurable(desc_array[CONFIGURABLE_INDEX]); return desc; } // For Harmony proxies. function GetTrap(handler, name, defaultTrap) { var trap = handler[name]; if (IS_UNDEFINED(trap)) { if (IS_UNDEFINED(defaultTrap)) { throw MakeTypeError("handler_trap_missing", [handler, name]); } trap = defaultTrap; } else if (!IS_SPEC_FUNCTION(trap)) { throw MakeTypeError("handler_trap_must_be_callable", [handler, name]); } return trap; } function CallTrap0(handler, name, defaultTrap) { return %_CallFunction(handler, GetTrap(handler, name, defaultTrap)); } function CallTrap1(handler, name, defaultTrap, x) { return %_CallFunction(handler, x, GetTrap(handler, name, defaultTrap)); } function CallTrap2(handler, name, defaultTrap, x, y) { return %_CallFunction(handler, x, y, GetTrap(handler, name, defaultTrap)); } // ES5 section 8.12.1. function GetOwnPropertyJS(obj, v) { var p = ToName(v); if (%IsJSProxy(obj)) { // TODO(rossberg): adjust once there is a story for symbols vs proxies. if (IS_SYMBOL(v)) return UNDEFINED; var handler = %GetHandler(obj); var descriptor = CallTrap1( handler, "getOwnPropertyDescriptor", UNDEFINED, p); if (IS_UNDEFINED(descriptor)) return descriptor; var desc = ToCompletePropertyDescriptor(descriptor); if (!desc.isConfigurable()) { throw MakeTypeError("proxy_prop_not_configurable", [handler, "getOwnPropertyDescriptor", p, descriptor]); } return desc; } // GetOwnProperty returns an array indexed by the constants // defined in macros.py. // If p is not a property on obj undefined is returned. var props = %GetOwnProperty(ToObject(obj), p); // A false value here means that access checks failed. if (props === false) return UNDEFINED; return ConvertDescriptorArrayToDescriptor(props); } // ES5 section 8.12.7. function Delete(obj, p, should_throw) { var desc = GetOwnPropertyJS(obj, p); if (IS_UNDEFINED(desc)) return true; if (desc.isConfigurable()) { %DeleteProperty(obj, p, 0); return true; } else if (should_throw) { throw MakeTypeError("define_disallowed", [p]); } else { return; } } // Harmony proxies. function DefineProxyProperty(obj, p, attributes, should_throw) { // TODO(rossberg): adjust once there is a story for symbols vs proxies. if (IS_SYMBOL(p)) return false; var handler = %GetHandler(obj); var result = CallTrap2(handler, "defineProperty", UNDEFINED, p, attributes); if (!ToBoolean(result)) { if (should_throw) { throw MakeTypeError("handler_returned_false", [handler, "defineProperty"]); } else { return false; } } return true; } // ES5 8.12.9. function DefineObjectProperty(obj, p, desc, should_throw) { var current_or_access = %GetOwnProperty(ToObject(obj), ToName(p)); // A false value here means that access checks failed. if (current_or_access === false) return UNDEFINED; var current = ConvertDescriptorArrayToDescriptor(current_or_access); var extensible = %IsExtensible(ToObject(obj)); // Error handling according to spec. // Step 3 if (IS_UNDEFINED(current) && !extensible) { if (should_throw) { throw MakeTypeError("define_disallowed", [p]); } else { return false; } } if (!IS_UNDEFINED(current)) { // Step 5 and 6 if ((IsGenericDescriptor(desc) || IsDataDescriptor(desc) == IsDataDescriptor(current)) && (!desc.hasEnumerable() || SameValue(desc.isEnumerable(), current.isEnumerable())) && (!desc.hasConfigurable() || SameValue(desc.isConfigurable(), current.isConfigurable())) && (!desc.hasWritable() || SameValue(desc.isWritable(), current.isWritable())) && (!desc.hasValue() || SameValue(desc.getValue(), current.getValue())) && (!desc.hasGetter() || SameValue(desc.getGet(), current.getGet())) && (!desc.hasSetter() || SameValue(desc.getSet(), current.getSet()))) { return true; } if (!current.isConfigurable()) { // Step 7 if (desc.isConfigurable() || (desc.hasEnumerable() && desc.isEnumerable() != current.isEnumerable())) { if (should_throw) { throw MakeTypeError("redefine_disallowed", [p]); } else { return false; } } // Step 8 if (!IsGenericDescriptor(desc)) { // Step 9a if (IsDataDescriptor(current) != IsDataDescriptor(desc)) { if (should_throw) { throw MakeTypeError("redefine_disallowed", [p]); } else { return false; } } // Step 10a if (IsDataDescriptor(current) && IsDataDescriptor(desc)) { if (!current.isWritable() && desc.isWritable()) { if (should_throw) { throw MakeTypeError("redefine_disallowed", [p]); } else { return false; } } if (!current.isWritable() && desc.hasValue() && !SameValue(desc.getValue(), current.getValue())) { if (should_throw) { throw MakeTypeError("redefine_disallowed", [p]); } else { return false; } } } // Step 11 if (IsAccessorDescriptor(desc) && IsAccessorDescriptor(current)) { if (desc.hasSetter() && !SameValue(desc.getSet(), current.getSet())) { if (should_throw) { throw MakeTypeError("redefine_disallowed", [p]); } else { return false; } } if (desc.hasGetter() && !SameValue(desc.getGet(),current.getGet())) { if (should_throw) { throw MakeTypeError("redefine_disallowed", [p]); } else { return false; } } } } } } // Send flags - enumerable and configurable are common - writable is // only send to the data descriptor. // Take special care if enumerable and configurable is not defined on // desc (we need to preserve the existing values from current). var flag = NONE; if (desc.hasEnumerable()) { flag |= desc.isEnumerable() ? 0 : DONT_ENUM; } else if (!IS_UNDEFINED(current)) { flag |= current.isEnumerable() ? 0 : DONT_ENUM; } else { flag |= DONT_ENUM; } if (desc.hasConfigurable()) { flag |= desc.isConfigurable() ? 0 : DONT_DELETE; } else if (!IS_UNDEFINED(current)) { flag |= current.isConfigurable() ? 0 : DONT_DELETE; } else flag |= DONT_DELETE; if (IsDataDescriptor(desc) || (IsGenericDescriptor(desc) && (IS_UNDEFINED(current) || IsDataDescriptor(current)))) { // There are 3 cases that lead here: // Step 4a - defining a new data property. // Steps 9b & 12 - replacing an existing accessor property with a data // property. // Step 12 - updating an existing data property with a data or generic // descriptor. if (desc.hasWritable()) { flag |= desc.isWritable() ? 0 : READ_ONLY; } else if (!IS_UNDEFINED(current)) { flag |= current.isWritable() ? 0 : READ_ONLY; } else { flag |= READ_ONLY; } var value = UNDEFINED; // Default value is undefined. if (desc.hasValue()) { value = desc.getValue(); } else if (!IS_UNDEFINED(current) && IsDataDescriptor(current)) { value = current.getValue(); } %DefineDataPropertyUnchecked(obj, p, value, flag); } else { // There are 3 cases that lead here: // Step 4b - defining a new accessor property. // Steps 9c & 12 - replacing an existing data property with an accessor // property. // Step 12 - updating an existing accessor property with an accessor // descriptor. var getter = desc.hasGetter() ? desc.getGet() : null; var setter = desc.hasSetter() ? desc.getSet() : null; %DefineAccessorPropertyUnchecked(obj, p, getter, setter, flag); } return true; } // ES5 section 15.4.5.1. function DefineArrayProperty(obj, p, desc, should_throw) { // Note that the length of an array is not actually stored as part of the // property, hence we use generated code throughout this function instead of // DefineObjectProperty() to modify its value. // Step 3 - Special handling for length property. if (p === "length") { var length = obj.length; var old_length = length; if (!desc.hasValue()) { return DefineObjectProperty(obj, "length", desc, should_throw); } var new_length = ToUint32(desc.getValue()); if (new_length != ToNumber(desc.getValue())) { throw new $RangeError('defineProperty() array length out of range'); } var length_desc = GetOwnPropertyJS(obj, "length"); if (new_length != length && !length_desc.isWritable()) { if (should_throw) { throw MakeTypeError("redefine_disallowed", [p]); } else { return false; } } var threw = false; var emit_splice = %IsObserved(obj) && new_length !== old_length; var removed; if (emit_splice) { BeginPerformSplice(obj); removed = []; if (new_length < old_length) removed.length = old_length - new_length; } while (new_length < length--) { var index = ToString(length); if (emit_splice) { var deletedDesc = GetOwnPropertyJS(obj, index); if (deletedDesc && deletedDesc.hasValue()) removed[length - new_length] = deletedDesc.getValue(); } if (!Delete(obj, index, false)) { new_length = length + 1; threw = true; break; } } // Make sure the below call to DefineObjectProperty() doesn't overwrite // any magic "length" property by removing the value. // TODO(mstarzinger): This hack should be removed once we have addressed the // respective TODO in Runtime_DefineDataPropertyUnchecked. // For the time being, we need a hack to prevent Object.observe from // generating two change records. obj.length = new_length; desc.value_ = UNDEFINED; desc.hasValue_ = false; threw = !DefineObjectProperty(obj, "length", desc, should_throw) || threw; if (emit_splice) { EndPerformSplice(obj); EnqueueSpliceRecord(obj, new_length < old_length ? new_length : old_length, removed, new_length > old_length ? new_length - old_length : 0); } if (threw) { if (should_throw) { throw MakeTypeError("redefine_disallowed", [p]); } else { return false; } } return true; } // Step 4 - Special handling for array index. if (!IS_SYMBOL(p)) { var index = ToUint32(p); var emit_splice = false; if (ToString(index) == p && index != 4294967295) { var length = obj.length; if (index >= length && %IsObserved(obj)) { emit_splice = true; BeginPerformSplice(obj); } var length_desc = GetOwnPropertyJS(obj, "length"); if ((index >= length && !length_desc.isWritable()) || !DefineObjectProperty(obj, p, desc, true)) { if (emit_splice) EndPerformSplice(obj); if (should_throw) { throw MakeTypeError("define_disallowed", [p]); } else { return false; } } if (index >= length) { obj.length = index + 1; } if (emit_splice) { EndPerformSplice(obj); EnqueueSpliceRecord(obj, length, [], index + 1 - length); } return true; } } // Step 5 - Fallback to default implementation. return DefineObjectProperty(obj, p, desc, should_throw); } // ES5 section 8.12.9, ES5 section 15.4.5.1 and Harmony proxies. function DefineOwnProperty(obj, p, desc, should_throw) { if (%IsJSProxy(obj)) { // TODO(rossberg): adjust once there is a story for symbols vs proxies. if (IS_SYMBOL(p)) return false; var attributes = FromGenericPropertyDescriptor(desc); return DefineProxyProperty(obj, p, attributes, should_throw); } else if (IS_ARRAY(obj)) { return DefineArrayProperty(obj, p, desc, should_throw); } else { return DefineObjectProperty(obj, p, desc, should_throw); } } // ES5 section 15.2.3.2. function ObjectGetPrototypeOf(obj) { if (!IS_SPEC_OBJECT(obj)) { throw MakeTypeError("called_on_non_object", ["Object.getPrototypeOf"]); } return %GetPrototype(obj); } // ES6 section 19.1.2.19. function ObjectSetPrototypeOf(obj, proto) { CHECK_OBJECT_COERCIBLE(obj, "Object.setPrototypeOf"); if (proto !== null && !IS_SPEC_OBJECT(proto)) { throw MakeTypeError("proto_object_or_null", [proto]); } if (IS_SPEC_OBJECT(obj)) { %SetPrototype(obj, proto); } return obj; } // ES5 section 15.2.3.3 function ObjectGetOwnPropertyDescriptor(obj, p) { if (!IS_SPEC_OBJECT(obj)) { throw MakeTypeError("called_on_non_object", ["Object.getOwnPropertyDescriptor"]); } var desc = GetOwnPropertyJS(obj, p); return FromPropertyDescriptor(desc); } // For Harmony proxies function ToNameArray(obj, trap, includeSymbols) { if (!IS_SPEC_OBJECT(obj)) { throw MakeTypeError("proxy_non_object_prop_names", [obj, trap]); } var n = ToUint32(obj.length); var array = new $Array(n); var realLength = 0; var names = { __proto__: null }; // TODO(rossberg): use sets once ready. for (var index = 0; index < n; index++) { var s = ToName(obj[index]); // TODO(rossberg): adjust once there is a story for symbols vs proxies. if (IS_SYMBOL(s) && !includeSymbols) continue; if (%HasOwnProperty(names, s)) { throw MakeTypeError("proxy_repeated_prop_name", [obj, trap, s]); } array[index] = s; ++realLength; names[s] = 0; } array.length = realLength; return array; } function ObjectGetOwnPropertyKeys(obj, symbolsOnly) { var nameArrays = new InternalArray(); var filter = symbolsOnly ? PROPERTY_ATTRIBUTES_STRING | PROPERTY_ATTRIBUTES_PRIVATE_SYMBOL : PROPERTY_ATTRIBUTES_SYMBOLIC; // Find all the indexed properties. // Only get own element names if we want to include string keys. if (!symbolsOnly) { var ownElementNames = %GetOwnElementNames(obj); for (var i = 0; i < ownElementNames.length; ++i) { ownElementNames[i] = %_NumberToString(ownElementNames[i]); } nameArrays.push(ownElementNames); // Get names for indexed interceptor properties. var interceptorInfo = %GetInterceptorInfo(obj); if ((interceptorInfo & 1) != 0) { var indexedInterceptorNames = %GetIndexedInterceptorElementNames(obj); if (!IS_UNDEFINED(indexedInterceptorNames)) { nameArrays.push(indexedInterceptorNames); } } } // Find all the named properties. // Get own property names. nameArrays.push(%GetOwnPropertyNames(obj, filter)); // Get names for named interceptor properties if any. if ((interceptorInfo & 2) != 0) { var namedInterceptorNames = %GetNamedInterceptorPropertyNames(obj); if (!IS_UNDEFINED(namedInterceptorNames)) { nameArrays.push(namedInterceptorNames); } } var propertyNames = %Apply(InternalArray.prototype.concat, nameArrays[0], nameArrays, 1, nameArrays.length - 1); // Property names are expected to be unique strings, // but interceptors can interfere with that assumption. if (interceptorInfo != 0) { var seenKeys = { __proto__: null }; var j = 0; for (var i = 0; i < propertyNames.length; ++i) { var name = propertyNames[i]; if (symbolsOnly) { if (!IS_SYMBOL(name) || IS_PRIVATE(name)) continue; } else { if (IS_SYMBOL(name)) continue; name = ToString(name); } if (seenKeys[name]) continue; seenKeys[name] = true; propertyNames[j++] = name; } propertyNames.length = j; } return propertyNames; } // ES5 section 15.2.3.4. function ObjectGetOwnPropertyNames(obj) { if (!IS_SPEC_OBJECT(obj)) { throw MakeTypeError("called_on_non_object", ["Object.getOwnPropertyNames"]); } // Special handling for proxies. if (%IsJSProxy(obj)) { var handler = %GetHandler(obj); var names = CallTrap0(handler, "getOwnPropertyNames", UNDEFINED); return ToNameArray(names, "getOwnPropertyNames", false); } return ObjectGetOwnPropertyKeys(obj, false); } // ES5 section 15.2.3.5. function ObjectCreate(proto, properties) { if (!IS_SPEC_OBJECT(proto) && proto !== null) { throw MakeTypeError("proto_object_or_null", [proto]); } var obj = {}; %InternalSetPrototype(obj, proto); if (!IS_UNDEFINED(properties)) ObjectDefineProperties(obj, properties); return obj; } // ES5 section 15.2.3.6. function ObjectDefineProperty(obj, p, attributes) { if (!IS_SPEC_OBJECT(obj)) { throw MakeTypeError("called_on_non_object", ["Object.defineProperty"]); } var name = ToName(p); if (%IsJSProxy(obj)) { // Clone the attributes object for protection. // TODO(rossberg): not spec'ed yet, so not sure if this should involve // non-own properties as it does (or non-enumerable ones, as it doesn't?). var attributesClone = { __proto__: null }; for (var a in attributes) { attributesClone[a] = attributes[a]; } DefineProxyProperty(obj, name, attributesClone, true); // The following would implement the spec as in the current proposal, // but after recent comments on es-discuss, is most likely obsolete. /* var defineObj = FromGenericPropertyDescriptor(desc); var names = ObjectGetOwnPropertyNames(attributes); var standardNames = {value: 0, writable: 0, get: 0, set: 0, enumerable: 0, configurable: 0}; for (var i = 0; i < names.length; i++) { var N = names[i]; if (!(%HasOwnProperty(standardNames, N))) { var attr = GetOwnPropertyJS(attributes, N); DefineOwnProperty(descObj, N, attr, true); } } // This is really confusing the types, but it is what the proxies spec // currently requires: desc = descObj; */ } else { var desc = ToPropertyDescriptor(attributes); DefineOwnProperty(obj, name, desc, true); } return obj; } function GetOwnEnumerablePropertyNames(object) { var names = new InternalArray(); for (var key in object) { if (%HasOwnProperty(object, key)) { names.push(key); } } var filter = PROPERTY_ATTRIBUTES_STRING | PROPERTY_ATTRIBUTES_PRIVATE_SYMBOL; var symbols = %GetOwnPropertyNames(object, filter); for (var i = 0; i < symbols.length; ++i) { var symbol = symbols[i]; if (IS_SYMBOL(symbol)) { var desc = ObjectGetOwnPropertyDescriptor(object, symbol); if (desc.enumerable) names.push(symbol); } } return names; } // ES5 section 15.2.3.7. function ObjectDefineProperties(obj, properties) { if (!IS_SPEC_OBJECT(obj)) { throw MakeTypeError("called_on_non_object", ["Object.defineProperties"]); } var props = ToObject(properties); var names = GetOwnEnumerablePropertyNames(props); var descriptors = new InternalArray(); for (var i = 0; i < names.length; i++) { descriptors.push(ToPropertyDescriptor(props[names[i]])); } for (var i = 0; i < names.length; i++) { DefineOwnProperty(obj, names[i], descriptors[i], true); } return obj; } // Harmony proxies. function ProxyFix(obj) { var handler = %GetHandler(obj); var props = CallTrap0(handler, "fix", UNDEFINED); if (IS_UNDEFINED(props)) { throw MakeTypeError("handler_returned_undefined", [handler, "fix"]); } if (%IsJSFunctionProxy(obj)) { var callTrap = %GetCallTrap(obj); var constructTrap = %GetConstructTrap(obj); var code = DelegateCallAndConstruct(callTrap, constructTrap); %Fix(obj); // becomes a regular function %SetCode(obj, code); // TODO(rossberg): What about length and other properties? Not specified. // We just put in some half-reasonable defaults for now. var prototype = new $Object(); $Object.defineProperty(prototype, "constructor", {value: obj, writable: true, enumerable: false, configurable: true}); // TODO(v8:1530): defineProperty does not handle prototype and length. %FunctionSetPrototype(obj, prototype); obj.length = 0; } else { %Fix(obj); } ObjectDefineProperties(obj, props); } // ES5 section 15.2.3.8. function ObjectSeal(obj) { if (!IS_SPEC_OBJECT(obj)) { throw MakeTypeError("called_on_non_object", ["Object.seal"]); } if (%IsJSProxy(obj)) { ProxyFix(obj); } var names = ObjectGetOwnPropertyNames(obj); for (var i = 0; i < names.length; i++) { var name = names[i]; var desc = GetOwnPropertyJS(obj, name); if (desc.isConfigurable()) { desc.setConfigurable(false); DefineOwnProperty(obj, name, desc, true); } } %PreventExtensions(obj); return obj; } // ES5 section 15.2.3.9. function ObjectFreezeJS(obj) { if (!IS_SPEC_OBJECT(obj)) { throw MakeTypeError("called_on_non_object", ["Object.freeze"]); } var isProxy = %IsJSProxy(obj); if (isProxy || %HasSloppyArgumentsElements(obj) || %IsObserved(obj)) { if (isProxy) { ProxyFix(obj); } var names = ObjectGetOwnPropertyNames(obj); for (var i = 0; i < names.length; i++) { var name = names[i]; var desc = GetOwnPropertyJS(obj, name); if (desc.isWritable() || desc.isConfigurable()) { if (IsDataDescriptor(desc)) desc.setWritable(false); desc.setConfigurable(false); DefineOwnProperty(obj, name, desc, true); } } %PreventExtensions(obj); } else { // TODO(adamk): Is it worth going to this fast path if the // object's properties are already in dictionary mode? %ObjectFreeze(obj); } return obj; } // ES5 section 15.2.3.10 function ObjectPreventExtension(obj) { if (!IS_SPEC_OBJECT(obj)) { throw MakeTypeError("called_on_non_object", ["Object.preventExtension"]); } if (%IsJSProxy(obj)) { ProxyFix(obj); } %PreventExtensions(obj); return obj; } // ES5 section 15.2.3.11 function ObjectIsSealed(obj) { if (!IS_SPEC_OBJECT(obj)) { throw MakeTypeError("called_on_non_object", ["Object.isSealed"]); } if (%IsJSProxy(obj)) { return false; } if (%IsExtensible(obj)) { return false; } var names = ObjectGetOwnPropertyNames(obj); for (var i = 0; i < names.length; i++) { var name = names[i]; var desc = GetOwnPropertyJS(obj, name); if (desc.isConfigurable()) { return false; } } return true; } // ES5 section 15.2.3.12 function ObjectIsFrozen(obj) { if (!IS_SPEC_OBJECT(obj)) { throw MakeTypeError("called_on_non_object", ["Object.isFrozen"]); } if (%IsJSProxy(obj)) { return false; } if (%IsExtensible(obj)) { return false; } var names = ObjectGetOwnPropertyNames(obj); for (var i = 0; i < names.length; i++) { var name = names[i]; var desc = GetOwnPropertyJS(obj, name); if (IsDataDescriptor(desc) && desc.isWritable()) return false; if (desc.isConfigurable()) return false; } return true; } // ES5 section 15.2.3.13 function ObjectIsExtensible(obj) { if (!IS_SPEC_OBJECT(obj)) { throw MakeTypeError("called_on_non_object", ["Object.isExtensible"]); } if (%IsJSProxy(obj)) { return true; } return %IsExtensible(obj); } // Harmony egal. function ObjectIs(obj1, obj2) { if (obj1 === obj2) { return (obj1 !== 0) || (1 / obj1 === 1 / obj2); } else { return (obj1 !== obj1) && (obj2 !== obj2); } } // ECMA-262, Edition 6, section B.2.2.1.1 function ObjectGetProto() { return %GetPrototype(ToObject(this)); } // ECMA-262, Edition 6, section B.2.2.1.2 function ObjectSetProto(proto) { CHECK_OBJECT_COERCIBLE(this, "Object.prototype.__proto__"); if ((IS_SPEC_OBJECT(proto) || IS_NULL(proto)) && IS_SPEC_OBJECT(this)) { %SetPrototype(this, proto); } } function ObjectConstructor(x) { if (%_IsConstructCall()) { if (x == null) return this; return ToObject(x); } else { if (x == null) return { }; return ToObject(x); } } // ---------------------------------------------------------------------------- // Object function SetUpObject() { %CheckIsBootstrapping(); %SetNativeFlag($Object); %SetCode($Object, ObjectConstructor); %AddNamedProperty($Object.prototype, "constructor", $Object, DONT_ENUM); // Set up non-enumerable functions on the Object.prototype object. InstallFunctions($Object.prototype, DONT_ENUM, $Array( "toString", ObjectToString, "toLocaleString", ObjectToLocaleString, "valueOf", ObjectValueOf, "hasOwnProperty", ObjectHasOwnProperty, "isPrototypeOf", ObjectIsPrototypeOf, "propertyIsEnumerable", ObjectPropertyIsEnumerable, "__defineGetter__", ObjectDefineGetter, "__lookupGetter__", ObjectLookupGetter, "__defineSetter__", ObjectDefineSetter, "__lookupSetter__", ObjectLookupSetter )); InstallGetterSetter($Object.prototype, "__proto__", ObjectGetProto, ObjectSetProto); // Set up non-enumerable functions in the Object object. InstallFunctions($Object, DONT_ENUM, $Array( "keys", ObjectKeys, "create", ObjectCreate, "defineProperty", ObjectDefineProperty, "defineProperties", ObjectDefineProperties, "freeze", ObjectFreezeJS, "getPrototypeOf", ObjectGetPrototypeOf, "setPrototypeOf", ObjectSetPrototypeOf, "getOwnPropertyDescriptor", ObjectGetOwnPropertyDescriptor, "getOwnPropertyNames", ObjectGetOwnPropertyNames, // getOwnPropertySymbols is added in symbol.js. "is", ObjectIs, "isExtensible", ObjectIsExtensible, "isFrozen", ObjectIsFrozen, "isSealed", ObjectIsSealed, "preventExtensions", ObjectPreventExtension, "seal", ObjectSeal // deliverChangeRecords, getNotifier, observe and unobserve are added // in object-observe.js. )); } SetUpObject(); // ---------------------------------------------------------------------------- // Boolean function BooleanConstructor(x) { if (%_IsConstructCall()) { %_SetValueOf(this, ToBoolean(x)); } else { return ToBoolean(x); } } function BooleanToString() { // NOTE: Both Boolean objects and values can enter here as // 'this'. This is not as dictated by ECMA-262. var b = this; if (!IS_BOOLEAN(b)) { if (!IS_BOOLEAN_WRAPPER(b)) { throw new $TypeError('Boolean.prototype.toString is not generic'); } b = %_ValueOf(b); } return b ? 'true' : 'false'; } function BooleanValueOf() { // NOTE: Both Boolean objects and values can enter here as // 'this'. This is not as dictated by ECMA-262. if (!IS_BOOLEAN(this) && !IS_BOOLEAN_WRAPPER(this)) { throw new $TypeError('Boolean.prototype.valueOf is not generic'); } return %_ValueOf(this); } // ---------------------------------------------------------------------------- function SetUpBoolean () { %CheckIsBootstrapping(); %SetCode($Boolean, BooleanConstructor); %FunctionSetPrototype($Boolean, new $Boolean(false)); %AddNamedProperty($Boolean.prototype, "constructor", $Boolean, DONT_ENUM); InstallFunctions($Boolean.prototype, DONT_ENUM, $Array( "toString", BooleanToString, "valueOf", BooleanValueOf )); } SetUpBoolean(); // ---------------------------------------------------------------------------- // Number function NumberConstructor(x) { var value = %_ArgumentsLength() == 0 ? 0 : ToNumber(x); if (%_IsConstructCall()) { %_SetValueOf(this, value); } else { return value; } } // ECMA-262 section 15.7.4.2. function NumberToString(radix) { // NOTE: Both Number objects and values can enter here as // 'this'. This is not as dictated by ECMA-262. var number = this; if (!IS_NUMBER(this)) { if (!IS_NUMBER_WRAPPER(this)) { throw new $TypeError('Number.prototype.toString is not generic'); } // Get the value of this number in case it's an object. number = %_ValueOf(this); } // Fast case: Convert number in radix 10. if (IS_UNDEFINED(radix) || radix === 10) { return %_NumberToString(number); } // Convert the radix to an integer and check the range. radix = TO_INTEGER(radix); if (radix < 2 || radix > 36) { throw new $RangeError('toString() radix argument must be between 2 and 36'); } // Convert the number to a string in the given radix. return %NumberToRadixString(number, radix); } // ECMA-262 section 15.7.4.3 function NumberToLocaleString() { return %_CallFunction(this, NumberToString); } // ECMA-262 section 15.7.4.4 function NumberValueOf() { // NOTE: Both Number objects and values can enter here as // 'this'. This is not as dictated by ECMA-262. if (!IS_NUMBER(this) && !IS_NUMBER_WRAPPER(this)) { throw new $TypeError('Number.prototype.valueOf is not generic'); } return %_ValueOf(this); } // ECMA-262 section 15.7.4.5 function NumberToFixedJS(fractionDigits) { var x = this; if (!IS_NUMBER(this)) { if (!IS_NUMBER_WRAPPER(this)) { throw MakeTypeError("incompatible_method_receiver", ["Number.prototype.toFixed", this]); } // Get the value of this number in case it's an object. x = %_ValueOf(this); } var f = TO_INTEGER(fractionDigits); if (f < 0 || f > 20) { throw new $RangeError("toFixed() digits argument must be between 0 and 20"); } if (NUMBER_IS_NAN(x)) return "NaN"; if (x == INFINITY) return "Infinity"; if (x == -INFINITY) return "-Infinity"; return %NumberToFixed(x, f); } // ECMA-262 section 15.7.4.6 function NumberToExponentialJS(fractionDigits) { var x = this; if (!IS_NUMBER(this)) { if (!IS_NUMBER_WRAPPER(this)) { throw MakeTypeError("incompatible_method_receiver", ["Number.prototype.toExponential", this]); } // Get the value of this number in case it's an object. x = %_ValueOf(this); } var f = IS_UNDEFINED(fractionDigits) ? UNDEFINED : TO_INTEGER(fractionDigits); if (NUMBER_IS_NAN(x)) return "NaN"; if (x == INFINITY) return "Infinity"; if (x == -INFINITY) return "-Infinity"; if (IS_UNDEFINED(f)) { f = -1; // Signal for runtime function that f is not defined. } else if (f < 0 || f > 20) { throw new $RangeError("toExponential() argument must be between 0 and 20"); } return %NumberToExponential(x, f); } // ECMA-262 section 15.7.4.7 function NumberToPrecisionJS(precision) { var x = this; if (!IS_NUMBER(this)) { if (!IS_NUMBER_WRAPPER(this)) { throw MakeTypeError("incompatible_method_receiver", ["Number.prototype.toPrecision", this]); } // Get the value of this number in case it's an object. x = %_ValueOf(this); } if (IS_UNDEFINED(precision)) return ToString(%_ValueOf(this)); var p = TO_INTEGER(precision); if (NUMBER_IS_NAN(x)) return "NaN"; if (x == INFINITY) return "Infinity"; if (x == -INFINITY) return "-Infinity"; if (p < 1 || p > 21) { throw new $RangeError("toPrecision() argument must be between 1 and 21"); } return %NumberToPrecision(x, p); } // Harmony isFinite. function NumberIsFinite(number) { return IS_NUMBER(number) && NUMBER_IS_FINITE(number); } // Harmony isInteger function NumberIsInteger(number) { return NumberIsFinite(number) && TO_INTEGER(number) == number; } // Harmony isNaN. function NumberIsNaN(number) { return IS_NUMBER(number) && NUMBER_IS_NAN(number); } // Harmony isSafeInteger function NumberIsSafeInteger(number) { if (NumberIsFinite(number)) { var integral = TO_INTEGER(number); if (integral == number) return MathAbs(integral) <= $Number.MAX_SAFE_INTEGER; } return false; } // ---------------------------------------------------------------------------- function SetUpNumber() { %CheckIsBootstrapping(); %SetCode($Number, NumberConstructor); %FunctionSetPrototype($Number, new $Number(0)); %OptimizeObjectForAddingMultipleProperties($Number.prototype, 8); // Set up the constructor property on the Number prototype object. %AddNamedProperty($Number.prototype, "constructor", $Number, DONT_ENUM); InstallConstants($Number, $Array( // ECMA-262 section 15.7.3.1. "MAX_VALUE", 1.7976931348623157e+308, // ECMA-262 section 15.7.3.2. "MIN_VALUE", 5e-324, // ECMA-262 section 15.7.3.3. "NaN", NAN, // ECMA-262 section 15.7.3.4. "NEGATIVE_INFINITY", -INFINITY, // ECMA-262 section 15.7.3.5. "POSITIVE_INFINITY", INFINITY, // --- Harmony constants (no spec refs until settled.) "MAX_SAFE_INTEGER", %_MathPow(2, 53) - 1, "MIN_SAFE_INTEGER", -%_MathPow(2, 53) + 1, "EPSILON", %_MathPow(2, -52) )); // Set up non-enumerable functions on the Number prototype object. InstallFunctions($Number.prototype, DONT_ENUM, $Array( "toString", NumberToString, "toLocaleString", NumberToLocaleString, "valueOf", NumberValueOf, "toFixed", NumberToFixedJS, "toExponential", NumberToExponentialJS, "toPrecision", NumberToPrecisionJS )); // Harmony Number constructor additions InstallFunctions($Number, DONT_ENUM, $Array( "isFinite", NumberIsFinite, "isInteger", NumberIsInteger, "isNaN", NumberIsNaN, "isSafeInteger", NumberIsSafeInteger, "parseInt", GlobalParseInt, "parseFloat", GlobalParseFloat )); } SetUpNumber(); // ---------------------------------------------------------------------------- // Function function FunctionSourceString(func) { while (%IsJSFunctionProxy(func)) { func = %GetCallTrap(func); } if (!IS_FUNCTION(func)) { throw new $TypeError('Function.prototype.toString is not generic'); } var source = %FunctionGetSourceCode(func); if (!IS_STRING(source) || %FunctionIsBuiltin(func)) { var name = %FunctionGetName(func); if (name) { // Mimic what KJS does. return 'function ' + name + '() { [native code] }'; } else { return 'function () { [native code] }'; } } if (%FunctionIsArrow(func)) { return source; } var name = %FunctionNameShouldPrintAsAnonymous(func) ? 'anonymous' : %FunctionGetName(func); var head = %FunctionIsGenerator(func) ? 'function* ' : 'function '; return head + name + source; } function FunctionToString() { return FunctionSourceString(this); } // ES5 15.3.4.5 function FunctionBind(this_arg) { // Length is 1. if (!IS_SPEC_FUNCTION(this)) { throw new $TypeError('Bind must be called on a function'); } var boundFunction = function () { // Poison .arguments and .caller, but is otherwise not detectable. "use strict"; // This function must not use any object literals (Object, Array, RegExp), // since the literals-array is being used to store the bound data. if (%_IsConstructCall()) { return %NewObjectFromBound(boundFunction); } var bindings = %BoundFunctionGetBindings(boundFunction); var argc = %_ArgumentsLength(); if (argc == 0) { return %Apply(bindings[0], bindings[1], bindings, 2, bindings.length - 2); } if (bindings.length === 2) { return %Apply(bindings[0], bindings[1], arguments, 0, argc); } var bound_argc = bindings.length - 2; var argv = new InternalArray(bound_argc + argc); for (var i = 0; i < bound_argc; i++) { argv[i] = bindings[i + 2]; } for (var j = 0; j < argc; j++) { argv[i++] = %_Arguments(j); } return %Apply(bindings[0], bindings[1], argv, 0, bound_argc + argc); }; var new_length = 0; var old_length = this.length; // FunctionProxies might provide a non-UInt32 value. If so, ignore it. if ((typeof old_length === "number") && ((old_length >>> 0) === old_length)) { var argc = %_ArgumentsLength(); if (argc > 0) argc--; // Don't count the thisArg as parameter. new_length = old_length - argc; if (new_length < 0) new_length = 0; } // This runtime function finds any remaining arguments on the stack, // so we don't pass the arguments object. var result = %FunctionBindArguments(boundFunction, this, this_arg, new_length); // We already have caller and arguments properties on functions, // which are non-configurable. It therefore makes no sence to // try to redefine these as defined by the spec. The spec says // that bind should make these throw a TypeError if get or set // is called and make them non-enumerable and non-configurable. // To be consistent with our normal functions we leave this as it is. // TODO(lrn): Do set these to be thrower. return result; } function NewFunctionString(arguments, function_token) { var n = arguments.length; var p = ''; if (n > 1) { p = ToString(arguments[0]); for (var i = 1; i < n - 1; i++) { p += ',' + ToString(arguments[i]); } // If the formal parameters string include ) - an illegal // character - it may make the combined function expression // compile. We avoid this problem by checking for this early on. if (%_CallFunction(p, ')', StringIndexOfJS) != -1) { throw MakeSyntaxError('paren_in_arg_string', []); } // If the formal parameters include an unbalanced block comment, the // function must be rejected. Since JavaScript does not allow nested // comments we can include a trailing block comment to catch this. p += '\n/' + '**/'; } var body = (n > 0) ? ToString(arguments[n - 1]) : ''; return '(' + function_token + '(' + p + ') {\n' + body + '\n})'; } function FunctionConstructor(arg1) { // length == 1 var source = NewFunctionString(arguments, 'function'); var global_proxy = %GlobalProxy(global); // Compile the string in the constructor and not a helper so that errors // appear to come from here. var f = %_CallFunction(global_proxy, %CompileString(source, true)); %FunctionMarkNameShouldPrintAsAnonymous(f); return f; } // ---------------------------------------------------------------------------- function SetUpFunction() { %CheckIsBootstrapping(); %SetCode($Function, FunctionConstructor); %AddNamedProperty($Function.prototype, "constructor", $Function, DONT_ENUM); InstallFunctions($Function.prototype, DONT_ENUM, $Array( "bind", FunctionBind, "toString", FunctionToString )); } SetUpFunction(); // ---------------------------------------------------------------------------- // Iterator related spec functions. // ES6 rev 26, 2014-07-18 // 7.4.1 CheckIterable ( obj ) function ToIterable(obj) { if (!IS_SPEC_OBJECT(obj)) { return UNDEFINED; } return obj[symbolIterator]; } // ES6 rev 26, 2014-07-18 // 7.4.2 GetIterator ( obj, method ) function GetIterator(obj, method) { if (IS_UNDEFINED(method)) { method = ToIterable(obj); } if (!IS_SPEC_FUNCTION(method)) { throw MakeTypeError('not_iterable', [obj]); } var iterator = %_CallFunction(obj, method); if (!IS_SPEC_OBJECT(iterator)) { throw MakeTypeError('not_an_iterator', [iterator]); } return iterator; }
kingland/go-v8
v8-3.28/src/v8natives.js
JavaScript
mit
55,369
<!-- $Id: example-user-feed.html,v 1.1 2008/09/30 00:01:27 merlinofchaos Exp $ --> <p>In this example you will create a <em>Feed display</em> to show nodes by individual users, dynamically selected through the URL. You will become familiar with the Views 2 interface, as well as learn how to use an argument to pull in a user name and use it in a dynamically created path.</p> <p>A <em>feed</em> is a data format that places your site's content into a file that can be read and displayed by news reader programs. When visiting a site, you may notice a small <a href="http://drupal.org/misc/feed.png">RSS transmission icon</a>, whereby clicking on it, you can subscribe to the site's most recent content. This makes it easier for your visitors to keep up to date with your website. You can also use this format to aggregate information into other sites. For more information, please watch a video from Common Craft about <a href="http://www.commoncraft.com/rss_plain_english">RSS in plain English</a>.</p> <p>Note, Drupal automatically creates a feed for your website, but you may want to create feeds with specific information. In this case, a list per user. </p> <ol> <li> <h3>Creating a new view </h3> <ol> <li>Go to <a target="_blank" href="/admin/build/views/add">add new view</a>. Give it the name 'user_feed', description 'A feed of user nodes.', tag 'users', type 'Node' and click Next.</li> </ol> </li> <li><strong>About the Interface.</strong> You have been brought to the Views User Interface. As you start, you are editing the &quot;Default&quot; options for the view. In the 1st column on the left- you can see the pull-down menu offers 'Feed', for example, to select settings specific only to RSS views. In the remaining columns, you will be able to add or change options by clicking on links or icons. These options appear below this main area. Most likely, you will need to scroll to see the options appear. As you make changes, these options will appear in bold until you save your view.</li> <li> <h3>Change default display</h3> <ol> <li>Under <strong>Basic Settings</strong> in the 2nd column, click <em>Row style: Fields</em></li> <li>A menu loads below, <em>Defaults: How should each row in this view be styled</em>, check the <em>Node</em> option, and click <strong>Update</strong>.</li> <li>This loads another options menu, <em>Defaults: Row style options</em> click <strong>Update</strong>.</li> </ol> </li> <li> <h3>Create the RSS view </h3> <ol> <li>In the 1st column, select 'Feed' in the drop-down menu, and click <strong>Add Display</strong>.</li> <li>Under <strong>Basic Settings </strong>in the 2nd column, click<em> Row style:Missing style plugin</em></li> <li>Note, options appear below the Views Interface, you may need to scroll to see <em>Feed: How should each row in this view be styled</em><br /> tick <strong>Node</strong>, then <strong>Update</strong></li> <li>This loads the next options menu- <em>Display type: </em>select &quot;Use default RSS settings&quot;, click <strong>Update</strong>.</li> </ol> </li> <li> <h3>Set the path for accessing your feed</h3> <ol> <li> In the 2nd column under <strong>Feed settings</strong>, click <em>Path: None </em></li> <li>In options below <em>Feed: The menu path or URL of this view</em> enter in the path with an argument feeds/%/rss.xml</li> <li>Click <strong>Update</strong></li> </ol> </li> <li> <h3>Set up your arguments to say which user's nodes to display</h3> <ol> <li>To the right of <strong>Arguments</strong>, click the + sign to add and argument</li> <li>In the Feed: Add arguments menu that loads below, select User in the pull-down menu</li> <li>Check the box <em>User: Name</em>, click <strong>Add</strong></li> <li>Scroll down to options to find <strong>Case in path:</strong> select <em>Lower case</em></li> <li>Check the box <em>Transform spaces to dashes in URL</em></li> <li>Click <strong>Update default display</strong></li> </ol> </li> <li> <h3>Sort to show most recent at top of feed</h3> <ol> <li>Scroll up to <strong>Sort criteria</strong> in the right most column and click the + icon.</li> <li>In the <strong>Groups</strong> drop-down menu below, select 'Node', then check <em>Node: Post date</em>, and click <strong>Add</strong>. </li> <li>Select <em>Descending</em> <strong>Sort order</strong>. Click <strong>Update</strong>.</li> </ol> </li> <li> <h3>Set filters to hide unpublished entries</h3> <ol> <li>Click the + icon next to <strong>Filters</strong>. In the options below, select <em>Node</em> under <strong>Groups</strong> drop-down menu, choose the <em>Node: Published</em> filter, and click <strong>Add</strong>.</li> <li>Check the box <em>Published</em>. Click <strong>Update default display</strong></li> </ol> </li> <li> <h3>Test</h3> <ol> <li>Click <strong>Save</strong></li> <li>Under <strong>Live preview</strong> type in the name of a user, in lowercase, replacing spaces with dashes, click <strong>Preview</strong>.</li> <li>You should test and find your feeds at URLs like http://yoursite.com/feeds/user-name/rss.xml</li> <li>You can use this path for aggregating on another site. You can also attach the RSS feed to another display of view to make the feed link appear on that display.</li> </ol> </li> </ol>
temptemp5678/drupalnote
sites/all/modules/views/help/example-user-feed.html
HTML
mit
5,606
const fs = require('fs-extra'); const path = require('path'); const cleanDirs = [ 'dist' ]; cleanDirs.forEach(dir => { const cleanDir = path.join(__dirname, '../', dir); fs.removeSync(cleanDir); });
TillaTheHun0/ionic
core/scripts/clean.js
JavaScript
mit
208
--- # Layouts must stay in HTML --- {% include header.html %} {{ content }} {% include footer.html %} {% include closure.html %}
richardatval/annual-review-2016
_layouts/plain.html
HTML
mit
132
if [ "$1" = "lein" ]; then exportVariable JAVA_CMD "$JAVA_HOME/bin/java" exportVariable LEIN_JAVA_CMD "$JAVA_HOME/bin/java" if [ -n "$JENV_OPTIONS" ]; then if [ -z "$LEIN_JVM_OPTS" ]; then exportVariable LEIN_JVM_OPTS $JENV_OPTIONS exportVariable JVM_OPTS $JENV_OPTIONS else echo "LEIN_JVM_OPTS is set and not overridden by jenv" fi unset JENV_OPTIONS fi fi
jchochli/dotfiles
home/.jenv/available-plugins/lein/etc/jenv.d/exec/lein-before.bash
Shell
mit
416
/* * Qt4 bitcoin GUI. * * W.J. van der Laan 2011-2012 * The Bitcoin Developers 2011-2012 * The Litecoin Developers 201-2013 */ #include "bitcoingui.h" #include "transactiontablemodel.h" #include "addressbookpage.h" #include "sendcoinsdialog.h" #include "signverifymessagedialog.h" #include "optionsdialog.h" #include "aboutdialog.h" #include "clientmodel.h" #include "walletmodel.h" #include "editaddressdialog.h" #include "optionsmodel.h" #include "transactiondescdialog.h" #include "addresstablemodel.h" #include "transactionview.h" #include "overviewpage.h" #include "miningpage.h" #include "bitcoinunits.h" #include "guiconstants.h" #include "askpassphrasedialog.h" #include "notificator.h" #include "guiutil.h" #include "rpcconsole.h" #ifdef Q_WS_MAC #include "macdockiconhandler.h" #endif #include <QApplication> #include <QMainWindow> #include <QMenuBar> #include <QMenu> #include <QIcon> #include <QTabWidget> #include <QVBoxLayout> #include <QToolBar> #include <QStatusBar> #include <QLabel> #include <QLineEdit> #include <QPushButton> #include <QLocale> #include <QMessageBox> #include <QProgressBar> #include <QStackedWidget> #include <QDateTime> #include <QMovie> #include <QFileDialog> #include <QDesktopServices> #include <QTimer> #include <QDragEnterEvent> #include <QUrl> #include <iostream> BitcoinGUI::BitcoinGUI(QWidget *parent): QMainWindow(parent), clientModel(0), walletModel(0), encryptWalletAction(0), changePassphraseAction(0), aboutQtAction(0), trayIcon(0), notificator(0), rpcConsole(0) { resize(850, 550); setWindowTitle(tr("8Coin") + " - " + tr("Wallet")); #ifndef Q_WS_MAC qApp->setWindowIcon(QIcon(":icons/bitcoin")); setWindowIcon(QIcon(":icons/bitcoin")); #else setUnifiedTitleAndToolBarOnMac(true); QApplication::setAttribute(Qt::AA_DontShowIconsInMenus); #endif // Accept D&D of URIs setAcceptDrops(true); // Create actions for the toolbar, menu bar and tray/dock icon createActions(); // Create application menu bar createMenuBar(); // Create the toolbars createToolBars(); // Create the tray icon (or setup the dock icon) createTrayIcon(); // Create tabs overviewPage = new OverviewPage(); miningPage = new MiningPage(this); transactionsPage = new QWidget(this); QVBoxLayout *vbox = new QVBoxLayout(); transactionView = new TransactionView(this); vbox->addWidget(transactionView); transactionsPage->setLayout(vbox); addressBookPage = new AddressBookPage(AddressBookPage::ForEditing, AddressBookPage::SendingTab); receiveCoinsPage = new AddressBookPage(AddressBookPage::ForEditing, AddressBookPage::ReceivingTab); sendCoinsPage = new SendCoinsDialog(this); signVerifyMessageDialog = new SignVerifyMessageDialog(this); centralWidget = new QStackedWidget(this); centralWidget->addWidget(overviewPage); centralWidget->addWidget(miningPage); centralWidget->addWidget(transactionsPage); centralWidget->addWidget(addressBookPage); centralWidget->addWidget(receiveCoinsPage); centralWidget->addWidget(sendCoinsPage); #ifdef FIRST_CLASS_MESSAGING centralWidget->addWidget(signVerifyMessageDialog); #endif setCentralWidget(centralWidget); // Create status bar statusBar(); // Status bar notification icons QFrame *frameBlocks = new QFrame(); frameBlocks->setContentsMargins(0,0,0,0); frameBlocks->setMinimumWidth(73); frameBlocks->setMaximumWidth(73); QHBoxLayout *frameBlocksLayout = new QHBoxLayout(frameBlocks); frameBlocksLayout->setContentsMargins(3,0,3,0); frameBlocksLayout->setSpacing(3); labelEncryptionIcon = new QLabel(); labelMiningIcon = new QLabel(); labelConnectionsIcon = new QLabel(); labelBlocksIcon = new QLabel(); frameBlocksLayout->addStretch(); frameBlocksLayout->addWidget(labelEncryptionIcon); frameBlocksLayout->addStretch(); frameBlocksLayout->addWidget(labelMiningIcon); frameBlocksLayout->addStretch(); frameBlocksLayout->addWidget(labelConnectionsIcon); frameBlocksLayout->addStretch(); frameBlocksLayout->addWidget(labelBlocksIcon); frameBlocksLayout->addStretch(); // Progress bar and label for blocks download progressBarLabel = new QLabel(); progressBarLabel->setVisible(false); progressBar = new QProgressBar(); progressBar->setAlignment(Qt::AlignCenter); progressBar->setVisible(false); statusBar()->addWidget(progressBarLabel); statusBar()->addWidget(progressBar); statusBar()->addPermanentWidget(frameBlocks); syncIconMovie = new QMovie(":/movies/update_spinner", "mng", this); // Clicking on a transaction on the overview page simply sends you to transaction history page connect(overviewPage, SIGNAL(transactionClicked(QModelIndex)), this, SLOT(gotoHistoryPage())); connect(overviewPage, SIGNAL(transactionClicked(QModelIndex)), transactionView, SLOT(focusTransaction(QModelIndex))); // Doubleclicking on a transaction on the transaction history page shows details connect(transactionView, SIGNAL(doubleClicked(QModelIndex)), transactionView, SLOT(showDetails())); rpcConsole = new RPCConsole(this); connect(openRPCConsoleAction, SIGNAL(triggered()), rpcConsole, SLOT(show())); // Clicking on "Verify Message" in the address book sends you to the verify message tab connect(addressBookPage, SIGNAL(verifyMessage(QString)), this, SLOT(gotoVerifyMessageTab(QString))); // Clicking on "Sign Message" in the receive coins page sends you to the sign message tab connect(receiveCoinsPage, SIGNAL(signMessage(QString)), this, SLOT(gotoSignMessageTab(QString))); gotoOverviewPage(); } BitcoinGUI::~BitcoinGUI() { if(trayIcon) // Hide tray icon, as deleting will let it linger until quit (on Ubuntu) trayIcon->hide(); #ifdef Q_WS_MAC delete appMenuBar; #endif } void BitcoinGUI::createActions() { QActionGroup *tabGroup = new QActionGroup(this); overviewAction = new QAction(QIcon(":/icons/overview"), tr("&Overview"), this); overviewAction->setToolTip(tr("Show general overview of wallet")); overviewAction->setCheckable(true); overviewAction->setShortcut(QKeySequence(Qt::ALT + Qt::Key_1)); tabGroup->addAction(overviewAction); miningAction = new QAction(QIcon(":/icons/mining"), tr("&Mining"), this); miningAction->setToolTip(tr("Configure mining")); miningAction->setCheckable(true); tabGroup->addAction(miningAction); historyAction = new QAction(QIcon(":/icons/history"), tr("&Transactions"), this); historyAction->setToolTip(tr("Browse transaction history")); historyAction->setCheckable(true); historyAction->setShortcut(QKeySequence(Qt::ALT + Qt::Key_4)); tabGroup->addAction(historyAction); addressBookAction = new QAction(QIcon(":/icons/address-book"), tr("&Address Book"), this); addressBookAction->setToolTip(tr("Edit the list of stored addresses and labels")); addressBookAction->setCheckable(true); addressBookAction->setShortcut(QKeySequence(Qt::ALT + Qt::Key_5)); tabGroup->addAction(addressBookAction); receiveCoinsAction = new QAction(QIcon(":/icons/receiving_addresses"), tr("&Receive coins"), this); receiveCoinsAction->setToolTip(tr("Show the list of addresses for receiving payments")); receiveCoinsAction->setCheckable(true); receiveCoinsAction->setShortcut(QKeySequence(Qt::ALT + Qt::Key_3)); tabGroup->addAction(receiveCoinsAction); sendCoinsAction = new QAction(QIcon(":/icons/send"), tr("&Send coins"), this); sendCoinsAction->setToolTip(tr("Send coins to a 8Coin address")); sendCoinsAction->setCheckable(true); sendCoinsAction->setShortcut(QKeySequence(Qt::ALT + Qt::Key_2)); tabGroup->addAction(sendCoinsAction); signMessageAction = new QAction(QIcon(":/icons/edit"), tr("Sign &message..."), this); signMessageAction->setToolTip(tr("Sign a message to prove you own a Bitcoin address")); tabGroup->addAction(signMessageAction); verifyMessageAction = new QAction(QIcon(":/icons/transaction_0"), tr("&Verify message..."), this); verifyMessageAction->setToolTip(tr("Verify a message to ensure it was signed with a specified Bitcoin address")); tabGroup->addAction(verifyMessageAction); #ifdef FIRST_CLASS_MESSAGING firstClassMessagingAction = new QAction(QIcon(":/icons/edit"), tr("S&ignatures"), this); firstClassMessagingAction->setToolTip(signMessageAction->toolTip() + QString(". / ") + verifyMessageAction->toolTip() + QString(".")); firstClassMessagingAction->setCheckable(true); tabGroup->addAction(firstClassMessagingAction); #endif connect(overviewAction, SIGNAL(triggered()), this, SLOT(showNormalIfMinimized())); connect(overviewAction, SIGNAL(triggered()), this, SLOT(gotoOverviewPage())); connect(miningAction, SIGNAL(triggered()), this, SLOT(gotoMiningPage())); connect(historyAction, SIGNAL(triggered()), this, SLOT(showNormalIfMinimized())); connect(historyAction, SIGNAL(triggered()), this, SLOT(gotoHistoryPage())); connect(addressBookAction, SIGNAL(triggered()), this, SLOT(showNormalIfMinimized())); connect(addressBookAction, SIGNAL(triggered()), this, SLOT(gotoAddressBookPage())); connect(receiveCoinsAction, SIGNAL(triggered()), this, SLOT(showNormalIfMinimized())); connect(receiveCoinsAction, SIGNAL(triggered()), this, SLOT(gotoReceiveCoinsPage())); connect(sendCoinsAction, SIGNAL(triggered()), this, SLOT(showNormalIfMinimized())); connect(sendCoinsAction, SIGNAL(triggered()), this, SLOT(gotoSendCoinsPage())); connect(signMessageAction, SIGNAL(triggered()), this, SLOT(showNormalIfMinimized())); connect(signMessageAction, SIGNAL(triggered()), this, SLOT(gotoSignMessageTab())); connect(verifyMessageAction, SIGNAL(triggered()), this, SLOT(showNormalIfMinimized())); connect(verifyMessageAction, SIGNAL(triggered()), this, SLOT(gotoVerifyMessageTab())); #ifdef FIRST_CLASS_MESSAGING connect(firstClassMessagingAction, SIGNAL(triggered()), this, SLOT(showNormalIfMinimized())); // Always start with the sign message tab for FIRST_CLASS_MESSAGING connect(firstClassMessagingAction, SIGNAL(triggered()), this, SLOT(gotoSignMessageTab())); #endif quitAction = new QAction(QIcon(":/icons/quit"), tr("E&xit"), this); quitAction->setToolTip(tr("Quit application")); quitAction->setShortcut(QKeySequence(Qt::CTRL + Qt::Key_Q)); quitAction->setMenuRole(QAction::QuitRole); aboutAction = new QAction(QIcon(":/icons/bitcoin"), tr("&About 8Coin"), this); aboutAction->setToolTip(tr("Show information about 8Coin")); aboutAction->setMenuRole(QAction::AboutRole); aboutQtAction = new QAction(tr("About &Qt"), this); aboutQtAction->setToolTip(tr("Show information about Qt")); aboutQtAction->setMenuRole(QAction::AboutQtRole); optionsAction = new QAction(QIcon(":/icons/options"), tr("&Options..."), this); optionsAction->setToolTip(tr("Modify configuration options for 8Coin")); optionsAction->setMenuRole(QAction::PreferencesRole); toggleHideAction = new QAction(QIcon(":/icons/bitcoin"), tr("Show/Hide &8Coin"), this); toggleHideAction->setToolTip(tr("Show or hide the 8Coin window")); exportAction = new QAction(QIcon(":/icons/export"), tr("&Export..."), this); exportAction->setToolTip(tr("Export the data in the current tab to a file")); encryptWalletAction = new QAction(QIcon(":/icons/lock_closed"), tr("&Encrypt Wallet..."), this); encryptWalletAction->setToolTip(tr("Encrypt or decrypt wallet")); encryptWalletAction->setCheckable(true); backupWalletAction = new QAction(QIcon(":/icons/filesave"), tr("&Backup Wallet..."), this); backupWalletAction->setToolTip(tr("Backup wallet to another location")); changePassphraseAction = new QAction(QIcon(":/icons/key"), tr("&Change Passphrase..."), this); changePassphraseAction->setToolTip(tr("Change the passphrase used for wallet encryption")); openRPCConsoleAction = new QAction(QIcon(":/icons/debugwindow"), tr("&Debug window"), this); openRPCConsoleAction->setToolTip(tr("Open debugging and diagnostic console")); connect(quitAction, SIGNAL(triggered()), qApp, SLOT(quit())); connect(optionsAction, SIGNAL(triggered()), this, SLOT(optionsClicked())); connect(aboutAction, SIGNAL(triggered()), this, SLOT(aboutClicked())); connect(aboutQtAction, SIGNAL(triggered()), qApp, SLOT(aboutQt())); connect(toggleHideAction, SIGNAL(triggered()), this, SLOT(toggleHidden())); connect(encryptWalletAction, SIGNAL(triggered(bool)), this, SLOT(encryptWallet(bool))); connect(backupWalletAction, SIGNAL(triggered()), this, SLOT(backupWallet())); connect(changePassphraseAction, SIGNAL(triggered()), this, SLOT(changePassphrase())); } void BitcoinGUI::createMenuBar() { #ifdef Q_WS_MAC // Create a decoupled menu bar on Mac which stays even if the window is closed appMenuBar = new QMenuBar(); #else // Get the main window's menu bar on other platforms appMenuBar = menuBar(); #endif // Configure the menus QMenu *file = appMenuBar->addMenu(tr("&File")); file->addAction(backupWalletAction); file->addAction(exportAction); #ifndef FIRST_CLASS_MESSAGING file->addAction(signMessageAction); file->addAction(verifyMessageAction); #endif file->addSeparator(); file->addAction(quitAction); QMenu *settings = appMenuBar->addMenu(tr("&Settings")); settings->addAction(encryptWalletAction); settings->addAction(changePassphraseAction); settings->addSeparator(); settings->addAction(optionsAction); QMenu *help = appMenuBar->addMenu(tr("&Help")); help->addAction(openRPCConsoleAction); help->addSeparator(); help->addAction(aboutAction); help->addAction(aboutQtAction); } void BitcoinGUI::createToolBars() { QToolBar *toolbar = addToolBar(tr("Tabs toolbar")); toolbar->setToolButtonStyle(Qt::ToolButtonTextBesideIcon); toolbar->addAction(overviewAction); toolbar->addAction(sendCoinsAction); toolbar->addAction(receiveCoinsAction); toolbar->addAction(historyAction); toolbar->addAction(addressBookAction); toolbar->addAction(miningAction); #ifdef FIRST_CLASS_MESSAGING toolbar->addAction(firstClassMessagingAction); #endif QToolBar *toolbar2 = addToolBar(tr("Actions toolbar")); toolbar2->setToolButtonStyle(Qt::ToolButtonTextBesideIcon); toolbar2->addAction(exportAction); } void BitcoinGUI::setClientModel(ClientModel *clientModel) { this->clientModel = clientModel; if(clientModel) { // Replace some strings and icons, when using the testnet if(clientModel->isTestNet()) { setWindowTitle(windowTitle() + QString(" ") + tr("[testnet]")); #ifndef Q_WS_MAC qApp->setWindowIcon(QIcon(":icons/bitcoin_testnet")); setWindowIcon(QIcon(":icons/bitcoin_testnet")); #else MacDockIconHandler::instance()->setIcon(QIcon(":icons/bitcoin_testnet")); #endif if(trayIcon) { trayIcon->setToolTip(tr("8Coin client") + QString(" ") + tr("[testnet]")); trayIcon->setIcon(QIcon(":/icons/toolbar_testnet")); toggleHideAction->setIcon(QIcon(":/icons/toolbar_testnet")); } aboutAction->setIcon(QIcon(":/icons/toolbar_testnet")); } // Keep up to date with client setNumConnections(clientModel->getNumConnections()); connect(clientModel, SIGNAL(numConnectionsChanged(int)), this, SLOT(setNumConnections(int))); setNumBlocks(clientModel->getNumBlocks(), clientModel->getNumBlocksOfPeers()); connect(clientModel, SIGNAL(numBlocksChanged(int,int)), this, SLOT(setNumBlocks(int,int))); setMining(false, 0); connect(clientModel, SIGNAL(miningChanged(bool,int)), this, SLOT(setMining(bool,int))); // Report errors from network/worker thread connect(clientModel, SIGNAL(error(QString,QString,bool)), this, SLOT(error(QString,QString,bool))); rpcConsole->setClientModel(clientModel); addressBookPage->setOptionsModel(clientModel->getOptionsModel()); receiveCoinsPage->setOptionsModel(clientModel->getOptionsModel()); } } void BitcoinGUI::setWalletModel(WalletModel *walletModel) { this->walletModel = walletModel; if(walletModel) { // Report errors from wallet thread connect(walletModel, SIGNAL(error(QString,QString,bool)), this, SLOT(error(QString,QString,bool))); // Put transaction list in tabs transactionView->setModel(walletModel); overviewPage->setModel(walletModel); addressBookPage->setModel(walletModel->getAddressTableModel()); receiveCoinsPage->setModel(walletModel->getAddressTableModel()); sendCoinsPage->setModel(walletModel); signVerifyMessageDialog->setModel(walletModel); miningPage->setModel(clientModel); setEncryptionStatus(walletModel->getEncryptionStatus()); connect(walletModel, SIGNAL(encryptionStatusChanged(int)), this, SLOT(setEncryptionStatus(int))); // Balloon popup for new transaction connect(walletModel->getTransactionTableModel(), SIGNAL(rowsInserted(QModelIndex,int,int)), this, SLOT(incomingTransaction(QModelIndex,int,int))); // Ask for passphrase if needed connect(walletModel, SIGNAL(requireUnlock()), this, SLOT(unlockWallet())); } } void BitcoinGUI::createTrayIcon() { QMenu *trayIconMenu; #ifndef Q_WS_MAC trayIcon = new QSystemTrayIcon(this); trayIconMenu = new QMenu(this); trayIcon->setContextMenu(trayIconMenu); trayIcon->setToolTip(tr("8Coin client")); trayIcon->setIcon(QIcon(":/icons/toolbar")); connect(trayIcon, SIGNAL(activated(QSystemTrayIcon::ActivationReason)), this, SLOT(trayIconActivated(QSystemTrayIcon::ActivationReason))); trayIcon->show(); #else // Note: On Mac, the dock icon is used to provide the tray's functionality. MacDockIconHandler *dockIconHandler = MacDockIconHandler::instance(); trayIconMenu = dockIconHandler->dockMenu(); #endif // Configuration of the tray icon (or dock icon) icon menu trayIconMenu->addAction(toggleHideAction); trayIconMenu->addSeparator(); trayIconMenu->addAction(sendCoinsAction); trayIconMenu->addAction(receiveCoinsAction); #ifndef FIRST_CLASS_MESSAGING trayIconMenu->addSeparator(); #endif trayIconMenu->addAction(signMessageAction); trayIconMenu->addAction(verifyMessageAction); trayIconMenu->addSeparator(); trayIconMenu->addAction(optionsAction); trayIconMenu->addAction(openRPCConsoleAction); #ifndef Q_WS_MAC // This is built-in on Mac trayIconMenu->addSeparator(); trayIconMenu->addAction(quitAction); #endif notificator = new Notificator(qApp->applicationName(), trayIcon); } #ifndef Q_WS_MAC void BitcoinGUI::trayIconActivated(QSystemTrayIcon::ActivationReason reason) { if(reason == QSystemTrayIcon::Trigger) { // Click on system tray icon triggers "show/hide 8Coin" toggleHideAction->trigger(); } } #endif void BitcoinGUI::optionsClicked() { if(!clientModel || !clientModel->getOptionsModel()) return; OptionsDialog dlg; dlg.setModel(clientModel->getOptionsModel()); dlg.exec(); } void BitcoinGUI::aboutClicked() { AboutDialog dlg; dlg.setModel(clientModel); dlg.exec(); } void BitcoinGUI::setNumConnections(int count) { QString icon; switch(count) { case 0: icon = ":/icons/connect_0"; break; case 1: case 2: case 3: icon = ":/icons/connect_1"; break; case 4: case 5: case 6: icon = ":/icons/connect_2"; break; case 7: case 8: case 9: icon = ":/icons/connect_3"; break; default: icon = ":/icons/connect_4"; break; } labelConnectionsIcon->setPixmap(QIcon(icon).pixmap(STATUSBAR_ICONSIZE,STATUSBAR_ICONSIZE)); labelConnectionsIcon->setToolTip(tr("%n active connection(s) to 8Coin network", "", count)); } void BitcoinGUI::setNumBlocks(int count, int nTotalBlocks) { // don't show / hide progressBar and it's label if we have no connection(s) to the network if (!clientModel || clientModel->getNumConnections() == 0) { progressBarLabel->setVisible(false); progressBar->setVisible(false); return; } QString tooltip; if(count < nTotalBlocks) { int nRemainingBlocks = nTotalBlocks - count; float nPercentageDone = count / (nTotalBlocks * 0.01f); if (clientModel->getStatusBarWarnings() == "") { progressBarLabel->setText(tr("Synchronizing with network...")); progressBarLabel->setVisible(true); progressBar->setFormat(tr("~%n block(s) remaining", "", nRemainingBlocks)); progressBar->setMaximum(nTotalBlocks); progressBar->setValue(count); progressBar->setVisible(true); } else { progressBarLabel->setText(clientModel->getStatusBarWarnings()); progressBarLabel->setVisible(true); progressBar->setVisible(false); } tooltip = tr("Downloaded %1 of %2 blocks of transaction history (%3% done).").arg(count).arg(nTotalBlocks).arg(nPercentageDone, 0, 'f', 2); } else { if (clientModel->getStatusBarWarnings() == "") progressBarLabel->setVisible(false); else { progressBarLabel->setText(clientModel->getStatusBarWarnings()); progressBarLabel->setVisible(true); } progressBar->setVisible(false); tooltip = tr("Downloaded %1 blocks of transaction history.").arg(count); } tooltip = tr("Current difficulty is %1.").arg(clientModel->GetDifficulty()) + QString("<br>") + tooltip; QDateTime now = QDateTime::currentDateTime(); QDateTime lastBlockDate = clientModel->getLastBlockDate(); int secs = lastBlockDate.secsTo(now); QString text; // Represent time from last generated block in human readable text if(secs <= 0) { // Fully up to date. Leave text empty. } else if(secs < 60) { text = tr("%n second(s) ago","",secs); } else if(secs < 60*60) { text = tr("%n minute(s) ago","",secs/60); } else if(secs < 24*60*60) { text = tr("%n hour(s) ago","",secs/(60*60)); } else { text = tr("%n day(s) ago","",secs/(60*60*24)); } // Set icon state: spinning if catching up, tick otherwise if(secs < 90*60 && count >= nTotalBlocks) { tooltip = tr("Up to date") + QString(".<br>") + tooltip; labelBlocksIcon->setPixmap(QIcon(":/icons/synced").pixmap(STATUSBAR_ICONSIZE, STATUSBAR_ICONSIZE)); overviewPage->showOutOfSyncWarning(false); } else { tooltip = tr("Catching up...") + QString("<br>") + tooltip; labelBlocksIcon->setMovie(syncIconMovie); syncIconMovie->start(); overviewPage->showOutOfSyncWarning(true); } if(!text.isEmpty()) { tooltip += QString("<br>"); tooltip += tr("Last received block was generated %1.").arg(text); } // Don't word-wrap this (fixed-width) tooltip tooltip = QString("<nobr>") + tooltip + QString("</nobr>"); labelBlocksIcon->setToolTip(tooltip); progressBarLabel->setToolTip(tooltip); progressBar->setToolTip(tooltip); } void BitcoinGUI::setMining(bool mining, int hashrate) { if (mining) { labelMiningIcon->setPixmap(QIcon(":/icons/mining_active").pixmap(STATUSBAR_ICONSIZE,STATUSBAR_ICONSIZE)); labelMiningIcon->setToolTip(tr("Mining 8Coin at %1 hashes per second").arg(hashrate)); } else { labelMiningIcon->setPixmap(QIcon(":/icons/mining_inactive").pixmap(STATUSBAR_ICONSIZE,STATUSBAR_ICONSIZE)); labelMiningIcon->setToolTip(tr("Not mining 8Coin")); } } void BitcoinGUI::error(const QString &title, const QString &message, bool modal) { // Report errors from network/worker thread if(modal) { QMessageBox::critical(this, title, message, QMessageBox::Ok, QMessageBox::Ok); } else { notificator->notify(Notificator::Critical, title, message); } } void BitcoinGUI::changeEvent(QEvent *e) { QMainWindow::changeEvent(e); #ifndef Q_WS_MAC // Ignored on Mac if(e->type() == QEvent::WindowStateChange) { if(clientModel && clientModel->getOptionsModel()->getMinimizeToTray()) { QWindowStateChangeEvent *wsevt = static_cast<QWindowStateChangeEvent*>(e); if(!(wsevt->oldState() & Qt::WindowMinimized) && isMinimized()) { QTimer::singleShot(0, this, SLOT(hide())); e->ignore(); } } } #endif } void BitcoinGUI::closeEvent(QCloseEvent *event) { if(clientModel) { #ifndef Q_WS_MAC // Ignored on Mac if(!clientModel->getOptionsModel()->getMinimizeToTray() && !clientModel->getOptionsModel()->getMinimizeOnClose()) { qApp->quit(); } #endif } QMainWindow::closeEvent(event); } void BitcoinGUI::askFee(qint64 nFeeRequired, bool *payFee) { QString strMessage = tr("This transaction is over the size limit. You can still send it for a fee of %1, " "which goes to the nodes that process your transaction and helps to support the network. " "Do you want to pay the fee?").arg( BitcoinUnits::formatWithUnit(BitcoinUnits::BTC, nFeeRequired)); QMessageBox::StandardButton retval = QMessageBox::question( this, tr("Confirm transaction fee"), strMessage, QMessageBox::Yes|QMessageBox::Cancel, QMessageBox::Yes); *payFee = (retval == QMessageBox::Yes); } void BitcoinGUI::incomingTransaction(const QModelIndex & parent, int start, int end) { if(!walletModel || !clientModel) return; TransactionTableModel *ttm = walletModel->getTransactionTableModel(); qint64 amount = ttm->index(start, TransactionTableModel::Amount, parent) .data(Qt::EditRole).toULongLong(); if(!clientModel->inInitialBlockDownload()) { // On new transaction, make an info balloon // Unless the initial block download is in progress, to prevent balloon-spam QString date = ttm->index(start, TransactionTableModel::Date, parent) .data().toString(); QString type = ttm->index(start, TransactionTableModel::Type, parent) .data().toString(); QString address = ttm->index(start, TransactionTableModel::ToAddress, parent) .data().toString(); QIcon icon = qvariant_cast<QIcon>(ttm->index(start, TransactionTableModel::ToAddress, parent) .data(Qt::DecorationRole)); notificator->notify(Notificator::Information, (amount)<0 ? tr("Sent transaction") : tr("Incoming transaction"), tr("Date: %1\n" "Amount: %2\n" "Type: %3\n" "Address: %4\n") .arg(date) .arg(BitcoinUnits::formatWithUnit(walletModel->getOptionsModel()->getDisplayUnit(), amount, true)) .arg(type) .arg(address), icon); } } void BitcoinGUI::gotoOverviewPage() { overviewAction->setChecked(true); centralWidget->setCurrentWidget(overviewPage); exportAction->setEnabled(false); disconnect(exportAction, SIGNAL(triggered()), 0, 0); } void BitcoinGUI::gotoMiningPage() { miningAction->setChecked(true); centralWidget->setCurrentWidget(miningPage); exportAction->setEnabled(false); disconnect(exportAction, SIGNAL(triggered()), 0, 0); } void BitcoinGUI::gotoHistoryPage() { historyAction->setChecked(true); centralWidget->setCurrentWidget(transactionsPage); exportAction->setEnabled(true); disconnect(exportAction, SIGNAL(triggered()), 0, 0); connect(exportAction, SIGNAL(triggered()), transactionView, SLOT(exportClicked())); } void BitcoinGUI::gotoAddressBookPage() { addressBookAction->setChecked(true); centralWidget->setCurrentWidget(addressBookPage); exportAction->setEnabled(true); disconnect(exportAction, SIGNAL(triggered()), 0, 0); connect(exportAction, SIGNAL(triggered()), addressBookPage, SLOT(exportClicked())); } void BitcoinGUI::gotoReceiveCoinsPage() { receiveCoinsAction->setChecked(true); centralWidget->setCurrentWidget(receiveCoinsPage); exportAction->setEnabled(true); disconnect(exportAction, SIGNAL(triggered()), 0, 0); connect(exportAction, SIGNAL(triggered()), receiveCoinsPage, SLOT(exportClicked())); } void BitcoinGUI::gotoSendCoinsPage() { sendCoinsAction->setChecked(true); centralWidget->setCurrentWidget(sendCoinsPage); exportAction->setEnabled(false); disconnect(exportAction, SIGNAL(triggered()), 0, 0); } void BitcoinGUI::gotoSignMessageTab(QString addr) { #ifdef FIRST_CLASS_MESSAGING firstClassMessagingAction->setChecked(true); centralWidget->setCurrentWidget(signVerifyMessageDialog); exportAction->setEnabled(false); disconnect(exportAction, SIGNAL(triggered()), 0, 0); signVerifyMessageDialog->showTab_SM(false); #else // call show() in showTab_SM() signVerifyMessageDialog->showTab_SM(true); #endif if(!addr.isEmpty()) signVerifyMessageDialog->setAddress_SM(addr); } void BitcoinGUI::gotoVerifyMessageTab(QString addr) { #ifdef FIRST_CLASS_MESSAGING firstClassMessagingAction->setChecked(true); centralWidget->setCurrentWidget(signVerifyMessageDialog); exportAction->setEnabled(false); disconnect(exportAction, SIGNAL(triggered()), 0, 0); signVerifyMessageDialog->showTab_VM(false); #else // call show() in showTab_VM() signVerifyMessageDialog->showTab_VM(true); #endif if(!addr.isEmpty()) signVerifyMessageDialog->setAddress_VM(addr); } void BitcoinGUI::dragEnterEvent(QDragEnterEvent *event) { // Accept only URIs if(event->mimeData()->hasUrls()) event->acceptProposedAction(); } void BitcoinGUI::dropEvent(QDropEvent *event) { if(event->mimeData()->hasUrls()) { int nValidUrisFound = 0; QList<QUrl> uris = event->mimeData()->urls(); foreach(const QUrl &uri, uris) { if (sendCoinsPage->handleURI(uri.toString())) nValidUrisFound++; } // if valid URIs were found if (nValidUrisFound) gotoSendCoinsPage(); else notificator->notify(Notificator::Warning, tr("URI handling"), tr("URI can not be parsed! This can be caused by an invalid 8Coin address or malformed URI parameters.")); } event->acceptProposedAction(); } void BitcoinGUI::handleURI(QString strURI) { // URI has to be valid if (sendCoinsPage->handleURI(strURI)) { showNormalIfMinimized(); gotoSendCoinsPage(); } else notificator->notify(Notificator::Warning, tr("URI handling"), tr("URI can not be parsed! This can be caused by an invalid 8Coin address or malformed URI parameters.")); } void BitcoinGUI::setEncryptionStatus(int status) { switch(status) { case WalletModel::Unencrypted: labelEncryptionIcon->hide(); encryptWalletAction->setChecked(false); changePassphraseAction->setEnabled(false); encryptWalletAction->setEnabled(true); break; case WalletModel::Unlocked: labelEncryptionIcon->show(); labelEncryptionIcon->setPixmap(QIcon(":/icons/lock_open").pixmap(STATUSBAR_ICONSIZE,STATUSBAR_ICONSIZE)); labelEncryptionIcon->setToolTip(tr("Wallet is <b>encrypted</b> and currently <b>unlocked</b>")); encryptWalletAction->setChecked(true); changePassphraseAction->setEnabled(true); encryptWalletAction->setEnabled(false); // TODO: decrypt currently not supported break; case WalletModel::Locked: labelEncryptionIcon->show(); labelEncryptionIcon->setPixmap(QIcon(":/icons/lock_closed").pixmap(STATUSBAR_ICONSIZE,STATUSBAR_ICONSIZE)); labelEncryptionIcon->setToolTip(tr("Wallet is <b>encrypted</b> and currently <b>locked</b>")); encryptWalletAction->setChecked(true); changePassphraseAction->setEnabled(true); encryptWalletAction->setEnabled(false); // TODO: decrypt currently not supported break; } } void BitcoinGUI::encryptWallet(bool status) { if(!walletModel) return; AskPassphraseDialog dlg(status ? AskPassphraseDialog::Encrypt: AskPassphraseDialog::Decrypt, this); dlg.setModel(walletModel); dlg.exec(); setEncryptionStatus(walletModel->getEncryptionStatus()); } void BitcoinGUI::backupWallet() { QString saveDir = QDesktopServices::storageLocation(QDesktopServices::DocumentsLocation); QString filename = QFileDialog::getSaveFileName(this, tr("Backup Wallet"), saveDir, tr("Wallet Data (*.dat)")); if(!filename.isEmpty()) { if(!walletModel->backupWallet(filename)) { QMessageBox::warning(this, tr("Backup Failed"), tr("There was an error trying to save the wallet data to the new location.")); } } } void BitcoinGUI::changePassphrase() { AskPassphraseDialog dlg(AskPassphraseDialog::ChangePass, this); dlg.setModel(walletModel); dlg.exec(); } void BitcoinGUI::unlockWallet() { if(!walletModel) return; // Unlock wallet when requested by wallet model if(walletModel->getEncryptionStatus() == WalletModel::Locked) { AskPassphraseDialog dlg(AskPassphraseDialog::Unlock, this); dlg.setModel(walletModel); dlg.exec(); } } void BitcoinGUI::showNormalIfMinimized(bool fToggleHidden) { // activateWindow() (sometimes) helps with keyboard focus on Windows if (isHidden()) { show(); activateWindow(); } else if (isMinimized()) { showNormal(); activateWindow(); } else if (GUIUtil::isObscured(this)) { raise(); activateWindow(); } else if(fToggleHidden) hide(); } void BitcoinGUI::toggleHidden() { showNormalIfMinimized(true); }
8coin/8coin
src/qt/bitcoingui.cpp
C++
mit
35,605
<?php /** * Magento * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) * that is bundled with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://opensource.org/licenses/osl-3.0.php * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@magento.com so we can send you a copy immediately. * * DISCLAIMER * * Do not edit or add to this file if you wish to upgrade Magento to newer * versions in the future. If you wish to customize Magento for your * needs please refer to http://www.magento.com for more information. * * @category Mage * @package Mage_Sales * @copyright Copyright (c) 2006-2016 X.commerce, Inc. and affiliates (http://www.magento.com) * @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0) */ /** * Enter description here ... * * @category Mage * @package Mage_Sales * @author Magento Core Team <core@magentocommerce.com> */ class Mage_Sales_Model_Mysql4_Quote_Address_Attribute_Backend_Region extends Mage_Sales_Model_Resource_Quote_Address_Attribute_Backend_Region { }
hansbonini/cloud9-magento
www/app/code/core/Mage/Sales/Model/Mysql4/Quote/Address/Attribute/Backend/Region.php
PHP
mit
1,261
<?php /** * Zend Framework * * LICENSE * * This source file is subject to the new BSD license that is bundled * with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://framework.zend.com/license/new-bsd * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@zend.com so we can send you a copy immediately. * * @category Zend * @package Zend_Amf * @subpackage Value * @copyright Copyright (c) 2005-2009 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License * @version $Id: MessageBody.php 18951 2009-11-12 16:26:19Z alexander $ */ /** * An AMF Message contains information about the actual individual * transaction that is to be performed. It specifies the remote * operation that is to be performed; a local (client) operation * to be invoked upon success; and, the data to be used in the * operation. * <p/> * This Message structure defines how a local client would * invoke a method/operation on a remote server. Additionally, * the response from the Server is structured identically. * * @package Zend_Amf * @subpackage Value * @copyright Copyright (c) 2005-2009 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ class Zend_Amf_Value_MessageBody { /** * A string describing which operation, function, or method * is to be remotley invoked. * @var string */ protected $_targetUri = ""; /** * Universal Resource Identifier that uniquely targets the originator's * Object that should receive the server's response. The server will * use this path specification to target the "OnResult()" or "onStatus()" * handlers within the client. For Flash, it specifies an ActionScript * Object path only. The NetResponse object pointed to by the Response Uri * contains the connection state information. Passing/specifying this * provides a convenient mechanism for the client/server to share access * to an object that is managing the state of the shared connection. * * Since the server will use this field in the event of an error, * this field is required even if a successful server request would * not be expected to return a value to the client. * * @var string */ protected $_responseUri = ""; /** * Contains the actual data associated with the operation. It contains * the client's parameter data that is passed to the server's operation/method. * When serializing a root level data type or a parameter list array, no * name field is included. That is, the data is anonomously represented * as "Type Marker"/"Value" pairs. When serializing member data, the data is * represented as a series of "Name"/"Type"/"Value" combinations. * * For server generated responses, it may contain any ActionScript * data/objects that the server was expected to provide. * * @var string */ protected $_data; /** * Constructor * * @param string $targetUri * @param string $responseUri * @param string $data * @return void */ public function __construct($targetUri, $responseUri, $data) { $this->setTargetUri($targetUri); $this->setResponseUri($responseUri); $this->setData($data); } /** * Retrieve target Uri * * @return string */ public function getTargetUri() { return $this->_targetUri; } /** * Set target Uri * * @param string $targetUri * @return Zend_Amf_Value_MessageBody */ public function setTargetUri($targetUri) { if (null === $targetUri) { $targetUri = ''; } $this->_targetUri = (string) $targetUri; return $this; } /** * Get target Uri * * @return string */ public function getResponseUri() { return $this->_responseUri; } /** * Set response Uri * * @param string $responseUri * @return Zend_Amf_Value_MessageBody */ public function setResponseUri($responseUri) { if (null === $responseUri) { $responseUri = ''; } $this->_responseUri = $responseUri; return $this; } /** * Retrieve response data * * @return string */ public function getData() { return $this->_data; } /** * Set response data * * @param mixed $data * @return Zend_Amf_Value_MessageBody */ public function setData($data) { $this->_data = $data; return $this; } /** * Set reply method * * @param string $methodName * @return Zend_Amf_Value_MessageBody */ public function setReplyMethod($methodName) { if (!preg_match('#^[/?]#', $methodName)) { $this->_targetUri = rtrim($this->_targetUri, '/') . '/'; } $this->_targetUri = $this->_targetUri . $methodName; return $this; } }
jpasosa/agroapex
lib/vendor/Zend/Amf/Value/MessageBody.php
PHP
mit
5,286
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // <auto-generated/> #nullable disable using System.Text.Json; using Azure.Core; namespace Azure.ResourceManager.Communication.Models { public partial class OperationDisplay { internal static OperationDisplay DeserializeOperationDisplay(JsonElement element) { Optional<string> provider = default; Optional<string> resource = default; Optional<string> operation = default; Optional<string> description = default; foreach (var property in element.EnumerateObject()) { if (property.NameEquals("provider")) { provider = property.Value.GetString(); continue; } if (property.NameEquals("resource")) { resource = property.Value.GetString(); continue; } if (property.NameEquals("operation")) { operation = property.Value.GetString(); continue; } if (property.NameEquals("description")) { description = property.Value.GetString(); continue; } } return new OperationDisplay(provider.Value, resource.Value, operation.Value, description.Value); } } }
brjohnstmsft/azure-sdk-for-net
sdk/communication/Azure.ResourceManager.Communication/src/Generated/Models/OperationDisplay.Serialization.cs
C#
mit
1,523
module PublicActivity module ORM module ActiveRecord # The ActiveRecord model containing # details about recorded activity. class Activity < ::ActiveRecord::Base include Renderable self.table_name = PublicActivity.config.table_name # Define polymorphic association to the parent belongs_to :trackable, :polymorphic => true # Define ownership to a resource responsible for this activity belongs_to :owner, :polymorphic => true # Define ownership to a resource targeted by this activity belongs_to :recipient, :polymorphic => true # Serialize parameters Hash serialize :parameters, Hash if ::ActiveRecord::VERSION::MAJOR < 4 attr_accessible :key, :owner, :parameters, :recipient, :trackable end end end end end
FlavourSys/public_activity
lib/public_activity/orm/active_record/activity.rb
Ruby
mit
853
/******************************************************************************* * Copyright (c) 2000, 2011 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.jdt.internal.ui.compare; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Font; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.core.runtime.CoreException; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.jface.resource.JFaceResources; import org.eclipse.jface.viewers.ISelection; import org.eclipse.jface.viewers.Viewer; import org.eclipse.jface.text.Document; import org.eclipse.jface.text.source.SourceViewer; import org.eclipse.compare.IStreamContentAccessor; import org.eclipse.jdt.ui.text.JavaTextTools; import org.eclipse.jdt.internal.ui.JavaPlugin; import org.eclipse.jdt.internal.ui.propertiesfileeditor.IPropertiesFilePartitions; import org.eclipse.jdt.internal.ui.propertiesfileeditor.PropertiesFileSourceViewerConfiguration; /** * Properties file viewer. * * @since 3.1 */ public class PropertiesFileViewer extends Viewer { private SourceViewer fSourceViewer; private Object fInput; PropertiesFileViewer(Composite parent) { fSourceViewer= new SourceViewer(parent, null, SWT.LEFT_TO_RIGHT | SWT.H_SCROLL | SWT.V_SCROLL); JavaTextTools tools= JavaCompareUtilities.getJavaTextTools(); if (tools != null) { IPreferenceStore store= JavaPlugin.getDefault().getCombinedPreferenceStore(); fSourceViewer.configure(new PropertiesFileSourceViewerConfiguration(tools.getColorManager(), store, null, IPropertiesFilePartitions.PROPERTIES_FILE_PARTITIONING)); } fSourceViewer.setEditable(false); String symbolicFontName= PropertiesFileMergeViewer.class.getName(); Font font= JFaceResources.getFont(symbolicFontName); if (font != null) fSourceViewer.getTextWidget().setFont(font); } @Override public Control getControl() { return fSourceViewer.getControl(); } @Override public void setInput(Object input) { if (input instanceof IStreamContentAccessor) { Document document= new Document(getString(input)); JavaCompareUtilities.setupPropertiesFileDocument(document); fSourceViewer.setDocument(document); } fInput= input; } @Override public Object getInput() { return fInput; } @Override public ISelection getSelection() { return null; } @Override public void setSelection(ISelection s, boolean reveal) { } @Override public void refresh() { } /** * A helper method to retrieve the contents of the given object * if it implements the IStreamContentAccessor interface. */ private static String getString(Object input) { if (input instanceof IStreamContentAccessor) { IStreamContentAccessor sca= (IStreamContentAccessor) input; try { return JavaCompareUtilities.readString(sca); } catch (CoreException ex) { JavaPlugin.log(ex); } } return ""; //$NON-NLS-1$ } }
brunyuriy/quick-fix-scout
org.eclipse.jdt.ui_3.7.1.r371_v20110824-0800/src/org/eclipse/jdt/internal/ui/compare/PropertiesFileViewer.java
Java
mit
3,328
'use strict'; var fs = require('fs'); var Promise = require('../ext/promise'); var readFile = Promise.denodeify(fs.readFile); var lstat = Promise.denodeify(fs.stat); var chalk = require('chalk'); var EditFileDiff = require('./edit-file-diff'); var EOL = require('os').EOL; var isBinaryFile = require('isbinaryfile').sync; var template = require('lodash/template'); var canEdit = require('../utilities/open-editor').canEdit; function processTemplate(content, context) { var options = { evaluate: /<%([\s\S]+?)%>/g, interpolate: /<%=([\s\S]+?)%>/g, escape: /<%-([\s\S]+?)%>/g }; return template(content, options)(context); } function diffHighlight(line) { if (line[0] === '+') { return chalk.green(line); } else if (line[0] === '-') { return chalk.red(line); } else if (line.match(/^@@/)) { return chalk.cyan(line); } else { return line; } } FileInfo.prototype.confirmOverwrite = function(path) { var promptOptions = { type: 'expand', name: 'answer', default: false, message: chalk.red('Overwrite') + ' ' + path + '?', choices: [ { key: 'y', name: 'Yes, overwrite', value: 'overwrite' }, { key: 'n', name: 'No, skip', value: 'skip' }, { key: 'd', name: 'Diff', value: 'diff' } ] }; if (canEdit()) { promptOptions.choices.push({ key: 'e', name: 'Edit', value: 'edit' }); } return this.ui.prompt(promptOptions) .then(function(response) { return response.answer; }); }; FileInfo.prototype.displayDiff = function() { var info = this, jsdiff = require('diff'); return Promise.hash({ input: this.render(), output: readFile(info.outputPath) }).then(function(result) { var diff = jsdiff.createPatch( info.outputPath, result.output.toString(), result.input ); var lines = diff.split('\n'); for (var i = 0; i < lines.length; i++) { info.ui.write( diffHighlight(lines[i] + EOL) ); } }); }; function FileInfo(options) { this.action = options.action; this.outputPath = options.outputPath; this.displayPath = options.displayPath; this.inputPath = options.inputPath; this.templateVariables = options.templateVariables; this.ui = options.ui; } FileInfo.prototype.render = function() { var path = this.inputPath, context = this.templateVariables; if (!this.rendered) { this.rendered = readFile(path).then(function(content) { return lstat(path).then(function(fileStat) { if (isBinaryFile(content, fileStat.size)) { return content; } else { try { return processTemplate(content.toString(), context); } catch (err) { err.message += ' (Error in blueprint template: ' + path + ')'; throw err; } } }); }); } return this.rendered; }; FileInfo.prototype.checkForConflict = function() { return new Promise(function (resolve, reject) { fs.exists(this.outputPath, function (doesExist, error) { if (error) { reject(error); return; } var result; if (doesExist) { result = Promise.hash({ input: this.render(), output: readFile(this.outputPath) }).then(function(result) { var type; if (result.input === result.output.toString()) { type = 'identical'; } else { type = 'confirm'; } return type; }.bind(this)); } else { result = 'none'; } resolve(result); }.bind(this)); }.bind(this)); }; FileInfo.prototype.confirmOverwriteTask = function() { var info = this; return function() { return new Promise(function(resolve, reject) { function doConfirm() { info.confirmOverwrite(info.displayPath).then(function(action) { if (action === 'diff') { info.displayDiff().then(doConfirm, reject); } else if (action === 'edit') { var editFileDiff = new EditFileDiff({info: info}); editFileDiff.edit().then(function() { info.action = action; resolve(info); }).catch(function() { doConfirm() .finally(function() { resolve(info); }); }); } else { info.action = action; resolve(info); } }, reject); } doConfirm(); }); }.bind(this); }; module.exports = FileInfo;
pixelhandler/ember-cli
lib/models/file-info.js
JavaScript
mit
4,573
require 'nokogiri' module ActiveMerchant #:nodoc: module Billing #:nodoc: class BorgunGateway < Gateway self.display_name = 'Borgun' self.homepage_url = 'http://www.borgun.com' self.test_url = 'https://gatewaytest.borgun.is/ws/Heimir.pub.ws:Authorization' self.live_url = 'https://gateway01.borgun.is/ws/Heimir.pub.ws:Authorization' self.supported_countries = ['IS', 'GB', 'HU', 'CZ', 'DE', 'DK', 'SE' ] self.default_currency = 'ISK' self.money_format = :cents self.supported_cardtypes = [:visa, :master, :american_express, :diners_club, :discover, :jcb] self.homepage_url = 'https://www.borgun.is/' def initialize(options={}) requires!(options, :processor, :merchant_id, :username, :password) super end def purchase(money, payment, options={}) post = {} post[:TransType] = '1' add_invoice(post, money, options) add_payment_method(post, payment) commit('sale', post) end def authorize(money, payment, options={}) post = {} post[:TransType] = '5' add_invoice(post, money, options) add_payment_method(post, payment) commit('authonly', post) end def capture(money, authorization, options={}) post = {} post[:TransType] = '1' add_invoice(post, money, options) add_reference(post, authorization) commit('capture', post) end def refund(money, authorization, options={}) post = {} post[:TransType] = '3' add_invoice(post, money, options) add_reference(post, authorization) commit('refund', post) end def void(authorization, options={}) post = {} # TransType, TrAmount, and currency must match original values from auth or purchase. _, _, _, _, _, transtype, tramount, currency = split_authorization(authorization) post[:TransType] = transtype options[:currency] = options[:currency] || CURRENCY_CODES.key(currency) add_invoice(post, tramount.to_i, options) add_reference(post, authorization) commit('void', post) end def supports_scrubbing true end def scrub(transcript) transcript.gsub(%r((&lt;PAN&gt;)[^&]*(&lt;/PAN&gt;))i, '\1[FILTERED]\2'). gsub(%r((&lt;CVC2&gt;)[^&]*(&lt;/CVC2&gt;))i, '\1[FILTERED]\2'). gsub(%r(((?:\r\n)?Authorization: Basic )[^\r\n]+(\r\n)?), '\1[FILTERED]\2') end private CURRENCY_CODES = Hash.new { |h, k| raise ArgumentError.new("Unsupported currency for HDFC: #{k}") } CURRENCY_CODES['ISK'] = '352' CURRENCY_CODES['EUR'] = '978' CURRENCY_CODES['USD'] = '840' def add_invoice(post, money, options) post[:TrAmount] = amount(money) post[:TrCurrency] = CURRENCY_CODES[options[:currency] || currency(money)] post[:TerminalID] = options[:terminal_id] || '1' end def add_payment_method(post, payment_method) post[:PAN] = payment_method.number post[:ExpDate] = format(payment_method.year, :two_digits) + format(payment_method.month, :two_digits) post[:CVC2] = payment_method.verification_value post[:DateAndTime] = Time.now.strftime('%y%m%d%H%M%S') post[:RRN] = 'AMRCNT' + six_random_digits end def add_reference(post, authorization) dateandtime, _batch, transaction, rrn, authcode, _, _, _ = split_authorization(authorization) post[:DateAndTime] = dateandtime post[:Transaction] = transaction post[:RRN] = rrn post[:AuthCode] = authcode end def parse(xml) response = {} doc = Nokogiri::XML(CGI.unescapeHTML(xml)) body = doc.xpath('//getAuthorizationReply') body = doc.xpath('//cancelAuthorizationReply') if body.length == 0 body.children.each do |node| if node.text? next elsif node.elements.size == 0 response[node.name.downcase.to_sym] = node.text else node.elements.each do |childnode| name = "#{node.name.downcase}_#{childnode.name.downcase}" response[name.to_sym] = childnode.text end end end response end def commit(action, post) post[:Version] = '1000' post[:Processor] = @options[:processor] post[:MerchantID] = @options[:merchant_id] request = build_request(action, post) raw = ssl_post(url(action), request, headers) pairs = parse(raw) success = success_from(pairs) Response.new( success, message_from(success, pairs), pairs, authorization: authorization_from(pairs), test: test? ) end def success_from(response) (response[:actioncode] == '000') end def message_from(succeeded, response) if succeeded 'Succeeded' else response[:message] || "Error with ActionCode=#{response[:actioncode]}" end end def authorization_from(response) [ response[:dateandtime], response[:batch], response[:transaction], response[:rrn], response[:authcode], response[:transtype], response[:tramount], response[:trcurrency] ].join('|') end def split_authorization(authorization) dateandtime, batch, transaction, rrn, authcode, transtype, tramount, currency = authorization.split('|') [dateandtime, batch, transaction, rrn, authcode, transtype, tramount, currency] end def headers { 'Authorization' => 'Basic ' + Base64.strict_encode64(@options[:username].to_s + ':' + @options[:password].to_s), } end def build_request(action, post) mode = action == 'void' ? 'cancel' : 'get' xml = Builder::XmlMarkup.new :indent => 18 xml.instruct!(:xml, :version => '1.0', :encoding => 'utf-8') xml.tag!("#{mode}Authorization") do post.each do |field, value| xml.tag!(field, value) end end inner = CGI.escapeHTML(xml.target!) envelope(mode).sub(/{{ :body }}/, inner) end def envelope(mode) <<-EOS <soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:aut="http://Borgun/Heimir/pub/ws/Authorization"> <soapenv:Header/> <soapenv:Body> <aut:#{mode}AuthorizationInput> <#{mode}AuthReqXml> {{ :body }} </#{mode}AuthReqXml> </aut:#{mode}AuthorizationInput> </soapenv:Body> </soapenv:Envelope> EOS end def url(action) (test? ? test_url : live_url) end def six_random_digits (0...6).map { rand(48..57).chr }.join end end end end
lcn-com-dev/active_merchant
lib/active_merchant/billing/gateways/borgun.rb
Ruby
mit
7,043
<?php /** * NoViewerFoundException.php * * @since 29/05/15 * @author gseidel */ namespace Enhavo\Bundle\AppBundle\Exception; class TemplateNotFoundException extends \Exception { }
FabianLiebl/enhavo
src/Enhavo/Bundle/AppBundle/Exception/TemplateNotFoundException.php
PHP
mit
188
var everything = function () { return 42; }; anything.prototype.everything = everything;
Sha-Grisha/anything.js
src/everything.js
JavaScript
mit
93
<a href="http://flexboxgrid.com" target="blank">Flexbox Grid reference</a> ---- ``` <Row> <Col xs={12} sm={8} md={6} lg={4}> <Box>Responsive</div> </Col> </Row> ``` ---- ##### Offset ``` <Row> <Col xs={9} className="col-xs-offset-3"> <Box>Offset</div> </Col> </Row> ``` ----- ##### Auto size ``` <Row> <Col auto={true}> <Box>Auto size</div> </Col> </Row> ```
the-simian/reforged-prepack
.structor/docs/components/Row.md
Markdown
mit
409
<!DOCTYPE html><html><head><meta charset="UTF-8" /><title>Source Code</title></head><body><pre>&lt;%-- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --%> &lt;%@ taglib uri="http://tomcat.apache.org/example-taglib" prefix="eg" %> &lt;eg:ShowSource jspFile="&lt;%= util.HTMLFilter.filter(request.getQueryString()) %>"/> </pre></body></html>
attrs/plexi.http
www/samples/jsp/source.jsp.html
HTML
mit
1,099
<!-- THIS FILE IS GENERATED VIA '.template-helpers/generate-tag-details.pl' --> # Tags of `iojs` - [`iojs:1.8.4`](#iojs184) - [`iojs:1.8`](#iojs18) - [`iojs:1`](#iojs1) - [`iojs:1.8.4-onbuild`](#iojs184-onbuild) - [`iojs:1.8-onbuild`](#iojs18-onbuild) - [`iojs:1-onbuild`](#iojs1-onbuild) - [`iojs:1.8.4-slim`](#iojs184-slim) - [`iojs:1.8-slim`](#iojs18-slim) - [`iojs:1-slim`](#iojs1-slim) - [`iojs:2.5.0`](#iojs250) - [`iojs:2.5`](#iojs25) - [`iojs:2`](#iojs2) - [`iojs:2.5.0-onbuild`](#iojs250-onbuild) - [`iojs:2.5-onbuild`](#iojs25-onbuild) - [`iojs:2-onbuild`](#iojs2-onbuild) - [`iojs:2.5.0-slim`](#iojs250-slim) - [`iojs:2.5-slim`](#iojs25-slim) - [`iojs:2-slim`](#iojs2-slim) - [`iojs:3.3.0`](#iojs330) - [`iojs:3.3`](#iojs33) - [`iojs:3`](#iojs3) - [`iojs:latest`](#iojslatest) - [`iojs:3.3.0-onbuild`](#iojs330-onbuild) - [`iojs:3.3-onbuild`](#iojs33-onbuild) - [`iojs:3-onbuild`](#iojs3-onbuild) - [`iojs:onbuild`](#iojsonbuild) - [`iojs:3.3.0-slim`](#iojs330-slim) - [`iojs:3.3-slim`](#iojs33-slim) - [`iojs:3-slim`](#iojs3-slim) - [`iojs:slim`](#iojsslim) ## `iojs:1.8.4` - Total Virtual Size: 634.3 MB (634263704 bytes) - Total v2 Content-Length: 249.6 MB (249647017 bytes) ### Layers (10) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `9d71703fc5af6a867b93053bc59ed969aa95962f1583e5c6ada06af40c6ed733` ```dockerfile ENV IOJS_VERSION=1.8.4 ``` - Created: Thu, 10 Sep 2015 07:50:52 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `e71ccca3ecebb23e5aa7e83701fadc23f9ae39de1acc82bfa13a45b94ace6304` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:50:57 GMT - Parent Layer: `9d71703fc5af6a867b93053bc59ed969aa95962f1583e5c6ada06af40c6ed733` - Docker Version: 1.7.1 - Virtual Size: 27.7 MB (27739327 bytes) - v2 Blob: `sha256:b9ee50e4201d1e2daec5cb3830428fe5b35b81762cdda5de7ad1409b2d79f28c` - v2 Content-Length: 8.9 MB (8858079 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:30:49 GMT #### `4a8b49bca7559021391dfdf0c28ede2002c9a5c2aedb9b2db11e35dfb29861d9` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:50:58 GMT - Parent Layer: `e71ccca3ecebb23e5aa7e83701fadc23f9ae39de1acc82bfa13a45b94ace6304` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:1.8` - Total Virtual Size: 634.3 MB (634263704 bytes) - Total v2 Content-Length: 249.6 MB (249647017 bytes) ### Layers (10) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `9d71703fc5af6a867b93053bc59ed969aa95962f1583e5c6ada06af40c6ed733` ```dockerfile ENV IOJS_VERSION=1.8.4 ``` - Created: Thu, 10 Sep 2015 07:50:52 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `e71ccca3ecebb23e5aa7e83701fadc23f9ae39de1acc82bfa13a45b94ace6304` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:50:57 GMT - Parent Layer: `9d71703fc5af6a867b93053bc59ed969aa95962f1583e5c6ada06af40c6ed733` - Docker Version: 1.7.1 - Virtual Size: 27.7 MB (27739327 bytes) - v2 Blob: `sha256:b9ee50e4201d1e2daec5cb3830428fe5b35b81762cdda5de7ad1409b2d79f28c` - v2 Content-Length: 8.9 MB (8858079 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:30:49 GMT #### `4a8b49bca7559021391dfdf0c28ede2002c9a5c2aedb9b2db11e35dfb29861d9` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:50:58 GMT - Parent Layer: `e71ccca3ecebb23e5aa7e83701fadc23f9ae39de1acc82bfa13a45b94ace6304` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:1` - Total Virtual Size: 634.3 MB (634263704 bytes) - Total v2 Content-Length: 249.6 MB (249647017 bytes) ### Layers (10) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `9d71703fc5af6a867b93053bc59ed969aa95962f1583e5c6ada06af40c6ed733` ```dockerfile ENV IOJS_VERSION=1.8.4 ``` - Created: Thu, 10 Sep 2015 07:50:52 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `e71ccca3ecebb23e5aa7e83701fadc23f9ae39de1acc82bfa13a45b94ace6304` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:50:57 GMT - Parent Layer: `9d71703fc5af6a867b93053bc59ed969aa95962f1583e5c6ada06af40c6ed733` - Docker Version: 1.7.1 - Virtual Size: 27.7 MB (27739327 bytes) - v2 Blob: `sha256:b9ee50e4201d1e2daec5cb3830428fe5b35b81762cdda5de7ad1409b2d79f28c` - v2 Content-Length: 8.9 MB (8858079 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:30:49 GMT #### `4a8b49bca7559021391dfdf0c28ede2002c9a5c2aedb9b2db11e35dfb29861d9` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:50:58 GMT - Parent Layer: `e71ccca3ecebb23e5aa7e83701fadc23f9ae39de1acc82bfa13a45b94ace6304` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:1.8.4-onbuild` - Total Virtual Size: 634.3 MB (634263704 bytes) - Total v2 Content-Length: 249.6 MB (249647305 bytes) ### Layers (16) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `9d71703fc5af6a867b93053bc59ed969aa95962f1583e5c6ada06af40c6ed733` ```dockerfile ENV IOJS_VERSION=1.8.4 ``` - Created: Thu, 10 Sep 2015 07:50:52 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `e71ccca3ecebb23e5aa7e83701fadc23f9ae39de1acc82bfa13a45b94ace6304` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:50:57 GMT - Parent Layer: `9d71703fc5af6a867b93053bc59ed969aa95962f1583e5c6ada06af40c6ed733` - Docker Version: 1.7.1 - Virtual Size: 27.7 MB (27739327 bytes) - v2 Blob: `sha256:b9ee50e4201d1e2daec5cb3830428fe5b35b81762cdda5de7ad1409b2d79f28c` - v2 Content-Length: 8.9 MB (8858079 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:30:49 GMT #### `4a8b49bca7559021391dfdf0c28ede2002c9a5c2aedb9b2db11e35dfb29861d9` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:50:58 GMT - Parent Layer: `e71ccca3ecebb23e5aa7e83701fadc23f9ae39de1acc82bfa13a45b94ace6304` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `f0ed0e53dd58a300300895ddfc11f42ba380b5d892c17c2f66e0d792215f2d9c` ```dockerfile RUN mkdir -p /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:51:35 GMT - Parent Layer: `4a8b49bca7559021391dfdf0c28ede2002c9a5c2aedb9b2db11e35dfb29861d9` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:2a57f56f1970941ef543d0c536769b7f5d5a4437c8516aaddfe233d137a9c03c` - v2 Content-Length: 128.0 B - v2 Last-Modified: Fri, 11 Sep 2015 04:37:17 GMT #### `4f225461d57e7eb76ed2b60c2159de7bcece88bca7591d602d7feb3f2545df58` ```dockerfile WORKDIR /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:51:35 GMT - Parent Layer: `f0ed0e53dd58a300300895ddfc11f42ba380b5d892c17c2f66e0d792215f2d9c` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `37e20e64a819b56199ccf4a221d7273f45889d38c8b5b2d6e715b80aa73a7fcb` ```dockerfile ONBUILD COPY package.json /usr/src/app/ ``` - Created: Thu, 10 Sep 2015 07:51:36 GMT - Parent Layer: `4f225461d57e7eb76ed2b60c2159de7bcece88bca7591d602d7feb3f2545df58` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `a2fafb149118bcf80021482e76d5d13e00a5ed9fd059b686cbfd191b7f057caa` ```dockerfile ONBUILD RUN npm install ``` - Created: Thu, 10 Sep 2015 07:51:36 GMT - Parent Layer: `37e20e64a819b56199ccf4a221d7273f45889d38c8b5b2d6e715b80aa73a7fcb` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `04678cd0ead8f9cdb50a1b974487ca581c89e3e41f4a73040f9b5bb4d8c5086a` ```dockerfile ONBUILD COPY . /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:51:37 GMT - Parent Layer: `a2fafb149118bcf80021482e76d5d13e00a5ed9fd059b686cbfd191b7f057caa` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `b590d3ee658428281816f126207d270c25c9e32509e2fc4f682fba334f3fadec` ```dockerfile CMD ["npm" "start"] ``` - Created: Thu, 10 Sep 2015 07:51:37 GMT - Parent Layer: `04678cd0ead8f9cdb50a1b974487ca581c89e3e41f4a73040f9b5bb4d8c5086a` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:1.8-onbuild` - Total Virtual Size: 634.3 MB (634263704 bytes) - Total v2 Content-Length: 249.6 MB (249647305 bytes) ### Layers (16) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `9d71703fc5af6a867b93053bc59ed969aa95962f1583e5c6ada06af40c6ed733` ```dockerfile ENV IOJS_VERSION=1.8.4 ``` - Created: Thu, 10 Sep 2015 07:50:52 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `e71ccca3ecebb23e5aa7e83701fadc23f9ae39de1acc82bfa13a45b94ace6304` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:50:57 GMT - Parent Layer: `9d71703fc5af6a867b93053bc59ed969aa95962f1583e5c6ada06af40c6ed733` - Docker Version: 1.7.1 - Virtual Size: 27.7 MB (27739327 bytes) - v2 Blob: `sha256:b9ee50e4201d1e2daec5cb3830428fe5b35b81762cdda5de7ad1409b2d79f28c` - v2 Content-Length: 8.9 MB (8858079 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:30:49 GMT #### `4a8b49bca7559021391dfdf0c28ede2002c9a5c2aedb9b2db11e35dfb29861d9` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:50:58 GMT - Parent Layer: `e71ccca3ecebb23e5aa7e83701fadc23f9ae39de1acc82bfa13a45b94ace6304` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `f0ed0e53dd58a300300895ddfc11f42ba380b5d892c17c2f66e0d792215f2d9c` ```dockerfile RUN mkdir -p /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:51:35 GMT - Parent Layer: `4a8b49bca7559021391dfdf0c28ede2002c9a5c2aedb9b2db11e35dfb29861d9` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:2a57f56f1970941ef543d0c536769b7f5d5a4437c8516aaddfe233d137a9c03c` - v2 Content-Length: 128.0 B - v2 Last-Modified: Fri, 11 Sep 2015 04:37:17 GMT #### `4f225461d57e7eb76ed2b60c2159de7bcece88bca7591d602d7feb3f2545df58` ```dockerfile WORKDIR /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:51:35 GMT - Parent Layer: `f0ed0e53dd58a300300895ddfc11f42ba380b5d892c17c2f66e0d792215f2d9c` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `37e20e64a819b56199ccf4a221d7273f45889d38c8b5b2d6e715b80aa73a7fcb` ```dockerfile ONBUILD COPY package.json /usr/src/app/ ``` - Created: Thu, 10 Sep 2015 07:51:36 GMT - Parent Layer: `4f225461d57e7eb76ed2b60c2159de7bcece88bca7591d602d7feb3f2545df58` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `a2fafb149118bcf80021482e76d5d13e00a5ed9fd059b686cbfd191b7f057caa` ```dockerfile ONBUILD RUN npm install ``` - Created: Thu, 10 Sep 2015 07:51:36 GMT - Parent Layer: `37e20e64a819b56199ccf4a221d7273f45889d38c8b5b2d6e715b80aa73a7fcb` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `04678cd0ead8f9cdb50a1b974487ca581c89e3e41f4a73040f9b5bb4d8c5086a` ```dockerfile ONBUILD COPY . /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:51:37 GMT - Parent Layer: `a2fafb149118bcf80021482e76d5d13e00a5ed9fd059b686cbfd191b7f057caa` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `b590d3ee658428281816f126207d270c25c9e32509e2fc4f682fba334f3fadec` ```dockerfile CMD ["npm" "start"] ``` - Created: Thu, 10 Sep 2015 07:51:37 GMT - Parent Layer: `04678cd0ead8f9cdb50a1b974487ca581c89e3e41f4a73040f9b5bb4d8c5086a` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:1-onbuild` - Total Virtual Size: 634.3 MB (634263704 bytes) - Total v2 Content-Length: 249.6 MB (249647305 bytes) ### Layers (16) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `9d71703fc5af6a867b93053bc59ed969aa95962f1583e5c6ada06af40c6ed733` ```dockerfile ENV IOJS_VERSION=1.8.4 ``` - Created: Thu, 10 Sep 2015 07:50:52 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `e71ccca3ecebb23e5aa7e83701fadc23f9ae39de1acc82bfa13a45b94ace6304` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:50:57 GMT - Parent Layer: `9d71703fc5af6a867b93053bc59ed969aa95962f1583e5c6ada06af40c6ed733` - Docker Version: 1.7.1 - Virtual Size: 27.7 MB (27739327 bytes) - v2 Blob: `sha256:b9ee50e4201d1e2daec5cb3830428fe5b35b81762cdda5de7ad1409b2d79f28c` - v2 Content-Length: 8.9 MB (8858079 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:30:49 GMT #### `4a8b49bca7559021391dfdf0c28ede2002c9a5c2aedb9b2db11e35dfb29861d9` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:50:58 GMT - Parent Layer: `e71ccca3ecebb23e5aa7e83701fadc23f9ae39de1acc82bfa13a45b94ace6304` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `f0ed0e53dd58a300300895ddfc11f42ba380b5d892c17c2f66e0d792215f2d9c` ```dockerfile RUN mkdir -p /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:51:35 GMT - Parent Layer: `4a8b49bca7559021391dfdf0c28ede2002c9a5c2aedb9b2db11e35dfb29861d9` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:2a57f56f1970941ef543d0c536769b7f5d5a4437c8516aaddfe233d137a9c03c` - v2 Content-Length: 128.0 B - v2 Last-Modified: Fri, 11 Sep 2015 04:37:17 GMT #### `4f225461d57e7eb76ed2b60c2159de7bcece88bca7591d602d7feb3f2545df58` ```dockerfile WORKDIR /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:51:35 GMT - Parent Layer: `f0ed0e53dd58a300300895ddfc11f42ba380b5d892c17c2f66e0d792215f2d9c` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `37e20e64a819b56199ccf4a221d7273f45889d38c8b5b2d6e715b80aa73a7fcb` ```dockerfile ONBUILD COPY package.json /usr/src/app/ ``` - Created: Thu, 10 Sep 2015 07:51:36 GMT - Parent Layer: `4f225461d57e7eb76ed2b60c2159de7bcece88bca7591d602d7feb3f2545df58` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `a2fafb149118bcf80021482e76d5d13e00a5ed9fd059b686cbfd191b7f057caa` ```dockerfile ONBUILD RUN npm install ``` - Created: Thu, 10 Sep 2015 07:51:36 GMT - Parent Layer: `37e20e64a819b56199ccf4a221d7273f45889d38c8b5b2d6e715b80aa73a7fcb` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `04678cd0ead8f9cdb50a1b974487ca581c89e3e41f4a73040f9b5bb4d8c5086a` ```dockerfile ONBUILD COPY . /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:51:37 GMT - Parent Layer: `a2fafb149118bcf80021482e76d5d13e00a5ed9fd059b686cbfd191b7f057caa` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `b590d3ee658428281816f126207d270c25c9e32509e2fc4f682fba334f3fadec` ```dockerfile CMD ["npm" "start"] ``` - Created: Thu, 10 Sep 2015 07:51:37 GMT - Parent Layer: `04678cd0ead8f9cdb50a1b974487ca581c89e3e41f4a73040f9b5bb4d8c5086a` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:1.8.4-slim` - Total Virtual Size: 197.3 MB (197293565 bytes) - Total v2 Content-Length: 78.8 MB (78776351 bytes) ### Layers (8) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:52:24 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:20f9453d177242021cc6799c74fa2a47f6687a3144251dcb13909e6e8edd0d96` - v2 Content-Length: 19.9 KB (19856 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:39:29 GMT #### `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:52:25 GMT - Parent Layer: `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `149f5ebfd6a8b217f6a58329646685df8e696a9ad534b48a504dd1f21d640fcc` ```dockerfile ENV IOJS_VERSION=1.8.4 ``` - Created: Thu, 10 Sep 2015 07:52:25 GMT - Parent Layer: `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `c6f6fe91c2b5485eaa591e6d8c18be5967b16589c4f2572d2aba74f716684cb9` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:52:29 GMT - Parent Layer: `149f5ebfd6a8b217f6a58329646685df8e696a9ad534b48a504dd1f21d640fcc` - Docker Version: 1.7.1 - Virtual Size: 27.7 MB (27739327 bytes) - v2 Blob: `sha256:8dcc546b529c982dc34f742a4246d749f4f25281e22c39faa35f0254e1fe50d7` - v2 Content-Length: 8.9 MB (8858076 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:39:15 GMT #### `2face4b93cc0e8f3fd806290a7200bcdb3013cf2fd9336602838c27400beb789` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:52:30 GMT - Parent Layer: `c6f6fe91c2b5485eaa591e6d8c18be5967b16589c4f2572d2aba74f716684cb9` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:1.8-slim` - Total Virtual Size: 197.3 MB (197293565 bytes) - Total v2 Content-Length: 78.8 MB (78776351 bytes) ### Layers (8) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:52:24 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:20f9453d177242021cc6799c74fa2a47f6687a3144251dcb13909e6e8edd0d96` - v2 Content-Length: 19.9 KB (19856 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:39:29 GMT #### `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:52:25 GMT - Parent Layer: `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `149f5ebfd6a8b217f6a58329646685df8e696a9ad534b48a504dd1f21d640fcc` ```dockerfile ENV IOJS_VERSION=1.8.4 ``` - Created: Thu, 10 Sep 2015 07:52:25 GMT - Parent Layer: `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `c6f6fe91c2b5485eaa591e6d8c18be5967b16589c4f2572d2aba74f716684cb9` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:52:29 GMT - Parent Layer: `149f5ebfd6a8b217f6a58329646685df8e696a9ad534b48a504dd1f21d640fcc` - Docker Version: 1.7.1 - Virtual Size: 27.7 MB (27739327 bytes) - v2 Blob: `sha256:8dcc546b529c982dc34f742a4246d749f4f25281e22c39faa35f0254e1fe50d7` - v2 Content-Length: 8.9 MB (8858076 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:39:15 GMT #### `2face4b93cc0e8f3fd806290a7200bcdb3013cf2fd9336602838c27400beb789` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:52:30 GMT - Parent Layer: `c6f6fe91c2b5485eaa591e6d8c18be5967b16589c4f2572d2aba74f716684cb9` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:1-slim` - Total Virtual Size: 197.3 MB (197293565 bytes) - Total v2 Content-Length: 78.8 MB (78776351 bytes) ### Layers (8) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:52:24 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:20f9453d177242021cc6799c74fa2a47f6687a3144251dcb13909e6e8edd0d96` - v2 Content-Length: 19.9 KB (19856 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:39:29 GMT #### `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:52:25 GMT - Parent Layer: `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `149f5ebfd6a8b217f6a58329646685df8e696a9ad534b48a504dd1f21d640fcc` ```dockerfile ENV IOJS_VERSION=1.8.4 ``` - Created: Thu, 10 Sep 2015 07:52:25 GMT - Parent Layer: `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `c6f6fe91c2b5485eaa591e6d8c18be5967b16589c4f2572d2aba74f716684cb9` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:52:29 GMT - Parent Layer: `149f5ebfd6a8b217f6a58329646685df8e696a9ad534b48a504dd1f21d640fcc` - Docker Version: 1.7.1 - Virtual Size: 27.7 MB (27739327 bytes) - v2 Blob: `sha256:8dcc546b529c982dc34f742a4246d749f4f25281e22c39faa35f0254e1fe50d7` - v2 Content-Length: 8.9 MB (8858076 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:39:15 GMT #### `2face4b93cc0e8f3fd806290a7200bcdb3013cf2fd9336602838c27400beb789` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:52:30 GMT - Parent Layer: `c6f6fe91c2b5485eaa591e6d8c18be5967b16589c4f2572d2aba74f716684cb9` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:2.5.0` - Total Virtual Size: 635.5 MB (635457324 bytes) - Total v2 Content-Length: 250.0 MB (250040183 bytes) ### Layers (10) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `0d2ef88aff9d5ce7d9eb82781c311c3c78a2db2a866f971080963ede4f8ed07f` ```dockerfile ENV IOJS_VERSION=2.5.0 ``` - Created: Thu, 10 Sep 2015 07:53:12 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `81a1c07db5ecfb6c2121d2a3dc2b0d6f16cc930001b523c0e2231db2b219799d` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:53:15 GMT - Parent Layer: `0d2ef88aff9d5ce7d9eb82781c311c3c78a2db2a866f971080963ede4f8ed07f` - Docker Version: 1.7.1 - Virtual Size: 28.9 MB (28932947 bytes) - v2 Blob: `sha256:b86d996ff07ea3616dbb762f5598a137bfea047be884fe8f88d64906d4234405` - v2 Content-Length: 9.3 MB (9251245 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:40:57 GMT #### `02afbdf878536de7f4c49ebfa9c1efdbb9b2af514f470207d5b7fbe02873a14b` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:53:16 GMT - Parent Layer: `81a1c07db5ecfb6c2121d2a3dc2b0d6f16cc930001b523c0e2231db2b219799d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:2.5` - Total Virtual Size: 635.5 MB (635457324 bytes) - Total v2 Content-Length: 250.0 MB (250040183 bytes) ### Layers (10) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `0d2ef88aff9d5ce7d9eb82781c311c3c78a2db2a866f971080963ede4f8ed07f` ```dockerfile ENV IOJS_VERSION=2.5.0 ``` - Created: Thu, 10 Sep 2015 07:53:12 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `81a1c07db5ecfb6c2121d2a3dc2b0d6f16cc930001b523c0e2231db2b219799d` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:53:15 GMT - Parent Layer: `0d2ef88aff9d5ce7d9eb82781c311c3c78a2db2a866f971080963ede4f8ed07f` - Docker Version: 1.7.1 - Virtual Size: 28.9 MB (28932947 bytes) - v2 Blob: `sha256:b86d996ff07ea3616dbb762f5598a137bfea047be884fe8f88d64906d4234405` - v2 Content-Length: 9.3 MB (9251245 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:40:57 GMT #### `02afbdf878536de7f4c49ebfa9c1efdbb9b2af514f470207d5b7fbe02873a14b` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:53:16 GMT - Parent Layer: `81a1c07db5ecfb6c2121d2a3dc2b0d6f16cc930001b523c0e2231db2b219799d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:2` - Total Virtual Size: 635.5 MB (635457324 bytes) - Total v2 Content-Length: 250.0 MB (250040183 bytes) ### Layers (10) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `0d2ef88aff9d5ce7d9eb82781c311c3c78a2db2a866f971080963ede4f8ed07f` ```dockerfile ENV IOJS_VERSION=2.5.0 ``` - Created: Thu, 10 Sep 2015 07:53:12 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `81a1c07db5ecfb6c2121d2a3dc2b0d6f16cc930001b523c0e2231db2b219799d` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:53:15 GMT - Parent Layer: `0d2ef88aff9d5ce7d9eb82781c311c3c78a2db2a866f971080963ede4f8ed07f` - Docker Version: 1.7.1 - Virtual Size: 28.9 MB (28932947 bytes) - v2 Blob: `sha256:b86d996ff07ea3616dbb762f5598a137bfea047be884fe8f88d64906d4234405` - v2 Content-Length: 9.3 MB (9251245 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:40:57 GMT #### `02afbdf878536de7f4c49ebfa9c1efdbb9b2af514f470207d5b7fbe02873a14b` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:53:16 GMT - Parent Layer: `81a1c07db5ecfb6c2121d2a3dc2b0d6f16cc930001b523c0e2231db2b219799d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:2.5.0-onbuild` - Total Virtual Size: 635.5 MB (635457324 bytes) - Total v2 Content-Length: 250.0 MB (250040470 bytes) ### Layers (16) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `0d2ef88aff9d5ce7d9eb82781c311c3c78a2db2a866f971080963ede4f8ed07f` ```dockerfile ENV IOJS_VERSION=2.5.0 ``` - Created: Thu, 10 Sep 2015 07:53:12 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `81a1c07db5ecfb6c2121d2a3dc2b0d6f16cc930001b523c0e2231db2b219799d` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:53:15 GMT - Parent Layer: `0d2ef88aff9d5ce7d9eb82781c311c3c78a2db2a866f971080963ede4f8ed07f` - Docker Version: 1.7.1 - Virtual Size: 28.9 MB (28932947 bytes) - v2 Blob: `sha256:b86d996ff07ea3616dbb762f5598a137bfea047be884fe8f88d64906d4234405` - v2 Content-Length: 9.3 MB (9251245 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:40:57 GMT #### `02afbdf878536de7f4c49ebfa9c1efdbb9b2af514f470207d5b7fbe02873a14b` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:53:16 GMT - Parent Layer: `81a1c07db5ecfb6c2121d2a3dc2b0d6f16cc930001b523c0e2231db2b219799d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `9560cc7fdd94630c15f23604c9d100452e209854e3f3ff8182940ab231b75824` ```dockerfile RUN mkdir -p /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:53:54 GMT - Parent Layer: `02afbdf878536de7f4c49ebfa9c1efdbb9b2af514f470207d5b7fbe02873a14b` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:185cb7df2a145f643eb4abb9b7a3cd1f9676527ce8ca64d01825d40a21655276` - v2 Content-Length: 127.0 B - v2 Last-Modified: Fri, 11 Sep 2015 04:42:40 GMT #### `f3b5558e54d589c3e2efe19406f6586af7e4dbd0c27cea17be997c394b0cd30f` ```dockerfile WORKDIR /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:53:54 GMT - Parent Layer: `9560cc7fdd94630c15f23604c9d100452e209854e3f3ff8182940ab231b75824` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `2cc10249693a396aec77b4ad7cdb8479cb5277e2be81e29f00ba3b685a590bce` ```dockerfile ONBUILD COPY package.json /usr/src/app/ ``` - Created: Thu, 10 Sep 2015 07:53:55 GMT - Parent Layer: `f3b5558e54d589c3e2efe19406f6586af7e4dbd0c27cea17be997c394b0cd30f` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `2d63ce6e1e701bc30186c87a6152a61e88cabd9ac38a8efb86ba131601f80179` ```dockerfile ONBUILD RUN npm install ``` - Created: Thu, 10 Sep 2015 07:53:55 GMT - Parent Layer: `2cc10249693a396aec77b4ad7cdb8479cb5277e2be81e29f00ba3b685a590bce` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `e0b622a2a1b54f6f2c226e92a4ab8f7ae5d8e9e4cf02c11aaa0dfd3027cc475f` ```dockerfile ONBUILD COPY . /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:53:55 GMT - Parent Layer: `2d63ce6e1e701bc30186c87a6152a61e88cabd9ac38a8efb86ba131601f80179` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `1fa04753e5c044c977b4454ea301e81e7b0c9030f117ea70f6e6bea25a176877` ```dockerfile CMD ["npm" "start"] ``` - Created: Thu, 10 Sep 2015 07:53:56 GMT - Parent Layer: `e0b622a2a1b54f6f2c226e92a4ab8f7ae5d8e9e4cf02c11aaa0dfd3027cc475f` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:2.5-onbuild` - Total Virtual Size: 635.5 MB (635457324 bytes) - Total v2 Content-Length: 250.0 MB (250040470 bytes) ### Layers (16) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `0d2ef88aff9d5ce7d9eb82781c311c3c78a2db2a866f971080963ede4f8ed07f` ```dockerfile ENV IOJS_VERSION=2.5.0 ``` - Created: Thu, 10 Sep 2015 07:53:12 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `81a1c07db5ecfb6c2121d2a3dc2b0d6f16cc930001b523c0e2231db2b219799d` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:53:15 GMT - Parent Layer: `0d2ef88aff9d5ce7d9eb82781c311c3c78a2db2a866f971080963ede4f8ed07f` - Docker Version: 1.7.1 - Virtual Size: 28.9 MB (28932947 bytes) - v2 Blob: `sha256:b86d996ff07ea3616dbb762f5598a137bfea047be884fe8f88d64906d4234405` - v2 Content-Length: 9.3 MB (9251245 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:40:57 GMT #### `02afbdf878536de7f4c49ebfa9c1efdbb9b2af514f470207d5b7fbe02873a14b` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:53:16 GMT - Parent Layer: `81a1c07db5ecfb6c2121d2a3dc2b0d6f16cc930001b523c0e2231db2b219799d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `9560cc7fdd94630c15f23604c9d100452e209854e3f3ff8182940ab231b75824` ```dockerfile RUN mkdir -p /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:53:54 GMT - Parent Layer: `02afbdf878536de7f4c49ebfa9c1efdbb9b2af514f470207d5b7fbe02873a14b` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:185cb7df2a145f643eb4abb9b7a3cd1f9676527ce8ca64d01825d40a21655276` - v2 Content-Length: 127.0 B - v2 Last-Modified: Fri, 11 Sep 2015 04:42:40 GMT #### `f3b5558e54d589c3e2efe19406f6586af7e4dbd0c27cea17be997c394b0cd30f` ```dockerfile WORKDIR /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:53:54 GMT - Parent Layer: `9560cc7fdd94630c15f23604c9d100452e209854e3f3ff8182940ab231b75824` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `2cc10249693a396aec77b4ad7cdb8479cb5277e2be81e29f00ba3b685a590bce` ```dockerfile ONBUILD COPY package.json /usr/src/app/ ``` - Created: Thu, 10 Sep 2015 07:53:55 GMT - Parent Layer: `f3b5558e54d589c3e2efe19406f6586af7e4dbd0c27cea17be997c394b0cd30f` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `2d63ce6e1e701bc30186c87a6152a61e88cabd9ac38a8efb86ba131601f80179` ```dockerfile ONBUILD RUN npm install ``` - Created: Thu, 10 Sep 2015 07:53:55 GMT - Parent Layer: `2cc10249693a396aec77b4ad7cdb8479cb5277e2be81e29f00ba3b685a590bce` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `e0b622a2a1b54f6f2c226e92a4ab8f7ae5d8e9e4cf02c11aaa0dfd3027cc475f` ```dockerfile ONBUILD COPY . /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:53:55 GMT - Parent Layer: `2d63ce6e1e701bc30186c87a6152a61e88cabd9ac38a8efb86ba131601f80179` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `1fa04753e5c044c977b4454ea301e81e7b0c9030f117ea70f6e6bea25a176877` ```dockerfile CMD ["npm" "start"] ``` - Created: Thu, 10 Sep 2015 07:53:56 GMT - Parent Layer: `e0b622a2a1b54f6f2c226e92a4ab8f7ae5d8e9e4cf02c11aaa0dfd3027cc475f` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:2-onbuild` - Total Virtual Size: 635.5 MB (635457324 bytes) - Total v2 Content-Length: 250.0 MB (250040470 bytes) ### Layers (16) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `0d2ef88aff9d5ce7d9eb82781c311c3c78a2db2a866f971080963ede4f8ed07f` ```dockerfile ENV IOJS_VERSION=2.5.0 ``` - Created: Thu, 10 Sep 2015 07:53:12 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `81a1c07db5ecfb6c2121d2a3dc2b0d6f16cc930001b523c0e2231db2b219799d` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:53:15 GMT - Parent Layer: `0d2ef88aff9d5ce7d9eb82781c311c3c78a2db2a866f971080963ede4f8ed07f` - Docker Version: 1.7.1 - Virtual Size: 28.9 MB (28932947 bytes) - v2 Blob: `sha256:b86d996ff07ea3616dbb762f5598a137bfea047be884fe8f88d64906d4234405` - v2 Content-Length: 9.3 MB (9251245 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:40:57 GMT #### `02afbdf878536de7f4c49ebfa9c1efdbb9b2af514f470207d5b7fbe02873a14b` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:53:16 GMT - Parent Layer: `81a1c07db5ecfb6c2121d2a3dc2b0d6f16cc930001b523c0e2231db2b219799d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `9560cc7fdd94630c15f23604c9d100452e209854e3f3ff8182940ab231b75824` ```dockerfile RUN mkdir -p /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:53:54 GMT - Parent Layer: `02afbdf878536de7f4c49ebfa9c1efdbb9b2af514f470207d5b7fbe02873a14b` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:185cb7df2a145f643eb4abb9b7a3cd1f9676527ce8ca64d01825d40a21655276` - v2 Content-Length: 127.0 B - v2 Last-Modified: Fri, 11 Sep 2015 04:42:40 GMT #### `f3b5558e54d589c3e2efe19406f6586af7e4dbd0c27cea17be997c394b0cd30f` ```dockerfile WORKDIR /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:53:54 GMT - Parent Layer: `9560cc7fdd94630c15f23604c9d100452e209854e3f3ff8182940ab231b75824` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `2cc10249693a396aec77b4ad7cdb8479cb5277e2be81e29f00ba3b685a590bce` ```dockerfile ONBUILD COPY package.json /usr/src/app/ ``` - Created: Thu, 10 Sep 2015 07:53:55 GMT - Parent Layer: `f3b5558e54d589c3e2efe19406f6586af7e4dbd0c27cea17be997c394b0cd30f` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `2d63ce6e1e701bc30186c87a6152a61e88cabd9ac38a8efb86ba131601f80179` ```dockerfile ONBUILD RUN npm install ``` - Created: Thu, 10 Sep 2015 07:53:55 GMT - Parent Layer: `2cc10249693a396aec77b4ad7cdb8479cb5277e2be81e29f00ba3b685a590bce` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `e0b622a2a1b54f6f2c226e92a4ab8f7ae5d8e9e4cf02c11aaa0dfd3027cc475f` ```dockerfile ONBUILD COPY . /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:53:55 GMT - Parent Layer: `2d63ce6e1e701bc30186c87a6152a61e88cabd9ac38a8efb86ba131601f80179` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `1fa04753e5c044c977b4454ea301e81e7b0c9030f117ea70f6e6bea25a176877` ```dockerfile CMD ["npm" "start"] ``` - Created: Thu, 10 Sep 2015 07:53:56 GMT - Parent Layer: `e0b622a2a1b54f6f2c226e92a4ab8f7ae5d8e9e4cf02c11aaa0dfd3027cc475f` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:2.5.0-slim` - Total Virtual Size: 198.5 MB (198487185 bytes) - Total v2 Content-Length: 79.2 MB (79169527 bytes) ### Layers (8) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:52:24 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:20f9453d177242021cc6799c74fa2a47f6687a3144251dcb13909e6e8edd0d96` - v2 Content-Length: 19.9 KB (19856 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:39:29 GMT #### `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:52:25 GMT - Parent Layer: `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `e7d859a7f819f4146c93c4778eead93a1e4e86e970c41c5ecd6e7cb027556e9c` ```dockerfile ENV IOJS_VERSION=2.5.0 ``` - Created: Thu, 10 Sep 2015 07:54:44 GMT - Parent Layer: `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `def0c773154a8c78b2ff26fc64c2375ed961aa03a7ef43bac516522a00ad5825` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:54:50 GMT - Parent Layer: `e7d859a7f819f4146c93c4778eead93a1e4e86e970c41c5ecd6e7cb027556e9c` - Docker Version: 1.7.1 - Virtual Size: 28.9 MB (28932947 bytes) - v2 Blob: `sha256:7a58f7d23f0c1b2b82f101a9476e9f2810848885fa5ce1702c102b961c733a37` - v2 Content-Length: 9.3 MB (9251252 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:44:37 GMT #### `f1c2420e87fc6a1a4939f3effd2bd3e5e83281c9f2d5a86d9897fcd6ed872da8` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:54:51 GMT - Parent Layer: `def0c773154a8c78b2ff26fc64c2375ed961aa03a7ef43bac516522a00ad5825` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:2.5-slim` - Total Virtual Size: 198.5 MB (198487185 bytes) - Total v2 Content-Length: 79.2 MB (79169527 bytes) ### Layers (8) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:52:24 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:20f9453d177242021cc6799c74fa2a47f6687a3144251dcb13909e6e8edd0d96` - v2 Content-Length: 19.9 KB (19856 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:39:29 GMT #### `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:52:25 GMT - Parent Layer: `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `e7d859a7f819f4146c93c4778eead93a1e4e86e970c41c5ecd6e7cb027556e9c` ```dockerfile ENV IOJS_VERSION=2.5.0 ``` - Created: Thu, 10 Sep 2015 07:54:44 GMT - Parent Layer: `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `def0c773154a8c78b2ff26fc64c2375ed961aa03a7ef43bac516522a00ad5825` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:54:50 GMT - Parent Layer: `e7d859a7f819f4146c93c4778eead93a1e4e86e970c41c5ecd6e7cb027556e9c` - Docker Version: 1.7.1 - Virtual Size: 28.9 MB (28932947 bytes) - v2 Blob: `sha256:7a58f7d23f0c1b2b82f101a9476e9f2810848885fa5ce1702c102b961c733a37` - v2 Content-Length: 9.3 MB (9251252 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:44:37 GMT #### `f1c2420e87fc6a1a4939f3effd2bd3e5e83281c9f2d5a86d9897fcd6ed872da8` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:54:51 GMT - Parent Layer: `def0c773154a8c78b2ff26fc64c2375ed961aa03a7ef43bac516522a00ad5825` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:2-slim` - Total Virtual Size: 198.5 MB (198487185 bytes) - Total v2 Content-Length: 79.2 MB (79169527 bytes) ### Layers (8) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:52:24 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:20f9453d177242021cc6799c74fa2a47f6687a3144251dcb13909e6e8edd0d96` - v2 Content-Length: 19.9 KB (19856 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:39:29 GMT #### `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:52:25 GMT - Parent Layer: `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `e7d859a7f819f4146c93c4778eead93a1e4e86e970c41c5ecd6e7cb027556e9c` ```dockerfile ENV IOJS_VERSION=2.5.0 ``` - Created: Thu, 10 Sep 2015 07:54:44 GMT - Parent Layer: `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `def0c773154a8c78b2ff26fc64c2375ed961aa03a7ef43bac516522a00ad5825` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:54:50 GMT - Parent Layer: `e7d859a7f819f4146c93c4778eead93a1e4e86e970c41c5ecd6e7cb027556e9c` - Docker Version: 1.7.1 - Virtual Size: 28.9 MB (28932947 bytes) - v2 Blob: `sha256:7a58f7d23f0c1b2b82f101a9476e9f2810848885fa5ce1702c102b961c733a37` - v2 Content-Length: 9.3 MB (9251252 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:44:37 GMT #### `f1c2420e87fc6a1a4939f3effd2bd3e5e83281c9f2d5a86d9897fcd6ed872da8` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:54:51 GMT - Parent Layer: `def0c773154a8c78b2ff26fc64c2375ed961aa03a7ef43bac516522a00ad5825` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:3.3.0` - Total Virtual Size: 641.2 MB (641174444 bytes) - Total v2 Content-Length: 252.3 MB (252271245 bytes) ### Layers (10) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `d6121d3249540e18301640b4991e78cb09b84c6598c3754b48ffa3a01c9bc0f5` ```dockerfile ENV IOJS_VERSION=3.3.0 ``` - Created: Thu, 10 Sep 2015 07:55:34 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `ad6edef3fa19561e563f5b851ad375fcb92c81312271de584bb20c664178a9c1` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:55:39 GMT - Parent Layer: `d6121d3249540e18301640b4991e78cb09b84c6598c3754b48ffa3a01c9bc0f5` - Docker Version: 1.7.1 - Virtual Size: 34.7 MB (34650067 bytes) - v2 Blob: `sha256:880f632cb30a847917e9d1b7a4d21a6ad328ba64c9184daed59842223e53c54f` - v2 Content-Length: 11.5 MB (11482307 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:46:04 GMT #### `1b198e14bf1696eff48a52a848bf0d82fa5eac7294afe8778f82360e9a23a4f3` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:55:40 GMT - Parent Layer: `ad6edef3fa19561e563f5b851ad375fcb92c81312271de584bb20c664178a9c1` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:3.3` - Total Virtual Size: 641.2 MB (641174444 bytes) - Total v2 Content-Length: 252.3 MB (252271245 bytes) ### Layers (10) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `d6121d3249540e18301640b4991e78cb09b84c6598c3754b48ffa3a01c9bc0f5` ```dockerfile ENV IOJS_VERSION=3.3.0 ``` - Created: Thu, 10 Sep 2015 07:55:34 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `ad6edef3fa19561e563f5b851ad375fcb92c81312271de584bb20c664178a9c1` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:55:39 GMT - Parent Layer: `d6121d3249540e18301640b4991e78cb09b84c6598c3754b48ffa3a01c9bc0f5` - Docker Version: 1.7.1 - Virtual Size: 34.7 MB (34650067 bytes) - v2 Blob: `sha256:880f632cb30a847917e9d1b7a4d21a6ad328ba64c9184daed59842223e53c54f` - v2 Content-Length: 11.5 MB (11482307 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:46:04 GMT #### `1b198e14bf1696eff48a52a848bf0d82fa5eac7294afe8778f82360e9a23a4f3` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:55:40 GMT - Parent Layer: `ad6edef3fa19561e563f5b851ad375fcb92c81312271de584bb20c664178a9c1` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:3` - Total Virtual Size: 641.2 MB (641174444 bytes) - Total v2 Content-Length: 252.3 MB (252271245 bytes) ### Layers (10) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `d6121d3249540e18301640b4991e78cb09b84c6598c3754b48ffa3a01c9bc0f5` ```dockerfile ENV IOJS_VERSION=3.3.0 ``` - Created: Thu, 10 Sep 2015 07:55:34 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `ad6edef3fa19561e563f5b851ad375fcb92c81312271de584bb20c664178a9c1` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:55:39 GMT - Parent Layer: `d6121d3249540e18301640b4991e78cb09b84c6598c3754b48ffa3a01c9bc0f5` - Docker Version: 1.7.1 - Virtual Size: 34.7 MB (34650067 bytes) - v2 Blob: `sha256:880f632cb30a847917e9d1b7a4d21a6ad328ba64c9184daed59842223e53c54f` - v2 Content-Length: 11.5 MB (11482307 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:46:04 GMT #### `1b198e14bf1696eff48a52a848bf0d82fa5eac7294afe8778f82360e9a23a4f3` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:55:40 GMT - Parent Layer: `ad6edef3fa19561e563f5b851ad375fcb92c81312271de584bb20c664178a9c1` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:latest` - Total Virtual Size: 641.2 MB (641174444 bytes) - Total v2 Content-Length: 252.3 MB (252271245 bytes) ### Layers (10) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `d6121d3249540e18301640b4991e78cb09b84c6598c3754b48ffa3a01c9bc0f5` ```dockerfile ENV IOJS_VERSION=3.3.0 ``` - Created: Thu, 10 Sep 2015 07:55:34 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `ad6edef3fa19561e563f5b851ad375fcb92c81312271de584bb20c664178a9c1` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:55:39 GMT - Parent Layer: `d6121d3249540e18301640b4991e78cb09b84c6598c3754b48ffa3a01c9bc0f5` - Docker Version: 1.7.1 - Virtual Size: 34.7 MB (34650067 bytes) - v2 Blob: `sha256:880f632cb30a847917e9d1b7a4d21a6ad328ba64c9184daed59842223e53c54f` - v2 Content-Length: 11.5 MB (11482307 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:46:04 GMT #### `1b198e14bf1696eff48a52a848bf0d82fa5eac7294afe8778f82360e9a23a4f3` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:55:40 GMT - Parent Layer: `ad6edef3fa19561e563f5b851ad375fcb92c81312271de584bb20c664178a9c1` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:3.3.0-onbuild` - Total Virtual Size: 641.2 MB (641174444 bytes) - Total v2 Content-Length: 252.3 MB (252271531 bytes) ### Layers (16) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `d6121d3249540e18301640b4991e78cb09b84c6598c3754b48ffa3a01c9bc0f5` ```dockerfile ENV IOJS_VERSION=3.3.0 ``` - Created: Thu, 10 Sep 2015 07:55:34 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `ad6edef3fa19561e563f5b851ad375fcb92c81312271de584bb20c664178a9c1` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:55:39 GMT - Parent Layer: `d6121d3249540e18301640b4991e78cb09b84c6598c3754b48ffa3a01c9bc0f5` - Docker Version: 1.7.1 - Virtual Size: 34.7 MB (34650067 bytes) - v2 Blob: `sha256:880f632cb30a847917e9d1b7a4d21a6ad328ba64c9184daed59842223e53c54f` - v2 Content-Length: 11.5 MB (11482307 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:46:04 GMT #### `1b198e14bf1696eff48a52a848bf0d82fa5eac7294afe8778f82360e9a23a4f3` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:55:40 GMT - Parent Layer: `ad6edef3fa19561e563f5b851ad375fcb92c81312271de584bb20c664178a9c1` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `ca0c87731f208965bc024e7f8306e3196213af5743f5be69b891a5d4d9c13dcb` ```dockerfile RUN mkdir -p /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:56:33 GMT - Parent Layer: `1b198e14bf1696eff48a52a848bf0d82fa5eac7294afe8778f82360e9a23a4f3` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:e0de1387cfbdbef29dc1cd767e3158bd148dd03632abdf070432301d1dfd89a9` - v2 Content-Length: 126.0 B - v2 Last-Modified: Fri, 11 Sep 2015 04:48:15 GMT #### `8bda8c066f514f73374a3464c2968117b409c2478f33b336d5ec06322b1fe9c2` ```dockerfile WORKDIR /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:56:33 GMT - Parent Layer: `ca0c87731f208965bc024e7f8306e3196213af5743f5be69b891a5d4d9c13dcb` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `4904476efc2ea066e1a3c6e273985b94e2de6d3435bb270da4118169d62402f4` ```dockerfile ONBUILD COPY package.json /usr/src/app/ ``` - Created: Thu, 10 Sep 2015 07:56:34 GMT - Parent Layer: `8bda8c066f514f73374a3464c2968117b409c2478f33b336d5ec06322b1fe9c2` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `2e628ba5e514cd9f5b783c993685755ba973a0f5162111f0e0bb3ecb2840535e` ```dockerfile ONBUILD RUN npm install ``` - Created: Thu, 10 Sep 2015 07:56:34 GMT - Parent Layer: `4904476efc2ea066e1a3c6e273985b94e2de6d3435bb270da4118169d62402f4` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `48b5b95c5a8bb00e2f69f8431f9af46f3674bba58bce55373ab9042c10430ea9` ```dockerfile ONBUILD COPY . /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:56:35 GMT - Parent Layer: `2e628ba5e514cd9f5b783c993685755ba973a0f5162111f0e0bb3ecb2840535e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `4bee70652e05216df56c88d0e04dbd62c9e588b7d77e83ca3d62821103bee6d8` ```dockerfile CMD ["npm" "start"] ``` - Created: Thu, 10 Sep 2015 07:56:35 GMT - Parent Layer: `48b5b95c5a8bb00e2f69f8431f9af46f3674bba58bce55373ab9042c10430ea9` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:3.3-onbuild` - Total Virtual Size: 641.2 MB (641174444 bytes) - Total v2 Content-Length: 252.3 MB (252271531 bytes) ### Layers (16) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `d6121d3249540e18301640b4991e78cb09b84c6598c3754b48ffa3a01c9bc0f5` ```dockerfile ENV IOJS_VERSION=3.3.0 ``` - Created: Thu, 10 Sep 2015 07:55:34 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `ad6edef3fa19561e563f5b851ad375fcb92c81312271de584bb20c664178a9c1` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:55:39 GMT - Parent Layer: `d6121d3249540e18301640b4991e78cb09b84c6598c3754b48ffa3a01c9bc0f5` - Docker Version: 1.7.1 - Virtual Size: 34.7 MB (34650067 bytes) - v2 Blob: `sha256:880f632cb30a847917e9d1b7a4d21a6ad328ba64c9184daed59842223e53c54f` - v2 Content-Length: 11.5 MB (11482307 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:46:04 GMT #### `1b198e14bf1696eff48a52a848bf0d82fa5eac7294afe8778f82360e9a23a4f3` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:55:40 GMT - Parent Layer: `ad6edef3fa19561e563f5b851ad375fcb92c81312271de584bb20c664178a9c1` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `ca0c87731f208965bc024e7f8306e3196213af5743f5be69b891a5d4d9c13dcb` ```dockerfile RUN mkdir -p /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:56:33 GMT - Parent Layer: `1b198e14bf1696eff48a52a848bf0d82fa5eac7294afe8778f82360e9a23a4f3` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:e0de1387cfbdbef29dc1cd767e3158bd148dd03632abdf070432301d1dfd89a9` - v2 Content-Length: 126.0 B - v2 Last-Modified: Fri, 11 Sep 2015 04:48:15 GMT #### `8bda8c066f514f73374a3464c2968117b409c2478f33b336d5ec06322b1fe9c2` ```dockerfile WORKDIR /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:56:33 GMT - Parent Layer: `ca0c87731f208965bc024e7f8306e3196213af5743f5be69b891a5d4d9c13dcb` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `4904476efc2ea066e1a3c6e273985b94e2de6d3435bb270da4118169d62402f4` ```dockerfile ONBUILD COPY package.json /usr/src/app/ ``` - Created: Thu, 10 Sep 2015 07:56:34 GMT - Parent Layer: `8bda8c066f514f73374a3464c2968117b409c2478f33b336d5ec06322b1fe9c2` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `2e628ba5e514cd9f5b783c993685755ba973a0f5162111f0e0bb3ecb2840535e` ```dockerfile ONBUILD RUN npm install ``` - Created: Thu, 10 Sep 2015 07:56:34 GMT - Parent Layer: `4904476efc2ea066e1a3c6e273985b94e2de6d3435bb270da4118169d62402f4` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `48b5b95c5a8bb00e2f69f8431f9af46f3674bba58bce55373ab9042c10430ea9` ```dockerfile ONBUILD COPY . /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:56:35 GMT - Parent Layer: `2e628ba5e514cd9f5b783c993685755ba973a0f5162111f0e0bb3ecb2840535e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `4bee70652e05216df56c88d0e04dbd62c9e588b7d77e83ca3d62821103bee6d8` ```dockerfile CMD ["npm" "start"] ``` - Created: Thu, 10 Sep 2015 07:56:35 GMT - Parent Layer: `48b5b95c5a8bb00e2f69f8431f9af46f3674bba58bce55373ab9042c10430ea9` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:3-onbuild` - Total Virtual Size: 641.2 MB (641174444 bytes) - Total v2 Content-Length: 252.3 MB (252271531 bytes) ### Layers (16) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `d6121d3249540e18301640b4991e78cb09b84c6598c3754b48ffa3a01c9bc0f5` ```dockerfile ENV IOJS_VERSION=3.3.0 ``` - Created: Thu, 10 Sep 2015 07:55:34 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `ad6edef3fa19561e563f5b851ad375fcb92c81312271de584bb20c664178a9c1` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:55:39 GMT - Parent Layer: `d6121d3249540e18301640b4991e78cb09b84c6598c3754b48ffa3a01c9bc0f5` - Docker Version: 1.7.1 - Virtual Size: 34.7 MB (34650067 bytes) - v2 Blob: `sha256:880f632cb30a847917e9d1b7a4d21a6ad328ba64c9184daed59842223e53c54f` - v2 Content-Length: 11.5 MB (11482307 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:46:04 GMT #### `1b198e14bf1696eff48a52a848bf0d82fa5eac7294afe8778f82360e9a23a4f3` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:55:40 GMT - Parent Layer: `ad6edef3fa19561e563f5b851ad375fcb92c81312271de584bb20c664178a9c1` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `ca0c87731f208965bc024e7f8306e3196213af5743f5be69b891a5d4d9c13dcb` ```dockerfile RUN mkdir -p /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:56:33 GMT - Parent Layer: `1b198e14bf1696eff48a52a848bf0d82fa5eac7294afe8778f82360e9a23a4f3` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:e0de1387cfbdbef29dc1cd767e3158bd148dd03632abdf070432301d1dfd89a9` - v2 Content-Length: 126.0 B - v2 Last-Modified: Fri, 11 Sep 2015 04:48:15 GMT #### `8bda8c066f514f73374a3464c2968117b409c2478f33b336d5ec06322b1fe9c2` ```dockerfile WORKDIR /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:56:33 GMT - Parent Layer: `ca0c87731f208965bc024e7f8306e3196213af5743f5be69b891a5d4d9c13dcb` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `4904476efc2ea066e1a3c6e273985b94e2de6d3435bb270da4118169d62402f4` ```dockerfile ONBUILD COPY package.json /usr/src/app/ ``` - Created: Thu, 10 Sep 2015 07:56:34 GMT - Parent Layer: `8bda8c066f514f73374a3464c2968117b409c2478f33b336d5ec06322b1fe9c2` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `2e628ba5e514cd9f5b783c993685755ba973a0f5162111f0e0bb3ecb2840535e` ```dockerfile ONBUILD RUN npm install ``` - Created: Thu, 10 Sep 2015 07:56:34 GMT - Parent Layer: `4904476efc2ea066e1a3c6e273985b94e2de6d3435bb270da4118169d62402f4` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `48b5b95c5a8bb00e2f69f8431f9af46f3674bba58bce55373ab9042c10430ea9` ```dockerfile ONBUILD COPY . /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:56:35 GMT - Parent Layer: `2e628ba5e514cd9f5b783c993685755ba973a0f5162111f0e0bb3ecb2840535e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `4bee70652e05216df56c88d0e04dbd62c9e588b7d77e83ca3d62821103bee6d8` ```dockerfile CMD ["npm" "start"] ``` - Created: Thu, 10 Sep 2015 07:56:35 GMT - Parent Layer: `48b5b95c5a8bb00e2f69f8431f9af46f3674bba58bce55373ab9042c10430ea9` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:onbuild` - Total Virtual Size: 641.2 MB (641174444 bytes) - Total v2 Content-Length: 252.3 MB (252271531 bytes) ### Layers (16) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ bzr \ git \ mercurial \ openssh-client \ subversion \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:29:05 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 122.3 MB (122317988 bytes) - v2 Blob: `sha256:a6f2dac3eb9c26067c12dafd0c917f591d9881ee84a45f750d7a1d58187adfd8` - v2 Content-Length: 42.3 MB (42339522 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:43:42 GMT #### `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ autoconf \ automake \ bzip2 \ file \ g++ \ gcc \ imagemagick \ libbz2-dev \ libc6-dev \ libcurl4-openssl-dev \ libevent-dev \ libffi-dev \ libgeoip-dev \ libglib2.0-dev \ libjpeg-dev \ liblzma-dev \ libmagickcore-dev \ libmagickwand-dev \ libmysqlclient-dev \ libncurses-dev \ libpng-dev \ libpq-dev \ libreadline-dev \ libsqlite3-dev \ libssl-dev \ libtool \ libwebp-dev \ libxml2-dev \ libxslt-dev \ libyaml-dev \ make \ patch \ xz-utils \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:31:25 GMT - Parent Layer: `20b348f4d5682b697d2f456322c97d916bafb65f6c4436697209ac1ec0f1803f` - Docker Version: 1.7.1 - Virtual Size: 314.7 MB (314652151 bytes) - v2 Blob: `sha256:f4f48828d97bcfe36d5697d8f505088a4369e3d660307576f68ae74031884ca7` - v2 Content-Length: 128.5 MB (128531143 bytes) - v2 Last-Modified: Tue, 08 Sep 2015 15:45:31 GMT #### `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `16b189cc8ce688f9f1d8f1d837fa0891107450a06c795b1cba8f6c33a4454280` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:61f26d33ec3bbef5073f11f8bad058c136159dee3ea81e9f942337e2a919d062` - v2 Content-Length: 19.9 KB (19854 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:31:02 GMT #### `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:50:51 GMT - Parent Layer: `6227f463e6e24a6ea3c8eaae40b4b206aeb3d63b6c615c3512cd9706524f0a3e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `d6121d3249540e18301640b4991e78cb09b84c6598c3754b48ffa3a01c9bc0f5` ```dockerfile ENV IOJS_VERSION=3.3.0 ``` - Created: Thu, 10 Sep 2015 07:55:34 GMT - Parent Layer: `2e45961f9f233ca9913dff96558d65a5096aa0b2e6a34711d3ac51289a40f50d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `ad6edef3fa19561e563f5b851ad375fcb92c81312271de584bb20c664178a9c1` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:55:39 GMT - Parent Layer: `d6121d3249540e18301640b4991e78cb09b84c6598c3754b48ffa3a01c9bc0f5` - Docker Version: 1.7.1 - Virtual Size: 34.7 MB (34650067 bytes) - v2 Blob: `sha256:880f632cb30a847917e9d1b7a4d21a6ad328ba64c9184daed59842223e53c54f` - v2 Content-Length: 11.5 MB (11482307 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:46:04 GMT #### `1b198e14bf1696eff48a52a848bf0d82fa5eac7294afe8778f82360e9a23a4f3` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:55:40 GMT - Parent Layer: `ad6edef3fa19561e563f5b851ad375fcb92c81312271de584bb20c664178a9c1` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `ca0c87731f208965bc024e7f8306e3196213af5743f5be69b891a5d4d9c13dcb` ```dockerfile RUN mkdir -p /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:56:33 GMT - Parent Layer: `1b198e14bf1696eff48a52a848bf0d82fa5eac7294afe8778f82360e9a23a4f3` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:e0de1387cfbdbef29dc1cd767e3158bd148dd03632abdf070432301d1dfd89a9` - v2 Content-Length: 126.0 B - v2 Last-Modified: Fri, 11 Sep 2015 04:48:15 GMT #### `8bda8c066f514f73374a3464c2968117b409c2478f33b336d5ec06322b1fe9c2` ```dockerfile WORKDIR /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:56:33 GMT - Parent Layer: `ca0c87731f208965bc024e7f8306e3196213af5743f5be69b891a5d4d9c13dcb` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `4904476efc2ea066e1a3c6e273985b94e2de6d3435bb270da4118169d62402f4` ```dockerfile ONBUILD COPY package.json /usr/src/app/ ``` - Created: Thu, 10 Sep 2015 07:56:34 GMT - Parent Layer: `8bda8c066f514f73374a3464c2968117b409c2478f33b336d5ec06322b1fe9c2` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `2e628ba5e514cd9f5b783c993685755ba973a0f5162111f0e0bb3ecb2840535e` ```dockerfile ONBUILD RUN npm install ``` - Created: Thu, 10 Sep 2015 07:56:34 GMT - Parent Layer: `4904476efc2ea066e1a3c6e273985b94e2de6d3435bb270da4118169d62402f4` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `48b5b95c5a8bb00e2f69f8431f9af46f3674bba58bce55373ab9042c10430ea9` ```dockerfile ONBUILD COPY . /usr/src/app ``` - Created: Thu, 10 Sep 2015 07:56:35 GMT - Parent Layer: `2e628ba5e514cd9f5b783c993685755ba973a0f5162111f0e0bb3ecb2840535e` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `4bee70652e05216df56c88d0e04dbd62c9e588b7d77e83ca3d62821103bee6d8` ```dockerfile CMD ["npm" "start"] ``` - Created: Thu, 10 Sep 2015 07:56:35 GMT - Parent Layer: `48b5b95c5a8bb00e2f69f8431f9af46f3674bba58bce55373ab9042c10430ea9` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:3.3.0-slim` - Total Virtual Size: 204.2 MB (204204305 bytes) - Total v2 Content-Length: 81.4 MB (81400599 bytes) ### Layers (8) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:52:24 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:20f9453d177242021cc6799c74fa2a47f6687a3144251dcb13909e6e8edd0d96` - v2 Content-Length: 19.9 KB (19856 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:39:29 GMT #### `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:52:25 GMT - Parent Layer: `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `b28dd1c16b74874e66cc509537bbbc59f9c2910384feefe315f930d43fecee9f` ```dockerfile ENV IOJS_VERSION=3.3.0 ``` - Created: Thu, 10 Sep 2015 07:57:43 GMT - Parent Layer: `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `e55726425149363c861ad6c5ade5b136ce1687a3824643aa3d847bc7acc377cc` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:57:50 GMT - Parent Layer: `b28dd1c16b74874e66cc509537bbbc59f9c2910384feefe315f930d43fecee9f` - Docker Version: 1.7.1 - Virtual Size: 34.7 MB (34650067 bytes) - v2 Blob: `sha256:85c960f80f7078f5a6f85cfeac2e6ada6e1eabbc8bd85c36db6f0d981786532a` - v2 Content-Length: 11.5 MB (11482324 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:50:48 GMT #### `afa96cc0bfbd5eeb372091abd7558dab5cdc25d5b40ab2b6fb58e86c13425267` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:57:51 GMT - Parent Layer: `e55726425149363c861ad6c5ade5b136ce1687a3824643aa3d847bc7acc377cc` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:3.3-slim` - Total Virtual Size: 204.2 MB (204204305 bytes) - Total v2 Content-Length: 81.4 MB (81400599 bytes) ### Layers (8) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:52:24 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:20f9453d177242021cc6799c74fa2a47f6687a3144251dcb13909e6e8edd0d96` - v2 Content-Length: 19.9 KB (19856 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:39:29 GMT #### `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:52:25 GMT - Parent Layer: `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `b28dd1c16b74874e66cc509537bbbc59f9c2910384feefe315f930d43fecee9f` ```dockerfile ENV IOJS_VERSION=3.3.0 ``` - Created: Thu, 10 Sep 2015 07:57:43 GMT - Parent Layer: `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `e55726425149363c861ad6c5ade5b136ce1687a3824643aa3d847bc7acc377cc` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:57:50 GMT - Parent Layer: `b28dd1c16b74874e66cc509537bbbc59f9c2910384feefe315f930d43fecee9f` - Docker Version: 1.7.1 - Virtual Size: 34.7 MB (34650067 bytes) - v2 Blob: `sha256:85c960f80f7078f5a6f85cfeac2e6ada6e1eabbc8bd85c36db6f0d981786532a` - v2 Content-Length: 11.5 MB (11482324 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:50:48 GMT #### `afa96cc0bfbd5eeb372091abd7558dab5cdc25d5b40ab2b6fb58e86c13425267` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:57:51 GMT - Parent Layer: `e55726425149363c861ad6c5ade5b136ce1687a3824643aa3d847bc7acc377cc` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:3-slim` - Total Virtual Size: 204.2 MB (204204305 bytes) - Total v2 Content-Length: 81.4 MB (81400599 bytes) ### Layers (8) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:52:24 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:20f9453d177242021cc6799c74fa2a47f6687a3144251dcb13909e6e8edd0d96` - v2 Content-Length: 19.9 KB (19856 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:39:29 GMT #### `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:52:25 GMT - Parent Layer: `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `b28dd1c16b74874e66cc509537bbbc59f9c2910384feefe315f930d43fecee9f` ```dockerfile ENV IOJS_VERSION=3.3.0 ``` - Created: Thu, 10 Sep 2015 07:57:43 GMT - Parent Layer: `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `e55726425149363c861ad6c5ade5b136ce1687a3824643aa3d847bc7acc377cc` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:57:50 GMT - Parent Layer: `b28dd1c16b74874e66cc509537bbbc59f9c2910384feefe315f930d43fecee9f` - Docker Version: 1.7.1 - Virtual Size: 34.7 MB (34650067 bytes) - v2 Blob: `sha256:85c960f80f7078f5a6f85cfeac2e6ada6e1eabbc8bd85c36db6f0d981786532a` - v2 Content-Length: 11.5 MB (11482324 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:50:48 GMT #### `afa96cc0bfbd5eeb372091abd7558dab5cdc25d5b40ab2b6fb58e86c13425267` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:57:51 GMT - Parent Layer: `e55726425149363c861ad6c5ade5b136ce1687a3824643aa3d847bc7acc377cc` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT ## `iojs:slim` - Total Virtual Size: 204.2 MB (204204305 bytes) - Total v2 Content-Length: 81.4 MB (81400599 bytes) ### Layers (8) #### `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` ```dockerfile ADD file:c7d957020a6ee3df60f2407c7a383cabcfa67d43f6d5151b241b37034f5bc6e0 in / ``` - Created: Mon, 07 Sep 2015 23:35:05 GMT - Docker Version: 1.7.1 - Virtual Size: 125.2 MB (125159131 bytes) - v2 Blob: `sha256:f8efbffe7b954b520805da80ce0cce94e3834482c384c25c8851db98696e7f70` - v2 Content-Length: 51.4 MB (51359708 bytes) - v2 Last-Modified: Mon, 07 Sep 2015 23:38:06 GMT #### `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` ```dockerfile CMD ["/bin/bash"] ``` - Created: Mon, 07 Sep 2015 23:35:07 GMT - Parent Layer: `843e2bded49837e4846422f3a82a67be3ccc46c3e636e03d8d946c57564468ba` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ wget \ && rm -rf /var/lib/apt/lists/* ``` - Created: Tue, 08 Sep 2015 15:27:57 GMT - Parent Layer: `8c00acfb017549e44d28098762c3e6296872a1ca9b90385855f1019d84bb0dac` - Docker Version: 1.7.1 - Virtual Size: 44.4 MB (44355688 bytes) - v2 Blob: `sha256:d6314f6cfecf4fc37f622f99d2a114af91ec678d29c76983249f23995ef77563` - v2 Content-Length: 18.5 MB (18538583 bytes) - v2 Last-Modified: Thu, 10 Sep 2015 23:36:48 GMT #### `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` ```dockerfile RUN set -ex && for key in\ 9554F04D7259F04124DE6B476D5A82AC7E37093B\ 94AE36675C464D64BAFA68DD7434390BDBE9B9C5\ 0034A06D9D9B0064CE8ADF6BF1747F4AD2306D93\ FD3A5288F042B6850C66B31F09FE44734EB7990E\ 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1\ DD8F2338BAE7501E3DD5AC78C273792F7D83545D ; do\ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; done ``` - Created: Thu, 10 Sep 2015 07:52:24 GMT - Parent Layer: `8b49fe88b40b6c09bbe751e9b235d1919e704ae1765a304226047bd0b203b3fe` - Docker Version: 1.7.1 - Virtual Size: 39.4 KB (39419 bytes) - v2 Blob: `sha256:20f9453d177242021cc6799c74fa2a47f6687a3144251dcb13909e6e8edd0d96` - v2 Content-Length: 19.9 KB (19856 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:39:29 GMT #### `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` ```dockerfile ENV NPM_CONFIG_LOGLEVEL=info ``` - Created: Thu, 10 Sep 2015 07:52:25 GMT - Parent Layer: `1e7917dfdd7dc805caa9fe209ae9df3a07529269f4989570f312e72ff4f339c8` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `b28dd1c16b74874e66cc509537bbbc59f9c2910384feefe315f930d43fecee9f` ```dockerfile ENV IOJS_VERSION=3.3.0 ``` - Created: Thu, 10 Sep 2015 07:57:43 GMT - Parent Layer: `a2f363d07640f912e4ce880341f1c4a88ce12edaf9be3668776f45da3ca5c73d` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT #### `e55726425149363c861ad6c5ade5b136ce1687a3824643aa3d847bc7acc377cc` ```dockerfile RUN curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/iojs-v$IOJS_VERSION-linux-x64.tar.gz" && curl -SLO "https://iojs.org/dist/v$IOJS_VERSION/SHASUMS256.txt.asc" && gpg --verify SHASUMS256.txt.asc && grep " iojs-v$IOJS_VERSION-linux-x64.tar.gz\$" SHASUMS256.txt.asc | sha256sum -c - && tar -xzf "iojs-v$IOJS_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 && rm "iojs-v$IOJS_VERSION-linux-x64.tar.gz" SHASUMS256.txt.asc ``` - Created: Thu, 10 Sep 2015 07:57:50 GMT - Parent Layer: `b28dd1c16b74874e66cc509537bbbc59f9c2910384feefe315f930d43fecee9f` - Docker Version: 1.7.1 - Virtual Size: 34.7 MB (34650067 bytes) - v2 Blob: `sha256:85c960f80f7078f5a6f85cfeac2e6ada6e1eabbc8bd85c36db6f0d981786532a` - v2 Content-Length: 11.5 MB (11482324 bytes) - v2 Last-Modified: Fri, 11 Sep 2015 04:50:48 GMT #### `afa96cc0bfbd5eeb372091abd7558dab5cdc25d5b40ab2b6fb58e86c13425267` ```dockerfile CMD ["iojs"] ``` - Created: Thu, 10 Sep 2015 07:57:51 GMT - Parent Layer: `e55726425149363c861ad6c5ade5b136ce1687a3824643aa3d847bc7acc377cc` - Docker Version: 1.7.1 - Virtual Size: 0.0 B - v2 Blob: `sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4` - v2 Content-Length: 32.0 B - v2 Last-Modified: Fri, 27 Mar 2015 17:18:47 GMT
snailwalker/docs
iojs/tag-details.md
Markdown
mit
204,831
/*! * Start Bootstrap - Half Slider (https://startbootstrap.com/template-overviews/half-slider) * Copyright 2013-2017 Start Bootstrap * Licensed under MIT (https://github.com/BlackrockDigital/startbootstrap-half-slider/blob/master/LICENSE) */ .carousel-item { height: 65vh; min-height: 300px; background: no-repeat center center scroll; -webkit-background-size: cover; -moz-background-size: cover; -o-background-size: cover; background-size: cover; }
hassaanaliw/hassaanaliw.github.io
projects/css/half-slider.css
CSS
mit
470
using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.IO; using System.Linq; using System.Text; using System.Windows.Forms; using Palaso.Xml; namespace TestApp { public partial class FastXmlSplitterTestForm : Form { public FastXmlSplitterTestForm() { InitializeComponent(); } private void LoadDataFile(object sender, EventArgs e) { try { string selectedPathname = null; using (var fileDlg = new OpenFileDialog()) { if (fileDlg.ShowDialog(this) == DialogResult.OK) selectedPathname = fileDlg.FileName; } if (!string.IsNullOrEmpty(selectedPathname)) { var extension = Path.GetExtension(selectedPathname).ToLowerInvariant(); string firstElementMarker = null; string recordMarker = null; switch (extension) { case ".lift": firstElementMarker = "header"; recordMarker = "entry"; break; case ".chorusnotes": recordMarker = "annotation"; break; case ".fwdata": firstElementMarker = "AdditionalFields"; recordMarker = "rt"; break; } using (var splitter = new FastXmlElementSplitter(selectedPathname)) { bool foundOptionalFirstElement; var results = splitter.GetSecondLevelElementBytes(firstElementMarker, recordMarker, out foundOptionalFirstElement); Console.WriteLine("Records: " + results.Count()); } } Close(); } catch (Exception err) { var msg = err.Message; Console.WriteLine(msg); throw; } } } }
darcywong00/libpalaso
TestApps/ReportingTest/FastXmlSplitterTestForm.cs
C#
mit
1,604
package org.knowm.xchange.okcoin.dto.account; import com.fasterxml.jackson.annotation.JsonProperty; public class OkCoinFuturesInfoCross { private final OkcoinFuturesFundsCross btcFunds; private final OkcoinFuturesFundsCross ltcFunds; public OkCoinFuturesInfoCross(@JsonProperty("btc") final OkcoinFuturesFundsCross btcFunds, @JsonProperty("ltc") final OkcoinFuturesFundsCross ltcFunds) { this.btcFunds = btcFunds; this.ltcFunds = ltcFunds; } public OkcoinFuturesFundsCross getBtcFunds() { return btcFunds; } public OkcoinFuturesFundsCross getLtcFunds() { return ltcFunds; } }
gaborkolozsy/XChange
xchange-okcoin/src/main/java/org/knowm/xchange/okcoin/dto/account/OkCoinFuturesInfoCross.java
Java
mit
620
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. package org.bondlib; import org.junit.Test; import java.io.ByteArrayInputStream; public class SimpleBinaryReaderTest { // See SimpleBinaryProtocolTest for more tests (on both reader and writer) @Test(expected = IllegalArgumentException.class) public void testConstructorWithZeroProtocolVersion() { ByteArrayInputStream bais = new ByteArrayInputStream(new byte[0]); new SimpleBinaryReader(bais, 0); } @Test(expected = IllegalArgumentException.class) public void testConstructorWithInvalidProtocolVersion() { ByteArrayInputStream bais = new ByteArrayInputStream(new byte[0]); new SimpleBinaryReader(bais, 3); } @Test(expected = IllegalArgumentException.class) public void testConstructorWithNullInputStream() { new SimpleBinaryReader(null, 1); } }
jdubrule/bond
java/core/src/test/java/org/bondlib/SimpleBinaryReaderTest.java
Java
mit
985