blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 4 410 | content_id stringlengths 40 40 | detected_licenses listlengths 0 51 | license_type stringclasses 2 values | repo_name stringlengths 5 132 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 80 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 5.85k 689M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 131 values | src_encoding stringclasses 34 values | language stringclasses 1 value | is_vendor bool 1 class | is_generated bool 2 classes | length_bytes int64 3 9.45M | extension stringclasses 32 values | content stringlengths 3 9.45M | authors listlengths 1 1 | author_id stringlengths 0 313 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dbe830a5acd10ca5d40847eed9da04bebb47ef22 | 3cb6cca32eab27cf507bd3937a903676b3cea6c5 | /third_party/java/htmlparser/src/nu/validator/saxtree/CharBufferNode.java | ff51b1b8ca90e9a4a1d59c1f1fcfd52052a9465f | [
"BSD-3-Clause",
"MIT",
"Apache-2.0"
] | permissive | rahulchaurasiazx55/caja | 1982461b68924915c09564370459d7784ff92c10 | 719b0d03c68461bc6bde8e7ad370e36471209846 | refs/heads/master | 2022-12-23T02:34:31.261651 | 2020-09-30T18:31:49 | 2020-09-30T18:31:49 | 300,015,001 | 1 | 0 | Apache-2.0 | 2020-09-30T18:25:42 | 2020-09-30T18:25:41 | null | UTF-8 | Java | false | false | 2,085 | java | /*
* Copyright (c) 2007 Henri Sivonen
* Copyright (c) 2008 Mozilla Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package nu.validator.saxtree;
import org.xml.sax.Locator;
/**
* A common superclass for character buffer node classes.
* @version $Id: CharBufferNode.java 367 2008-07-02 18:27:22Z hsivonen $
* @author hsivonen
*/
public abstract class CharBufferNode extends Node {
/**
* The buffer.
*/
protected final char[] buffer;
/**
* The constructor.
* @param locator the locator
* @param buf the buffer
* @param start the offset
* @param length the length
*/
CharBufferNode(Locator locator, char[] buf, int start, int length) {
super(locator);
this.buffer = new char[length];
System.arraycopy(buf, start, buffer, 0, length);
}
/**
* Returns the wrapped buffer as a string.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return new String(buffer);
}
}
| [
"mikesamuel@gmail.com"
] | mikesamuel@gmail.com |
81914bc810609f820b21b068917b6a64e44c2796 | eff279c4c1cd2a730107e7b8e5fbd313918b8a20 | /app/src/main/java/liophan/keystoredemo/KeyStoreActivity.java | 414e58727d96b97c0dbfb5b0d9dbb124fb17b81f | [] | no_license | loipn1804/KeyStoreDemo | f2885db7964e04dd5546456b3304477f8fb15447 | 95e28951e22b01609af26b684e9d3edfcdec1c8b | refs/heads/master | 2021-01-25T12:31:23.683644 | 2017-02-20T16:09:17 | 2017-02-20T16:09:17 | 82,516,248 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 7,471 | java | package liophan.keystoredemo;
import android.content.SharedPreferences;
import android.os.Build;
import android.os.Bundle;
import android.security.KeyPairGeneratorSpec;
import android.security.keystore.KeyGenParameterSpec;
import android.security.keystore.KeyProperties;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.text.TextUtils;
import android.util.Base64;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.math.BigInteger;
import java.security.KeyPairGenerator;
import java.security.KeyStore;
import java.security.interfaces.RSAPrivateKey;
import java.security.interfaces.RSAPublicKey;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Enumeration;
import java.util.List;
import javax.crypto.Cipher;
import javax.crypto.CipherInputStream;
import javax.crypto.CipherOutputStream;
import javax.security.auth.x500.X500Principal;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
* Copyright (c) 2017, Stacck Pte Ltd. All rights reserved.
*
* @author Lio <lphan@stacck.com>
* @version 1.0
* @since February 20, 2017
*/
public class KeyStoreActivity extends AppCompatActivity implements View.OnClickListener {
private static final boolean IS_M = Build.VERSION.SDK_INT >= Build.VERSION_CODES.M;
private static final String ANDROID_KEY_STORE = "AndroidKeyStore";
private static final String ALIAS = "lio";
@BindView(R.id.btnStart)
Button btnStart;
@BindView(R.id.txtLog)
TextView txtLog;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_demo);
ButterKnife.bind(this);
initData();
}
private void initData() {
btnStart.setOnClickListener(this);
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.btnStart:
initKeyStore();
break;
}
}
@SuppressWarnings("NewApi")
private void initKeyStore() {
try {
KeyStore keyStore = KeyStore.getInstance(ANDROID_KEY_STORE);
keyStore.load(null);
List<String> keyAliases = new ArrayList<>();
Enumeration<String> aliases = keyStore.aliases();
while (aliases.hasMoreElements()) {
keyAliases.add(aliases.nextElement());
}
Log.e("LIO", "keyAliases " + TextUtils.join(",", keyAliases));
if (!keyStore.containsAlias(ALIAS)) {
// Create new key and save to KeyStore
KeyPairGenerator kpg = KeyPairGenerator.getInstance(KeyProperties.KEY_ALGORITHM_RSA, ANDROID_KEY_STORE);
if (IS_M) {
KeyGenParameterSpec spec = new KeyGenParameterSpec.Builder(ALIAS,
KeyProperties.PURPOSE_ENCRYPT | KeyProperties.PURPOSE_DECRYPT)
.setBlockModes(KeyProperties.BLOCK_MODE_GCM)
.setEncryptionPaddings(KeyProperties.ENCRYPTION_PADDING_RSA_PKCS1)
.setRandomizedEncryptionRequired(false)
.build();
kpg.initialize(spec);
} else {
// Generate a key pair for encryption
Calendar start = Calendar.getInstance();
Calendar end = Calendar.getInstance();
end.add(Calendar.YEAR, 30);
KeyPairGeneratorSpec spec = new KeyPairGeneratorSpec.Builder(getApplicationContext())
.setAlias(ALIAS)
.setSubject(new X500Principal("CN=" + ALIAS))
.setSerialNumber(BigInteger.TEN)
.setStartDate(start.getTime())
.setEndDate(end.getTime())
.build();
kpg.initialize(spec);
}
kpg.generateKeyPair();
encryptString(ALIAS, "lio phan");
} else {
// Get key from KeyStore
loadEncodeStringFromPreference();
}
} catch (Exception e) {
e.printStackTrace();
Log.e("LIO", e.getMessage());
}
}
public void encryptString(String alias, String str) {
try {
KeyStore keyStore = KeyStore.getInstance(ANDROID_KEY_STORE);
keyStore.load(null);
KeyStore.PrivateKeyEntry privateKeyEntry = (KeyStore.PrivateKeyEntry) keyStore.getEntry(alias, null);
Cipher input = Cipher.getInstance("RSA/ECB/PKCS1Padding");
input.init(Cipher.ENCRYPT_MODE, privateKeyEntry.getCertificate().getPublicKey());
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
CipherOutputStream cipherOutputStream = new CipherOutputStream(outputStream, input);
cipherOutputStream.write(str.getBytes("UTF-8"));
cipherOutputStream.close();
byte[] vals = outputStream.toByteArray();
saveDecodedStringToPreference(Base64.encodeToString(vals, Base64.DEFAULT));
} catch (Exception e) {
Log.e("LIO", e.getMessage());
}
}
public void decryptString(String alias, String code) {
try {
KeyStore keyStore = KeyStore.getInstance(ANDROID_KEY_STORE);
keyStore.load(null);
KeyStore.PrivateKeyEntry privateKeyEntry = (KeyStore.PrivateKeyEntry) keyStore.getEntry(alias, null);
Cipher output = Cipher.getInstance("RSA/ECB/PKCS1Padding");
output.init(Cipher.DECRYPT_MODE, privateKeyEntry.getPrivateKey());
CipherInputStream cipherInputStream = new CipherInputStream(new ByteArrayInputStream(Base64.decode(code, Base64.DEFAULT)), output);
ArrayList<Byte> values = new ArrayList<>();
int nextByte;
while ((nextByte = cipherInputStream.read()) != -1) {
values.add((byte) nextByte);
}
byte[] bytes = new byte[values.size()];
for (int i = 0; i < bytes.length; i++) {
bytes[i] = values.get(i).byteValue();
}
String finalText = new String(bytes, 0, bytes.length, "UTF-8");
txtLog.setText(finalText);
} catch (Exception e) {
Log.e("LIO", e.getMessage());
}
}
private void saveDecodedStringToPreference(String decodedString) {
SharedPreferences preferences = getSharedPreferences("MyPref", MODE_PRIVATE);
SharedPreferences.Editor editor = preferences.edit();
editor.putString("code", decodedString);
editor.apply();
}
private void loadEncodeStringFromPreference() {
SharedPreferences preferences = getSharedPreferences("MyPref", MODE_PRIVATE);
String code = preferences.getString("code", null);
if (!TextUtils.isEmpty(code)) {
decryptString(ALIAS, code);
} else {
Toast.makeText(this, "No Code", Toast.LENGTH_SHORT).show();
encryptString(ALIAS, "lio phan");
}
}
}
| [
"lphan@stacck.com"
] | lphan@stacck.com |
ba2a98cd3d20f92455015c3d6bc707c998b19fff | a8ff71d6c40929c753b191d8326456f2ef5732bd | /app/src/main/java/com/szemingcheng/amemo/db/MemoHelper.java | 0bd5ef56f54878e428a519d092cad18cff9b1be4 | [] | no_license | szemingcheng/AMemo | c2b804a49b771448fdb3b23f9ab5953f5f6cc723 | 1ec21f2aac7c2dcfbf95db613b0bc625d0e37890 | refs/heads/master | 2020-12-30T13:19:38.242289 | 2017-08-02T10:21:53 | 2017-08-02T10:21:53 | 91,340,775 | 1 | 1 | null | 2017-05-30T10:14:40 | 2017-05-15T13:25:23 | Java | UTF-8 | Java | false | false | 302 | java | package com.szemingcheng.amemo.db;
import com.szemingcheng.amemo.entity.Memo;
import org.greenrobot.greendao.AbstractDao;
/**
* Created by szemingcheng on 2017/5/16.
*/
public class MemoHelper extends BaseDBHelper<Memo,Long> {
public MemoHelper(AbstractDao dao) {
super(dao);
}
}
| [
"zhengszeming@gmail.com"
] | zhengszeming@gmail.com |
3cc26585e92ba55e3c60c657080eb70a41835ac4 | b6378a8e54dca7d3b9f470a70d2196bdfa7cdf5d | /Open Hospital/src/org/isf/medtype/gui/MedicalTypeBrowserEdit.java | c0bef3a875382544bd228c691ef1ab4900edf39d | [] | no_license | acimasa/ModuloHIVOpenHospital | 9e840f3b40db615c48e089511269421c25eeecd4 | f130d386ca8e2848a1633fe1950e751835b38447 | refs/heads/master | 2021-01-22T05:10:22.259370 | 2013-07-22T15:20:45 | 2013-07-22T15:20:45 | 10,950,407 | 1 | 0 | null | null | null | null | UTF-8 | Java | false | false | 10,876 | java | package org.isf.medtype.gui;
import java.awt.AWTEvent;
import java.awt.BorderLayout;
import java.awt.event.KeyEvent;
import java.util.EventListener;
import javax.swing.BoxLayout;
import javax.swing.JButton;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTextField;
import javax.swing.event.EventListenerList;
import org.isf.utils.jobjects.*;
import org.isf.medtype.manager.MedicalTypeBrowserManager;
import org.isf.medtype.model.MedicalType;
import org.isf.generaldata.MessageBundle;
public class MedicalTypeBrowserEdit extends JDialog{
/**
*
*/
private static final long serialVersionUID = 1L;
private EventListenerList medicalTypeListeners = new EventListenerList();
public interface MedicalTypeListener extends EventListener {
public void medicalTypeUpdated(AWTEvent e);
public void medicalTypeInserted(AWTEvent e);
}
public void addMedicalTypeListener(MedicalTypeListener l) {
medicalTypeListeners.add(MedicalTypeListener.class, l);
}
public void removeMedicalTypeListener(MedicalTypeListener listener) {
medicalTypeListeners.remove(MedicalTypeListener.class, listener);
}
private void fireMedicalInserted() {
AWTEvent event = new AWTEvent(new Object(), AWTEvent.RESERVED_ID_MAX + 1) {
/**
*
*/
private static final long serialVersionUID = 1L;};
EventListener[] listeners = medicalTypeListeners.getListeners(MedicalTypeListener.class);
for (int i = 0; i < listeners.length; i++)
((MedicalTypeListener)listeners[i]).medicalTypeInserted(event);
}
private void fireMedicalUpdated() {
AWTEvent event = new AWTEvent(new Object(), AWTEvent.RESERVED_ID_MAX + 1) {
/**
*
*/
private static final long serialVersionUID = 1L;};
EventListener[] listeners = medicalTypeListeners.getListeners(MedicalTypeListener.class);
for (int i = 0; i < listeners.length; i++)
((MedicalTypeListener)listeners[i]).medicalTypeUpdated(event);
}
private JPanel jContentPane = null;
private JPanel dataPanel = null;
private JPanel buttonPanel = null;
private JButton cancelButton = null;
private JButton okButton = null;
private JTextField descriptionTextField = null;
private VoLimitedTextField codeTextField = null;
private String lastdescription;
private MedicalType medicalType = null;
private boolean insert;
private JPanel jDataPanel = null;
private JLabel jCodeLabel = null;
private JPanel jCodeLabelPanel = null;
private JPanel jDescriptionLabelPanel = null;
private JLabel jDescripitonLabel = null;
/**
*
* This is the default constructor; we pass the arraylist and the selectedrow
* because we need to update them
*/
public MedicalTypeBrowserEdit(JFrame owner,MedicalType old,boolean inserting) {
super(owner,true);
insert = inserting;
medicalType = old;//disease will be used for every operation
lastdescription= medicalType.getDescription();
initialize();
}
/**
* This method initializes this
*
* @return void
*/
private void initialize() {
this.setBounds(300,300,350,180);
this.setContentPane(getJContentPane());
if (insert) {
this.setTitle(MessageBundle.getMessage("angal.medtype.newmedicaltyperecord"));
} else {
this.setTitle(MessageBundle.getMessage("angal.medtype.editingmedicaltyperecord"));
}
this.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
}
/**
* This method initializes jContentPane
*
* @return javax.swing.JPanel
*/
private JPanel getJContentPane() {
if (jContentPane == null) {
jContentPane = new JPanel();
jContentPane.setLayout(new BorderLayout());
jContentPane.add(getDataPanel(), java.awt.BorderLayout.NORTH); // Generated
jContentPane.add(getButtonPanel(), java.awt.BorderLayout.SOUTH); // Generated
}
return jContentPane;
}
/**
* This method initializes dataPanel
*
* @return javax.swing.JPanel
*/
private JPanel getDataPanel() {
if (dataPanel == null) {
dataPanel = new JPanel();
//dataPanel.setLayout(new BoxLayout(getDataPanel(), BoxLayout.Y_AXIS)); // Generated
dataPanel.add(getJDataPanel(), null);
}
return dataPanel;
}
/**
* This method initializes buttonPanel
*
* @return javax.swing.JPanel
*/
private JPanel getButtonPanel() {
if (buttonPanel == null) {
buttonPanel = new JPanel();
buttonPanel.add(getOkButton(), null); // Generated
buttonPanel.add(getCancelButton(), null); // Generated
}
return buttonPanel;
}
/**
* This method initializes cancelButton
*
* @return javax.swing.JButton
*/
private JButton getCancelButton() {
if (cancelButton == null) {
cancelButton = new JButton();
cancelButton.setText(MessageBundle.getMessage("angal.medtype.cancel")); // Generated
cancelButton.setMnemonic(KeyEvent.VK_C);
cancelButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
dispose();
}
});
}
return cancelButton;
}
/**
* This method initializes okButton
*
* @return javax.swing.JButton
*/
private JButton getOkButton() {
if (okButton == null) {
okButton = new JButton();
okButton.setText(MessageBundle.getMessage("angal.medtype.ok")); // Generated
okButton.setMnemonic(KeyEvent.VK_O);
okButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
String key = codeTextField.getText();
MedicalTypeBrowserManager manager = new MedicalTypeBrowserManager();
if (key.equals("")){
JOptionPane.showMessageDialog(
null,
MessageBundle.getMessage("angal.medtype.pleaseinsertacode"),
MessageBundle.getMessage("angal.hospital"),
JOptionPane.PLAIN_MESSAGE);
return;
}
//System.out.print(key.length());
if (key.length()>1){
JOptionPane.showMessageDialog(
null,
MessageBundle.getMessage("angal.medtype.codetoolongmaxchar"),
MessageBundle.getMessage("angal.hospital"),
JOptionPane.PLAIN_MESSAGE);
return;
}
if(insert){
if (manager.codeControl(key)){
JOptionPane.showMessageDialog(
null,
MessageBundle.getMessage("angal.medtype.codealreadyinuse"),
MessageBundle.getMessage("angal.hospital"),
JOptionPane.PLAIN_MESSAGE);
codeTextField.setText("");
return;
}};
if (descriptionTextField.getText().equals("")){
JOptionPane.showMessageDialog(
null,
MessageBundle.getMessage("angal.medtype.pleaseinsertavaliddescription"),
MessageBundle.getMessage("angal.hospital"),
JOptionPane.PLAIN_MESSAGE);
return;
}
if (descriptionTextField.getText().equals(lastdescription)){
dispose();
}
medicalType.setDescription(descriptionTextField.getText());
medicalType.setCode(codeTextField.getText());
boolean result = false;
if (insert) { // inserting
result = manager.newMedicalType(medicalType);
if (result) {
fireMedicalInserted();
}
if (!result) JOptionPane.showMessageDialog(null, MessageBundle.getMessage("angal.medtype.thedatacouldnotbesaved"));
else dispose();
}
else { // updating
if (descriptionTextField.getText().equals(lastdescription)){
dispose();
}else{
result = manager.updateMedicalType(medicalType);
if (result) {
fireMedicalUpdated();
}
if (!result) JOptionPane.showMessageDialog(null, MessageBundle.getMessage("angal.medtype.thedatacouldnotbesaved"));
else dispose();
}
}
}
});
}
return okButton;
}
/**
* This method initializes descriptionTextField
*
* @return javax.swing.JTextField
*/
private JTextField getDescriptionTextField() {
if (descriptionTextField == null) {
descriptionTextField = new JTextField(20);
if (!insert) {
descriptionTextField.setText(medicalType.getDescription());
lastdescription=medicalType.getDescription();
}
}
return descriptionTextField;
}
/**
* This method initializes codeTextField
*
* @return javax.swing.JTextField
*/
private JTextField getCodeTextField() {
if (codeTextField == null) {
codeTextField = new VoLimitedTextField(2);
if (!insert) {
codeTextField.setText(medicalType.getCode());
codeTextField.setEnabled(false);
}
}
return codeTextField;
}
/**
* This method initializes jDataPanel
*
* @return javax.swing.JPanel
*/
private JPanel getJDataPanel() {
if (jDataPanel == null) {
jDataPanel = new JPanel();
jDataPanel.setLayout(new BoxLayout(getJDataPanel(),BoxLayout.Y_AXIS));
jDataPanel.add(getJCodeLabelPanel(), null);
jDataPanel.add(getCodeTextField(), null);
jDataPanel.add(getJDescriptionLabelPanel(), null);
jDataPanel.add(getDescriptionTextField(), null);
}
return jDataPanel;
}
/**
* This method initializes jCodeLabel
*
* @return javax.swing.JLabel
*/
private JLabel getJCodeLabel() {
if (jCodeLabel == null) {
jCodeLabel = new JLabel();
jCodeLabel.setText(MessageBundle.getMessage("angal.medtype.codemaxchar"));
}
return jCodeLabel;
}
/**
* This method initializes jCodeLabelPanel
*
* @return javax.swing.JPanel
*/
private JPanel getJCodeLabelPanel() {
if (jCodeLabelPanel == null) {
jCodeLabelPanel = new JPanel();
//jCodeLabelPanel.setLayout(new BorderLayout());
jCodeLabelPanel.add(getJCodeLabel(), BorderLayout.CENTER);
}
return jCodeLabelPanel;
}
/**
* This method initializes jDescriptionLabelPanel
*
* @return javax.swing.JPanel
*/
private JPanel getJDescriptionLabelPanel() {
if (jDescriptionLabelPanel == null) {
jDescripitonLabel = new JLabel();
jDescripitonLabel.setText(MessageBundle.getMessage("angal.medtype.description"));
jDescriptionLabelPanel = new JPanel();
jDescriptionLabelPanel.add(jDescripitonLabel, null);
}
return jDescriptionLabelPanel;
}
} // @jve:decl-index=0:visual-constraint="146,61"
| [
"Alfonso@Alfonso-PC"
] | Alfonso@Alfonso-PC |
c4f3129567c194e1e279780081be406f9de27273 | af7b8bbe77461e59f32ba746f4bb055620a5c110 | /base/src/main/java/com/hz/yk/ssh/jcabi/Simple.java | 98b35d45d85a8a4c5d5d27fbbd94577516d24c2c | [] | no_license | ykdsg/MyJavaProject | 3e51564a3fb57ab4ae043c9112e1936ccc179dd5 | a7d88aee2f58698aed7d497c2cf6e23a605ebb59 | refs/heads/master | 2023-06-26T02:23:33.812330 | 2023-06-12T11:28:23 | 2023-06-12T11:28:23 | 1,435,034 | 4 | 6 | null | 2022-12-01T15:21:01 | 2011-03-03T13:30:03 | Java | UTF-8 | Java | false | false | 1,104 | java | package com.hz.yk.ssh.jcabi;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.logging.Level;
import com.jcabi.log.Logger;
import com.jcabi.ssh.SSH;
import com.jcabi.ssh.SSHByPassword;
import com.jcabi.ssh.Shell;
import org.apache.commons.io.input.NullInputStream;
/**
* 对JSch 的包装 ,目前来看使用最简单,但是太简单了
* https://github.com/jcabi/jcabi-ssh
* Created by wuzheng.yk on 15/11/3.
*/
public class Simple {
public static void main(String[] args) throws IOException {
SSHByPassword sshByPassword = new SSHByPassword(
"112.124.123.117", 22,
"admin", "Yangtuojia001"
);
Shell.Plain plain = new Shell.Plain(
sshByPassword
);
String hello = plain.exec("pwd");
System.out.println(hello);
final ByteArrayOutputStream output = new ByteArrayOutputStream();
Shell ssh = new Shell.Verbose(sshByPassword);
System.out.println(ssh.exec("pwd",new NullInputStream(0L),output, Logger.stream(Level.WARNING, true)));
}
}
| [
"17173as@163.com"
] | 17173as@163.com |
b01eab922468d47371f74bf7a8f73d5136bc33ce | a9e78f785fbdd7b4b2bd3b16ac5c703ef0af5a29 | /INetworkManager.java | 76af17cdad0487275e4c8f329cb6c0305ed2af50 | [] | no_license | btilm305/mc-dev | 0aa1a8aad5b8fe7e0bc7be64fbd7e7f87162fdaa | c0ad4fec170c89b8e1534635b614b50a55235573 | refs/heads/master | 2016-09-05T20:07:45.144546 | 2013-06-05T02:12:55 | 2013-06-05T02:12:55 | 9,861,501 | 2 | 2 | null | null | null | null | UTF-8 | Java | false | false | 1,142 | java | package net.minecraft.src;
import java.net.SocketAddress;
public interface INetworkManager
{
/**
* Sets the NetHandler for this NetworkManager. Server-only.
*/
void setNetHandler(NetHandler var1);
/**
* Adds the packet to the correct send queue (chunk data packets go to a separate queue).
*/
void addToSendQueue(Packet var1);
/**
* Wakes reader and writer threads
*/
void wakeThreads();
/**
* Checks timeouts and processes all pending read packets.
*/
void processReadPackets();
/**
* Returns the socket address of the remote side. Server-only.
*/
SocketAddress getRemoteAddress();
/**
* Shuts down the server. (Only actually used on the server)
*/
void serverShutdown();
/**
* Returns the number of chunk data packets waiting to be sent.
*/
int getNumChunkDataPackets();
/**
* Shuts down the network with the specified reason. Closes all streams and sockets, spawns NetworkMasterThread to
* stop reading and writing threads.
*/
void networkShutdown(String var1, Object ... var2);
}
| [
"btilm305@gmail.com"
] | btilm305@gmail.com |
a3134e8634cddc8a850c0f4f3a98480ea31dc722 | 88a2ba39520b21148f8464c94451b0d5b63b33c8 | /lib/skyapi-feign/src/main/java/org/openapitools/client/model/InlineResponse2002.java | c702eb2f7aaef99c6c0c593d43c5cbcb029ac95c | [] | no_license | fibercrypto/libjava-skyfiber | 396f52fe310d762d2a81b63411f089462b5eaabc | 77e111da1d03a12e9410624dfc394d8dc5847c11 | refs/heads/develop | 2022-12-22T04:45:47.365615 | 2019-12-26T21:27:25 | 2019-12-26T21:27:25 | 130,543,908 | 0 | 0 | null | 2022-12-10T04:45:06 | 2018-04-22T07:09:51 | Java | UTF-8 | Java | false | false | 8,532 | java | /*
* Skycoin REST API.
* Skycoin is a next-generation cryptocurrency.
*
* The version of the OpenAPI document: 0.27.0
* Contact: contact@skycoin.net
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package org.openapitools.client.model;
import java.util.Objects;
import java.util.Arrays;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.ArrayList;
import java.util.List;
/**
* InlineResponse2002
*/
@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", date = "2019-08-05T23:30:16.371-04:00[America/Havana]")
public class InlineResponse2002 {
@JsonProperty("current_coinhour_supply")
private String currentCoinhourSupply;
@JsonProperty("current_supply")
private String currentSupply;
@JsonProperty("locked_distribution_addresses")
private List<String> lockedDistributionAddresses = new ArrayList<String>();
@JsonProperty("max_supply")
private String maxSupply;
@JsonProperty("total_coinhour_supply")
private String totalCoinhourSupply;
@JsonProperty("total_supply")
private String totalSupply;
@JsonProperty("unlocked_distribution_addresses")
private List<String> unlockedDistributionAddresses = new ArrayList<String>();
public InlineResponse2002 currentCoinhourSupply(String currentCoinhourSupply) {
this.currentCoinhourSupply = currentCoinhourSupply;
return this;
}
/**
* CurrentCoinHourSupply is coins hours in non distribution addresses.
* @return currentCoinhourSupply
**/
@ApiModelProperty(value = "CurrentCoinHourSupply is coins hours in non distribution addresses.")
public String getCurrentCoinhourSupply() {
return currentCoinhourSupply;
}
public void setCurrentCoinhourSupply(String currentCoinhourSupply) {
this.currentCoinhourSupply = currentCoinhourSupply;
}
public InlineResponse2002 currentSupply(String currentSupply) {
this.currentSupply = currentSupply;
return this;
}
/**
* Coins distributed beyond the project.
* @return currentSupply
**/
@ApiModelProperty(value = "Coins distributed beyond the project.")
public String getCurrentSupply() {
return currentSupply;
}
public void setCurrentSupply(String currentSupply) {
this.currentSupply = currentSupply;
}
public InlineResponse2002 lockedDistributionAddresses(List<String> lockedDistributionAddresses) {
this.lockedDistributionAddresses = lockedDistributionAddresses;
return this;
}
public InlineResponse2002 addLockedDistributionAddressesItem(String lockedDistributionAddressesItem) {
if (this.lockedDistributionAddresses == null) {
this.lockedDistributionAddresses = new ArrayList<String>();
}
this.lockedDistributionAddresses.add(lockedDistributionAddressesItem);
return this;
}
/**
* Distribution addresses which are locked and do not count towards total supply.
* @return lockedDistributionAddresses
**/
@ApiModelProperty(value = "Distribution addresses which are locked and do not count towards total supply.")
public List<String> getLockedDistributionAddresses() {
return lockedDistributionAddresses;
}
public void setLockedDistributionAddresses(List<String> lockedDistributionAddresses) {
this.lockedDistributionAddresses = lockedDistributionAddresses;
}
public InlineResponse2002 maxSupply(String maxSupply) {
this.maxSupply = maxSupply;
return this;
}
/**
* MaxSupply is the maximum number of coins to be distributed ever.
* @return maxSupply
**/
@ApiModelProperty(value = "MaxSupply is the maximum number of coins to be distributed ever.")
public String getMaxSupply() {
return maxSupply;
}
public void setMaxSupply(String maxSupply) {
this.maxSupply = maxSupply;
}
public InlineResponse2002 totalCoinhourSupply(String totalCoinhourSupply) {
this.totalCoinhourSupply = totalCoinhourSupply;
return this;
}
/**
* TotalCoinHourSupply is coin hours in all addresses including unlocked distribution addresses.
* @return totalCoinhourSupply
**/
@ApiModelProperty(value = "TotalCoinHourSupply is coin hours in all addresses including unlocked distribution addresses.")
public String getTotalCoinhourSupply() {
return totalCoinhourSupply;
}
public void setTotalCoinhourSupply(String totalCoinhourSupply) {
this.totalCoinhourSupply = totalCoinhourSupply;
}
public InlineResponse2002 totalSupply(String totalSupply) {
this.totalSupply = totalSupply;
return this;
}
/**
* TotalSupply is CurrentSupply plus coins held by the distribution addresses that are spendable.
* @return totalSupply
**/
@ApiModelProperty(value = "TotalSupply is CurrentSupply plus coins held by the distribution addresses that are spendable.")
public String getTotalSupply() {
return totalSupply;
}
public void setTotalSupply(String totalSupply) {
this.totalSupply = totalSupply;
}
public InlineResponse2002 unlockedDistributionAddresses(List<String> unlockedDistributionAddresses) {
this.unlockedDistributionAddresses = unlockedDistributionAddresses;
return this;
}
public InlineResponse2002 addUnlockedDistributionAddressesItem(String unlockedDistributionAddressesItem) {
if (this.unlockedDistributionAddresses == null) {
this.unlockedDistributionAddresses = new ArrayList<String>();
}
this.unlockedDistributionAddresses.add(unlockedDistributionAddressesItem);
return this;
}
/**
* Distribution addresses which count towards total supply.
* @return unlockedDistributionAddresses
**/
@ApiModelProperty(value = "Distribution addresses which count towards total supply.")
public List<String> getUnlockedDistributionAddresses() {
return unlockedDistributionAddresses;
}
public void setUnlockedDistributionAddresses(List<String> unlockedDistributionAddresses) {
this.unlockedDistributionAddresses = unlockedDistributionAddresses;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InlineResponse2002 inlineResponse2002 = (InlineResponse2002) o;
return Objects.equals(this.currentCoinhourSupply, inlineResponse2002.currentCoinhourSupply) &&
Objects.equals(this.currentSupply, inlineResponse2002.currentSupply) &&
Objects.equals(this.lockedDistributionAddresses, inlineResponse2002.lockedDistributionAddresses) &&
Objects.equals(this.maxSupply, inlineResponse2002.maxSupply) &&
Objects.equals(this.totalCoinhourSupply, inlineResponse2002.totalCoinhourSupply) &&
Objects.equals(this.totalSupply, inlineResponse2002.totalSupply) &&
Objects.equals(this.unlockedDistributionAddresses, inlineResponse2002.unlockedDistributionAddresses);
}
@Override
public int hashCode() {
return Objects.hash(currentCoinhourSupply, currentSupply, lockedDistributionAddresses, maxSupply, totalCoinhourSupply, totalSupply, unlockedDistributionAddresses);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class InlineResponse2002 {\n");
sb.append(" currentCoinhourSupply: ").append(toIndentedString(currentCoinhourSupply)).append("\n");
sb.append(" currentSupply: ").append(toIndentedString(currentSupply)).append("\n");
sb.append(" lockedDistributionAddresses: ").append(toIndentedString(lockedDistributionAddresses)).append("\n");
sb.append(" maxSupply: ").append(toIndentedString(maxSupply)).append("\n");
sb.append(" totalCoinhourSupply: ").append(toIndentedString(totalCoinhourSupply)).append("\n");
sb.append(" totalSupply: ").append(toIndentedString(totalSupply)).append("\n");
sb.append(" unlockedDistributionAddresses: ").append(toIndentedString(unlockedDistributionAddresses)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| [
"dev0006@simelo.tech"
] | dev0006@simelo.tech |
dbec8e2a9737592a1a592e3cfb3c3a43985bfba5 | a5242695d6ee9702738a2d32cf8c28b291ec7029 | /TeamCode/src/main/java/org/xbot/ftc/operatingcode/teleop/XbotTeleOp.java | f0fe05ae622cda32053d1c9a32e061aa36e3fa55 | [
"BSD-3-Clause"
] | permissive | Team488/ftc-2018 | f98250687b730409daabf6fbddf36df6c4de5829 | 78968a3be6c47bbaa134da35a24c0b9a0a4b9b1b | refs/heads/master | 2020-03-30T06:57:44.626720 | 2018-12-08T21:49:28 | 2018-12-08T21:49:28 | 150,903,301 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 2,004 | java | package org.xbot.ftc.operatingcode.teleop;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.xbot.ftc.operatingcode.BaseRobot;
import org.xbot.ftc.robotcore.utils.GameClock;
import org.xbot.ftc.robotcore.utils.XbotTelemetry;
import org.xbot.ftc.robotcore.subsystems.RobotSubsystemManager;
import java.util.ArrayList;
import java.util.List;
@TeleOp(name="Main: TeleOp", group="Main")
public class XbotTeleOp extends LinearOpMode {
private List<XbotOperatorSubHandler> handlers = new ArrayList<>();
public void registerHandler(XbotOperatorSubHandler listener) {
handlers.add(listener);
}
@Override
public void runOpMode() {
telemetry.addData("Listeners:", "Registering");
telemetry.update();
BaseRobot.initOpMode(this, hardwareMap, telemetry);
new XbotTeleOpSubHandlerRegister().registerHandlers(this);
telemetry.addData("Listeners:", "Registered");
telemetry.update();
waitForStart();
GameClock gameClock = RobotSubsystemManager.getInstance().getGameClock();
gameClock.resetClock();
for (XbotOperatorSubHandler handler : handlers) {
handler.start();
}
while (opModeIsActive()) {
for (XbotOperatorSubHandler handler : handlers) {
handler.handle(gamepad1, gamepad2);
handler.updateTelemetry();
for (XbotTelemetry telemetryData : XbotTelemetry.getDataToAddToTelemetry()) {
telemetry.addData(telemetryData.getCaption(), telemetryData.getValue());
}
telemetry.addData("Game Clock: ", gameClock.getTimeElapsed());
XbotTelemetry.clearData();
telemetry.update();
}
}
for (XbotOperatorSubHandler handler : handlers) {
handler.stop();
}
RobotSubsystemManager.getInstance().stop();
}
}
| [
"34357579+ClevelandRobotics@users.noreply.github.com"
] | 34357579+ClevelandRobotics@users.noreply.github.com |
2fcfcc1e4c945604b7e13277dabdea22abc2119a | 0504729695e330f7ee7adbe6f54471e29512f670 | /app/src/main/java/com/example/a3f/fragment/SearchFragment.java | d02fef472364257fa33940eba75a0ffc83e5ccde | [] | no_license | vsnail/3F | 62d527aadbbc98c7dbb8425c96fccfd48edc9460 | b5b891f276be6ebbf5dcd089fa8f1c3bad023a19 | refs/heads/master | 2023-08-14T15:09:23.632812 | 2021-09-24T18:26:56 | 2021-09-24T18:26:56 | 410,039,065 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 3,860 | java | package com.example.a3f.fragment;
import android.Manifest;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.os.StrictMode;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import androidx.core.app.ActivityCompat;
import androidx.fragment.app.Fragment;
import com.example.a3f.R;
import com.example.a3f.adapter.EventAdapter;
import com.example.a3f.adapter.SearchAdapter;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class SearchFragment extends Fragment implements AdapterView.OnItemClickListener{
private Connection connection = null;
private Button search_btn;
private EditText et_search;
public SearchFragment() {
}
public static SearchFragment newInstance() {
SearchFragment fragment = new SearchFragment();
Bundle args = new Bundle();
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View v= inflater.inflate(R.layout.fragment_search, container, false);
final ListView search_lv=v.findViewById(R.id.search_lv);
et_search=v.findViewById(R.id.et_search);
search_btn=v.findViewById(R.id.search_btn);
search_btn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String search=et_search.getText().toString().trim();
Statement statement = null;
List<Map<String, Object>> shlist = null;
try {
Class.forName("net.sourceforge.jtds.jdbc.Driver");
connection = DriverManager.getConnection("jdbc:jtds:sqlserver://140.137.61.130:1433/TravelJsonDB","sa", "student@109");
statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery("Select top 10 Id,Name,Description,region,town,Picture1 from tbTrip where Picture1 IS NOT NULL AND Picture1 !='' AND Name LIKE N'%"+search+"%';");
shlist = new ArrayList<Map<String, Object>>();
while (resultSet.next()) {
Map<String, Object> shmap = new HashMap<String, Object>();
shmap.put("shId",resultSet.getString(1));
shmap.put("shtitle", resultSet.getString(2));
shmap.put("shcontent", resultSet.getString(3));
shmap.put("shregion", resultSet.getString(4));
shmap.put("shtown", resultSet.getString(5));
shmap.put("shpic", resultSet.getString(6));
shlist.add(shmap);
}
SearchAdapter adapter=new SearchAdapter(getActivity());
adapter.setList(shlist);
search_lv.setAdapter(adapter);
//search_lv.setOnItemClickListener(this);
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (SQLException e) {
e.printStackTrace();
}
}
});
return v;
}
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
}
}
| [
"A6252861@gmail.com"
] | A6252861@gmail.com |
1e97920fae7c995e5408ec6d4ec63deb24cc3918 | 5c704ed2f9fade1636b8dd0e1adf17c1b6ebd35e | /src/main/java/org/shaong/javajsonplaceholder/models/Company.java | 8d6dfa6be4e24d8e7350499a5a4f29237e68d440 | [] | no_license | cshong0618/JavaConcurrentRequest | 483d58fb4b85e42983a03fd3278a7ee8d6b4cca6 | f933e831f4b0c6b808f85094cf6cf1ffacbd1da8 | refs/heads/master | 2020-04-07T04:39:30.912926 | 2018-11-18T09:04:17 | 2018-11-18T09:04:17 | 158,066,104 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 156 | java | package org.shaong.javajsonplaceholder.models;
import lombok.Data;
@Data
public class Company {
String name;
String catchPhrase;
String bs;
}
| [
"cshong.gcc@hotmail.com"
] | cshong.gcc@hotmail.com |
44c0cfe353f02ad20dd397e3c119def7df0f46d9 | 9d556971d98434f813f9cdd4afaa3b0363de4dd5 | /app/src/main/java/com/pucit/hostelhubupdated/AddHostel.java | 7dbf5a41b2837434e97ae26bfefdd650872de657 | [] | no_license | abdulwajid7347/MC_Final_Project | 6685f173170b95ecc105b258dff2e58aaf56c9be | 2eab63055b34a75a2b7ff2bc571b9b3561150136 | refs/heads/master | 2023-06-10T00:19:29.435448 | 2021-07-01T19:34:25 | 2021-07-01T19:34:25 | 381,694,136 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 12,445 | java | package com.pucit.hostelhubupdated;
import android.app.ProgressDialog;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.ValueEventListener;
import com.pucit.hostelhubupdated.Database.Database;
import com.pucit.hostelhubupdated.Models.HostelModel;
public class AddHostel extends AppCompatActivity {
private Button btn_addHostel;
private EditText hostelName, totalNoOfRooms, availableRooms, s_bed, d_bed, dorm;
private CheckBox wifi, gas, electricity, mess, baths, ac, male, female;
private HostelModel hostel;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_add_hostel);
findViews();
if (getCallingActivity() != null) {
if (getCallingActivity().getClassName().toString().equalsIgnoreCase("com.pucit.hostelhubupdated.HostelDetail")) {
//An existing hostel is going to be updated
final ProgressDialog progressDialog = new ProgressDialog(AddHostel.this);
progressDialog.setMessage("Loading information!");
progressDialog.show();
final String[] key = new String[1];
Bundle b = getIntent().getExtras();
hostel = (HostelModel) b.getSerializable("hostel");
Database.isAlreadyAHostel(hostel.city_locality_name).addListenerForSingleValueEvent(new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
for (DataSnapshot ds : dataSnapshot.getChildren()) {
key[0] = ds.getKey();
}
hostelName.setText(hostel.name + "");
totalNoOfRooms.setText(hostel.total_rooms + "");
availableRooms.setText(hostel.available_rooms + "");
s_bed.setText(hostel.single_bed_rent + "");
d_bed.setText(hostel.double_bed_rent + "");
dorm.setText(hostel.dormitory_rent + "");
if (hostel.electricity)
electricity.setChecked(true);
if (hostel.gas)
gas.setChecked(true);
if (hostel.wifi)
wifi.setChecked(true);
if (hostel.ac)
ac.setChecked(true);
if (hostel.attached_baths)
baths.setChecked(true);
if (hostel.mess)
mess.setChecked(true);
if (hostel.type.equalsIgnoreCase("male"))
male.setChecked(true);
if (hostel.type.equalsIgnoreCase("female"))
female.setChecked(true);
progressDialog.dismiss();
btn_addHostel.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (validate_hostel_name() && validate_total_rooms() && validate_available_rooms() &&
validate_single_bed() && validate_double_bed() && validate_dormitory() &&
validateHostelType() && validateSomeParameters()) {
hostel.name = hostelName.getText().toString();
hostel.total_rooms = Integer.parseInt(totalNoOfRooms.getText().toString());
hostel.available_rooms = Integer.parseInt(availableRooms.getText().toString());
hostel.single_bed_rent = Integer.parseInt(s_bed.getText().toString());
hostel.double_bed_rent = Integer.parseInt(d_bed.getText().toString());
hostel.dormitory_rent = Integer.parseInt(dorm.getText().toString());
if (gas.isChecked())
hostel.gas = true;
else
hostel.gas = false;
if (wifi.isChecked())
hostel.wifi = true;
else
hostel.wifi = false;
if (electricity.isChecked())
hostel.electricity = true;
else
hostel.electricity = false;
if (mess.isChecked())
hostel.mess = true;
else
hostel.mess = false;
if (ac.isChecked())
hostel.ac = true;
else
hostel.ac = false;
if (baths.isChecked())
hostel.attached_baths = true;
else
hostel.attached_baths = false;
if (male.isChecked())
hostel.type = "male";
if (female.isChecked())
hostel.type = "female";
Intent i = new Intent(AddHostel.this, AddressActivity.class);
i.putExtra("hostel", hostel);
i.putExtra("key", key[0]);
startActivityForResult(i, 100);
}
}
});
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) {
}
});
}
} else {
//A new hostel is going to be added
Toast.makeText(getApplicationContext(),
"Please make sure to enter 0 if particular field in irrelevant for you!",
Toast.LENGTH_LONG).show();
btn_addHostel.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (validate_hostel_name() && validate_total_rooms() && validate_available_rooms() &&
validate_single_bed() && validate_double_bed() && validate_dormitory() &&
validateHostelType() && validateSomeParameters()) {
Intent i = new Intent(AddHostel.this, AddressActivity.class);
i.putExtra("name", hostelName.getText().toString());
i.putExtra("t_rooms", Integer.parseInt(totalNoOfRooms.getText().toString()));
i.putExtra("a_rooms", Integer.parseInt(availableRooms.getText().toString()));
i.putExtra("s_bed", Integer.parseInt(s_bed.getText().toString()));
i.putExtra("d_bed", Integer.parseInt(d_bed.getText().toString()));
i.putExtra("dorm", Integer.parseInt(dorm.getText().toString()));
i.putExtra("wifi", wifi.isChecked());
i.putExtra("gas", gas.isChecked());
i.putExtra("electricity", electricity.isChecked());
i.putExtra("a_baths", baths.isChecked());
i.putExtra("ac", ac.isChecked());
i.putExtra("mess", mess.isChecked());
if (male.isChecked())
i.putExtra("type", "male");
else
i.putExtra("type", "female");
startActivityForResult(i, 100);
finish();
}
}
});
}
}
private void findViews() {
hostelName = findViewById(R.id.et_hostel_name);
totalNoOfRooms = findViewById(R.id.et_total_no_of_rooms);
availableRooms = findViewById(R.id.et_available_rooms);
btn_addHostel = findViewById(R.id.btn_add_Hostel_rl_addHOstel_activity);
s_bed = (EditText) findViewById(R.id.et_single_bed_beds_activity);
d_bed = (EditText) findViewById(R.id.et_double_bed_beds_activity);
dorm = (EditText) findViewById(R.id.et_dormitory_beds_activity);
wifi = findViewById(R.id.cb_wifi);
gas = findViewById(R.id.cb_gas);
electricity = findViewById(R.id.cb_electricity);
mess = findViewById(R.id.cb_mess);
baths = findViewById(R.id.cb_attached_baths);
ac = findViewById(R.id.cb_ac);
male = findViewById(R.id.cb_male_add_hostel_activity);
female = findViewById(R.id.cb_female_add_hostel_activity);
}
private boolean validate_hostel_name() {
if (hostelName.getText().toString().equals("")) {
hostelName.setError("Please enter value!");
return false;
}
return true;
}
private boolean validate_total_rooms() {
if (totalNoOfRooms.getText().toString().equals("")) {
totalNoOfRooms.setError("Please enter value!");
return false;
}
return true;
}
private boolean validate_available_rooms() {
if (availableRooms.getText().toString().equals("")) {
availableRooms.setError("Please enter value!");
return false;
}
return true;
}
private boolean validate_single_bed() {
if (s_bed.getText().toString().equals("")) {
s_bed.setError("Please enter value!");
return false;
}
return true;
}
private boolean validate_double_bed() {
if (d_bed.getText().toString().equals("")) {
d_bed.setError("Please enter value!");
return false;
}
return true;
}
private boolean validate_dormitory() {
if (dorm.getText().toString().equals("")) {
dorm.setError("Please enter value!");
return false;
}
return true;
}
private boolean validateHostelType() {
if (male.isChecked() && female.isChecked()) {
male.setError("Please select one option!");
female.setError("Please select one option!");
return false;
} else if (male.isChecked() == false && female.isChecked() == false) {
male.setError("Please select one option!");
female.setError("Please select one option!");
return false;
}
return true;
}
private boolean validateSomeParameters(){
if (Integer.parseInt(totalNoOfRooms.getText().toString()) < Integer.parseInt(availableRooms.getText().toString())){
availableRooms.setError("Total no of rooms should be less than available no of rooms!");
return false;
}
// if ((Integer.parseInt(s_bed.getText().toString()) < Integer.parseInt(d_bed.getText().toString())) ||
// (Integer.parseInt(s_bed.getText().toString()) < Integer.parseInt(dorm.getText().toString())) ||
// (Integer.parseInt(d_bed.getText().toString()) < Integer.parseInt(dorm.getText().toString()))){
// s_bed.setError("Conflicting values for bed rents!");
// d_bed.setError("Conflicting values for bed rents!");
// dorm.setError("Conflicting values for bed rents!");
// return false;
// }
return true;
}
}
| [
"mcsf19a035@pucit.edu.pk"
] | mcsf19a035@pucit.edu.pk |
007f49e901300d7b2122f86457287ae69901d915 | 4b0e81faed7ea9ded31b543b9a4eb7e19c7554e9 | /app/src/main/java/com/example/shareus/Session.java | 9ba4bbfecaf5fa0c1b9dd8da9ba1ac94ce253130 | [] | no_license | celllarod/ShareUS | d37dd6f4f7ed6add10df27e1601955c4a352575d | 165ed90158f5338194697d3ac8aedea5d2ebb94e | refs/heads/master | 2023-05-30T02:03:10.629374 | 2021-05-24T11:40:45 | 2021-05-24T11:40:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,723 | java | package com.example.shareus;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.util.Log;
public class Session {
private final static String USER_ID = "userId";
private final static String USER_NAME = "userName";
private final int userId;
private final String username;
public Session(int userId, String username) {
this.userId = userId;
this.username = username;
}
public static void save(Session session, Context cxt) {
Log.d("LOGIN", "Se guarda la sesión en SharedPreferences para el usuario (" + session.getUserId() + "," + session.getUsername() + ")");
SharedPreferences sp = cxt.getSharedPreferences("shareus", Context.MODE_PRIVATE);
SharedPreferences.Editor spEditor = sp.edit();
spEditor.putInt(USER_ID, session.getUserId());
spEditor.putString(USER_NAME, session.getUsername());
spEditor.apply();
}
public static Session get(Context cxt) {
SharedPreferences sp = cxt.getSharedPreferences("shareus", Context.MODE_PRIVATE);
Session result = new Session(sp.getInt(USER_ID, -1), sp.getString(USER_NAME, "null"));
if (result.getUserId() == -1) {
destroy(cxt);
return null;
}
return result;
}
public static void destroy(Context cxt) {
SharedPreferences sp = cxt.getSharedPreferences("shareus", Context.MODE_PRIVATE);
SharedPreferences.Editor spEditor = sp.edit();
spEditor.clear();
spEditor.apply();
}
public String getUsername() {
return username;
}
public int getUserId() {
return userId;
}
}
| [
"angrodboh@alum.us.es"
] | angrodboh@alum.us.es |
b3c2360952f60dc0d9e60549b1b3fde36c756500 | fa91450deb625cda070e82d5c31770be5ca1dec6 | /Diff-Raw-Data/9/9_7df8035dfc3e4519080da0f6bd904ec8cbdd859b/DateFilterPopup/9_7df8035dfc3e4519080da0f6bd904ec8cbdd859b_DateFilterPopup_s.java | b235ed6545a67a1c81d3de174705fbc3a5eb4725 | [] | no_license | zhongxingyu/Seer | 48e7e5197624d7afa94d23f849f8ea2075bcaec0 | c11a3109fdfca9be337e509ecb2c085b60076213 | refs/heads/master | 2023-07-06T12:48:55.516692 | 2023-06-22T07:55:56 | 2023-06-22T07:55:56 | 259,613,157 | 6 | 2 | null | 2023-06-22T07:55:57 | 2020-04-28T11:07:49 | null | UTF-8 | Java | false | false | 10,545 | java | package org.tepi.filtertable.datefilter;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Locale;
import org.tepi.filtertable.FilterDecorator;
import org.vaadin.hene.popupbutton.PopupButton;
import org.vaadin.hene.popupbutton.PopupButton.PopupVisibilityEvent;
import org.vaadin.hene.popupbutton.PopupButton.PopupVisibilityListener;
import com.vaadin.data.util.converter.Converter.ConversionException;
import com.vaadin.shared.ui.datefield.Resolution;
import com.vaadin.ui.Alignment;
import com.vaadin.ui.Button;
import com.vaadin.ui.Button.ClickEvent;
import com.vaadin.ui.Button.ClickListener;
import com.vaadin.ui.Component;
import com.vaadin.ui.CustomField;
import com.vaadin.ui.DateField;
import com.vaadin.ui.HorizontalLayout;
import com.vaadin.ui.InlineDateField;
import com.vaadin.ui.VerticalLayout;
/**
* Extension of PopupButton used to implement filter UI for Date properties.
* Users can select either start date, end date or both. The filter can also be
* set or cleared via a button in the filter pop-up.
*
* @author Teppo Kurki
*
*/
public class DateFilterPopup extends CustomField<DateInterval> {
private PopupButton content;
private DateField fromField, toField;
private Date fromValue, toValue;
private boolean cancelReset;
private FilterDecorator decorator;
private Button set, clear;
private final Object propertyId;
private String dateFormatPattern;
private static final String DEFAULT_FROM_CAPTION = "From";
private static final String DEFAULT_TO_CAPTION = "To";
private static final String DEFAULT_SET_CAPTION = "Set";
private static final String DEFAULT_CLEAR_CAPTION = "Clear";
private static final Resolution DEFAULT_RESOLUTION = Resolution.DAY;
public DateFilterPopup(FilterDecorator decorator, Object propertyId) {
this.decorator = decorator;
this.propertyId = propertyId;
}
@Override
public void attach() {
super.attach();
setFilterDecorator(decorator);
}
@Override
public void setValue(DateInterval newFieldValue)
throws com.vaadin.data.Property.ReadOnlyException,
ConversionException {
if (newFieldValue == null) {
newFieldValue = new DateInterval(null, null);
}
fromField.setValue(newFieldValue.getFrom());
toField.setValue(newFieldValue.getTo());
super.setValue(newFieldValue);
updateCaption(newFieldValue.isNull());
}
private void buildPopup() {
VerticalLayout content = new VerticalLayout();
content.setStyleName("datefilterpopupcontent");
content.setSpacing(true);
content.setMargin(true);
content.setSizeUndefined();
fromField = new InlineDateField();
toField = new InlineDateField();
fromField.setImmediate(true);
toField.setImmediate(true);
set = new Button();
clear = new Button();
ClickListener buttonClickHandler = new ClickListener() {
public void buttonClick(ClickEvent event) {
updateValue(clear.equals(event.getButton()));
}
};
set.addClickListener(buttonClickHandler);
clear.addClickListener(buttonClickHandler);
HorizontalLayout buttonBar = new HorizontalLayout();
buttonBar.setSizeUndefined();
buttonBar.setSpacing(true);
buttonBar.addComponent(set);
buttonBar.addComponent(clear);
HorizontalLayout row = new HorizontalLayout();
row.setSizeUndefined();
row.setSpacing(true);
row.addComponent(fromField);
row.addComponent(toField);
content.addComponent(row);
content.addComponent(buttonBar);
content.setComponentAlignment(buttonBar, Alignment.BOTTOM_RIGHT);
this.content.setContent(content);
}
public void setFilterDecorator(FilterDecorator decorator) {
this.decorator = decorator;
/* Set DateField Locale */
fromField.setLocale(getLocaleFailsafe());
toField.setLocale(getLocaleFailsafe());
String fromCaption = DEFAULT_FROM_CAPTION;
String toCaption = DEFAULT_TO_CAPTION;
String setCaption = DEFAULT_SET_CAPTION;
String clearCaption = DEFAULT_CLEAR_CAPTION;
Resolution resolution = DEFAULT_RESOLUTION;
dateFormatPattern = ((SimpleDateFormat) DateFormat.getDateTimeInstance(
DateFormat.SHORT, DateFormat.SHORT, getLocaleFailsafe()))
.toPattern();
if (decorator != null) {
if (decorator.getFromCaption() != null) {
fromCaption = decorator.getFromCaption();
}
if (decorator.getToCaption() != null) {
toCaption = decorator.getToCaption();
}
if (decorator.getSetCaption() != null) {
setCaption = decorator.getSetCaption();
}
if (decorator.getClearCaption() != null) {
clearCaption = decorator.getClearCaption();
}
if (decorator.getDateFieldResolution(propertyId) != null) {
resolution = decorator.getDateFieldResolution(propertyId);
}
String dateFormatPattern = decorator
.getDateFormatPattern(propertyId);
if (dateFormatPattern != null) {
this.dateFormatPattern = dateFormatPattern;
}
}
/* Set captions */
fromField.setCaption(fromCaption);
toField.setCaption(toCaption);
set.setCaption(setCaption);
clear.setCaption(clearCaption);
/* Set resolutions and date formats */
fromField.setResolution(resolution);
toField.setResolution(resolution);
fromField.setDateFormat(dateFormatPattern);
toField.setDateFormat(dateFormatPattern);
}
private void updateCaption(boolean nullTheCaption) {
if (nullTheCaption) {
if (decorator != null
&& decorator.getAllItemsVisibleString() != null) {
content.setCaption(decorator.getAllItemsVisibleString());
} else {
content.setCaption(null);
}
} else {
SimpleDateFormat sdf = new SimpleDateFormat(dateFormatPattern);
content.setCaption((fromField.getValue() == null ? "" : sdf
.format(fromField.getValue()))
+ " - "
+ (toField.getValue() == null ? "" : sdf.format(toField
.getValue())));
}
}
private void updateValue(boolean nullTheValue) {
if (nullTheValue) {
fromField.setValue(null);
toField.setValue(null);
} else {
cancelReset = true;
}
/* Truncate the from and to dates */
Resolution res = decorator != null ? decorator
.getDateFieldResolution(propertyId) : DEFAULT_RESOLUTION;
fromValue = truncateDate(fromField.getValue(), res, true);
toValue = truncateDate(toField.getValue(), res, false);
setValue(new DateInterval(fromValue, toValue));
DateFilterPopup.this.content.setPopupVisible(false);
}
private Date truncateDate(Date date, Resolution resolution, boolean start) {
if (date == null) {
return null;
}
Calendar cal = Calendar.getInstance(getLocaleFailsafe());
cal.setTime(date);
cal.set(Calendar.MILLISECOND, start ? 0 : 999);
for (Resolution res : Resolution.getResolutionsLowerThan(resolution)) {
if (res == Resolution.SECOND) {
cal.set(Calendar.SECOND, start ? 0 : 59);
} else if (res == Resolution.MINUTE) {
cal.set(Calendar.MINUTE, start ? 0 : 59);
} else if (res == Resolution.HOUR) {
cal.set(Calendar.HOUR_OF_DAY, start ? 0 : 23);
} else if (res == Resolution.DAY) {
cal.set(Calendar.DAY_OF_MONTH,
start ? 1 : cal.getActualMaximum(Calendar.DAY_OF_MONTH));
} else if (res == Resolution.MONTH) {
cal.set(Calendar.MONTH,
start ? 0 : cal.getActualMaximum(Calendar.MONTH));
}
}
return cal.getTime();
}
private Locale getLocaleFailsafe() {
/* First try the locale provided by the decorator */
if (decorator != null && decorator.getLocale() != null) {
return decorator.getLocale();
}
/* Then try application locale */
if (super.getLocale() != null) {
return super.getLocale();
}
/* Finally revert to system default locale */
return Locale.getDefault();
}
@Override
protected Component initContent() {
if (content == null) {
content = new PopupButton(null);
content.setWidth(100, Unit.PERCENTAGE);
setImmediate(true);
buildPopup();
setStyleName("datefilterpopup");
setFilterDecorator(decorator);
updateCaption(true);
content.addPopupVisibilityListener(new PopupVisibilityListener() {
public void popupVisibilityChange(PopupVisibilityEvent event) {
if (cancelReset || event.getPopupButton().isPopupVisible()) {
fromValue = fromField.getValue();
toValue = toField.getValue();
cancelReset = false;
return;
}
fromField.setValue(fromValue);
toField.setValue(toValue);
cancelReset = false;
}
});
}
return content;
}
@Override
public Class<? extends DateInterval> getType() {
return DateInterval.class;
}
}
| [
"yuzhongxing88@gmail.com"
] | yuzhongxing88@gmail.com |
c89f96d2f179c8cad2894c037f06ff17490c708f | 0e94d2f66e1a8b4ec10fba56707dc3a95fcf6b75 | /src/test/java/com/crud/tasks/service/SimpleEmailServiceTest.java | 3a5977da54a0ebbfb3745293c9c6df4734118ae4 | [] | no_license | sebastiansobieraj/TasksManager | 04691da3bb5da02d9d8b563f49fce2f3aa9be945 | 1770735ce22b7328aa9928f9b262001c3159b306 | refs/heads/master | 2021-09-12T21:46:43.425015 | 2018-04-21T07:47:11 | 2018-04-21T07:47:11 | 119,746,125 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,443 | java | package com.crud.tasks.service;
import com.crud.tasks.domain.Mail;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.springframework.mail.SimpleMailMessage;
import org.springframework.mail.javamail.JavaMailSender;
import org.springframework.mail.javamail.MimeMessageHelper;
import org.springframework.mail.javamail.MimeMessagePreparator;
import javax.mail.MessagingException;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class SimpleEmailServiceTest {
@InjectMocks
private SimpleEmailService simpleEmailService;
@Mock
private JavaMailSender javaMailSender;
@Mock
private MailCreatorService mailCreatorService;
@Test
public void shouldSendEmail() throws MessagingException {
//Given
Mail mail = new Mail("test@test.com", "Test","Test Message");
SimpleMailMessage mailMessage = new SimpleMailMessage();
mailMessage.setTo(mail.getMailTo());
mailMessage.setSubject(mail.getSubject());
mailMessage.setText(mail.getMessage());
//When
simpleEmailService.simpleSend(mail);
//Then
verify(javaMailSender, times(1)).send(mailMessage);
}
} | [
"“sebastian.sobieraj91@gmail.com"
] | “sebastian.sobieraj91@gmail.com |
908a306baaf869bff85940adbc355c3491bfe48e | 7bcaf9b6fc4bc40943664f0d049959fb7d8ad449 | /jobserver-server/src/main/java/com/weimob/jobserver/server/init/JobCache.java | 02409102611603163699b33fbad7719cc6ee7d1b | [] | no_license | ggj2010/job-server | ba181b6864659532e11e9afa462c466fe6e252e4 | d237c44f27cef9c37cd0b840221167a5ad335904 | refs/heads/master | 2021-01-17T22:56:26.275697 | 2016-08-31T07:59:39 | 2016-08-31T07:59:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 489 | java | package com.weimob.jobserver.server.init;
import com.weimob.jobserver.core.job.BaseAbstractJob;
import java.util.ArrayList;
import java.util.List;
/**
* @author: kevin
* @date 2016/08/19.
*/
public class JobCache {
private static List<BaseAbstractJob> jobList = new ArrayList<>();
public static void addJob(BaseAbstractJob baseAbstractJob) {
jobList.add(baseAbstractJob);
}
public static List<BaseAbstractJob> getJobList() {
return jobList;
}
}
| [
"xiaokang.yang@weimob.com"
] | xiaokang.yang@weimob.com |
5db0955d038cb2be2547e1d9c13ac8fd9892d476 | fff8f77f810bbd5fb6b4e5f7a654568fd9d3098d | /src/main/java/com/askgps/personaltrackercore/ui/loader/LoaderFragment.java | 2696dc25f8536147662981a36b15177c47b8bde8 | [] | no_license | TL148/gorkiy | b6ac8772587e9e643d939ea399bf5e7a42e89f46 | da8fbd017277cf72020c8c800326954bb1a0cee3 | refs/heads/master | 2021-05-21T08:24:39.286900 | 2020-04-03T02:57:49 | 2020-04-03T02:57:49 | 252,618,229 | 0 | 0 | null | 2020-04-03T02:54:39 | 2020-04-03T02:54:39 | null | UTF-8 | Java | false | false | 5,355 | java | package com.askgps.personaltrackercore.ui.loader;
import android.app.Dialog;
import android.content.Context;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import androidx.fragment.app.DialogFragment;
import androidx.vectordrawable.graphics.drawable.AnimatedVectorDrawableCompat;
import com.askgps.personaltrackercore.R;
import java.util.HashMap;
import kotlin.Lazy;
import kotlin.LazyKt;
import kotlin.Metadata;
import kotlin.jvm.internal.Intrinsics;
import kotlin.jvm.internal.PropertyReference1Impl;
import kotlin.jvm.internal.Reflection;
import kotlin.reflect.KProperty;
@Metadata(bv = {1, 0, 3}, d1 = {"\u0000:\n\u0002\u0018\u0002\n\u0002\u0018\u0002\n\u0002\b\u0002\n\u0002\u0018\u0002\n\u0002\b\u0005\n\u0002\u0018\u0002\n\u0000\n\u0002\u0018\u0002\n\u0000\n\u0002\u0018\u0002\n\u0000\n\u0002\u0018\u0002\n\u0000\n\u0002\u0018\u0002\n\u0000\n\u0002\u0010\u0002\n\u0002\b\u0004\u0018\u00002\u00020\u0001B\u0005¢\u0006\u0002\u0010\u0002J\u0012\u0010\t\u001a\u00020\n2\b\u0010\u000b\u001a\u0004\u0018\u00010\fH\u0016J&\u0010\r\u001a\u0004\u0018\u00010\u000e2\u0006\u0010\u000f\u001a\u00020\u00102\b\u0010\u0011\u001a\u0004\u0018\u00010\u00122\b\u0010\u000b\u001a\u0004\u0018\u00010\fH\u0016J\b\u0010\u0013\u001a\u00020\u0014H\u0016J\b\u0010\u0015\u001a\u00020\u0014H\u0016J\u001a\u0010\u0016\u001a\u00020\u00142\u0006\u0010\u0017\u001a\u00020\u000e2\b\u0010\u000b\u001a\u0004\u0018\u00010\fH\u0016R\u001b\u0010\u0003\u001a\u00020\u00048BX\u0002¢\u0006\f\n\u0004\b\u0007\u0010\b\u001a\u0004\b\u0005\u0010\u0006¨\u0006\u0018"}, d2 = {"Lcom/askgps/personaltrackercore/ui/loader/LoaderFragment;", "Landroidx/fragment/app/DialogFragment;", "()V", "avd", "Landroidx/vectordrawable/graphics/drawable/AnimatedVectorDrawableCompat;", "getAvd", "()Landroidx/vectordrawable/graphics/drawable/AnimatedVectorDrawableCompat;", "avd$delegate", "Lkotlin/Lazy;", "onCreateDialog", "Landroid/app/Dialog;", "savedInstanceState", "Landroid/os/Bundle;", "onCreateView", "Landroid/view/View;", "inflater", "Landroid/view/LayoutInflater;", "container", "Landroid/view/ViewGroup;", "onPause", "", "onResume", "onViewCreated", "view", "personaltrackercore_release"}, k = 1, mv = {1, 1, 16})
/* compiled from: LoaderFragment.kt */
public final class LoaderFragment extends DialogFragment {
static final /* synthetic */ KProperty[] $$delegatedProperties = {Reflection.property1(new PropertyReference1Impl(Reflection.getOrCreateKotlinClass(LoaderFragment.class), "avd", "getAvd()Landroidx/vectordrawable/graphics/drawable/AnimatedVectorDrawableCompat;"))};
private HashMap _$_findViewCache;
private final Lazy avd$delegate = LazyKt.lazy(new LoaderFragment$avd$2(this));
private final AnimatedVectorDrawableCompat getAvd() {
Lazy lazy = this.avd$delegate;
KProperty kProperty = $$delegatedProperties[0];
return (AnimatedVectorDrawableCompat) lazy.getValue();
}
public void _$_clearFindViewByIdCache() {
HashMap hashMap = this._$_findViewCache;
if (hashMap != null) {
hashMap.clear();
}
}
public View _$_findCachedViewById(int i) {
if (this._$_findViewCache == null) {
this._$_findViewCache = new HashMap();
}
View view = (View) this._$_findViewCache.get(Integer.valueOf(i));
if (view != null) {
return view;
}
View view2 = getView();
if (view2 == null) {
return null;
}
View findViewById = view2.findViewById(i);
this._$_findViewCache.put(Integer.valueOf(i), findViewById);
return findViewById;
}
public /* synthetic */ void onDestroyView() {
super.onDestroyView();
_$_clearFindViewByIdCache();
}
/* JADX DEBUG: Failed to find minimal casts for resolve overloaded methods, cast all args instead
method: ClspMth{android.view.LayoutInflater.inflate(int, android.view.ViewGroup, boolean):android.view.View}
arg types: [int, android.view.ViewGroup, int]
candidates:
ClspMth{android.view.LayoutInflater.inflate(org.xmlpull.v1.XmlPullParser, android.view.ViewGroup, boolean):android.view.View}
ClspMth{android.view.LayoutInflater.inflate(int, android.view.ViewGroup, boolean):android.view.View} */
public View onCreateView(LayoutInflater layoutInflater, ViewGroup viewGroup, Bundle bundle) {
Intrinsics.checkParameterIsNotNull(layoutInflater, "inflater");
return layoutInflater.inflate(R.layout.fragment_loader, viewGroup, false);
}
public void onViewCreated(View view, Bundle bundle) {
Intrinsics.checkParameterIsNotNull(view, "view");
super.onViewCreated(view, bundle);
((ImageView) _$_findCachedViewById(R.id.loader_icon)).setImageDrawable(getAvd());
}
public void onResume() {
super.onResume();
getAvd().start();
}
public void onPause() {
super.onPause();
getAvd().stop();
}
public Dialog onCreateDialog(Bundle bundle) {
Context context = getContext();
if (context == null) {
Intrinsics.throwNpe();
}
Dialog dialog = new Dialog(context, R.style.FullScreenDialog);
dialog.setCancelable(false);
return dialog;
}
}
| [
"itaysontesterlab@gmail.com"
] | itaysontesterlab@gmail.com |
ed9350599be0b4948311fd4f2bcd7bd3a0d9da5c | 0933ede497c119cf99ddc730422143dacfbe3d98 | /src/main/java/com/dent_in/nlp/lexicon/exception/UnsupportedLexiconException.java | a1988e3b0994ac38072b4674eefb450b15739d8c | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | mkabadjov/pnws | 961cc43321aebe115e915fc0436906a5493986eb | f589b5ccd27735581b5426a0f2a5b04aa08393d2 | refs/heads/master | 2020-12-08T00:08:29.634094 | 2020-01-13T14:54:34 | 2020-01-13T14:54:34 | 232,832,724 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,322 | java | /*
* Licensed to Dent-in Tech Solutions under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Dent-in Tech Solutions licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dent_in.nlp.lexicon.exception;
public class UnsupportedLexiconException extends UnsupportedOperationException {
public UnsupportedLexiconException() {
super();
}
public UnsupportedLexiconException(String message) {
super(message);
}
public UnsupportedLexiconException(String message, Throwable cause) {
super(message, cause);
}
public UnsupportedLexiconException(Throwable cause) {
super(cause);
}
}
| [
"mkabadjov@yahoo.co.uk"
] | mkabadjov@yahoo.co.uk |
4dff9421327d288086ac77f49bff9e81cf987504 | 2be98d3c9b3ddae9b25d2773e0e9384b1771bece | /src/main/java/com/jojoldu/book/springboot/web/dto/PostsListResponsDto/PostsListResponseDto.java | be3c9e498f7a8f69ef6b9cd435dc1ca3e539e6db | [] | no_license | chws/web-service-example | dbaeadba4edfec00df10adad204b389490fb4041 | b2d1ab595fc56b102cc5b81246073bce6fbffcb0 | refs/heads/master | 2022-12-30T07:05:28.330190 | 2020-10-18T02:42:44 | 2020-10-18T02:42:44 | 277,469,677 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 561 | java | package com.jojoldu.book.springboot.web.dto.PostsListResponsDto;
import com.jojoldu.book.springboot.domain.posts.Posts;
import java.time.LocalDateTime;
import lombok.Getter;
@Getter
public class PostsListResponseDto {
private Long id;
private String title;
private String author;
private LocalDateTime modifiedDate;
public PostsListResponseDto(Posts entity) {
this.id = entity.getId();
this.title = entity.getTitle();
this.author = entity.getAuthor();
this.modifiedDate = entity.getModifiedDate();
}
}
| [
"rebeccachoosir@gmail.com"
] | rebeccachoosir@gmail.com |
c9714b363cc832a0d145252595040352046f717c | 99b64725a621b5a9bbe90046566024a94ff61cdc | /Axis15/src/axis2-1.5/modules/kernel/src/org/apache/axis2/deployment/repository/util/ArchiveReader.java | ea20c1ca4360011dc60a60c132ef67efc2b4a89b | [
"Apache-2.0"
] | permissive | cyril23/testone | f38832016329bf655fdc120cadae7159b9be05fb | 41797a2bfd00102f39b7e908a792e51523890d00 | refs/heads/master | 2020-08-31T19:13:33.378927 | 2015-02-11T01:24:04 | 2015-02-11T01:24:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 27,662 | java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.axis2.deployment.repository.util;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.OMNamespace;
import org.apache.axis2.AxisFault;
import org.apache.axis2.context.ConfigurationContext;
import org.apache.axis2.deployment.DeploymentConstants;
import org.apache.axis2.deployment.DeploymentErrorMsgs;
import org.apache.axis2.deployment.DeploymentException;
import org.apache.axis2.deployment.DescriptionBuilder;
import org.apache.axis2.deployment.ModuleBuilder;
import org.apache.axis2.deployment.ServiceBuilder;
import org.apache.axis2.deployment.ServiceGroupBuilder;
import org.apache.axis2.deployment.resolver.AARBasedWSDLLocator;
import org.apache.axis2.deployment.resolver.AARFileBasedURIResolver;
import org.apache.axis2.deployment.resolver.WarBasedWSDLLocator;
import org.apache.axis2.deployment.resolver.WarFileBasedURIResolver;
import org.apache.axis2.description.AxisModule;
import org.apache.axis2.description.AxisService;
import org.apache.axis2.description.AxisServiceGroup;
import org.apache.axis2.description.WSDL11ToAllAxisServicesBuilder;
import org.apache.axis2.description.WSDL11ToAxisServiceBuilder;
import org.apache.axis2.description.WSDL20ToAllAxisServicesBuilder;
import org.apache.axis2.description.WSDL2Constants;
import org.apache.axis2.description.WSDLToAxisServiceBuilder;
import org.apache.axis2.engine.AxisConfiguration;
import org.apache.axis2.i18n.Messages;
import org.apache.axis2.namespace.Constants;
import org.apache.axis2.util.Utils;
import org.apache.axis2.util.XMLUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import javax.xml.stream.XMLStreamException;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
public class ArchiveReader implements DeploymentConstants {
private static final Log log = LogFactory.getLog(ArchiveReader.class);
public ArrayList<AxisService> buildServiceGroup(InputStream zin, DeploymentFileData currentFile,
AxisServiceGroup axisServiceGroup, HashMap<String, AxisService> wsdlServices,
ConfigurationContext configCtx)
throws XMLStreamException, AxisFault {
DescriptionBuilder builder = new DescriptionBuilder(zin, configCtx);
OMElement rootElement = builder.buildOM();
String elementName = rootElement.getLocalName();
if (TAG_SERVICE.equals(elementName)) {
AxisService axisService = null;
String serviceName = DescriptionBuilder.getShortFileName(currentFile.getName());
if (serviceName != null) {
axisService = (AxisService) wsdlServices.get(serviceName);
}
if (axisService == null) {
axisService = (AxisService) wsdlServices.get(
DescriptionBuilder.getShortFileName(currentFile.getName()));
}
if (axisService == null) {
axisService = new AxisService(serviceName);
} else {
axisService.setWsdlFound(true);
axisService.setCustomWsdl(true);
}
axisService.setParent(axisServiceGroup);
axisService.setClassLoader(currentFile.getClassLoader());
ServiceBuilder serviceBuilder = new ServiceBuilder(configCtx, axisService);
serviceBuilder.setWsdlServiceMap(wsdlServices);
AxisService service = serviceBuilder.populateService(rootElement);
ArrayList<AxisService> serviceList = new ArrayList<AxisService>();
serviceList.add(service);
return serviceList;
} else if (TAG_SERVICE_GROUP.equals(elementName)) {
ServiceGroupBuilder groupBuilder = new ServiceGroupBuilder(rootElement, wsdlServices,
configCtx);
return groupBuilder.populateServiceGroup(axisServiceGroup);
}
throw new AxisFault("Invalid services.xml found");
}
/**
* Extracts Service XML files and builds the service groups.
*
* @param filename
* @param axisServiceGroup
* @param extractService
* @param wsdlServices
* @param configCtx
* @return Returns ArrayList.
* @throws DeploymentException
*/
public ArrayList<AxisService> processServiceGroup(String filename, DeploymentFileData currentFile,
AxisServiceGroup axisServiceGroup,
boolean extractService,
HashMap<String, AxisService> wsdlServices,
ConfigurationContext configCtx)
throws AxisFault {
// get attribute values
if (!extractService) {
ZipInputStream zin = null;
FileInputStream fin = null;
try {
fin = new FileInputStream(filename);
zin = new ZipInputStream(fin);
ZipEntry entry;
while ((entry = zin.getNextEntry()) != null) {
if (entry.getName().equalsIgnoreCase(SERVICES_XML)) {
axisServiceGroup.setServiceGroupName(
DescriptionBuilder.getShortFileName(currentFile.getName()));
return buildServiceGroup(zin, currentFile, axisServiceGroup, wsdlServices,
configCtx);
}
}
throw new DeploymentException(
Messages.getMessage(DeploymentErrorMsgs.SERVICE_XML_NOT_FOUND, filename));
} catch (Exception e) {
throw new DeploymentException(e);
} finally {
if (zin != null) {
try {
zin.close();
} catch (IOException e) {
log.info(Messages.getMessage("errorininputstreamclose"));
}
}
if (fin != null) {
try {
fin.close();
} catch (IOException e) {
log.info(Messages.getMessage("errorininputstreamclose"));
}
}
}
} else {
File file = new File(filename, SERVICES_XML);
if (!file.exists()) {
// try for meta-inf
file = new File(filename, SERVICES_XML.toLowerCase());
}
if (file.exists()) {
InputStream in = null;
try {
in = new FileInputStream(file);
axisServiceGroup.setServiceGroupName(currentFile.getName());
return buildServiceGroup(in, currentFile, axisServiceGroup, wsdlServices, configCtx);
} catch (FileNotFoundException e) {
throw new DeploymentException(
Messages.getMessage(DeploymentErrorMsgs.FILE_NOT_FOUND,
e.getMessage()));
} catch (XMLStreamException e) {
throw new DeploymentException(
Messages.getMessage(DeploymentErrorMsgs.XML_STREAM_EXCEPTION,
e.getMessage()));
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
log.info(Messages.getMessage("errorininputstreamclose"));
}
}
}
} else {
throw new DeploymentException(
Messages.getMessage(DeploymentErrorMsgs.SERVICE_XML_NOT_FOUND));
}
}
}
/**
* Creats AxisService.
*
* @param in
* @return Returns AxisService.
* @throws DeploymentException
*/
private List<AxisService> processWSDLFile(WSDLToAxisServiceBuilder axisServiceBuilder,
File serviceArchiveFile,
boolean isArchive, InputStream in, String baseURI)
throws DeploymentException {
try {
if (serviceArchiveFile != null && isArchive) {
axisServiceBuilder.setCustomResolver(
new AARFileBasedURIResolver(serviceArchiveFile));
if (axisServiceBuilder instanceof WSDL11ToAllAxisServicesBuilder) {
((WSDL11ToAllAxisServicesBuilder) axisServiceBuilder).setCustomWSDLResolver(
new AARBasedWSDLLocator(baseURI, serviceArchiveFile, in));
((WSDL11ToAllAxisServicesBuilder) axisServiceBuilder).setDocumentBaseUri(
serviceArchiveFile.getCanonicalFile().toURI().toString());
} else if (axisServiceBuilder instanceof WSDL20ToAllAxisServicesBuilder) {
((WSDL20ToAllAxisServicesBuilder) axisServiceBuilder).setCustomWSDLResolver(
new AARBasedWSDLLocator(baseURI, serviceArchiveFile, in));
// trying to use the jar scheme as the base URI. I think this can be used to handle
// wsdl 1.1 as well without using a custom URI resolver. Need to look at it later.
axisServiceBuilder.setBaseUri(
"jar:file://" + serviceArchiveFile.toURI() + "!/" + baseURI);
}
} else {
if (serviceArchiveFile != null) {
axisServiceBuilder.setBaseUri(
serviceArchiveFile.getParentFile().toURI().toString());
if (axisServiceBuilder instanceof WSDL11ToAllAxisServicesBuilder) {
((WSDL11ToAllAxisServicesBuilder) axisServiceBuilder).setDocumentBaseUri(
serviceArchiveFile.getCanonicalFile().toURI().toString());
}
}
}
if (axisServiceBuilder instanceof WSDL11ToAllAxisServicesBuilder) {
return ((WSDL11ToAllAxisServicesBuilder) axisServiceBuilder).populateAllServices();
} else if (axisServiceBuilder instanceof WSDL20ToAllAxisServicesBuilder) {
return ((WSDL20ToAllAxisServicesBuilder) axisServiceBuilder).populateAllServices();
}
} catch (AxisFault axisFault) {
log.info("Trouble processing wsdl file :" + axisFault.getMessage());
if (log.isDebugEnabled()) {
log.debug(axisFault);
}
} catch (IOException ioex) {
log.info("Trouble processing wsdl file :" + ioex.getMessage());
if (log.isDebugEnabled()) {
log.debug(ioex);
}
}
return null;
}
/**
* Creates service objects from wsdl file inside a service archive file.
*
* @param file <code>ArchiveFileData</code>
* @throws DeploymentException <code>DeploymentException</code>
*/
public HashMap<String, AxisService> processWSDLs(DeploymentFileData file)
throws DeploymentException {
File serviceFile = file.getFile();
// to store service come from wsdl files
HashMap<String, AxisService> servicesMap = new HashMap<String, AxisService>();
boolean isDirectory = serviceFile.isDirectory();
if (isDirectory) {
try {
File metaInfFolder = new File(serviceFile, META_INF);
if (!metaInfFolder.exists()) {
metaInfFolder = new File(serviceFile, META_INF.toLowerCase());
if (!metaInfFolder.exists()) {
throw new DeploymentException(
Messages.getMessage(
DeploymentErrorMsgs.META_INF_MISSING,
serviceFile.getName()));
}
}
processFilesInFolder(metaInfFolder, servicesMap);
} catch (FileNotFoundException e) {
throw new DeploymentException(e);
} catch (IOException e) {
throw new DeploymentException(e);
} catch (XMLStreamException e) {
throw new DeploymentException(e);
}
} else {
ZipInputStream zin;
FileInputStream fin;
try {
fin = new FileInputStream(serviceFile);
zin = new ZipInputStream(fin);
//TODO Check whether this WSDL is empty
ZipEntry entry;
byte[] buf = new byte[1024];
int read;
ByteArrayOutputStream out;
while ((entry = zin.getNextEntry()) != null) {
String entryName = entry.getName().toLowerCase();
if (entryName.startsWith(META_INF.toLowerCase())
&& entryName.endsWith(SUFFIX_WSDL)) {
out = new ByteArrayOutputStream();
// we do not want to generate the services for the
// imported wsdl of one file.
if ((entryName.indexOf("/") != entryName.lastIndexOf("/"))
|| (entryName.indexOf("wsdl_") != -1)) {
//only care abt the toplevel wsdl
continue;
}
while ((read = zin.read(buf)) > 0) {
out.write(buf, 0, read);
}
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
// now the question is which version of WSDL file this archive contains.
// lets check the namespace of the root element and decide. But since we are
// using axiom (dude, you are becoming handy here :)), we will not build the
// whole thing.
OMNamespace documentElementNS =
((OMElement) XMLUtils.toOM(in)).getNamespace();
if (documentElementNS != null) {
WSDLToAxisServiceBuilder wsdlToAxisServiceBuilder;
if (WSDL2Constants.WSDL_NAMESPACE
.equals(documentElementNS.getNamespaceURI())) {
// we have a WSDL 2.0 document here.
wsdlToAxisServiceBuilder = new WSDL20ToAllAxisServicesBuilder(
new ByteArrayInputStream(out.toByteArray()));
wsdlToAxisServiceBuilder.setBaseUri(entryName);
} else if (Constants.NS_URI_WSDL11.
equals(documentElementNS.getNamespaceURI())) {
wsdlToAxisServiceBuilder = new WSDL11ToAllAxisServicesBuilder(
new ByteArrayInputStream(out.toByteArray()));
((WSDL11ToAxisServiceBuilder) wsdlToAxisServiceBuilder).setDocumentBaseUri(entryName);
} else {
throw new DeploymentException(Messages.getMessage("invalidWSDLFound"));
}
List<AxisService> services = processWSDLFile(wsdlToAxisServiceBuilder,
serviceFile, true,
new ByteArrayInputStream(
out.toByteArray()),
entry.getName());
if (services != null) {
for (int i = 0; i < services.size(); i++) {
AxisService axisService = (AxisService) services.get(i);
if (axisService != null) {
servicesMap.put(axisService.getName(), axisService);
}
}
}
}
}
}
try {
zin.close();
} catch (IOException e) {
log.info(e);
}
try {
fin.close();
} catch (IOException e) {
log.info(e);
}
} catch (FileNotFoundException e) {
throw new DeploymentException(e);
} catch (IOException e) {
throw new DeploymentException(e);
} catch (XMLStreamException e) {
throw new DeploymentException(e);
}
}
return servicesMap;
}
public List<AxisService> getAxisServiceFromWsdl(InputStream in,
ClassLoader loader, String wsdlUrl) throws Exception {
// ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
// now the question is which version of WSDL file this archive contains.
// lets check the namespace of the root element and decide. But since we are
// using axiom (dude, you are becoming handy here :)), we will not build the
// whole thing.
OMElement element = (OMElement) XMLUtils.toOM(in);
OMNamespace documentElementNS = element.getNamespace();
if (documentElementNS != null) {
WSDLToAxisServiceBuilder wsdlToAxisServiceBuilder;
ByteArrayOutputStream out = new ByteArrayOutputStream();
element.serialize(out);
if (Constants.NS_URI_WSDL11.
equals(documentElementNS.getNamespaceURI())) {
wsdlToAxisServiceBuilder = new WSDL11ToAllAxisServicesBuilder(
new ByteArrayInputStream(out.toByteArray()));
((WSDL11ToAllAxisServicesBuilder)wsdlToAxisServiceBuilder).setCustomWSDLResolver(new WarBasedWSDLLocator(wsdlUrl,
loader,
new ByteArrayInputStream(
out.toByteArray())));
wsdlToAxisServiceBuilder.setCustomResolver(
new WarFileBasedURIResolver(loader));
return ((WSDL11ToAllAxisServicesBuilder)wsdlToAxisServiceBuilder).populateAllServices();
} else if (WSDL2Constants.WSDL_NAMESPACE.
equals(documentElementNS.getNamespaceURI())){
wsdlToAxisServiceBuilder = new WSDL20ToAllAxisServicesBuilder(
new ByteArrayInputStream(out.toByteArray()));
((WSDL20ToAllAxisServicesBuilder)wsdlToAxisServiceBuilder).setCustomWSDLResolver(new WarBasedWSDLLocator(wsdlUrl,
loader,
new ByteArrayInputStream(
out.toByteArray())));
wsdlToAxisServiceBuilder.setCustomResolver(
new WarFileBasedURIResolver(loader));
return ((WSDL20ToAllAxisServicesBuilder)wsdlToAxisServiceBuilder).populateAllServices();
}
else {
throw new DeploymentException(Messages.getMessage("invalidWSDLFound"));
}
}
return null;
}
public void processFilesInFolder(File folder, HashMap<String, AxisService> servicesMap)
throws FileNotFoundException, XMLStreamException, DeploymentException {
File files[] = folder.listFiles();
for (int i = 0; i < files.length; i++) {
File file1 = files[i];
if (file1.getName().toLowerCase().endsWith(SUFFIX_WSDL)) {
InputStream in = new FileInputStream(file1);
FileInputStream in2;
// now the question is which version of WSDL file this archive contains.
// lets check the namespace of the root element and decide. But since we are
// using axiom (dude, you are becoming handy here :)), we will not build the
// whole thing.
OMNamespace documentElementNS = ((OMElement) XMLUtils.toOM(in)).getNamespace();
if (documentElementNS != null) {
WSDLToAxisServiceBuilder wsdlToAxisServiceBuilder;
if (WSDL2Constants.WSDL_NAMESPACE
.equals(documentElementNS.getNamespaceURI())) {
// we have a WSDL 2.0 document here.
in2 = new FileInputStream(file1);
wsdlToAxisServiceBuilder = new WSDL20ToAllAxisServicesBuilder(in2);
} else if (Constants.NS_URI_WSDL11.
equals(documentElementNS.getNamespaceURI())) {
in2 = new FileInputStream(file1);
wsdlToAxisServiceBuilder = new WSDL11ToAllAxisServicesBuilder(in2);
((WSDL11ToAxisServiceBuilder) wsdlToAxisServiceBuilder).setDocumentBaseUri(file1.toURI()
.toString());
} else {
throw new DeploymentException(Messages.getMessage("invalidWSDLFound"));
}
FileInputStream in3 = new FileInputStream(file1);
List<AxisService> services = processWSDLFile(wsdlToAxisServiceBuilder, file1, false,
in2, file1.toURI().toString());
if (services != null) {
for (int j = 0; j < services.size(); j++) {
AxisService axisService = (AxisService) services.get(j);
if (axisService != null) {
servicesMap.put(axisService.getName(), axisService);
}
}
}
try {
in2.close();
in3.close();
} catch (IOException e) {
log.info(e);
}
}
try {
in.close();
} catch (IOException e) {
log.info(e);
}
}
}
}
public void readModuleArchive(DeploymentFileData deploymentFile,
AxisModule module, boolean explodedDir,
AxisConfiguration axisConfig)
throws DeploymentException {
// get attribute values
boolean moduleXMLFound = false;
String shortFileName = DescriptionBuilder.getShortFileName(deploymentFile.getName());
if (!explodedDir) {
ZipInputStream zin;
FileInputStream fin;
try {
fin = new FileInputStream(deploymentFile.getAbsolutePath());
zin = new ZipInputStream(fin);
ZipEntry entry;
while ((entry = zin.getNextEntry()) != null) {
if (entry.getName().equalsIgnoreCase(MODULE_XML)) {
moduleXMLFound = true;
ModuleBuilder builder = new ModuleBuilder(zin, module, axisConfig);
// setting module name
module.setName(Utils.getModuleName(shortFileName));
module.setVersion(Utils.getModuleVersion(shortFileName));
builder.populateModule();
break;
}
}
zin.close();
fin.close();
if (!moduleXMLFound) {
throw new DeploymentException(
Messages.getMessage(
DeploymentErrorMsgs.MODULE_XML_MISSING,
deploymentFile.getAbsolutePath()));
}
} catch (Exception e) {
throw new DeploymentException(e);
}
} else {
File file = new File(deploymentFile.getAbsolutePath(), MODULE_XML);
if (file.exists() ||
(file = new File(deploymentFile.getAbsolutePath(), MODULE_XML.toLowerCase()))
.exists()) {
InputStream in = null;
try {
in = new FileInputStream(file);
ModuleBuilder builder = new ModuleBuilder(in, module, axisConfig);
// setting module name
module.setName(Utils.getModuleName(shortFileName));
module.setVersion(Utils.getModuleVersion(shortFileName));
builder.populateModule();
} catch (FileNotFoundException e) {
throw new DeploymentException(
Messages.getMessage(DeploymentErrorMsgs.FILE_NOT_FOUND,
e.getMessage()));
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
log.info(Messages.getMessage("errorininputstreamclose"));
}
}
}
} else {
throw new DeploymentException(
Messages.getMessage(
DeploymentErrorMsgs.MODULE_XML_MISSING,
deploymentFile.getAbsolutePath()));
}
}
}
}
| [
""
] | |
a77b0970f21ecd80cbd367731dea4a2f5932a969 | 39e36b0518f6b67aa3168b184fab9a19aee53c5e | /app/src/main/java/com/example/gagan/proj1/widgets/SeparatorDecoration.java | 144bdbd930c003abfba18cda87c0d03b5b8d36ac | [] | no_license | gagan1994/SomeProj | 5264f84be23a3fe8286201d5259e9314d41be329 | fa59e83b61a5c11334b92ecc170355e5f723817b | refs/heads/master | 2020-03-24T03:51:31.629127 | 2018-07-26T12:01:32 | 2018-07-26T12:01:32 | 142,434,886 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 2,835 | java | package com.example.gagan.proj1.widgets;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Rect;
import android.support.annotation.ColorInt;
import android.support.annotation.FloatRange;
import android.support.annotation.NonNull;
import android.support.v7.widget.RecyclerView;
import android.util.TypedValue;
import android.view.View;
/**
* Created by Gagan on 4/18/2018.
*/
public class SeparatorDecoration extends RecyclerView.ItemDecoration {
private final Paint mPaint;
/**
* Create a decoration that draws a line in the given color and width between the items in the view.
*
* @param context a context to access the resources.
* @param color the color of the separator to draw.
* @param heightDp the height of the separator in dp.
*/
public SeparatorDecoration(@NonNull Context context, @ColorInt int color,
@FloatRange(from = 0, fromInclusive = false) float heightDp) {
mPaint = new Paint();
mPaint.setColor(color);
final float thickness = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP,
heightDp, context.getResources().getDisplayMetrics());
mPaint.setStrokeWidth(thickness);
}
@Override
public void getItemOffsets(Rect outRect, View view, RecyclerView parent, RecyclerView.State state) {
final RecyclerView.LayoutParams params = (RecyclerView.LayoutParams) view.getLayoutParams();
// we want to retrieve the position in the list
final int position = params.getViewAdapterPosition();
// and add a separator to any view but the last one
if (position < state.getItemCount()) {
outRect.set(0, 0, 0, (int) mPaint.getStrokeWidth()); // left, top, right, bottom
} else {
outRect.setEmpty(); // 0, 0, 0, 0
}
}
@Override
public void onDraw(Canvas c, RecyclerView parent, RecyclerView.State state) {
// we set the stroke width before, so as to correctly draw the line we have to offset by width / 2
final int offset = (int) (mPaint.getStrokeWidth() / 2);
// this will iterate over every visible view
for (int i = 0; i < parent.getChildCount(); i++) {
// get the view
final View view = parent.getChildAt(i);
final RecyclerView.LayoutParams params = (RecyclerView.LayoutParams) view.getLayoutParams();
// get the position
final int position = params.getViewAdapterPosition();
// and finally draw the separator
if (position < state.getItemCount()) {
c.drawLine(view.getLeft(), view.getBottom() + offset, view.getRight(), view.getBottom() + offset, mPaint);
}
}
}
}
| [
"gagan.s.patil18@gmail.com"
] | gagan.s.patil18@gmail.com |
b3de2bdc5b0eae6f4ae1fefb4f231f12a4963bb0 | 701b49a5d4a24490c78af5322e0fab6d134a285a | /Core/src/main/jaxb/org/openestate/is24/restapi/xml/offerlistelement/Adapter1.java | c34799cc803d2748f7b74bae85cadf7563a3ca39 | [
"Apache-2.0"
] | permissive | ArbenMaloku/OpenEstate-IS24-REST | 8bcab3356f0400629ddb718bf0bf86c08a560d25 | 1fd999591ebc635fdccfb0e661c6bfde74315319 | refs/heads/master | 2021-10-08T06:12:29.435737 | 2018-12-09T00:11:09 | 2018-12-09T00:11:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 507 | java |
package org.openestate.is24.restapi.xml.offerlistelement;
import java.math.BigDecimal;
import javax.xml.bind.annotation.adapters.XmlAdapter;
public class Adapter1
extends XmlAdapter<String, BigDecimal>
{
public BigDecimal unmarshal(String value) {
return (org.openestate.is24.restapi.utils.XmlUtils.parseRoomNrForBaseHouse(value));
}
public String marshal(BigDecimal value) {
return (org.openestate.is24.restapi.utils.XmlUtils.printRoomNrForBaseHouse(value));
}
}
| [
"andy@openindex.de"
] | andy@openindex.de |
49fe822f3c3b404b0920b960dda2b846f210d5be | 4abd603f82fdfa5f5503c212605f35979b77c406 | /html/Programs/hw4/47c64d3d7966efed70bd077d001a8c3e/Deques.java | e688518415f5f57f9a30ed1b3ee6374c7e773a30 | [] | no_license | dn070017/1042-PDSA | b23070f51946c8ac708d3ab9f447ab8185bd2a34 | 5e7d7b1b2c9d751a93de9725316aa3b8f59652e6 | refs/heads/master | 2020-03-20T12:13:43.229042 | 2018-06-15T01:00:48 | 2018-06-15T01:00:48 | 137,424,305 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 3,653 | java |
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.lang.NullPointerException;
/*
.
* To change this template file, choose Tools | Templates
.
*/
/**
*
* @author user
*/
public class Deque<Item> implements Iterable<Item> {
private Node<Item> first;// top of stack
private Node<Item> last;
private int N; // size of the stack
public Deque() {
first = null;
last = null;
N = 0;
}
private static class Node<Item> {
private Item item;
private Node<Item> next;
private Node<Item> pre;
}
public boolean isEmpty() {
return first == null;
}
public int size() {
return N;
}
public void addFirst(Item item) {
if (item == null) {
throw new NullPointerException();
}
if (isEmpty()) {
first = new Node<Item>();
first.item = item;
last = first;
} else {
Node<Item> oldfirst = first;
first = new Node<Item>();
first.item = item;
first.next = oldfirst;
first.pre = null;
oldfirst.pre = first;
}
N++;
}
public void addLast(Item item) {
if (item == null) {
throw new NullPointerException();
}
if (isEmpty()) {
last = new Node<Item>();
last.item = item;
first = last;
} else {
Node<Item> oldlast = last;
last = new Node<Item>();
last.item = item;
last.next = null;
oldlast.next = last;
last.pre = oldlast;
}
N++;
}
public Item removeFirst() {
if (isEmpty()) {
throw new NoSuchElementException(""Stack underflow"");
}
Item item = first.item; // save item to return
if (first == last) {
first.next = null;
} else {
first = first.next; // delete first node
}
first.pre = null;
N--;
return item;
}
public Item removeLast() {
if (isEmpty()) {
throw new NoSuchElementException(""Stack underflow"");
}
Item item = last.item; // save item to return
if (first == last) {
last.pre = null;
} else {
last = last.pre;
}
last.next = null; // delete first node
N--;
return item;
}
public Iterator<Item> iterator() {
return new ListIterator<Item>(first);
}
private class ListIterator<Item> implements Iterator<Item> {
private Node<Item> current;
public ListIterator(Node<Item> first) {
current = first;
}
public boolean hasNext() {
return current != null;
}
public void remove() {
throw new UnsupportedOperationException();
}
public Item next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
Item item = current.item;
current = current.next;
return item;
}
}
/**
* @param args the command line arguments
*/
public static void main(String[] args) {
Deque<String> d = new Deque<String>();
d.addFirst(""fuck"");
d.removeLast();
// TODO code application logic here
}
}
| [
"dn070017@gmail.com"
] | dn070017@gmail.com |
e96abac4fcd992e855b7f10594f4f0db44646083 | a322414dbb93c4d8991c2bbad02d539969bebb5a | /wheel-lib/src/main/java/com/github/wzq/wheel/models/CityModel.java | bbe3e192c1da9960fffa6350c467fd54149fbb8a | [] | no_license | wzq/WheelAddress | 992ac4cbba5634b521798122f3197abbf7031a05 | e2361f4b8be3f5be03dfac685c29a43498d870f0 | refs/heads/master | 2021-01-10T18:30:10.444494 | 2016-09-22T13:21:53 | 2016-09-22T13:21:53 | 38,424,978 | 3 | 0 | null | null | null | null | UTF-8 | Java | false | false | 780 | java | package com.github.wzq.wheel.models;
import java.util.List;
/**
* Created by wzq on 15/4/23.
*/
public class CityModel {
private String name;
private List<DistrictModel> districtList;
public CityModel() {
super();
}
public CityModel(String name, List<DistrictModel> districtList) {
super();
this.name = name;
this.districtList = districtList;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<DistrictModel> getDistrictList() {
return districtList;
}
public void setDistrictList(List<DistrictModel> districtList) {
this.districtList = districtList;
}
@Override
public String toString() {
return "CityModel [name=" + name + ", districtList=" + districtList
+ "]";
}
}
| [
"297001985@qq.com"
] | 297001985@qq.com |
da0d356888d6e190eef492e860f279203ecd7ccb | 7e613bc9d0196d833880362cc36d0ff811488b39 | /src/day_12/Person.java | 8ba517de0f1211e019c91df3f04cc36bf8682fc7 | [] | no_license | ksmn1133/shangGuiGu | def479155b7bdddbe7beb2517cf138217f4a5c83 | a705603d2b60cff7e50e07b853f5bf26fabe1d20 | refs/heads/master | 2023-02-23T06:25:08.622821 | 2021-01-24T15:18:40 | 2021-01-24T15:18:40 | 332,470,763 | 1 | 0 | null | 2021-01-24T15:18:41 | 2021-01-24T14:30:44 | Java | UTF-8 | Java | false | false | 204 | java | package day_12;
/**
* @author xiaoxia
* @create 2020/11/27 1:52 下午
*/
public class Person {
public void method() throws Exception{
System.out.println("super class's method!");
}
}
| [
"48642805+ksmn1133@users.noreply.github.com"
] | 48642805+ksmn1133@users.noreply.github.com |
57ba15eaa43904fb4200e55c446d2afe7005bb40 | bb3951fcb9fa7522b1160a00a8313a9658fbc054 | /app/src/main/java/com/example/data/SplashActivity.java | ab93700129a4a42b1ba417ea55a13c991e4a35f7 | [] | no_license | ibnu-wildan/membaca | 226e23c73dc8abee5fb4da19029171088e1d2937 | 0630e9c35689af088ad06460157628e1232179d3 | refs/heads/master | 2023-01-21T10:53:45.892633 | 2020-08-17T03:13:44 | 2020-08-17T03:13:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 533 | java | package com.example.data;
import android.content.Intent;
import android.os.Bundle;
import androidx.appcompat.app.AppCompatActivity;
public class SplashActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// langsung pindah ke MainActivity atau activity lain
// begitu memasuki splash screen ini
Intent intent = new Intent(this, MainActivity.class);
startActivity(intent);
finish();
}
} | [
"pamanhentai@gmail.com"
] | pamanhentai@gmail.com |
a7ed39821738f4cdd52d8e9732178e1eb2ca69a3 | 076eb24fb028fe15e2cf7e5f806e1b31b15f98b8 | /1945비행기 프로그램/1945-SwingGame-master/1945-SwingGame-master/src/frame/GameFrame.java | 4a26396837d29e90bb9c1d7d30ffa62579108c09 | [] | no_license | plomqawz/Java-Swing-Project | d5a70c6047a53ed97aa29416887213fabaee0091 | 09327243cb3bf62ced4f80e83a1b6fbd3a37f1c9 | refs/heads/master | 2023-08-22T12:16:12.080581 | 2021-10-21T05:18:14 | 2021-10-21T05:18:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 2,847 | java | package frame;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import javax.swing.JFrame;
import objects.PlayerPlane;
public class GameFrame extends JFrame implements screenSize {
private GameFrame gameFrame = this;
public boolean isgame; // 게임실행 여부
public GamePanel gamePanel; // 인게임 패널 이거 잘 봐야된다. 오류 !!
public GameTitle gameTitle; // 타이틀 인트로 패널
public SelectAPI selectAPI; // 선택 패널
public PlayerPlane player; // 플레이어 선언
public GameFrame() {
init();
setting();
listener();
setVisible(true);
}
public void init() {
change("gameTitle"); // 초기 타이틀 화면
isgame = false; // 게임 중 이지 않은 상태
}
public void setting() {
setTitle("Strikers 1945");
setSize(SCREEN_WIDTH, SCREEN_HEIGHT);
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
setLocationRelativeTo(null);
}
// 패널 바꾸기 함수
public void change(String panelName) {
if (panelName.equals("gameTitle")) {
gameTitle = new GameTitle(gameFrame);
getContentPane().removeAll();
getContentPane().add(gameTitle);
revalidate();
repaint();
} else if (panelName.equals("selectAPL")) {
selectAPI = new SelectAPI(gameFrame);
getContentPane().removeAll();
getContentPane().add(selectAPI);
revalidate();
repaint();
} else if (panelName.equals("gameMap")) {
gamePanel = new GamePanel(gameFrame);
getContentPane().removeAll();
getContentPane().add(gamePanel);
revalidate();
repaint();
} else {
gameTitle = null;
selectAPI = null;
gamePanel = null;
isgame = false;
getContentPane().removeAll();
revalidate();
repaint();
}
}
public void listener() {
addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
switch (e.getKeyCode()) {
case KeyEvent.VK_1:
player.setWepponLevelUp(true);
break;
case KeyEvent.VK_ENTER:
change("selectAPL");
break;
case KeyEvent.VK_SPACE:
player.setAttack(true);
break;
case KeyEvent.VK_UP:
player.setUp(true);
break;
case KeyEvent.VK_DOWN:
player.setDown(true);
break;
case KeyEvent.VK_LEFT:
player.setLeft(true);
break;
case KeyEvent.VK_RIGHT:
player.setRight(true);
break;
}
}
@Override
public void keyReleased(KeyEvent e) {
switch (e.getKeyCode()) {
case KeyEvent.VK_1:
player.setWepponLevelUp(false);
break;
case KeyEvent.VK_SPACE:
player.setAttack(false);
break;
case KeyEvent.VK_UP:
player.setUp(false);
break;
case KeyEvent.VK_DOWN:
player.setDown(false);
break;
case KeyEvent.VK_LEFT:
player.setLeft(false);
break;
case KeyEvent.VK_RIGHT:
player.setRight(false);
break;
}
}
});
}
}
| [
"ssar@nate.com"
] | ssar@nate.com |
c76f5c870189ac5a4925bc4663df8c3d7444ba4b | ee98570c681aeabe1546c03da6f7f5da2dd9217d | /Backup/PerfectNo.java.bak | bcabae419f110ce868c02bcd9ea4ad461a02d3a2 | [] | no_license | superpaulza/kmitl.programming | 0bf5cedcbfd3e95560b5c156cb2fd2917969921d | 7e8a42aaaa0ef6bb4d51116e39a034f494a2fdd3 | refs/heads/master | 2022-12-15T06:24:56.225974 | 2020-09-07T16:44:34 | 2020-09-07T16:44:34 | 293,473,615 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,023 | bak | import java.util.*;
class PerfectNo
{
public static void main(String[] args)
{
Scanner scan = new Scanner(System.in);
System.out.print("Enter number x ==> ");
int x = scan.nextInt();
System.out.print("Enter number y ==> ");
int y = scan.nextInt();
System.out.print("Enter number z ==> ");
int z = scan.nextInt();
int result_x = result(x);
int result_y = result(y);
int result_z = result(z);
check_number(x,result_x);
check_number(y,result_y);
check_number(z,result_z);
}
////////////////////////////////////////////////////////////////
static int result(int A)
{
int result=0;
for(int i=1 ; i<A ;i++){
if( A%i == 0 ){
result=result+i;
}
}
return result;
}
/////////////////////////////////////////////////////////////////
static void check_number(int a ,int b )
{
if(a==b){
System.out.println(a+" is perfect number");
}else if(a<b){
System.out.println(a+" is deficient number");
}else{
System.out.println(a+" is abundant number");
}
}
}
| [
"166_35049@st.bcc1852.com"
] | 166_35049@st.bcc1852.com |
136628df11706a346b7d61243376343051313758 | 7cc0e4d9eb6ee3d4f0206e570bdf56641537a8bf | /dispatchAPI/src/main/java/com/tuplestores/api/model/general/DriverModel.java | 25d819f5ca749866dc0faf80512aee81d34aa3ad | [] | no_license | tuplestores/Api | 69aa6cb5f0b99f70cbb0bf8d189d68a0048f344a | fc02625f8b15d6f44fcd7671f8c3811e491551c4 | refs/heads/master | 2022-12-29T10:50:57.677693 | 2019-07-16T13:56:35 | 2019-07-16T13:56:35 | 187,573,955 | 0 | 0 | null | 2022-12-16T10:51:39 | 2019-05-20T05:36:40 | Java | UTF-8 | Java | false | false | 2,649 | java | package com.tuplestores.api.model.general;
public class DriverModel {
private String tenant_id;
private String driver_id;
private String email;
private String first_name;
private String last_name;
private String isd_code;
private String mobile;
private String driver_online;
private String checked_in_vehicle_id;
private String verified;
private String invite_code;
private String status;
private String msg;
private String driver_name;
private String driver_mobile;
private String driver_email;
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
public String getTenant_id() {
return tenant_id;
}
public void setTenant_id(String tenant_id) {
this.tenant_id = tenant_id;
}
public String getDriver_id() {
return driver_id;
}
public void setDriver_id(String driver_id) {
this.driver_id = driver_id;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getFirst_name() {
return first_name;
}
public void setFirst_name(String first_name) {
this.first_name = first_name;
}
public String getLast_name() {
return last_name;
}
public void setLast_name(String last_name) {
this.last_name = last_name;
}
public String getIsd_code() {
return isd_code;
}
public void setIsd_code(String isd_code) {
this.isd_code = isd_code;
}
public String getMobile() {
return mobile;
}
public void setMobile(String mobile) {
this.mobile = mobile;
}
public String getDriver_online() {
return driver_online;
}
public void setDriver_online(String driver_online) {
this.driver_online = driver_online;
}
public String getChecked_in_vehicle_id() {
return checked_in_vehicle_id;
}
public void setChecked_in_vehicle_id(String checked_in_vehicle_id) {
this.checked_in_vehicle_id = checked_in_vehicle_id;
}
public String getVerified() {
return verified;
}
public void setVerified(String verified) {
this.verified = verified;
}
public String getInvite_code() {
return invite_code;
}
public void setInvite_code(String invite_code) {
this.invite_code = invite_code;
}
public String getDriver_name() {
return driver_name;
}
public void setDriver_name(String driver_name) {
this.driver_name = driver_name;
}
public String getDriver_mobile() {
return driver_mobile;
}
public void setDriver_mobile(String driver_mobile) {
this.driver_mobile = driver_mobile;
}
public String getDriver_email() {
return driver_email;
}
public void setDriver_email(String driver_email) {
this.driver_email = driver_email;
}
}
| [
"ajishd.tuplestores@gmail.com"
] | ajishd.tuplestores@gmail.com |
7c46736ac909df2adeab3d9e6ca6822d92c7c461 | f0918d0706e167420646eb140f23235ee409415d | /src/com/upes/connectfour/Controller.java | e0a1bdb28e76b412533cbf5223d489c850e8b89e | [] | no_license | AnuragPant01/ConnectFour | e71aab2b02dc2f59f924d95bc125bf65c6fea329 | 05e29565fd14ae6b9d3d158d695274c9af9d3891 | refs/heads/master | 2022-12-10T16:21:28.371812 | 2020-08-18T15:13:28 | 2020-08-18T15:13:28 | 288,489,868 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 7,644 | java | package com.upes.connectfour;
import javafx.animation.TranslateTransition;
import javafx.application.Platform;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.geometry.Point2D;
import javafx.scene.control.Alert;
import javafx.scene.control.Button;
import javafx.scene.control.ButtonType;
import javafx.scene.control.Label;
import javafx.scene.layout.GridPane;
import javafx.scene.layout.Pane;
import javafx.scene.paint.Color;
import javafx.scene.shape.Circle;
import javafx.scene.shape.Rectangle;
import javafx.scene.shape.Shape;
import javafx.util.Duration;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.ResourceBundle;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class Controller implements Initializable {
private static final int COLUMNS=7;
private static final int ROWS=6;
private static final int CIRCLE_DIAMETER=80;
private static final String discColor1="#24303E";
private static final String discColor2="#4CAA88";
private static String PLAYER_ONE="Player One";
private static String PLAYER_TWO="Player Two";
private boolean isPlayerOneTurn=true;
private Disc[][] insertedDiscArray=new Disc[ROWS][COLUMNS]; //For Structural Changes
@FXML
public GridPane rootGridPane;
@FXML
public Pane insertedDiscsPane;
@FXML
public Label playerNameLabel;
private boolean isAllowedToInsert=true; //flag to avoid same color disc being added
public void createPlayground(){
Shape rectangleWithHoles=createGameStructuralGrid();
rootGridPane.add(rectangleWithHoles,0,1);
List<Rectangle> rectangleList=createClickableColumns();
for(Rectangle rectangle:rectangleList){
rootGridPane.add(rectangle,0,1);
}
}
private Shape createGameStructuralGrid(){
Shape rectangleWithHoles=new Rectangle((COLUMNS+1)*CIRCLE_DIAMETER,(ROWS+1)*CIRCLE_DIAMETER);
for(int row=0;row<ROWS;row++){
for(int col=0;col<COLUMNS;col++){
Circle circle=new Circle();
circle.setRadius(CIRCLE_DIAMETER/2);
circle.setCenterX(CIRCLE_DIAMETER/2);
circle.setCenterY(CIRCLE_DIAMETER/2);
circle.setSmooth(true);
circle.setTranslateX(col*(CIRCLE_DIAMETER+5) +CIRCLE_DIAMETER/4);
circle.setTranslateY(row*(CIRCLE_DIAMETER+5) +CIRCLE_DIAMETER/4);
rectangleWithHoles=Shape.subtract(rectangleWithHoles,circle);
}
}
rectangleWithHoles.setFill(Color.WHITE);
return rectangleWithHoles;
}
public List<Rectangle> createClickableColumns(){
List<Rectangle> rectangleList=new ArrayList<>();
for(int col=0;col<COLUMNS;col++){
Rectangle rectangle=new Rectangle(CIRCLE_DIAMETER,(ROWS+1)*CIRCLE_DIAMETER);
rectangle.setFill(Color.TRANSPARENT);
rectangle.setTranslateX(col*(CIRCLE_DIAMETER+5) +CIRCLE_DIAMETER/4);
rectangle.setOnMouseEntered(event -> rectangle.setFill(Color.valueOf("#eeeeee26")));
rectangle.setOnMouseExited(event -> rectangle.setFill(Color.TRANSPARENT));
final int column=col;
rectangle.setOnMouseClicked(event -> {
if(isAllowedToInsert) {
isAllowedToInsert=false; //when disc is being dropped then no more disc will be inserted
insertDisc(new Disc(isPlayerOneTurn), column);
}
});
rectangleList.add(rectangle);
}
return rectangleList;
}
private void insertDisc(Disc disc,int column){
int row=ROWS-1;
while (row>=0){
if(getDiscIfPresent(row,column)==null)
break;
row--;
}
if(row<0) //if row is completely full ,we cannot fill more
return;
insertedDiscArray[row][column]=disc; //for structural changes
insertedDiscsPane.getChildren().add(disc); //for visual changes for users discspane-->second pane
disc.setTranslateX(column*(CIRCLE_DIAMETER+5) +CIRCLE_DIAMETER/4);
int currentRow=row;
TranslateTransition translateTransition=new TranslateTransition(Duration.seconds(0.5),disc);
translateTransition.setToY(row *(CIRCLE_DIAMETER+5) +CIRCLE_DIAMETER/4);
translateTransition.setOnFinished(event -> {
isAllowedToInsert=true; //Finally when disc is dropped then second player can insert disc
if(gameEnded(currentRow,column)){
gameOver();
return;
}
isPlayerOneTurn=!isPlayerOneTurn;
playerNameLabel.setText(isPlayerOneTurn?PLAYER_ONE:PLAYER_TWO);
});
translateTransition.play();
}
private boolean gameEnded(int row,int column){
List<Point2D> verticalPoints=IntStream.rangeClosed(row-3,row+3) //range of row values=0,1,2,3,4,5
.mapToObj(r->new Point2D(r,column)) //0,3 1,3 2,3 4,3 5,3 list of Point2D
.collect(Collectors.toList());
List<Point2D> horizontalPoints=IntStream.rangeClosed(column-3,column+3)
.mapToObj(col->new Point2D(row,col))
.collect(Collectors.toList());
Point2D startPoint1=new Point2D(row-3,column+3);
List<Point2D> diagonal1Points=IntStream.rangeClosed(0,6)
.mapToObj(i->startPoint1.add(i,-i))
.collect(Collectors.toList());
Point2D startPoint2=new Point2D(row-3,column-3);
List<Point2D> diagonal2Points=IntStream.rangeClosed(0,6)
.mapToObj(i->startPoint2.add(i,i))
.collect(Collectors.toList());
boolean isEnded=checkCombinations(verticalPoints) || checkCombinations(horizontalPoints)
|| checkCombinations(diagonal1Points) || checkCombinations(diagonal2Points);
return isEnded;
}
private boolean checkCombinations(List<Point2D> points) {
int chain=0;
for(Point2D point:points){
int rowIndexForArray= (int) point.getX();
int columnIndexForArray= (int) point.getY();
Disc disc=getDiscIfPresent(rowIndexForArray,columnIndexForArray);
if(disc!=null && disc.isPlayerOneMove==isPlayerOneTurn){
chain++;
if(chain==4){
return true;
}
}else{
chain=0;
}
}
return false;
}
private Disc getDiscIfPresent(int row,int column){ //To prevent ArrayIndexOutOfBoundException
if(row>=ROWS || row<0 || column>=COLUMNS || column<0)
return null;
return insertedDiscArray[row][column];
}
private void gameOver(){
String winner=isPlayerOneTurn?PLAYER_ONE:PLAYER_TWO;
System.out.println("Winner is " +winner);
Alert alert=new Alert(Alert.AlertType.INFORMATION);
alert.setTitle("Connect four");
alert.setHeaderText("The winner is " + winner);
alert.setContentText("Want to play again? ");
ButtonType yesBtn=new ButtonType("Yes");
ButtonType noBtn=new ButtonType("No, Exit");
alert.getButtonTypes().setAll(yesBtn,noBtn);
Platform.runLater(()->{
Optional<ButtonType> btnClicked=alert.showAndWait();
if(btnClicked.isPresent() && btnClicked.get()==yesBtn){
//user has chosen yes so reset the game
resetGame();
}else {
//user has chosen no and exit the game
Platform.exit();
System.exit(0);
}
});
}
public void resetGame() {
insertedDiscsPane.getChildren().clear(); //Remove all the inserted disc from the pane
for(int row=0;row<insertedDiscArray.length;row++){ //structurally make insertedDiscArray back to null
for(int col=0;col<insertedDiscArray[row].length;col++){
insertedDiscArray[row][col]=null;
}
}
isPlayerOneTurn=true; //let player one start the game
playerNameLabel.setText(PLAYER_ONE);
createPlayground(); //create playgroud again (freshly)
}
private static class Disc extends Circle{
private final boolean isPlayerOneMove;
public Disc(boolean isPlayerOneMove){
this.isPlayerOneMove=isPlayerOneMove;
setRadius(CIRCLE_DIAMETER/2);
setFill(isPlayerOneMove?Color.valueOf(discColor1):Color.valueOf(discColor2));
setCenterX(CIRCLE_DIAMETER/2);
setCenterY(CIRCLE_DIAMETER/2);
}
}
@Override
public void initialize(URL location, ResourceBundle resources) {
}
}
| [
"66867605+AnuragPant01@users.noreply.github.com"
] | 66867605+AnuragPant01@users.noreply.github.com |
80270cbac2fa62848c1eba7cdaf0b473c938efda | f52981eb9dd91030872b2b99c694ca73fb2b46a8 | /Platform/Plugins/com.tle.platform.common/src/com/dytech/common/text/AbstractTopDownParser.java | f96d4b8dd97fed9f115462fb81a15c9cf07b0509 | [
"BSD-3-Clause",
"LGPL-2.0-or-later",
"LGPL-2.1-only",
"LicenseRef-scancode-jdom",
"GPL-1.0-or-later",
"ICU",
"CDDL-1.0",
"LGPL-3.0-only",
"LicenseRef-scancode-other-permissive",
"CPL-1.0",
"MIT",
"GPL-2.0-only",
"Apache-2.0",
"NetCDF",
"Apache-1.1",
"EPL-1.0",
"Classpath-exception-2.... | permissive | phette23/Equella | baa41291b91d666bf169bf888ad7e9f0b0db9fdb | 56c0d63cc1701a8a53434858a79d258605834e07 | refs/heads/master | 2020-04-19T20:55:13.609264 | 2019-01-29T03:27:40 | 2019-01-29T22:31:24 | 168,427,559 | 0 | 0 | Apache-2.0 | 2019-01-30T22:49:08 | 2019-01-30T22:49:08 | null | UTF-8 | Java | false | false | 5,372 | java | /*
* Copyright 2017 Apereo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dytech.common.text;
import java.io.IOException;
import java.io.PushbackReader;
import java.io.Reader;
import java.text.ParseException;
/**
* Implements the basics for a simple, fast, BNF-based, top-down parser.
* Examples of usage can be be found in The Learning Edge where the parsers are
* used for basic XOQL and Javascript parsing.
*
* @author Nicholas Read
*/
@SuppressWarnings("nls")
public abstract class AbstractTopDownParser
{
private static final int PUSHBACK_BUFFER_SIZE = 32;
private PushbackReader in;
private int offset = 0;
protected char look;
/**
* Constructs a new AbstractTopDownParser.
*/
public AbstractTopDownParser(Reader in)
{
this.in = new PushbackReader(in, PUSHBACK_BUFFER_SIZE);
}
// // COMMON HELPER METHODS ///////////////////////////////////////////////
/**
* Retrieves an integer value from the stream.
*/
protected int getIntegerValue() throws ParseException
{
if( !Character.isDigit(look) )
{
throw new ParseException("Digit value expected", getCurrentOffset());
}
// Read all the digits available
StringBuilder result = new StringBuilder();
do
{
result.append(look);
getChar();
}
while( Character.isDigit(look) );
// If the digit is not followed by white space, it's screwed
if( !Character.isWhitespace(look) )
{
throw new ParseException("Integer is followed by non-digits", getCurrentOffset());
}
// Because we didn't match anything, we have to skip whitespace.
skipWhiteSpace();
return Integer.parseInt(result.toString());
}
/**
* Retrieves a boolean value from the stream.
*/
protected boolean getBooleanValue() throws ParseException
{
if( isLookAhead('t') )
{
match("true"); //$NON-NLS-1$
return true;
}
else if( isLookAhead('f') )
{
match("false"); //$NON-NLS-1$
return false;
}
else
{
throw new ParseException("Boolean value could not be read", getCurrentOffset());
}
}
// // PARSER FUNCTIONS ////////////////////////////////////////////////////
/**
* Matches the stream in the future, but maintains the existing parser
* state.
*/
protected boolean matchTheFuture(String s) throws ParseException
{
// Save our current state
char originalLook = look;
StringBuilder fromStream = new StringBuilder();
// Read the length of the string to match, or until the end of the
// stream.
final int count = s.length() - 1;
for( int i = 0; i < count && look != -1; i++ )
{
getChar();
if( look != -1 )
{
fromStream.append(look);
}
}
boolean match = s.equalsIgnoreCase(originalLook + fromStream.toString());
// Reset state
look = originalLook;
for( int i = fromStream.length() - 1; i >= 0; i-- )
{
try
{
in.unread(fromStream.charAt(i));
}
catch( IOException ex )
{
ex.printStackTrace();
throw new ParseException("Parser died while trying to see the future!", getCurrentOffset());
}
}
return match;
}
/**
* Matches the given stream, moving us to the next token.
*/
protected void match(String s) throws ParseException
{
int count = s.length();
for( int i = 0; i < count; i++ )
{
try
{
match(s.charAt(i));
}
catch( ParseException e )
{
throw new ParseException("Match Failed: Expected '" + s + "'", getCurrentOffset());
}
}
}
/**
* Matches the given character against the look-ahead, and moves to the next
* token.
*/
protected void match(char c) throws ParseException
{
if( isLookAhead(c) )
{
skipWhiteSpace();
}
else
{
throw new ParseException("Expected '" + c + "' but found '" + look + "'", getCurrentOffset());
}
}
/**
* Case insensitive check against the look-ahead character.
*/
protected boolean isLookAhead(char c)
{
return Character.toLowerCase(look) == Character.toLowerCase(c);
}
/**
* The current single character look-ahead.
*/
protected char look()
{
return look;
}
protected int getCurrentOffset()
{
return offset;
}
/**
* @return true if at the end of the stream.
*/
protected boolean isEOF()
{
return look == (char) -1;
}
/**
* Skips any whitespace.
*/
protected void skipWhiteSpace() throws ParseException
{
skipWhiteSpace(true);
}
/**
* Skips any whitespace.
*/
protected void skipWhiteSpace(boolean skipCurrentRegardless) throws ParseException
{
if( skipCurrentRegardless )
{
getChar();
}
while( Character.isWhitespace(look) )
{
getChar();
}
}
/**
* Moves on to the next character in the stream.
*/
protected void getChar() throws ParseException
{
try
{
look = (char) in.read();
if( !isEOF() )
{
offset++;
}
}
catch( IOException e )
{
throw new ParseException("Could not read stream", getCurrentOffset());
}
}
}
| [
"doolse@gmail.com"
] | doolse@gmail.com |
51467210fecfec874d2787ad60b71d00ce9e9828 | 6a5fa0baed4cc0482b34c94a794283f0c9167ac7 | /helloMybatis/src/main/java/com/spring/book/controller/BookController.java | 3898a23896b43a43775a8e14be3a1da0f81a5a0d | [] | no_license | Limky/HelloSpring | d329d92ca3d6ef1855e0232d39e8911f259a7198 | dd03c02d1fd6c12b6308c26193798aeb1e34ecfb | refs/heads/master | 2021-01-17T17:33:27.649684 | 2016-08-09T01:08:44 | 2016-08-09T01:08:44 | 65,249,147 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 693 | java | package com.spring.book.controller;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import com.spring.book.service.BookService;
@Controller
public class BookController {
@Autowired
BookService bookService;
@RequestMapping(value = "/", method = RequestMethod.GET)
public String home(Model model) {
model.addAttribute("books", bookService.getBookList());
return "/book/book";
}
}
| [
"limky@sqisoft.com"
] | limky@sqisoft.com |
561752bf91c7947a1834cab33d0ea036b5e58448 | bf2966abae57885c29e70852243a22abc8ba8eb0 | /aws-java-sdk-kinesisvideo/src/main/java/com/amazonaws/services/kinesisvideo/model/transform/ListStreamsResultJsonUnmarshaller.java | 3ebf44da76f34705964c7f4d90ab2a779a9860c4 | [
"Apache-2.0"
] | permissive | kmbotts/aws-sdk-java | ae20b3244131d52b9687eb026b9c620da8b49935 | 388f6427e00fb1c2f211abda5bad3a75d29eef62 | refs/heads/master | 2021-12-23T14:39:26.369661 | 2021-07-26T20:09:07 | 2021-07-26T20:09:07 | 246,296,939 | 0 | 0 | Apache-2.0 | 2020-03-10T12:37:34 | 2020-03-10T12:37:33 | null | UTF-8 | Java | false | false | 3,096 | java | /*
* Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.kinesisvideo.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.kinesisvideo.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* ListStreamsResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListStreamsResultJsonUnmarshaller implements Unmarshaller<ListStreamsResult, JsonUnmarshallerContext> {
public ListStreamsResult unmarshall(JsonUnmarshallerContext context) throws Exception {
ListStreamsResult listStreamsResult = new ListStreamsResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return listStreamsResult;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("StreamInfoList", targetDepth)) {
context.nextToken();
listStreamsResult.setStreamInfoList(new ListUnmarshaller<StreamInfo>(StreamInfoJsonUnmarshaller.getInstance())
.unmarshall(context));
}
if (context.testExpression("NextToken", targetDepth)) {
context.nextToken();
listStreamsResult.setNextToken(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return listStreamsResult;
}
private static ListStreamsResultJsonUnmarshaller instance;
public static ListStreamsResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new ListStreamsResultJsonUnmarshaller();
return instance;
}
}
| [
""
] | |
6f3b73ff4166bb85ef8199745b822b8ee3a689eb | 97d8e2ea32ae486298614d6910362e3004430110 | /AndroidStudioProjects/Browser/app/src/test/java/com/example/ashu/browser/ExampleUnitTest.java | 923677e763e2567c4ed870bd31d48783559805e0 | [] | no_license | ak8527/TestApp | b58a3e59659a013312f9ca37a4bede080e355899 | 4c265acfb05d826c00960a652b06a77a2007b2ff | refs/heads/master | 2020-03-28T11:45:06.115704 | 2018-09-11T01:57:30 | 2018-09-11T01:57:30 | 148,243,909 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 401 | java | package com.example.ashu.browser;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() {
assertEquals(4, 2 + 2);
}
} | [
"ashutoshkumar1320@gmail.com"
] | ashutoshkumar1320@gmail.com |
74b80a17e01d16dcebf4de418e94635ad32578bf | a6b8d26d270ceff09a216a6ec94d77c82ec0bf1f | /MyApplication/app/src/test/java/com/example/kirill/lab14/ExampleUnitTest.java | f135be423adf0d6a12514f6bb553dedc4acc629e | [] | no_license | Kirusha251/Android_Java | 957b91d198dc7f0d5ec243c640759afc50b060b7 | a7b7ebf2ba35179462817b4493c10e773f056ce8 | refs/heads/master | 2021-01-17T16:16:51.711586 | 2016-12-12T20:06:47 | 2016-12-12T20:06:47 | 70,148,121 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 317 | java | package com.example.kirill.lab14;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* To work on unit tests, switch the Test Artifact in the Build Variants view.
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() throws Exception {
assertEquals(4, 2 + 2);
}
} | [
"mtanl8957@gmail.com"
] | mtanl8957@gmail.com |
38b83050bb7cbfb23b456f1e234469cff71ac31b | ed3cb95dcc590e98d09117ea0b4768df18e8f99e | /project_1_1/src/i/d/j/Calc_1_1_8392.java | 8634ab2618c5f764e7a55b7dba8857d54e3873ce | [] | no_license | chalstrick/bigRepo1 | ac7fd5785d475b3c38f1328e370ba9a85a751cff | dad1852eef66fcec200df10083959c674fdcc55d | refs/heads/master | 2016-08-11T17:59:16.079541 | 2015-12-18T14:26:49 | 2015-12-18T14:26:49 | 48,244,030 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 131 | java | package i.d.j;
public class Calc_1_1_8392 {
/** @return the sum of a and b */
public int add(int a, int b) {
return a+b;
}
}
| [
"christian.halstrick@sap.com"
] | christian.halstrick@sap.com |
c9a6533052ccb2ce5e3a8f01440c2f45b7a851de | 092716906eec743335bd0b5506ead815ac22d83b | /src/main/java/libcore/net/spdy/Ping.java | 8eb5ebe259ae99c3dba2ddda876ad683c4fa6029 | [
"Apache-2.0"
] | permissive | MeetMe/okhttp | 47f4558929dadaeabc9c68823d88a730d72d272f | 8ab9438c7007f3f5e2281db1052dcde7dc3065dc | refs/heads/master | 2021-01-20T19:49:08.040446 | 2012-11-14T08:31:39 | 2012-11-14T08:31:39 | 6,732,927 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,844 | java | /*
* Copyright (C) 2012 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package libcore.net.spdy;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
/**
* A locally-originated ping.
*/
public final class Ping {
private final CountDownLatch latch = new CountDownLatch(1);
private long sent = -1;
private long received = -1;
Ping() {
}
void send() {
if (sent != -1) throw new IllegalStateException();
sent = System.nanoTime();
}
void receive() {
if (received != -1 || sent == -1) throw new IllegalStateException();
received = System.nanoTime();
latch.countDown();
}
/**
* Returns the round trip time for this ping in nanoseconds, waiting for the
* response to arrive if necessary.
*/
public long roundTripTime() throws InterruptedException {
latch.await();
return received - sent;
}
/**
* Returns the round trip time for this ping in nanoseconds, or -1 if the
* timeout elapsed before the round trip completed.
*/
public long roundTripTime(long timeout, TimeUnit unit) throws InterruptedException {
if (latch.await(timeout, unit)) {
return received - sent;
} else {
return -1;
}
}
}
| [
"jwilson@squareup.com"
] | jwilson@squareup.com |
b3930ae8d347ef710aa89ac2674490cf537d6e75 | 96ee36c19cf03aacc6c788e2bb408b1a893ba2c2 | /Seminar 8/src/ro/ase/cts/decorator/clase/DecoratorNotaLMA.java | 090514b6449bff09fea6dfb756dc730230c5578d | [] | no_license | Gabriel2704/CTS | 764ff047039e42079c40ab7fb7415aa14d5a8101 | 18a2e56ec50294edabd6d7f63f3942552bcf3c71 | refs/heads/main | 2023-05-30T19:27:50.968563 | 2021-06-03T14:03:34 | 2021-06-03T14:03:34 | 344,463,862 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 359 | java | package ro.ase.cts.decorator.clase;
public class DecoratorNotaLMA extends DecoratorAbstract{
public DecoratorNotaLMA(NotaAbstracta nota) {
super(nota);
}
@Override
public void printeazaFelicitare() {
System.out.println("Felicitare - La multi ani!");
}
public void printeaza() {
super.printeaza();
System.out.println("La multi ani!");
}
}
| [
"p_gabriel1999@yahoo.com"
] | p_gabriel1999@yahoo.com |
e1c19ecc4d0c77a3719ff83efb8f7b988f8bf791 | eade6f9d61e4fa991ef25cfb242e8d21045c96fc | /src/com/burrow/MoveToFront.java | f3f446901494546bca586bc65e212edd164ec9c8 | [] | no_license | acharkq/javaDataStructure | 4c6f2b48b3de76566e16c82d821639a1d1fcdac3 | a387737d3946eea345b9afd6617ce082edc19764 | refs/heads/master | 2021-07-10T11:16:26.529768 | 2019-09-26T05:37:26 | 2019-09-26T05:37:26 | 123,426,304 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,566 | java | package com.burrow;
import edu.princeton.cs.algs4.BinaryStdIn;
import edu.princeton.cs.algs4.BinaryStdOut;
import java.util.LinkedList;
public class MoveToFront {
private static int R = 256;
// apply move-to-front encoding, reading from standard input and writing to standard output
public static void encode() {
LinkedList<Character> list = new LinkedList<>();
for (int i = 0; i < R; i++)
list.add((char) i);
while (!BinaryStdIn.isEmpty()) {
char c = BinaryStdIn.readChar();
int index = list.indexOf(c);
list.remove(index);
list.addFirst(c);
BinaryStdOut.write((char) index);
}
BinaryStdOut.close();
}
// apply move-to-front decoding, reading from standard input and writing to standard output
public static void decode() {
LinkedList<Character> list = new LinkedList<>();
for (int i = 0; i < R; i++)
list.add((char) i);
while (!BinaryStdIn.isEmpty()) {
int index = BinaryStdIn.readChar();
char c = list.get(index);
list.remove(index);
list.addFirst(c);
BinaryStdOut.write(c);
}
BinaryStdOut.close();
}
// if args[0] is "-", apply move-to-front encoding
// if args[0] is "+", apply move-to-front decoding
public static void main(String[] args) {
if (args[0].equals("-"))
MoveToFront.encode();
else if (args[0].equals("+"))
MoveToFront.decode();
}
} | [
"acharkq@gmail.com"
] | acharkq@gmail.com |
5128a0e9295b82311794815f2a4244976a105767 | b313399c632c567634caec6cefd61dd2faeda5c6 | /ScrollTextview/src/main/java/com/tracyis/scrolltextview/view/AutoHorizontalScrollTextView.java | a430ebc098d1a3209bb0328dc1642f30ede38848 | [] | no_license | tracyis/ScrollTextView | aadd4823e99f0977c0ea6305bb8c54a7bbd88bbe | ebe35d52dbb984843e472bb13debcac14c06b2ad | refs/heads/master | 2021-05-03T11:41:47.357855 | 2016-09-23T12:12:32 | 2016-09-23T12:12:32 | 69,021,655 | 1 | 0 | null | null | null | null | UTF-8 | Java | false | false | 949 | java | package com.tracyis.scrolltextview.view;
import android.content.Context;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.widget.TextView;
/**
* 自动横向滑动的TextView(类似现实中的电子屏效果)
*/
public class AutoHorizontalScrollTextView extends TextView {
public AutoHorizontalScrollTextView(Context context) {
this(context, null);
}
public AutoHorizontalScrollTextView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public AutoHorizontalScrollTextView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init();
}
private void init() {
setClickable(true);
setEllipsize(TextUtils.TruncateAt.MARQUEE);
setFocusable(true);
setFocusableInTouchMode(true);
}
@Override
public boolean isFocused() {
return true;
}
}
| [
"530904047@qq.com"
] | 530904047@qq.com |
1db4f8bfd0b37e5933f481adf02a80f3c588ed6e | 92e476eaf5a75c64962f27a5cd76da5c16407ef9 | /src/org/nirvawolf/douban/api/channel/Channel.java | 40e6ac58ec51072339ecb65e1a797766b297b9cd | [] | no_license | zimenglan-sysu/DoubanApi | f8b861f68c8a9b5486a94a846bf56fc5c12cf054 | 22ee210b6613c0119ce18c0742be0818e4884e35 | refs/heads/master | 2021-01-24T15:06:06.500700 | 2014-04-02T03:56:34 | 2014-04-02T03:56:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 593 | java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.nirvawolf.douban.api.channel;
import java.io.Serializable;
/**
*
* @author bruce
*/
public class Channel implements Serializable{
public int channel_id;
public String chineseName;
public String addr_en;
public String englishName;
public String categoryId;
public String categoryName;
public String coverImgUrl;
public String intro;
public int songNum;
}
| [
"466202783@qq.com"
] | 466202783@qq.com |
0d0226af3c342c9422e49b5e038f55643619a6eb | 6bb70b95862218ace5a7222d47e1b0ed47ad7487 | /APCSAHome/src/Unit6/Lab06f.java | fc32599564a91dcfef2cec03f35e5eda8ced5603 | [] | no_license | judyqin/APCSAHome | 90688bc96ce4d5a6173447f93e845d6e8f146055 | ca709dd5c547a5a048239a6125cf0bad7cf5ee24 | refs/heads/master | 2021-05-02T09:57:59.899545 | 2018-05-12T05:58:25 | 2018-05-12T05:58:25 | 120,784,836 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,059 | java | package Unit6;
import static java.lang.System.*;
public class Lab06f
{
public static void main( String args[] )
{
//add test cases
LetterRemover test = new LetterRemover("I am Sam I am", 'a');
test.setRemover("I am Sam I am", 'a');
System.out.println(test.toString());
System.out.println(test.removeLetters());
LetterRemover test2 = new LetterRemover();
test2.setRemover("ssssssssxssssesssssesss", 's');
System.out.println(test2.toString());
System.out.println(test2.removeLetters());
LetterRemover test3 = new LetterRemover();
test3.setRemover("qwertyqwertyqwerty", 'a');
System.out.println(test3.toString());
System.out.println(test3.removeLetters());
LetterRemover test4 = new LetterRemover();
test4.setRemover("abababababa", 'b');
System.out.println(test4.toString());
System.out.println(test4.removeLetters());
LetterRemover test5 = new LetterRemover();
test5.setRemover("abaababababa", 'x');
System.out.println(test5.toString());
System.out.println(test5.removeLetters());
}
} | [
"judyqin@judys-air.sduhsd.lan"
] | judyqin@judys-air.sduhsd.lan |
033ef4e27e033f8de883795cad170624587decd5 | 3978bb3996a4e49e1b88654669e1f835f0e073fd | /app/src/main/java/com/adr/rendimientoplanta/DATA/T_Labor.java | 87acae62581d721d058116593ebe2f0ac4b30f74 | [] | no_license | julio0860/RendimientoPlanta | dec83eb0334256412888e1a1b3af7dda804f0788 | 94bf86a5aa14acad41c323c045c99e54d81b25b2 | refs/heads/master | 2021-01-16T21:29:21.829016 | 2016-10-30T02:43:19 | 2016-10-30T02:43:19 | 68,314,338 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,945 | java | package com.adr.rendimientoplanta.DATA;
/**
* Created by smachado on 2016/05/19.
*/
public class T_Labor {
public static final String LABID ="Lab_Id";
public static final String ACTID ="Act_Id";
public static final String ESTID = "Est_Id";
public static final String LABCODIGO ="Lab_Codigo";
public static final String LABDESCRIPCION = "Lab_Descripcion";
public static final String EMPID = "Emp_Id";
public static final String N_TABLA = "Labor";
public static final String CREATE_T_LABOR ="CREATE TABLE " + N_TABLA+"("+
LABID +" INTEGER PRIMARY KEY NOT NULL, "+
ACTID +" INTEGER NOT NULL, "+
ESTID +" INTEGER NOT NULL, "+
LABCODIGO +" TEXT NOT NULL, "+
LABDESCRIPCION +" TEXT NOT NULL, "+
EMPID +" INTEGER NOT NULL "+
");";
public static final String DROP_T_LABOR ="DROP TABLE IF EXISTS "+N_TABLA;
public static String _INSERT(int ID,int AID,int ESID,String CODIGO,String DESCRIPCION,
int EMID)
{
String _INSERT;
_INSERT = "INSERT INTO "+N_TABLA +"("+LABID+","+ACTID+","+ESTID+","+LABCODIGO+","+LABDESCRIPCION+
","+EMPID+
")VALUES('"+
ID+"','"+AID+"','"+ESID+"','"+CODIGO+"','"+DESCRIPCION+"','"+EMID+"');";
return _INSERT;
}
public static String _DELETE()
{
String _DELETE;
_DELETE = "DELETE FROM "+N_TABLA +";";
return _DELETE;
}
public static String _SELECT_LABOR(int ID)
{
String _SELECT;
_SELECT = "SELECT "+LABID+" AS '_id',"+ACTID+","+ESTID+","+LABCODIGO+","+LABDESCRIPCION+
","+EMPID
+" FROM "+N_TABLA;
if (ID!=-1)
{
_SELECT=_SELECT+" WHERE "+ACTID+"='"+ID+"'";
}
_SELECT = _SELECT +" ORDER BY "+LABDESCRIPCION + " ASC;";
return _SELECT;
}
}
| [
"julio0860@hotmail.com"
] | julio0860@hotmail.com |
f3c3a868639404a325c0b25f6cec8a20a7c36611 | c85ed90a2ce93e16300a3922e7fa70162216c1e2 | /Projecto Android/Bicicleta/app/src/androidTest/java/com/example/mati/bicicleta/ApplicationTest.java | 3a3a6a0e4d4fa39669fa43fa3192ad7b9d47af70 | [] | no_license | Dorowk/Android | b3bcbe10c221a8f5884843cc5584b81e84cd5c0d | 349bfbe08432ff767defc0cbdcfbec4c3f5ff887 | refs/heads/master | 2021-01-10T17:10:56.943981 | 2016-02-15T11:49:29 | 2016-02-15T11:49:29 | 45,183,497 | 2 | 0 | null | null | null | null | UTF-8 | Java | false | false | 357 | java | package com.example.mati.bicicleta;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
} | [
"pablo.castellano.95@gmail.com"
] | pablo.castellano.95@gmail.com |
04ca3118d071b3ab5eaf3ea23f7a8f9444bf0192 | cb51b55a7368c9eace2028622d8d8abb59090f63 | /AMFRemotingService/src/main/java/com/jbouguima/remoting/repository/TypeIdentiteRepository.java | a1f6c33477a1ef2ada26b202f29885e65e77f706 | [] | no_license | JBouguima/Apache_Flex_Spring_Boot_Remoting | b1a1b87a784aa3f1bf24e91574e108725c3583f3 | e387ac0865dbb6241aced9ff8d114f1942eb86f3 | refs/heads/master | 2021-01-19T11:15:05.350009 | 2017-02-17T00:47:04 | 2017-02-17T00:47:04 | 82,236,219 | 3 | 0 | null | null | null | null | UTF-8 | Java | false | false | 430 | java | /**
*
* @author Jileni BOUGUIMA
*
* 12 janv. 2017
*
* CRUDFlex3Services / TypeIdentiteRepository.java
*
**/
package com.jbouguima.remoting.repository;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import com.jbouguima.remoting.model.gestionclient.TypeIdentite;
public interface TypeIdentiteRepository extends JpaRepository<TypeIdentite, Integer> {
}
| [
"jbouguima@gmail.com"
] | jbouguima@gmail.com |
75ce29633796d40d9d7896027edd5ba37d6672fb | 75424c03d583f41f74f678fd05e2a84320ef176b | /src/yu/sort/QuickSort/QuickSort.java | 26b4cfb2b6690ddcf3b474f7b32f6619d9d3c2f3 | [] | no_license | yusiming/BasicAlgorithms | c106d6b68da6aad0a8b1e225c631319dd164ce39 | 97edf30e2d95f07705f8ddd0375389f9de89880b | refs/heads/master | 2020-04-12T13:21:41.331423 | 2019-03-13T02:26:22 | 2019-03-13T02:26:22 | 162,520,096 | 1 | 0 | null | null | null | null | UTF-8 | Java | false | false | 2,813 | java | package yu.sort.QuickSort;
import yu.sort.Support;
import java.util.Arrays;
/**
* 改进的快速排序(三向切分的快速排序)
*
* @author yusiming
* @date 2018/12/18 10:54
*/
public class QuickSort {
public static void quickSort(int[] arr) {
if (arr == null || arr.length < 2) {
return;
}
quickSort(arr, 0, arr.length - 1);
}
private static void quickSort(int[] arr, int L, int R) {
if (L >= R) {
return;
}
// 引入随机性
Support.swap(arr, ((int) (Math.random() * (R - L + 1) + L)), R);
int[] p = partition(arr, L, R);
// 经过partition之后,p[0] 到 p[1] 位置上的元素已经排好序了
quickSort(arr, L, p[0] - 1);
quickSort(arr, p[1] + 1, R);
}
/**
* 这个方法的作用:
* 将arr数组中大于arr[R]的元素放在数组右测
* 将小于arr[R]的元素放在数组左侧
* 将等于arr[R]的元素放在数组中间
* <p>
* 也就是说中的元素已经排好序了
*/
private static int[] partition(int[] arr, int L, int R) {
int less = L - 1;
int more = R + 1;
int num = arr[R];
while (L < more) {
if (arr[L] < num) {
// 交换了一个等于num的数来到L位置,或者等于区域不存在,自己跟自己交换
Support.swap(arr, ++less, L++);
} else if (arr[L] > num) {
// 这里不应该 ++L,因为从--more位置交换过来的元素的值,是不确定的,
// 这里会改变arr[R]上元素的值,所以不能使用arr[R]
Support.swap(arr, L, --more);
} else {
L++;
}
}
return new int[]{less + 1, more - 1};
}
public static void main(String[] args) {
int testTime = 500000;
int size = 100;
int maxValue = 100;
boolean isSuccess = true;
long startTime = System.nanoTime();
for (int i = 0; i < testTime; i++) {
int[] arr1 = Support.generateRandomArray(size, maxValue);
int[] arr2 = Support.copyArray(arr1);
int[] arr3 = Support.copyArray(arr1);
Arrays.sort(arr2);
quickSort(arr3);
if (!Support.isEquals(arr2, arr3)) {
isSuccess = false;
System.out.println(Arrays.toString(arr1));
System.out.println(Arrays.toString(arr2));
System.out.println(Arrays.toString(arr3));
break;
}
}
System.out.println(isSuccess ? "success!" : "failed!");
long endTime = System.nanoTime();
System.out.println((endTime - startTime) / 1000000000.0);
}
}
| [
"13966931523@163.com"
] | 13966931523@163.com |
dbf12b87a4e64de8957f143f9d203c979329d332 | 6823f50636d14d3dc8f3b0fe820764f9e0191b3e | /main/java/com/dxc/pms/service/DealerServiceImpl.java | bcf057b880ee0832deb4ae16c9e56bf44b494193 | [] | no_license | Poovizhi98/FullStack_Training | 6ac84f597dd6366417228a110725856967d8ef21 | 967eabd746c9682cc0683d68e3f62de05f4b7b26 | refs/heads/master | 2023-06-18T11:42:17.835244 | 2021-07-18T11:16:05 | 2021-07-18T11:16:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,297 | java | package com.dxc.pms.service;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.dxc.pms.dao.DealerDAO;
import com.dxc.pms.model.Dealer;
@Service
public class DealerServiceImpl implements DealerService {
@Autowired
DealerDAO dealerDAO;
@Override
public boolean addDealer(int userId,Dealer dealer) {
// TODO Auto-generated method stub
return dealerDAO.addDealer(userId,dealer);
}
@Override
public Dealer getDealer(int userId,int dealerId) {
// TODO Auto-generated method stub
return dealerDAO.getDealer(userId,dealerId);
}
@Override
public boolean isDealerExists(int userId,int dealerId) {
// TODO Auto-generated method stub
return dealerDAO.isDealerExists(userId,dealerId);
}
@Override
public boolean deleteDealer(int userId,int dealerId) {
// TODO Auto-generated method stub
return dealerDAO.deleteDealer(userId,dealerId);
}
@Override
public boolean updateDealer(int userId,Dealer dealer) {
// TODO Auto-generated method stub
return dealerDAO.updateDealer(userId,dealer);
}
@Override
public Dealer getAllDealer(int userId) {
// TODO Auto-generated method stub
return dealerDAO.getAllDealer(userId);
}
}
| [
"noreply@github.com"
] | Poovizhi98.noreply@github.com |
b14d2745e087244d193a884678d6ac815a7216ce | b467d94449640f1a9b57e237ed16a1a74f11de01 | /src/main/java/org/openstreetmap/atlas/geography/atlas/change/MemberMerger.java | b5fbd93e6b43b31e66858a19e66cd1592b77203f | [
"BSD-3-Clause"
] | permissive | seancoulter/atlas | 27d8ff4e814a58a0a7f259643bd86a0db9d4dbe2 | c688660256ce1a50897c2615bbff6eedac0ca6e0 | refs/heads/master | 2021-06-25T12:24:45.558566 | 2019-06-21T20:38:37 | 2019-06-21T20:38:37 | 190,778,351 | 0 | 0 | BSD-3-Clause | 2019-08-20T16:54:51 | 2019-06-07T16:47:20 | Java | UTF-8 | Java | false | false | 22,509 | java | package org.openstreetmap.atlas.geography.atlas.change;
import java.util.Optional;
import java.util.Set;
import java.util.SortedSet;
import java.util.function.BinaryOperator;
import java.util.function.Function;
import org.openstreetmap.atlas.exception.CoreException;
import org.openstreetmap.atlas.geography.atlas.complete.CompleteEntity;
import org.openstreetmap.atlas.geography.atlas.complete.CompleteNode;
import org.openstreetmap.atlas.geography.atlas.items.AtlasEntity;
import org.openstreetmap.atlas.utilities.function.QuaternaryOperator;
import org.openstreetmap.atlas.utilities.function.TernaryOperator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class encapsulates the logic and configuration for {@link CompleteEntity} member merging in
* the context of {@link FeatureChange} merges.
*
* @author lcram
* @param <M>
* the type of the member this {@link MemberMerger} will be merging
*/
public final class MemberMerger<M>
{
/**
* A builder class for {@link MemberMerger}.
*
* @author lcram
* @param <M>
* the type of the member this {@link MemberMerger} will be merging
*/
public static class Builder<M>
{
private String memberName;
private AtlasEntity beforeEntityLeft;
private AtlasEntity afterEntityLeft;
private AtlasEntity beforeEntityRight;
private AtlasEntity afterEntityRight;
private Function<AtlasEntity, M> memberExtractor;
private BinaryOperator<M> afterViewNoBeforeViewMerger;
private TernaryOperator<M> afterViewConsistentBeforeViewMerger;
private QuaternaryOperator<M> afterViewConflictingBeforeViewMerger;
private BinaryOperator<M> beforeViewMerger;
private boolean useHackForMergingConflictingConnectedEdgeSetBeforeViews = false;
private Optional<CompleteNode> leftNode;
private Optional<CompleteNode> rightNode;
public MemberMerger<M> build()
{
assertRequiredFieldsNonNull();
final MemberMerger<M> merger = new MemberMerger<>();
merger.memberName = this.memberName;
merger.beforeEntityLeft = this.beforeEntityLeft;
merger.afterEntityLeft = this.afterEntityLeft;
merger.beforeEntityRight = this.beforeEntityRight;
merger.afterEntityRight = this.afterEntityRight;
merger.memberExtractor = this.memberExtractor;
merger.afterViewNoBeforeViewMerger = this.afterViewNoBeforeViewMerger;
merger.afterViewConsistentBeforeViewMerger = this.afterViewConsistentBeforeViewMerger;
merger.afterViewConflictingBeforeViewMerger = this.afterViewConflictingBeforeViewMerger;
merger.beforeViewMerger = this.beforeViewMerger;
merger.useHackForMergingConflictingConnectedEdgeSetBeforeViews = this.useHackForMergingConflictingConnectedEdgeSetBeforeViews;
merger.leftNode = this.leftNode;
merger.rightNode = this.rightNode;
return merger;
}
public Builder<M> useHackForMergingConflictingConnectedEdgeSetBeforeViews(
final CompleteNode left, final CompleteNode right)
{
this.useHackForMergingConflictingConnectedEdgeSetBeforeViews = true;
this.leftNode = Optional.ofNullable(left);
this.rightNode = Optional.ofNullable(right);
return this;
}
public Builder<M> withAfterEntityLeft(final AtlasEntity afterEntityLeft)
{
this.afterEntityLeft = afterEntityLeft;
return this;
}
public Builder<M> withAfterEntityRight(final AtlasEntity afterEntityRight)
{
this.afterEntityRight = afterEntityRight;
return this;
}
public Builder<M> withAfterViewConflictingBeforeViewMerger(
final QuaternaryOperator<M> afterViewConflictingBeforeViewMerger)
{
this.afterViewConflictingBeforeViewMerger = afterViewConflictingBeforeViewMerger;
return this;
}
public Builder<M> withAfterViewConsistentBeforeViewMerger(
final TernaryOperator<M> afterViewConsistentBeforeViewMerger)
{
this.afterViewConsistentBeforeViewMerger = afterViewConsistentBeforeViewMerger;
return this;
}
public Builder<M> withAfterViewNoBeforeMerger(
final BinaryOperator<M> afterViewNoBeforeMerger)
{
this.afterViewNoBeforeViewMerger = afterViewNoBeforeMerger;
return this;
}
public Builder<M> withBeforeEntityLeft(final AtlasEntity beforeEntityLeft)
{
this.beforeEntityLeft = beforeEntityLeft;
return this;
}
public Builder<M> withBeforeEntityRight(final AtlasEntity beforeEntityRight)
{
this.beforeEntityRight = beforeEntityRight;
return this;
}
public Builder<M> withBeforeViewMerger(final BinaryOperator<M> beforeViewMerger)
{
this.beforeViewMerger = beforeViewMerger;
return this;
}
public Builder<M> withMemberExtractor(final Function<AtlasEntity, M> memberExtractor)
{
this.memberExtractor = memberExtractor;
return this;
}
public Builder<M> withMemberName(final String memberName)
{
this.memberName = memberName;
return this;
}
private void assertRequiredFieldsNonNull()
{
if (this.memberName == null)
{
throw new CoreException("Required field \'memberName\' was unset");
}
if (this.afterEntityLeft == null)
{
throw new CoreException("Required field \'afterEntityLeft\' was unset");
}
if (this.afterEntityRight == null)
{
throw new CoreException("Required field \'afterEntityRight\' was unset");
}
if (this.beforeEntityLeft != null && this.beforeEntityRight == null
|| this.beforeEntityLeft == null && this.beforeEntityRight != null)
{
throw new CoreException("Both \'beforeEntity\' fields must either be set or null");
}
}
}
/**
* A bean class to store the merged before and after members. This is useful as a return type
* for the member merger, which needs to correctly merge the before and after entity view of
* each {@link FeatureChange}.
*
* @author lcram
* @param <M>
* the member type
*/
public static class MergedMemberBean<M>
{
private final M beforeMemberMerged;
private final M afterMemberMerged;
public MergedMemberBean(final M before, final M after)
{
this.beforeMemberMerged = before;
this.afterMemberMerged = after;
}
public M getMergedAfterMember()
{
return this.afterMemberMerged;
}
public M getMergedBeforeMember()
{
return this.beforeMemberMerged;
}
}
private static final Logger logger = LoggerFactory.getLogger(MemberMerger.class);
private String memberName;
private AtlasEntity beforeEntityLeft;
private AtlasEntity afterEntityLeft;
private AtlasEntity beforeEntityRight;
private AtlasEntity afterEntityRight;
private Function<AtlasEntity, M> memberExtractor;
private BinaryOperator<M> afterViewNoBeforeViewMerger;
private TernaryOperator<M> afterViewConsistentBeforeViewMerger;
private QuaternaryOperator<M> afterViewConflictingBeforeViewMerger;
private BinaryOperator<M> beforeViewMerger;
private boolean useHackForMergingConflictingConnectedEdgeSetBeforeViews;
private Optional<CompleteNode> leftNode;
private Optional<CompleteNode> rightNode;
private MemberMerger()
{
}
/**
* Merge some feature member using a left and right before/after view.
*
* @return a {@link MergedMemberBean} containing the merged beforeMember view and the merged
* afterMember view
*/
public MergedMemberBean<M> mergeMember()
{
final M beforeMemberResult;
final M afterMemberResult;
final M beforeMemberLeft = this.beforeEntityLeft == null ? null
: this.memberExtractor.apply(this.beforeEntityLeft);
final M afterMemberLeft = this.afterEntityLeft == null ? null
: this.memberExtractor.apply(this.afterEntityLeft);
final M beforeMemberRight = this.beforeEntityRight == null ? null
: this.memberExtractor.apply(this.beforeEntityRight);
final M afterMemberRight = this.afterEntityRight == null ? null
: this.memberExtractor.apply(this.afterEntityRight);
/*
* In the case that both beforeMembers are present, we check their equivalence before
* continuing. If they are not equivalent, then we try to use our special beforeView
* conflict resolution merge logic. Otherwise, we can continue as normal.
*/
if (beforeMemberLeft != null && beforeMemberRight != null
&& !beforeMemberLeft.equals(beforeMemberRight))
{
/*
* In the case that we are merging the inEdges or outEdges members of Node, we perform a
* different merge logic. The in/outEdge sets have a possibility of beforeView
* conflicts, and since we are unable to attach additional explicitlyExcluded state
* directly to a set, we cannot use the same logic utilized for merging other members
* with conflicting beforeViews.
*/
if (this.useHackForMergingConflictingConnectedEdgeSetBeforeViews)
{
return mergeMemberHackForConflictingConnectedEdgeSetBeforeViews(beforeMemberLeft,
afterMemberLeft, beforeMemberRight, afterMemberRight);
}
return mergeMemberWithConflictingBeforeViews(beforeMemberLeft, afterMemberLeft,
beforeMemberRight, afterMemberRight);
}
beforeMemberResult = chooseNonNullMemberIfPossible(beforeMemberLeft, beforeMemberRight);
/*
* In the case that both afterMembers are present, then we will need to resolve the
* afterMember merge using one of the supplied merge strategies. In this case, beforeMembers
* that are either consistent or both null - so we can use the merged beforeMemberResult.
*/
if (afterMemberLeft != null && afterMemberRight != null)
{
return mergeMembersWithConsistentBeforeViews(beforeMemberResult, afterMemberLeft,
afterMemberRight);
}
/*
* If only one of the afterMembers is present, we just take whichever one is present.
*/
if (afterMemberLeft != null)
{
afterMemberResult = afterMemberLeft;
}
else if (afterMemberRight != null)
{
afterMemberResult = afterMemberRight;
}
/*
* If neither afterMember is present, then just move on.
*/
else
{
afterMemberResult = null;
}
return new MergedMemberBean<>(beforeMemberResult, afterMemberResult);
}
/**
* Choose the non-null member between two choices if possible. If both the left and right
* members are non-null, then this method will arbitrarily select one of them. Due to this
* condition, you may see unexpected results if you pass two non-null members that are unequal.
*
* @param memberLeft
* the left side before view of the member
* @param memberRight
* the right side before view of the member
* @return The non-null beforeMember among the two if present. Otherwise, returns {@code null};
*/
private M chooseNonNullMemberIfPossible(final M memberLeft, final M memberRight)
{
/*
* Properly merge the members. If both are non-null, we arbitrarily take the left (since
* this method makes no guarantee on which side it will select when both are non-null). If
* one is null and one is not, then we take the non-null. If both were null, then the result
* remains null.
*/
if (memberLeft != null && memberRight != null)
{
return memberLeft;
}
else if (memberLeft != null)
{
return memberLeft;
}
else if (memberRight != null)
{
return memberRight;
}
else
{
return null;
}
}
@SuppressWarnings("unchecked")
private MergedMemberBean<M> mergeMemberHackForConflictingConnectedEdgeSetBeforeViews(
final M beforeMemberLeft, final M afterMemberLeft, final M beforeMemberRight,
final M afterMemberRight)
{
final M beforeMemberResult;
final M afterMemberResult;
final Set<Long> explicitlyExcludedLeft;
final Set<Long> explicitlyExcludedRight;
if (!this.leftNode.isPresent())
{
throw new CoreException(
"Attempted merge failed for {}: tried to use hackForConflictingConnectedEdgeSet but was missing leftNode",
this.memberName);
}
if (!this.rightNode.isPresent())
{
throw new CoreException(
"Attempted merge failed for {}: tried to use hackForConflictingConnectedEdgeSet but was missing rightNode",
this.memberName);
}
if (FeatureChangeMergingHelpers.IN_EDGE_IDENTIFIERS_FIELD.equals(this.memberName))
{
explicitlyExcludedLeft = this.leftNode.get().explicitlyExcludedInEdgeIdentifiers();
explicitlyExcludedRight = this.rightNode.get().explicitlyExcludedInEdgeIdentifiers();
}
else if (FeatureChangeMergingHelpers.OUT_EDGE_IDENTIFIERS_FIELD.equals(this.memberName))
{
explicitlyExcludedLeft = this.leftNode.get().explicitlyExcludedOutEdgeIdentifiers();
explicitlyExcludedRight = this.rightNode.get().explicitlyExcludedOutEdgeIdentifiers();
}
else
{
throw new CoreException(
"Attempted merge failed for {}: hackForConflictingConnectedEdgeSet is not a valid strategy for {}",
this.memberName, this.memberName);
}
if (this.beforeViewMerger == null)
{
throw new CoreException(
"Conflicting beforeMembers {} and no beforeView merge strategy was provided; beforeView: {} vs {}",
this.memberName, beforeMemberLeft, beforeMemberRight);
}
try
{
beforeMemberResult = this.beforeViewMerger.apply(beforeMemberLeft, beforeMemberRight);
}
catch (final Exception exception)
{
throw new CoreException(
"Attempted beforeView merge strategy failed for {} with beforeView: {} vs {}",
this.memberName, beforeMemberLeft, beforeMemberRight, exception);
}
/*
* Here we hardcode the application of the SenaryOperator node connected edge set merger. We
* can cast back and forth between M and SortedSet<Long> here, since we know that M is of
* type SortedSet<Long> based on the constraints imposed when calling this function.
*/
try
{
final SortedSet<Long> mergeResult = MemberMergeStrategies.conflictingBeforeViewSetMerger
.apply((SortedSet<Long>) beforeMemberLeft, (SortedSet<Long>) afterMemberLeft,
explicitlyExcludedLeft, (SortedSet<Long>) beforeMemberRight,
(SortedSet<Long>) afterMemberRight, explicitlyExcludedRight);
afterMemberResult = (M) mergeResult;
}
catch (final Exception exception)
{
throw new CoreException(
"Tried merge strategy for hackForConflictingConnectedEdgeSet, but it failed for {}"
+ "\nbeforeView: {} vs {};\nafterView: {} vs {}",
this.memberName, beforeMemberLeft, beforeMemberRight, afterMemberLeft,
afterMemberRight, exception);
}
return new MergedMemberBean<>(beforeMemberResult, afterMemberResult);
}
/**
* Merge a member that has consistent (possibly null) beforeViews.
*
* @param beforeMemberResult
* the pre-merged before member view
* @param afterMemberLeft
* the left side after view of the member
* @param afterMemberRight
* the right side after view of the member
* @return a {@link MergedMemberBean} containing the merged beforeMember view and the merged
* afterMember view
*/
private MergedMemberBean<M> mergeMembersWithConsistentBeforeViews(final M beforeMemberResult,
final M afterMemberLeft, final M afterMemberRight)
{
final M afterMemberResult;
/*
* In the case that both afterMembers are non-null and equivalent, we arbitrarily pick the
* left one.
*/
if (afterMemberLeft.equals(afterMemberRight))
{
return new MergedMemberBean<>(beforeMemberResult, afterMemberLeft);
}
/*
* If both beforeMembers are present (we have already asserted their equivalence so we just
* arbitrarily use beforeMemberLeft), we use the diffBased strategy if present.
*/
if (beforeMemberResult != null && this.afterViewConsistentBeforeViewMerger != null)
{
try
{
afterMemberResult = this.afterViewConsistentBeforeViewMerger
.apply(beforeMemberResult, afterMemberLeft, afterMemberRight);
}
catch (final Exception exception)
{
throw new CoreException(
"Attempted afterViewConsistentBeforeMerge failed for {} with beforeView: {}; afterView: {} vs {}",
this.memberName, beforeMemberResult, afterMemberLeft, afterMemberRight,
exception);
}
}
/*
* If the beforeMember is not present, or we don't have a diffBased strategy, we try the
* simple strategy.
*/
else if (this.afterViewNoBeforeViewMerger != null)
{
try
{
afterMemberResult = this.afterViewNoBeforeViewMerger.apply(afterMemberLeft,
afterMemberRight);
}
catch (final CoreException exception)
{
throw new CoreException(
"Attempted afterViewNoBeforeMerge failed for {}; afterView: {} vs {}",
this.memberName, afterMemberLeft, afterMemberRight, exception);
}
}
/*
* If there was no simple strategy, we have to fail.
*/
else
{
throw new CoreException(
"Conflicting members and no merge strategy for {}; afterView: {} vs {}",
this.memberName, afterMemberLeft, afterMemberRight);
}
return new MergedMemberBean<>(beforeMemberResult, afterMemberResult);
}
/**
* Merge a member that has conflicting beforeViews. This can happen occasionally with
* {@link RelationBean}s and the in/out {@link Edge} identifier sets in {@link Node}, since
* these may be inconsistent across shards.
*
* @param beforeMemberLeft
* the left side before view of the member
* @param afterMemberLeft
* the left side after view of the member
* @param beforeMemberRight
* the right side before view of the member
* @param afterMemberRight
* the right side after view of the member
* @return a {@link MergedMemberBean} containing the merged beforeMember view and the merged
* afterMember view
*/
private MergedMemberBean<M> mergeMemberWithConflictingBeforeViews(final M beforeMemberLeft,
final M afterMemberLeft, final M beforeMemberRight, final M afterMemberRight)
{
final M beforeMemberResult;
final M afterMemberResult;
if (this.afterViewConflictingBeforeViewMerger == null)
{
throw new CoreException(
"Conflicting beforeMembers {} and no afterView merge strategy capable of handling"
+ " conflicting beforeViews was provided; beforeView: {} vs {}",
this.memberName, beforeMemberLeft, beforeMemberRight);
}
if (this.beforeViewMerger == null)
{
throw new CoreException(
"Conflicting beforeMembers {} and no beforeView merge strategy was provided; beforeView: {} vs {}",
this.memberName, beforeMemberLeft, beforeMemberRight);
}
try
{
beforeMemberResult = this.beforeViewMerger.apply(beforeMemberLeft, beforeMemberRight);
}
catch (final Exception exception)
{
throw new CoreException(
"Attempted beforeView merge strategy failed for {} with beforeView: {} vs {}",
this.memberName, beforeMemberLeft, beforeMemberRight, exception);
}
try
{
afterMemberResult = this.afterViewConflictingBeforeViewMerger.apply(beforeMemberLeft,
afterMemberLeft, beforeMemberRight, afterMemberRight);
}
catch (final Exception exception)
{
throw new CoreException(
"Tried merge strategy for handling conflicting beforeViews. but it failed for {}"
+ "\nbeforeView: {} vs {};\nafterView: {} vs {}",
this.memberName, beforeMemberLeft, beforeMemberRight, afterMemberLeft,
afterMemberRight, exception);
}
return new MergedMemberBean<>(beforeMemberResult, afterMemberResult);
}
}
| [
"matthieun@users.noreply.github.com"
] | matthieun@users.noreply.github.com |
93a5d6e31197186001481262194efb436a259b0a | c2e58ac4e8364eb7d2701ac481117d9c759121f8 | /src/main/java/com/iuling/comm/utils/concurrent/threadpool/ThreadPoolBuilder.java | 895c752facf2fdafcf018b7485fbc2d3d08a24e3 | [] | no_license | wolfmuzi/test | fbde72530fd504f712376a11aac17c0118a07dde | 63494be8221a6e5d93e3b26785f1297c19f894e2 | refs/heads/master | 2021-04-27T02:50:00.124416 | 2018-02-24T03:29:44 | 2018-02-24T03:29:44 | 122,700,176 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 11,698 | java | package com.iuling.comm.utils.concurrent.threadpool;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.RejectedExecutionHandler;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.ThreadPoolExecutor.AbortPolicy;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang3.Validate;
import com.iuling.comm.utils.concurrent.threadpool.QueuableCachedThreadPool.ControllableQueue;
/**
* ThreadPool创建的工具类.
*
* 对比JDK Executors中的newFixedThreadPool(), newCachedThreadPool(),newScheduledThreadPool, 提供更多有用的配置项.
*
* 另包含了移植自Tomcat的QueuableCachedPool.
*
* 使用示例如下:
*
* <pre>
* ExecutorService ExecutorService = new FixedThreadPoolBuilder().setPoolSize(10).build();
* </pre>
*
* 参考文章 《Java ThreadPool的正确打开方式》http://calvin1978.blogcn.com/articles/java-threadpool.html
*/
public class ThreadPoolBuilder {
private static RejectedExecutionHandler defaultRejectHandler = new AbortPolicy();
/**
* @see FixedThreadPoolBuilder
*/
public static FixedThreadPoolBuilder fixedPool() {
return new FixedThreadPoolBuilder();
}
/**
* @see CacheedThreadPoolBuilder
*/
public static CachedThreadPoolBuilder cachedPool() {
return new CachedThreadPoolBuilder();
}
/**
* @see ScheduledThreadPoolBuilder
*/
public static ScheduledThreadPoolBuilder scheduledPool() {
return new ScheduledThreadPoolBuilder();
}
/**
* @see QueuableCachedThreadPoolBuilder
*/
public static QueuableCachedThreadPoolBuilder queuableCachedPool() {
return new QueuableCachedThreadPoolBuilder();
}
/**
* 创建FixedThreadPool.
*
* 1. 任务提交时, 如果线程数还没达到poolSize即创建新线程并绑定任务(即poolSize次提交后线程总数必达到poolSize,不会重用之前的线程)
*
* poolSize默认为1,即singleThreadPool.
*
* 2. 第poolSize次任务提交后, 新增任务放入Queue中, Pool中的所有线程从Queue中take任务执行.
*
* Queue默认为无限长的LinkedBlockingQueue, 也可以设置queueSize换成有界的队列.
*
* 如果使用有界队列, 当队列满了之后,会调用RejectHandler进行处理, 默认为AbortPolicy,抛出RejectedExecutionException异常.
* 其他可选的Policy包括静默放弃当前任务(Discard),放弃Queue里最老的任务(DisacardOldest),或由主线程来直接执行(CallerRuns).
*
* 3. 因为线程全部为core线程,所以不会在空闲回收.
*/
public static class FixedThreadPoolBuilder {
private int poolSize = 1;
private int queueSize = -1;
private ThreadFactory threadFactory = null;
private String threadNamePrefix = null;
private Boolean daemon = null;
private RejectedExecutionHandler rejectHandler;
/**
* Pool大小,默认为1,即singleThreadPool
*/
public FixedThreadPoolBuilder setPoolSize(int poolSize) {
Validate.isTrue(poolSize >= 1);
this.poolSize = poolSize;
return this;
}
/**
* 默认为-1, 使用无限长的LinkedBlockingQueue,为正数时使用ArrayBlockingQueue
*/
public FixedThreadPoolBuilder setQueueSize(int queueSize) {
this.queueSize = queueSize;
return this;
}
/**
* 与threadNamePrefix互斥, 优先使用ThreadFactory
*/
public FixedThreadPoolBuilder setThreadFactory(ThreadFactory threadFactory) {
this.threadFactory = threadFactory;
return this;
}
/**
* 与ThreadFactory互斥, 优先使用ThreadFactory
*/
public FixedThreadPoolBuilder setThreadNamePrefix(String threadNamePrefix) {
this.threadNamePrefix = threadNamePrefix;
return this;
}
/**
* 与threadFactory互斥, 优先使用ThreadFactory
*
* 默认为NULL,不进行设置,使用JDK的默认值.
*/
public FixedThreadPoolBuilder setDaemon(Boolean daemon) {
this.daemon = daemon;
return this;
}
public FixedThreadPoolBuilder setRejectHanlder(RejectedExecutionHandler rejectHandler) {
this.rejectHandler = rejectHandler;
return this;
}
public ThreadPoolExecutor build() {
BlockingQueue<Runnable> queue = null;
if (queueSize < 1) {
queue = new LinkedBlockingQueue<Runnable>();
} else {
queue = new ArrayBlockingQueue<Runnable>(queueSize);
}
threadFactory = createThreadFactory(threadFactory, threadNamePrefix, daemon);
if (rejectHandler == null) {
rejectHandler = defaultRejectHandler;
}
return new ThreadPoolExecutor(poolSize, poolSize, 0L, TimeUnit.MILLISECONDS, queue, threadFactory,
rejectHandler);
}
}
/**
* 创建CachedThreadPool.
*
* 1. 任务提交时, 如果线程数还没达到minSize即创建新线程并绑定任务(即minSize次提交后线程总数必达到minSize, 不会重用之前的线程)
*
* minSize默认为0, 可设置保证有基本的线程处理请求不被回收.
*
* 2. 第minSize次任务提交后, 新增任务提交进SynchronousQueue后,如果没有空闲线程立刻处理,则会创建新的线程, 直到总线程数达到上限.
*
* maxSize默认为Integer.Max, 可进行设置.
*
* 如果设置了maxSize, 当总线程数达到上限, 会调用RejectHandler进行处理, 默认为AbortPolicy, 抛出RejectedExecutionException异常.
* 其他可选的Policy包括静默放弃当前任务(Discard),或由主线程来直接执行(CallerRuns).
*
* 3. minSize以上, maxSize以下的线程, 如果在keepAliveTime中都poll不到任务执行将会被结束掉, keeAliveTimeJDK默认为10秒.
* JDK默认值60秒太高,如高达1000线程时,要低于16QPS时才会开始回收线程, 因此改为默认10秒.
*/
public static class CachedThreadPoolBuilder {
private int minSize = 0;
private int maxSize = Integer.MAX_VALUE;
private int keepAliveSecs = 10;
private ThreadFactory threadFactory = null;
private String threadNamePrefix = null;
private Boolean daemon = null;
private RejectedExecutionHandler rejectHandler;
public CachedThreadPoolBuilder setMinSize(int minSize) {
this.minSize = minSize;
return this;
}
public CachedThreadPoolBuilder setMaxSize(int maxSize) {
this.maxSize = maxSize;
return this;
}
/**
* JDK默认值60秒太高,如高达1000线程时,要低于16QPS时才会开始回收线程, 因此改为默认10秒.
*/
public CachedThreadPoolBuilder setKeepAliveSecs(int keepAliveSecs) {
this.keepAliveSecs = keepAliveSecs;
return this;
}
/**
* 与threadNamePrefix互斥, 优先使用ThreadFactory
*/
public CachedThreadPoolBuilder setThreadFactory(ThreadFactory threadFactory) {
this.threadFactory = threadFactory;
return this;
}
/**
* 与threadFactory互斥, 优先使用ThreadFactory
*/
public CachedThreadPoolBuilder setThreadNamePrefix(String threadNamePrefix) {
this.threadNamePrefix = threadNamePrefix;
return this;
}
/**
* 与threadFactory互斥, 优先使用ThreadFactory
*
* 默认为NULL,不进行设置,使用JDK的默认值.
*/
public CachedThreadPoolBuilder setDaemon(Boolean daemon) {
this.daemon = daemon;
return this;
}
public CachedThreadPoolBuilder setRejectHanlder(RejectedExecutionHandler rejectHandler) {
this.rejectHandler = rejectHandler;
return this;
}
public ThreadPoolExecutor build() {
threadFactory = createThreadFactory(threadFactory, threadNamePrefix, daemon);
if (rejectHandler == null) {
rejectHandler = defaultRejectHandler;
}
return new ThreadPoolExecutor(minSize, maxSize, keepAliveSecs, TimeUnit.SECONDS,
new SynchronousQueue<Runnable>(), threadFactory, rejectHandler);
}
}
/*
* 创建ScheduledPool.
*/
public static class ScheduledThreadPoolBuilder {
private int poolSize = 1;
private ThreadFactory threadFactory = null;
private String threadNamePrefix = null;
/**
* 默认为1
*/
public ScheduledThreadPoolBuilder setPoolSize(int poolSize) {
this.poolSize = poolSize;
return this;
}
/**
* 与threadNamePrefix互斥, 优先使用ThreadFactory
*/
public ScheduledThreadPoolBuilder setThreadFactory(ThreadFactory threadFactory) {
this.threadFactory = threadFactory;
return this;
}
public ScheduledThreadPoolBuilder setThreadNamePrefix(String threadNamePrefix) {
this.threadNamePrefix = threadNamePrefix;
return this;
}
public ScheduledThreadPoolExecutor build() {
threadFactory = createThreadFactory(threadFactory, threadNamePrefix, Boolean.TRUE);
return new ScheduledThreadPoolExecutor(poolSize, threadFactory);
}
}
/**
* 从Tomcat移植过来的可扩展可用Queue缓存任务的ThreadPool
*
* @see QueuableCachedThreadPool
*/
public static class QueuableCachedThreadPoolBuilder {
private int minSize = 0;
private int maxSize = Integer.MAX_VALUE;
private int keepAliveSecs = 10;
private int queueSize = 100;
private ThreadFactory threadFactory = null;
private String threadNamePrefix = null;
private Boolean daemon = null;
private RejectedExecutionHandler rejectHandler;
public QueuableCachedThreadPoolBuilder setMinSize(int minSize) {
this.minSize = minSize;
return this;
}
public QueuableCachedThreadPoolBuilder setMaxSize(int maxSize) {
this.maxSize = maxSize;
return this;
}
/**
* LinkedQueue长度, 默认100
*/
public QueuableCachedThreadPoolBuilder setQueueSize(int queueSize) {
this.queueSize = queueSize;
return this;
}
public QueuableCachedThreadPoolBuilder setKeepAliveSecs(int keepAliveSecs) {
this.keepAliveSecs = keepAliveSecs;
return this;
}
/**
* 与threadNamePrefix互斥, 优先使用ThreadFactory
*/
public QueuableCachedThreadPoolBuilder setThreadFactory(ThreadFactory threadFactory) {
this.threadFactory = threadFactory;
return this;
}
/**
* 与threadFactory互斥, 优先使用ThreadFactory
*/
public QueuableCachedThreadPoolBuilder setThreadNamePrefix(String threadNamePrefix) {
this.threadNamePrefix = threadNamePrefix;
return this;
}
/**
* 与threadFactory互斥, 优先使用ThreadFactory
*
* 默认为NULL,不进行设置,使用JDK的默认值.
*/
public QueuableCachedThreadPoolBuilder setDaemon(Boolean daemon) {
this.daemon = daemon;
return this;
}
public QueuableCachedThreadPoolBuilder setRejectHanlder(RejectedExecutionHandler rejectHandler) {
this.rejectHandler = rejectHandler;
return this;
}
public QueuableCachedThreadPool build() {
threadFactory = createThreadFactory(threadFactory, threadNamePrefix, daemon);
if (rejectHandler == null) {
rejectHandler = defaultRejectHandler;
}
return new QueuableCachedThreadPool(minSize, maxSize, keepAliveSecs, TimeUnit.SECONDS,
new ControllableQueue(queueSize), threadFactory, rejectHandler);
}
}
/**
* 优先使用threadFactory,否则如果threadNamePrefix不为空则使用自建ThreadFactory,否则使用defaultThreadFactory
*/
private static ThreadFactory createThreadFactory(ThreadFactory threadFactory, String threadNamePrefix,
Boolean daemon) {
if (threadFactory != null) {
return threadFactory;
}
if (threadNamePrefix != null) {
if (daemon != null) {
return ThreadPoolUtil.buildThreadFactory(threadNamePrefix, daemon);
} else {
return ThreadPoolUtil.buildThreadFactory(threadNamePrefix);
}
}
return Executors.defaultThreadFactory();
}
}
| [
"email@example.com"
] | email@example.com |
5c5d1ec8c02febe8faeb22df4d8eb78892a65340 | cf845162897c23cd92d3355975fc3445e3901d46 | /critpoint/java/detection2d/CritpointRegion.java | e051cebd5fbab5da0a134c6dad5b1690ca90dc33 | [
"MIT"
] | permissive | miroslavradojevic/advantra | f7f85e4403f14cefa1a7b051bcb2ca08f4f7fc78 | a678c8ebae8e385ba81b82e2f918054d460e0650 | refs/heads/master | 2022-04-09T23:13:06.955022 | 2020-04-02T13:16:08 | 2020-04-02T13:16:08 | 233,956,459 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 2,967 | java | package detection2d;
import java.awt.*;
/**
* Created by miroslav on 6/8/14.
*/
public class CritpointRegion {
public float radius;
public float[] centroid;
public float score;
public RegionType type;
public float[][] outward_directions;
public CritpointRegion(RegionType _type, float _centroidX, float _centroidY, float _radius, float _score, float[][] _outward_directions, int nr_directions) {
this.type = _type;
this.centroid = new float[2];
this.centroid[0] = _centroidX;
this.centroid[1] = _centroidY;
this.radius = _radius;
this.score = _score;
this.outward_directions = new float[Math.min(_outward_directions.length, nr_directions)][2];
for (int i = 0; i < this.outward_directions.length; i++) {
for (int j = 0; j < 2; j++) {
this.outward_directions[i][j] = _outward_directions[i][j];
}
}
}
public enum RegionType {
END, BIF, CROSS, BIF_CROSS, BDY; // BIF_CROSS represents both, need it for some function calls clearness, appears as a pseudo-category here
}
public enum AnnotationType{
END, BIF, CROSS, NONE, IGNORE;
}
public static int annotationId(AnnotationType t) {
// categories of annotations (indexes are used to differentiate in .swc and colours in overlays)
if (t.equals(AnnotationType.BIF)) {
return 3;
}
else if (t.equals(AnnotationType.END)) {
return 1;
}
else if (t.equals(AnnotationType.CROSS)) {
return 4;
}
else if (t.equals(AnnotationType.NONE)) {
return 0;
}
else if (t.equals(AnnotationType.IGNORE)) {
return 7;
}
else return -99; // dummy fake
}
public static Color annotationColor (AnnotationType t) {
if (t.equals(AnnotationType.BIF)) {
return Color.RED;
}
else if (t.equals(AnnotationType.END)) {
return Color.YELLOW;
}
else if (t.equals(AnnotationType.CROSS)) {
return Color.GREEN;
}
else if (t.equals(AnnotationType.NONE)) {
return Color.BLUE;
}
else if (t.equals(AnnotationType.IGNORE)) {
return new Color(1, 1, 1, 0.5f);
}
else return Color.BLACK; // dummy fake
}
/* intended these for evaluation but won't use them */
public boolean isOverlapping(float[] _other_centroid, float _other_radius) {
float dist = (float) Math.sqrt(Math.pow(_other_centroid[0]-this.centroid[0],2) + Math.pow(_other_centroid[1]-this.centroid[1],2));
if (dist <= this.radius + _other_radius) return true;
else return false;
}
public boolean isOverlaping(CritpointRegion _other_critpoint_region) {
float dist = (float) Math.sqrt(Math.pow(_other_critpoint_region.centroid[0]-this.centroid[0],2) + Math.pow(_other_critpoint_region.centroid[1],2));
if (dist <= this.radius + _other_critpoint_region.radius) return true;
else return false;
}
}
| [
"miroslav.radojevic@gmail.com"
] | miroslav.radojevic@gmail.com |
c7b406117acc6fc2da2a38be2b8a3a047e6dfd86 | 7969fd064cfc9c7ceaccc6e13e560c707c393a87 | /sample/src/main/java/com/zc/view/sample/HomeActivity.java | b597530c2084f49ac5051a5953ec9dd16bbd803c | [
"Apache-2.0"
] | permissive | mayemonkey/MonkeyDatePager | 53a71976e0bc1be3108494d5ea4129cf1e043099 | 42ea4b7db43c2bfa3ef6bbca2ec5fe7210ea135e | refs/heads/master | 2020-12-24T07:56:01.241940 | 2016-11-14T15:35:09 | 2016-11-14T15:35:09 | 73,352,752 | 1 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,198 | java | package com.zc.view.sample;
import android.app.Activity;
import android.os.Bundle;
import android.widget.TextView;
import com.maye.view.MonkeyDatePager;
import java.util.Calendar;
public class HomeActivity extends Activity implements MonkeyDatePager.OnMonkeyTimeChangedListener, MonkeyDatePager.OnMonkeyTypeChangedListener {
private TextView tv_date;
private MonkeyDatePager mdp_home;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home);
initComponent();
}
private void initComponent() {
mdp_home = (MonkeyDatePager) findViewById(R.id.mdp_home);
mdp_home.setOnMonkeyTypeChangedListener(this);
mdp_home.setOnMonkeyTimeChangedListener(this);
tv_date = (TextView) findViewById(R.id.tv_date);
}
@Override
public void onTypeChanged(int type) {
String innerTime = mdp_home.getInnerTime();
tv_date.setText("Date:" + innerTime);
}
@Override
public void onTimeChanged(Calendar time) {
String innerTime = mdp_home.getInnerTime();
tv_date.setText("Date:" + innerTime);
}
}
| [
"ssdd050@163.com"
] | ssdd050@163.com |
3164e1ee7768303975e9d3d4d3bee175f10aa1ce | dcd14f6f9fe9fba932402ef00de85d92199b2689 | /Java/New folder/IntroductionToJava/src/Employee.java | 56bd521b8dbfdc1431b7bd7f50b9b774937612a7 | [] | no_license | rahulshelke3099/StudyRepo | c006755c3c53433166ecdb5e853000b8223ee541 | ae390ffc508a1b7a6a75cbaf9191738d9bc9b772 | refs/heads/master | 2022-06-04T14:13:12.779146 | 2020-05-03T10:44:51 | 2020-05-03T10:44:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 991 | java |
public class Employee {
private String empId;
private String empName;
private float empSal;
private static int count = 0;
public String getEmpId() {
return empId;
}
public void setEmpId(String empId) {
this.empId = empId;
}
public String getEmpName() {
return empName;
}
public void setEmpName(String empName) {
this.empName = empName;
}
public float getEmpSal() {
return empSal;
}
public void setEmpSal(float empSal) {
this.empSal = empSal;
}
private Employee()
{
empId="A11000";
empName= null;
empSal = 0.00f;
count++;
}
private Employee(String id, String name, float salary)
{
empId=id;
empName= name;
empSal = salary;
count++;
}
public static Employee createEmployee()
{
Employee ref = null;
if(count==0)
ref = new Employee();
return ref;
}
public static Employee createEmployee(String id, String name, float salary)
{
Employee ref = null;
if(count==0)
ref = new Employee(id,name,salary);
return ref;
}
}
| [
"pulkit_rastogi@persistent.co.in"
] | pulkit_rastogi@persistent.co.in |
30cbc8377fbdeeae704b064610ab84e5b240b572 | 7f7c9646636be86356fc1c84220e28c328f5d6b2 | /.history/src/main/java/com/dailyopt/contentservice/controller/ApiController_20200418163931.java | c3828568d7933586024a49908bfff1d764856a1e | [] | no_license | tuandat95cbn/content-service | 8ac9b29bf28e2baec7557bc2a336be0a498c8561 | 624aca3e999163b83d454bc7989f581c3883ca4b | refs/heads/master | 2023-07-14T05:56:52.551922 | 2021-08-29T09:34:13 | 2021-08-29T09:34:13 | 401,001,974 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 2,018 | java | package com.dailyopt.contentservice.controller;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import com.dailyopt.contentservice.model.ContentModel;
import com.dailyopt.contentservice.repo.ContentService;
import org.bson.types.ObjectId;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.InputStreamResource;
import org.springframework.data.mongodb.gridfs.GridFsResource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
public class ApiController {
@Autowired
private ContentService contentService;
@PostMapping("/")
public ResponseEntity<Map> create(ContentModel model) {
ObjectId id;
try {
id= contentService.storeFileToGridFs(model);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return ResponseEntity.ok().body(null);
}
Map<String, String> response = new HashMap<>();
response.put("status", "SUCESS");
response.put("id", id.toHexString());
return ResponseEntity.ok().body(response);
}
@GetMapping("/{id}")
public ResponseEntity<InputStreamResource> get(@PathVariable String id){
System.out.println(id);
GridFsResource content =contentService.getById(id);
HttpHeaders respHeaders = new HttpHeaders();
respHeaders.setContentLength(content.contentLength());
respHeaders.setContentDispositionFormData("attachment", "fileNameIwant.pdf");
InputStreamResource isr = new InputStreamResource(new FileInputStream(file));
return new ResponseEntity<InputStreamResource>(isr, respHeaders, HttpStatus.OK);
}
}
| [
"tuandat95cbn@gmail.com"
] | tuandat95cbn@gmail.com |
86bc0525fb192203ae486b16f1b36a67c4dd3dc9 | dd80a584130ef1a0333429ba76c1cee0eb40df73 | /external/nist-sip/java/gov/nist/javax/sip/header/ims/AuthorizationHeaderIms.java | d65b2d1914d6a470b0a87bec5efa7fcd72a84931 | [
"MIT",
"NIST-PD"
] | permissive | karunmatharu/Android-4.4-Pay-by-Data | 466f4e169ede13c5835424c78e8c30ce58f885c1 | fcb778e92d4aad525ef7a995660580f948d40bc9 | refs/heads/master | 2021-03-24T13:33:01.721868 | 2017-02-18T17:48:49 | 2017-02-18T17:48:49 | 81,847,777 | 0 | 2 | MIT | 2020-03-09T00:02:12 | 2017-02-13T16:47:00 | null | UTF-8 | Java | false | false | 2,105 | java | /*
* Conditions Of Use
*
* This software was developed by employees of the National Institute of
* Standards and Technology (NIST), an agency of the Federal Government
* and others.
* Pursuant to title 15 Untied States Code Section 105, works of NIST
* employees are not subject to copyright protection in the United States
* and are considered to be in the public domain. As a result, a formal
* license is not needed to use the software.
*
* This software is provided by NIST as a service and is expressly
* provided "AS IS." NIST MAKES NO WARRANTY OF ANY KIND, EXPRESS, IMPLIED
* OR STATUTORY, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTY OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT
* AND DATA ACCURACY. NIST does not warrant or make any representations
* regarding the use of the software or the results thereof, including but
* not limited to the correctness, accuracy, reliability or usefulness of
* the software.
*
* Permission to use this software is contingent upon your acceptance
* of the terms of this agreement
*
* .
*
*/
/*******************************************
* PRODUCT OF PT INOVACAO - EST DEPARTMENT *
*******************************************/
package gov.nist.javax.sip.header.ims;
import java.text.ParseException;
import javax.sip.InvalidArgumentException;
import javax.sip.header.AuthorizationHeader;
/**
*
* Extension to Authorization header (3GPP TS 24299-5d0)
*
* This extension defines a new auth-param for the Authorization header used
* in REGISTER requests.
* For more information, see RFC 2617 [21] subclause 3.2.2.
*
* @author ALEXANDRE MIGUEL SILVA SANTOS
*/
public interface AuthorizationHeaderIms extends AuthorizationHeader
{
// issued by Miguel Freitas (IT) PT-Inovacao
public static final String YES = "yes";
public static final String NO = "no";
/**
* @param integrityProtected
* @throws ParseException
*/
public void setIntegrityProtected(String integrityProtected) throws InvalidArgumentException, ParseException;
public String getIntegrityProtected();
}
| [
"karun.matharu@gmail.com"
] | karun.matharu@gmail.com |
cd757206eae1f84f8b2543e205956cd8a2bee0b4 | b5c58560a5df73fa8ce97879631bd3296dd026fb | /src/jsortie/object/quicksort/partitioner/kthstatistic/ObjectRangeSorterToKthStatisticPartitioner.java | 429a4c4a3f27b2fee5b44ab6f9d03d1fb8265fb6 | [
"MIT"
] | permissive | JamesBarbetti/jsortie | 272669c2fbe35acf05a2656268ab609537ab8e19 | 8086675235a598f6b081a4edd591012bf5408abf | refs/heads/main | 2023-04-13T08:50:56.132423 | 2021-04-18T04:57:14 | 2021-04-18T04:57:14 | 358,776,751 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 798 | java | package jsortie.object.quicksort.partitioner.kthstatistic;
import java.util.Comparator;
import jsortie.object.ObjectRangeSorter;
import jsortie.object.quicksort.selector.KthStatisticObjectPartitioner;
public class ObjectRangeSorterToKthStatisticPartitioner<T>
implements KthStatisticObjectPartitioner<T> {
public ObjectRangeSorter<T> sorter;
public ObjectRangeSorterToKthStatisticPartitioner
( ObjectRangeSorter<T> sorterToUse) {
sorter = sorterToUse;
}
public String toString() {
return this.getClass().getSimpleName()
+ "(" + sorter.toString() + ")";
}
@Override
public void partitionRangeExactly
( Comparator<? super T> comparator
, T[] vArray, int start, int stop
, int targetIndex) {
sorter.sortRange(comparator, vArray, start, stop);
}
} | [
"james_barbetti@yahoo.com"
] | james_barbetti@yahoo.com |
c5ba66f3e9fcb84d21ad890a390136224b5550f4 | 600b164bde9a45d312eb952f3aaaaf3a30566f70 | /src/com/gmail/tyi/Lesson4/CatAgeComparator.java | 19479e507b8d24c6556ca3d3a513f42a34060e57 | [] | no_license | Yura1979/ITProg-OOP | b570e135e1f2fa4c5f4562fb756b009736f867c8 | 1be59395c7d7e92e0e0e0e528514f9924068be9e | refs/heads/master | 2022-12-19T04:36:15.283516 | 2020-09-29T09:25:11 | 2020-09-29T09:25:11 | 291,686,107 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 536 | java | package com.gmail.tyi.Lesson4;
import java.util.Comparator;
public class CatAgeComparator implements Comparator {
@Override
public int compare(Object o1, Object o2) {
if (o1 != null && o2 == null) {
return 1;
}
if (o1 == null && o2 != null) {
return -1;
}
if (o1 == null && o2 == null) {
return 0;
}
Cat cat1 = (Cat) o1;
Cat cat2 = (Cat) o2;
int age1 = cat1.getAge();
int age2 = cat2.getAge();
if (age1 > age2) {
return 1;
}
if (age1 < age2) {
return -1;
}
return 0;
}
}
| [
"tyi@outlook.com"
] | tyi@outlook.com |
13342dee2d722219e30ddc6ac54b0e20efe7702c | 117258e900f8fd32bca3d3d506a66d03f25198d9 | /app/src/androidTest/java/com/example/admin/tictactoe/ApplicationTest.java | 881aadab4503f21ca8476cf078e5e756364cb7a5 | [] | no_license | rckohler/TicTacToe | 0d7f1909121c1d41cfdd0a0cfb9293cb72b7c82d | 6605ee94b80966ad5be31db6a97f881b2e61c933 | refs/heads/master | 2020-12-25T08:38:20.016735 | 2016-08-04T15:29:43 | 2016-08-04T15:29:43 | 64,946,146 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 358 | java | package com.example.admin.tictactoe;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
} | [
"sasha.westerfield@gmail.com"
] | sasha.westerfield@gmail.com |
4d7e4c93bcc0d906685b97350fedc8c72c825325 | e1b0bc2939a8d53684fb147f112234957e355199 | /SendingReceivingData/app/src/main/java/com/example/dticgeneral/sendingreceivingdata/DeviceListAdapter.java | 509294ad29098be823c7b752310d2f0a66630aff | [] | no_license | misaelgaray/AndroidBluetooth | 5e5dec843a2ca7ad926777cbaf8abaffc3e75a5f | 54e836a420b14c2e42acbe6a258998b8881561ba | refs/heads/master | 2020-03-10T02:55:53.234476 | 2018-04-17T16:15:30 | 2018-04-17T16:15:30 | 129,150,012 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 2,308 | java | package com.example.dticgeneral.sendingreceivingdata;
import android.bluetooth.BluetoothDevice;
import android.content.Context;
import android.support.annotation.NonNull;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import java.util.ArrayList;
/**
* TODO:We create a device adapter which simply is a layout for every list item.
* Created by misaelgaray on 13/04/18.
*/
public class DeviceListAdapter extends ArrayAdapter<BluetoothDevice> {
/*
* The layout inflater instantiates the XML UI file where the design of our list items are
* */
private LayoutInflater inflater;
/*
* Saves the refence to the devices we find. That devices are passed once we instantiate this classes
* */
private ArrayList<BluetoothDevice> devices;
/*
* The id of de UI component that represents our list items
* */
private int id;
/*
* Constructor where we pass the found devices, the id and the context
* */
public DeviceListAdapter(@NonNull Context context, int id, ArrayList<BluetoothDevice> devices) {
super(context, id, devices);
this.devices = devices;
this.inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
this.id = id;
}
/*
* This method is called for every list(UI) item with the index of that item
* We found the device that matches with the item position index in the devices
* array and then show we show the information in list item.
* */
public View getView(int position, View convertView, ViewGroup parent){
convertView = inflater.inflate(id, null);
//Found the device in the array that matches with the curren item postition in the list(UI)
BluetoothDevice device = devices.get(position);
if(device != null){
TextView name = (TextView) convertView.findViewById(R.id.deviceName);
TextView address = (TextView) convertView.findViewById(R.id.deviceAddress);
if(name != null)
name.setText(device.getName());
if(address != null)
address.setText(device.getAddress());
}
return convertView;
}
}
| [
"dticgeneral@dtic.local"
] | dticgeneral@dtic.local |
5934bcc8146066e32c64a3395bc2bd2fccab8095 | 727803491cf386bcbba153ba530aa3bf19d87261 | /src/main/java/ru/chuikov/SpringBackend/repository/generic/GenericDao.java | e3df5ccb2a6fe42bb34bd6886cdd582c99696481 | [] | no_license | Tank4444/SpringBackend | 805216f75417e7a92c23277b270937a8f96ffd10 | 5113c72421757340e45d8e55654ed3c5a6a67b81 | refs/heads/master | 2020-04-03T00:00:39.082507 | 2018-10-26T20:05:11 | 2018-10-26T20:05:11 | 154,885,954 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 353 | java | package ru.chuikov.SpringBackend.repository.generic;
import java.util.List;
public interface GenericDao<T> {
public T findUniqueBy (String propertyName, Object value);
public List<T> findBy (String propertyName, Object value);
public List<T> getByIds (String IDS);
public void insert (T entity) ;
public void update (T entity) ;
}
| [
"tank4444@mail.ru"
] | tank4444@mail.ru |
d9e775cd118bcdeb74f130ee4ef32157717683bd | e7eb4595206cc9718e2ed4b54272fccdc6a6c3c9 | /MediaManagerAm/src/com/streetfiresound/mediamanager/player/MLIDQueue.java | 36e52ad6f449c8a8a4993c6b604f588536ac2073 | [] | no_license | sgstreet/StreetFireSound | 8147ea7391fe639e1912fae0a75069c6c3b9bb1f | dd6ac7341fd660c08232c12f60a2f59e1036eaaa | refs/heads/master | 2021-01-13T01:50:05.625953 | 2013-07-08T21:10:16 | 2013-07-08T21:10:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 12,459 | java | /*
* Copyright (C) 2004 by StreetFire Sound Labs
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* $Id: MLIDQueue.java,v 1.6 2005/03/16 04:23:42 stephen Exp $
*/
package com.streetfiresound.mediamanager.player;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Observable;
import com.redrocketcomputing.util.concurrent.NullReadWriteLock;
import com.redrocketcomputing.util.concurrent.ReadWriteLock;
import com.redrocketcomputing.util.concurrent.Sync;
import com.redrocketcomputing.util.log.LoggerSingleton;
import com.streetfiresound.mediamanager.mediacatalog.types.MLID;
import com.streetfiresound.mediamanager.mediaplayer.constants.ConstMoveDirection;
import com.streetfiresound.mediamanager.mediaplayer.types.HaviMediaPlayerBadQueueIndexException;
import com.streetfiresound.mediamanager.mediaplayer.types.HaviMediaPlayerBadVersionException;
import com.streetfiresound.mediamanager.mediaplayer.types.HaviMediaPlayerException;
import com.streetfiresound.mediamanager.mediaplayer.types.HaviMediaPlayerInvalidParameterException;
import com.streetfiresound.mediamanager.mediaplayer.types.HaviMediaPlayerUnidentifiedFailureException;
import com.streetfiresound.mediamanager.mediaplayer.types.PlayQueue;
/**
* @author stephen
*
* TODO To change the template for this generated type comment go to
* Window - Preferences - Java - Code Style - Code Templates
*/
class MLIDQueue extends Observable
{
public final static Integer PLAYITEM_QUEUE_CHANGED = new Integer(0);
private List queue = new ArrayList();
private ReadWriteLock lock = new NullReadWriteLock();
//private ReadWriteLock lock = new ReentrantWriterPreferenceReadWriteLock();
private Sync readLock = lock.readLock();
private Sync writeLock = lock.writeLock();
private int version = 0;
/**
* Construct an empty MLIDQueue
*/
public MLIDQueue()
{
}
/**
* Clear the queue
* @throws HaviMediaPlayerException
*/
public void clear() throws HaviMediaPlayerException
{
try
{
// Get write lock
writeLock.acquire();
// Clear the array
queue.clear();
// Mark as changed
version++;
setChanged();
// Release
writeLock.release();
// Notify observers
notifyObservers(PLAYITEM_QUEUE_CHANGED);
}
catch (InterruptedException e)
{
// Translate
throw new HaviMediaPlayerUnidentifiedFailureException(e.toString());
}
}
/**
* Return the current number of elements in the queue
* @return The number of elements in the queue
*/
public final int size() throws HaviMediaPlayerException
{
try
{
// Lock for read
readLock.acquire();
// Get size
int size = queue.size();
// Unlock
readLock.release();
// Return size
return queue.size();
}
catch (InterruptedException e)
{
// Translate
throw new HaviMediaPlayerUnidentifiedFailureException(e.toString());
}
}
/**
* Get the PlayItem at the specified index
* @param index The index for the requested PlayItem
* @return The PlayItem at the index
* @throws HaviMediaPlayerException If there is a range problem or a thread interruption
*/
public MLID getAt(int index) throws HaviMediaPlayerException
{
try
{
// Lock for read
readLock.acquire();
// Check range
if (index < 0 || index >= queue.size())
{
// Unlock
readLock.release();
// Bad range we are confused
throw new HaviMediaPlayerBadQueueIndexException("bad index: " + index);
}
// Unlock
readLock.release();
// Return entry
return (MLID)queue.get(index);
}
catch (InterruptedException e)
{
// Translate
throw new HaviMediaPlayerUnidentifiedFailureException(e.toString());
}
}
/**
* Add an array of PlayItems to the end of the queue
* @param items The array of PlayItems to add
* @return The new queue version number
* @throws HaviMediaPlayerException Thrown if a thread interruption is detected
*/
public int add(MLID[] items) throws HaviMediaPlayerException
{
try
{
// Get write lock
writeLock.acquire();
// Append array to the list
for (int i = 0; i < items.length; i++)
{
// Append element
queue.add(items[i]);
// Mark as changed
setChanged();
}
// Update the version if changed
if (hasChanged())
{
version++;
}
// Release
writeLock.release();
// Notify observers
notifyObservers(PLAYITEM_QUEUE_CHANGED);
// Return version
LoggerSingleton.logDebugCoarse(this.getClass(), "add", "returning version: " + version);
return version;
}
catch (InterruptedException e)
{
// Translate
throw new HaviMediaPlayerUnidentifiedFailureException(e.toString());
}
}
/**
* Remote the specified range to PlayItem from the queue. The queue version numbers
* must match in order for the removal to succeed.
* @param verison The queue verision number
* @param start The start index of the range to remove
* @param size The size of the range to remove
* @return The new queue version number or -1 on failure
* @throws HaviMediaPlayerException Thrown if a range problem or thread interruption is detected
*/
public int remove(int verison, int start, int size) throws HaviMediaPlayerException
{
try
{
// Get write lock
writeLock.acquire();
// Match versions
if (verison != this.version)
{
// Release
writeLock.release();
// Bad version
throw new HaviMediaPlayerBadVersionException("version mismatch");
}
// Check start range
if (start < 0 || start > queue.size())
{
// Release
writeLock.release();
// Bad range
throw new HaviMediaPlayerInvalidParameterException("bad start range: " + start);
}
// Check sublist range
int end = start + size;
if (end > queue.size() || start > end)
{
// Release
writeLock.release();
// Bad range
throw new HaviMediaPlayerInvalidParameterException("bad size: " + start + "->" + size);
}
// Remove the element
queue.subList(start, end).clear();
// Update the version
this.version++;
// Release
writeLock.release();
// Mark as changed
setChanged();
// Notify observers
notifyObservers(PLAYITEM_QUEUE_CHANGED);
// Return true
return this.version;
}
catch (InterruptedException e)
{
// Translate
throw new HaviMediaPlayerUnidentifiedFailureException(e.toString());
}
}
/**
* Move the specified range of PlayItem up or down 1 space in the queue. The queue
* version numbers must match in order for the move to succeed.
* @param verison The queue verision number
* @param direction The direction of the move 0 for up and 1 for down
* @param start The start index of the range to remove
* @param size The size of the range to remove
* @return The new queue version number or -1 on failure
* @throws HaviMediaPlayerException Thrown if a range problem or thread interruption is detected
*/
public int move(int version, int direction, int start, int size) throws HaviMediaPlayerException
{
try
{
// Get write lock
writeLock.acquire();
// Match versions
if (version != this.version)
{
// Release
writeLock.release();
// Bad version
throw new HaviMediaPlayerBadVersionException("version mismatch");
}
// Range check the direction
if (direction != ConstMoveDirection.UP && direction != ConstMoveDirection.DOWN)
{
// Bad range
throw new HaviMediaPlayerInvalidParameterException("bad direction: " + direction);
}
// Check start range
if (start < 0 || start > queue.size())
{
// Release
writeLock.release();
// Bad range
throw new HaviMediaPlayerInvalidParameterException("bad start range: " + start);
}
// Check sublist range
int end = start + size;
if (end > queue.size() || start > end)
{
// Release
writeLock.release();
// Bad range
throw new HaviMediaPlayerInvalidParameterException("bad size: " + start + "->" + size);
}
// Clone the subrange
List rangeList = new ArrayList(queue.subList(start, end));
// Remove the elements
queue.subList(start, end).clear();
// Calculate insert point
int insertIndex;
if (direction == ConstMoveDirection.UP)
{
// Limit move to start of list
insertIndex = Math.max(0, start - 1);
}
else
{
// Limit move to end of list
insertIndex = Math.max(queue.size(), start + 1);
}
// Inset the range list
queue.addAll(insertIndex, rangeList);
// Update the version
this.version++;
// Release
writeLock.release();
// Mark as changed
setChanged();
// Notify observers
notifyObservers(PLAYITEM_QUEUE_CHANGED);
// Return true
return this.version;
}
catch (InterruptedException e)
{
// Translate
throw new HaviMediaPlayerUnidentifiedFailureException(e.toString());
}
}
/**
* Return a PlayQueue object built from this queue
* @return The matching PlayQueue
* @throws HaviMediaPlayerException Thrown if a thread interruption is detected
*/
public PlayQueue getQueue() throws HaviMediaPlayerException
{
try
{
// Lock for read
readLock.acquire();
// Create array
MLID[] items = (MLID[])queue.toArray(new MLID[queue.size()]);
// Unlock
readLock.release();
// Return queue
return new PlayQueue(version, items);
}
catch (InterruptedException e)
{
// Translate
throw new HaviMediaPlayerUnidentifiedFailureException(e.toString());
}
}
/**
* Randomally re-order the queue
* @throws HaviMediaPlayerException Thrown if a thread interruption is detected
*/
public void shuffle() throws HaviMediaPlayerException
{
try
{
// Get write lock
writeLock.acquire();
// Shuffle the queue
Collections.shuffle(queue);
// Update the version
version++;
// Release
writeLock.release();
// Mark as changed
setChanged();
// Notify observers
notifyObservers(PLAYITEM_QUEUE_CHANGED);
}
catch (InterruptedException e)
{
// Translate
throw new HaviMediaPlayerUnidentifiedFailureException(e.toString());
}
}
/**
* Return the current queue version
* @return The version of the current queue
* @throws HaviMediaPlayerException Thrown if a thread interruption is detected
*/
public int getVersion() throws HaviMediaPlayerException
{
try
{
// Lock for read
readLock.acquire();
LoggerSingleton.logDebugCoarse(this.getClass(), "getVersion", "return version: " + version);
// Return the current version
return version;
}
catch (InterruptedException e)
{
// Translate
throw new HaviMediaPlayerUnidentifiedFailureException(e.toString());
}
finally
{
// Alway unlock
readLock.release();
}
}
}
| [
"stephen@redrocketcomputing.com"
] | stephen@redrocketcomputing.com |
521dafdb571c263f92fab8d8784acf9eb390affa | 1c0df66bdc53d84aea6f7aa1f0183cf6f8392ab1 | /temp/src/minecraft/net/minecraft/network/LegacyPingHandler.java | 55d51f361c3c2fd5e2369e484df2de8b636bf91d | [] | no_license | yuwenyong/Minecraft-1.9-MCP | 9b7be179db0d7edeb74865b1a78d5203a5f75d08 | bc89baf1fd0b5d422478619e7aba01c0b23bd405 | refs/heads/master | 2022-05-23T00:52:00.345068 | 2016-03-11T21:47:32 | 2016-03-11T21:47:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 4,785 | java | package net.minecraft.network;
import com.google.common.base.Charsets;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import java.net.InetSocketAddress;
import net.minecraft.network.NetworkSystem;
import net.minecraft.server.MinecraftServer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class LegacyPingHandler extends ChannelInboundHandlerAdapter {
private static final Logger field_151258_a = LogManager.getLogger();
private NetworkSystem field_151257_b;
public LegacyPingHandler(NetworkSystem p_i45286_1_) {
this.field_151257_b = p_i45286_1_;
}
public void channelRead(ChannelHandlerContext p_channelRead_1_, Object p_channelRead_2_) throws Exception {
ByteBuf bytebuf = (ByteBuf)p_channelRead_2_;
bytebuf.markReaderIndex();
boolean flag = true;
try {
if(bytebuf.readUnsignedByte() == 254) {
InetSocketAddress inetsocketaddress = (InetSocketAddress)p_channelRead_1_.channel().remoteAddress();
MinecraftServer minecraftserver = this.field_151257_b.func_151267_d();
int i = bytebuf.readableBytes();
switch(i) {
case 0:
field_151258_a.debug("Ping: (<1.3.x) from {}:{}", new Object[]{inetsocketaddress.getAddress(), Integer.valueOf(inetsocketaddress.getPort())});
String s2 = String.format("%s\u00a7%d\u00a7%d", new Object[]{minecraftserver.func_71273_Y(), Integer.valueOf(minecraftserver.func_71233_x()), Integer.valueOf(minecraftserver.func_71275_y())});
this.func_151256_a(p_channelRead_1_, this.func_151255_a(s2));
break;
case 1:
if(bytebuf.readUnsignedByte() != 1) {
return;
}
field_151258_a.debug("Ping: (1.4-1.5.x) from {}:{}", new Object[]{inetsocketaddress.getAddress(), Integer.valueOf(inetsocketaddress.getPort())});
String s = String.format("\u00a71\u0000%d\u0000%s\u0000%s\u0000%d\u0000%d", new Object[]{Integer.valueOf(127), minecraftserver.func_71249_w(), minecraftserver.func_71273_Y(), Integer.valueOf(minecraftserver.func_71233_x()), Integer.valueOf(minecraftserver.func_71275_y())});
this.func_151256_a(p_channelRead_1_, this.func_151255_a(s));
break;
default:
boolean flag1 = bytebuf.readUnsignedByte() == 1;
flag1 = flag1 & bytebuf.readUnsignedByte() == 250;
flag1 = flag1 & "MC|PingHost".equals(new String(bytebuf.readBytes(bytebuf.readShort() * 2).array(), Charsets.UTF_16BE));
int j = bytebuf.readUnsignedShort();
flag1 = flag1 & bytebuf.readUnsignedByte() >= 73;
flag1 = flag1 & 3 + bytebuf.readBytes(bytebuf.readShort() * 2).array().length + 4 == j;
flag1 = flag1 & bytebuf.readInt() <= '\uffff';
flag1 = flag1 & bytebuf.readableBytes() == 0;
if(!flag1) {
return;
}
field_151258_a.debug("Ping: (1.6) from {}:{}", new Object[]{inetsocketaddress.getAddress(), Integer.valueOf(inetsocketaddress.getPort())});
String s1 = String.format("\u00a71\u0000%d\u0000%s\u0000%s\u0000%d\u0000%d", new Object[]{Integer.valueOf(127), minecraftserver.func_71249_w(), minecraftserver.func_71273_Y(), Integer.valueOf(minecraftserver.func_71233_x()), Integer.valueOf(minecraftserver.func_71275_y())});
ByteBuf bytebuf1 = this.func_151255_a(s1);
try {
this.func_151256_a(p_channelRead_1_, bytebuf1);
} finally {
bytebuf1.release();
}
}
bytebuf.release();
flag = false;
return;
}
} catch (RuntimeException var21) {
return;
} finally {
if(flag) {
bytebuf.resetReaderIndex();
p_channelRead_1_.channel().pipeline().remove("legacy_query");
p_channelRead_1_.fireChannelRead(p_channelRead_2_);
}
}
}
private void func_151256_a(ChannelHandlerContext p_151256_1_, ByteBuf p_151256_2_) {
p_151256_1_.pipeline().firstContext().writeAndFlush(p_151256_2_).addListener(ChannelFutureListener.CLOSE);
}
private ByteBuf func_151255_a(String p_151255_1_) {
ByteBuf bytebuf = Unpooled.buffer();
bytebuf.writeByte(255);
char[] achar = p_151255_1_.toCharArray();
bytebuf.writeShort(achar.length);
for(char c0 : achar) {
bytebuf.writeChar(c0);
}
return bytebuf;
}
}
| [
"jholley373@yahoo.com"
] | jholley373@yahoo.com |
c3d50d113091f2d75423300c079086d527ae6d32 | b26c66210a5dd0fd5686136a7da557c70076e2a1 | /mr-shop-parent/mr-shop-commons/mr-shop-common-core/src/main/java/com/baidu/shop/base/Result.java | 7c73a27ab65b3b17749d1a044331e563a92ea377 | [] | no_license | xiao-pengyan/xpy-shop | 18b568ee5c980f060b73d54a33a2bb5ea672005a | 28aec3422d4da628ff75928447e780f4be807f8b | refs/heads/master | 2023-01-02T17:44:00.054483 | 2020-10-23T08:07:27 | 2020-10-23T08:07:27 | 290,706,153 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 522 | java | package com.baidu.shop.base;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* @ClassName Result
* @Description: TODO
* @Author xiaopengyan
* @Date 2020/8/27
* @Version V1.0
**/
@Data
@NoArgsConstructor
public class Result<T> {
private Integer code;//返回码
private String message;//返回消息
private T data;//返回数据
public Result(Integer code, String message, Object data) {
this.code = code;
this.message = message;
this.data = (T) data;
}
}
| [
"xiaopengyan36@163.com"
] | xiaopengyan36@163.com |
e25b5f304fa7a6fbed7506e267bb5e008bc8dd3e | a890c56a44233f4b0b1b012f9ca705cc49cec2c5 | /company-profile-service/src/main/java/com/doitgeek/companyprofileservice/CompanyProfileServiceApplication.java | 407a4b8e5e93ef2730163a2909aaa9daf22d7dc2 | [] | no_license | sainath23/online-job-portal-microservices | 5d4553cfc277288c63885dbdc6f5a1732bd97bb7 | 779dea3030f05486000889e5098dd51629326c99 | refs/heads/master | 2020-07-06T13:01:05.206011 | 2019-09-01T15:54:32 | 2019-09-01T15:54:32 | 203,025,588 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 463 | java | package com.doitgeek.companyprofileservice;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
@SpringBootApplication
@EnableDiscoveryClient
public class CompanyProfileServiceApplication {
public static void main(String[] args) {
SpringApplication.run(CompanyProfileServiceApplication.class, args);
}
}
| [
"saiparkar4@gmail.com"
] | saiparkar4@gmail.com |
2d78eecc13570453d80018167e09415dce0ffe20 | 4bfea59e02407d49130e30d91973569fadfe8292 | /src/test/java/org/mumdag/utils/XmlUtilsTest.java | 4bc3f27f16b261357f28b3baf184e832645c594c | [] | no_license | wagenge/Mumdag | b37673bb118fc12c7d657f2885915c6e76ee3123 | 443871129b16944c0022680860a3ce0f55e2a678 | refs/heads/master | 2018-09-15T23:07:38.655600 | 2018-06-05T15:39:30 | 2018-06-05T15:42:28 | 115,509,845 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 38,228 | java | package org.mumdag.utils;
//-----------------------------------------------------------------------------
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import org.testng.collections.Lists;
import org.w3c.dom.Document;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.*;
import static org.assertj.core.api.Assertions.fail;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
//-----------------------------------------------------------------------------
public class XmlUtilsTest {
private static Logger log = null;
//=============================================================================
/*
* TEST METHODS INCLUDING DATAPROVIDER (public)
*/
//DOC: nok
//ASSERTION: ok
@Test(dataProvider = "data_resolveXpathString_varargs_ok")
public void test_resolveXpathString_varargs_ok(String testDesc, String xpath, String paramStr, String expRes) {
log.info("{} ... started", testDesc);
String [] params = paramStr.split("\\|\\|");
String res = XpathUtils.resolveXpathString(xpath, params);
assertThat(res).isEqualTo(expRes);
log.info("{} ... finished successfully!", testDesc);
}
//-----------------------------------------------------------------------------
//DOC: nok
@DataProvider
public Object[][] data_resolveXpathString_varargs_ok() {
return new Object[][] {
new Object[] {"01 - xpath, zero params",
"/my/xpath[_with_]/different[_parameters_]",
"",
"/my/xpath[_with_]/different[_parameters_]"},
new Object[] {"02 - xpath, one param",
"/my/xpath[_with_]/different[_parameters_]",
"_with_::@key='value'",
"/my/xpath[@key='value']/different[_parameters_]"},
new Object[] {"03 - xpath, one param, two occasions",
"/my/xpath[_with_]/different[_with_]",
"_with_::@key='value'",
"/my/xpath[@key='value']/different[@key='value']"},
new Object[] {"04 - xpath, one empty param",
"/my/xpath[_with_]/different[_parameters_]",
"_with_::",
"/my/xpath/different[_parameters_]"},
new Object[] {"05 - xpath, two params",
"/my/xpath[_with_]/different[_parameters_]",
"_with_::@key='value'||_parameters_::@key2='value2'",
"/my/xpath[@key='value']/different[@key2='value2']"},
new Object[] {"06 - xpath, two params, one is wrong",
"/my/xpath[_with2_]/different[_parameters_]",
"_with_::@key='value'||_parameters_::@key2='value2'",
"/my/xpath[_with2_]/different[@key2='value2']"},
new Object[] {"07 - xpath, two params, one is wrong seperated",
"/my/xpath[_with_]/different[_parameters_]",
"_with_:@key='value'||_parameters_::@key2='value2'",
"/my/xpath[_with_]/different[@key2='value2']"},
new Object[] {"08 - xpath, two params, one is wrong seperated",
"/my/xpath[_with_]/different[_parameters_]",
"_with_::@key::'value'||_parameters_::@key2='value2'",
"/my/xpath[_with_]/different[@key2='value2']"},
new Object[] {"09 - xpath, two params, first is empty",
"/my/xpath[_with_]/different[_parameters_]",
"||_parameters_::@key2='value2'",
"/my/xpath[_with_]/different[@key2='value2']"},
new Object[] {"10 - xpath, two params, second is empty",
"/my/xpath[_with_]/different[_parameters_]",
"_with_::@key='value'||",
"/my/xpath[@key='value']/different[_parameters_]"},
new Object[] {"11 - xpath, two params, both are empty",
"/my/xpath[_with_]/different[_parameters_]",
"||",
"/my/xpath[_with_]/different[_parameters_]"},
new Object[] {"12 - xpath, two params, first with empty value",
"/my/xpath[_with_]/different[_parameters_]",
"_with_::||_parameters_::@key2='value2'",
"/my/xpath/different[@key2='value2']"},
new Object[] {"13 - xpath, two params, second with empty value",
"/my/xpath[_with_]/different[_parameters_]",
"_with_::@key='value'||_parameters_::",
"/my/xpath[@key='value']/different"},
new Object[] {"14 - xpath, two params, both empty values",
"/my/xpath[_with_]/different[_parameters_]",
"_with_::||_parameters_::",
"/my/xpath/different"},
new Object[] {"15 - xpath empty",
"",
"_with_:@key='value'||_parameters_::@key2='value2'",
""},
new Object[] {"16 - xpath null",
null,
"_with_:@key='value'||_parameters_::@key2='value2'",
null},
};
}
//-----------------------------------------------------------------------------
//DOC: nok
//ASSERTION: ok
@Test(dataProvider = "data_resolveXpathString_map_ok")
public void test_resolveXpathString_map_ok(String testDesc, String xpath, String paramStr, String expRes) {
log.info("{} ... started", testDesc);
HashMap<String, String> resolveMap = new HashMap<>();
String [] params = paramStr.split("\\|\\|");
for (String param : params) {
if (StringUtils.isNotEmpty(param)) {
String[] parts = param.split("::");
if (parts.length == 2) {
resolveMap.put(parts[0], parts[1]);
} else if (parts.length == 1) {
resolveMap.put(parts[0], "");
}
}
}
String res = XpathUtils.resolveXpathString(xpath, resolveMap);
assertThat(res).isEqualTo(expRes);
log.info("{} ... finished successfully!", testDesc);
}
//-----------------------------------------------------------------------------
@DataProvider
public Object[][] data_resolveXpathString_map_ok() {
List<Object[]> result = Lists.newArrayList();
result.addAll(Arrays.asList(data_resolveXpathString_varargs_ok()));
result.addAll(Arrays.asList(data_resolveXpathString_map2_ok()));
return result.toArray(new Object[result.size()][]);
}
//-----------------------------------------------------------------------------
//DOC: nok
@DataProvider
public Object[][] data_resolveXpathString_map2_ok() {
return new Object[][] {
new Object[] {"17 - xpath, three params, one text param",
"/my/xpath[_with_]/different[_parameters_]/and/text() = '_text_'",
"_with_::@key='value'||_parameters_::@key2='value2'||_text_::value3",
"/my/xpath[@key='value']/different[@key2='value2']/and/text() = 'value3'"},
new Object[] {"18 - xpath, three params, text param empty",
"/my/xpath[_with_]/different[_parameters_]/and/text() = '_text_'",
"_with_::@key='value'||_parameters_::@key2='value2'||_text_::",
"/my/xpath[@key='value']/different[@key2='value2']/and/contains(., '')"},
};
}
//-----------------------------------------------------------------------------
@Test(dataProvider = "data_removePredicatesFromXpath_ok")
public void test_removePredicatesFromXpath_ok(String testDesc, String xpath, String expRes) {
log.info("{} ... started", testDesc);
String res = XpathUtils.removePredicatesFromXpath(xpath);
assertThat(res).isEqualTo(expRes);
log.info("{} ... finished successfully!", testDesc);
}
//-----------------------------------------------------------------------------
//DOC: nok
@DataProvider
public Object[][] data_removePredicatesFromXpath_ok() {
return new Object[][] {
new Object[] {"01 - normal xpath, two predicates",
"/my/xpath[_with_]/different[_parameters_]/and/text() = '_text_'",
"/my/xpath/different/and/text() = '_text_'"},
new Object[] {"02 - normal xpath, two predicates, one empty",
"/my/xpath[_with_]/different[]/and/text() = '_text_'",
"/my/xpath/different/and/text() = '_text_'"},
new Object[] {"03 - normal xpath, no predicates",
"/my/xpath/different/and/text() = '_text_'",
"/my/xpath/different/and/text() = '_text_'"},
new Object[] {"04 - empty xpath",
"",
""},
new Object[] {"05 - xpath null",
null,
""},
};
}
//-----------------------------------------------------------------------------
@Test(dataProvider = "data_getNodeTextByXPath_ok")
public void test_getNodeTextByXPath_ok(String testDesc, String xmlFile, String xpath, String expRes) {
log.info("{} ... started", testDesc);
List<String> resList = new ArrayList<>();
try {
String xmlStr = new String(Files.readAllBytes(Paths.get(xmlFile)));
resList = XmlUtils.getNodeTextByXPath(xmlStr, xpath);
}
catch (Exception ex) {
log.error("... failed");
fail("No exception expected. Maybe file '" + xmlFile + "' not found!\n" + "Error: " + ex.getMessage());
}
String res = String.join(", ", resList);
assertThat(res).isEqualTo(expRes);
log.info("{} ... finished successfully!", testDesc);
}
//-----------------------------------------------------------------------------
//DOC: nok
@DataProvider
public Object[][] data_getNodeTextByXPath_ok() {
return new Object[][] {
new Object[] {"01 - xml ok, xpath ok, one node text found",
"./src/test/resources/XmlUtilsTest/getNodeTextByXPath/getNodeTextByXPath_ok-01-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']/name/text()",
"The Beatles"},
new Object[] {"02 - xml ok, xpath ok, one node text found, text utf8",
"./src/test/resources/XmlUtilsTest/getNodeTextByXPath/getNodeTextByXPath_ok-02-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']/alias-list/alias[@locale='ru']/text()",
"Битлз"},
new Object[] {"03 - xml ok, xpath ok, many nodes' text found",
"./src/test/resources/XmlUtilsTest/getNodeTextByXPath/getNodeTextByXPath_ok-03-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']/alias-list/alias[@locale]/text()",
"Beatles, Los Beatles, The Beatles, Битлз, ザ・ビートルズ, 披头士, 披頭四, 더 비틀즈"},
new Object[] {"04 - xml ok, xpath ok, one attribute text found",
"./src/test/resources/XmlUtilsTest/getNodeTextByXPath/getNodeTextByXPath_ok-04-inp.xml",
"/metadata/artist/rating/@votes-count",
"67"},
new Object[] {"05 - xml ok, xpath with slash, one node text found",
"./src/test/resources/XmlUtilsTest/getNodeTextByXPath/getNodeTextByXPath_ok-05-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']/name/text()/",
"The Beatles"},
new Object[] {"06 - xml ok, xpath ok, no node text found",
"./src/test/resources/XmlUtilsTest/getNodeTextByXPath/getNodeTextByXPath_ok-06-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']/names/text()/",
""},
new Object[] {"07 - xml ok, xpath empty, no node text found",
"./src/test/resources/XmlUtilsTest/getNodeTextByXPath/getNodeTextByXPath_ok-07-inp.xml",
"",
""},
new Object[] {"08 - xml ok, xpath null, no node text found",
"./src/test/resources/XmlUtilsTest/getNodeTextByXPath/getNodeTextByXPath_ok-08-inp.xml",
null,
""},
new Object[] {"09 - xml ok, xpath wrong, no node text found",
"./src/test/resources/XmlUtilsTest/getNodeTextByXPath/getNodeTextByXPath_ok-09-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']/name/texts()",
""},
new Object[] {"10 - xml empty, xpath ok, no node text found",
"./src/test/resources/XmlUtilsTest/getNodeTextByXPath/getNodeTextByXPath_ok-10-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']/name/text()",
""},
new Object[] {"11 - xml corrupt, xpath ok, no node text found",
"./src/test/resources/XmlUtilsTest/getNodeTextByXPath/getNodeTextByXPath_ok-11-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']/name/text()",
""},
};
}
//-----------------------------------------------------------------------------
@Test(dataProvider = "data_getNodeByXPath_ok")
public void test_getNodeByXPath_ok(String testDesc, String xmlFile, String xpath, String childNodeNames, String expRes) {
log.info("{} ... started", testDesc);
List<String> resList = new ArrayList<>();
try {
String xmlStr = new String(Files.readAllBytes(Paths.get(xmlFile)));
resList = XmlUtils.getNodeByXPath(xmlStr, xpath, childNodeNames);
}
catch (Exception ex) {
log.error("... failed");
fail("No exception expected. Maybe file '" + xmlFile + "' not found!\n" + "Error: " + ex.getMessage());
}
String res = String.join(", ", resList);
assertThat(res).isEqualTo(expRes);
log.info("{} ... finished successfully!", testDesc);
}
//-----------------------------------------------------------------------------
//DOC: nok
@DataProvider
public Object[][] data_getNodeByXPath_ok() {
return new Object[][] {
new Object[] {"01 - xml ok, xpath ok, child list ok, 1 node found",
"./src/test/resources/XmlUtilsTest/getNodeByXPath/getNodeByXPath_ok-01-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']",
"country",
"country=GB"},
new Object[] {"02 - xml ok, xpath with slash, child list ok, 1 node found",
"./src/test/resources/XmlUtilsTest/getNodeByXPath/getNodeByXPath_ok-02-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']/",
"country",
"country=GB"},
new Object[] {"03 - xml ok, xpath ok, child list ok, 1 complex node found",
"./src/test/resources/XmlUtilsTest/getNodeByXPath/getNodeByXPath_ok-03-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']",
"area",
"name=United Kingdom,sort-name=United Kingdom,iso-3166-1-code=GB"},
new Object[] {"04 - xml ok, xpath ok, child list ok, 14 nodes found, text utf8",
"./src/test/resources/XmlUtilsTest/getNodeByXPath/getNodeByXPath_ok-04-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']/alias-list",
"alias",
"alias=B,alias=Be,alias=Beat,alias=Beatles,alias=Beetles,alias=Los Beatles,alias=The Beatles,alias=The Savage Young Beatles,alias=fab four,"+
"alias=Битлз,alias=ザ・ビートルズ,alias=披头士,alias=披頭四,alias=더 비틀즈"},
new Object[] {"05 - xml ok, xpath ok, child list ok with 2 items, 2 complex nodes found",
"./src/test/resources/XmlUtilsTest/getNodeByXPath/getNodeByXPath_ok-05-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']",
"area, begin-area",
"name=United Kingdom,sort-name=United Kingdom,iso-3166-1-code=GB,name=Liverpool,sort-name=Liverpool,iso-3166-2-code=GB-LIV"},
new Object[] {"06 - xml ok, xpath ok, child list ok, no nodes found",
"./src/test/resources/XmlUtilsTest/getNodeByXPath/getNodeByXPath_ok-06-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']",
"wrong-tag",
""},
new Object[] {"07- xml ok, xpath ok, child list ok one child found one child not found, 1 complex node found",
"./src/test/resources/XmlUtilsTest/getNodeByXPath/getNodeByXPath_ok-07-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']",
"wrong-tag, area",
"name=United Kingdom,sort-name=United Kingdom,iso-3166-1-code=GB"},
new Object[] {"08- xml ok, xpath ok, child empty, no node found",
"./src/test/resources/XmlUtilsTest/getNodeByXPath/getNodeByXPath_ok-08-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']",
"",
""},
new Object[] {"09- xml ok, xpath ok, child null, no node found",
"./src/test/resources/XmlUtilsTest/getNodeByXPath/getNodeByXPath_ok-09-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']",
null,
""},
new Object[] {"10 - xml ok, xpath with slash, child list ok, 1 complex node found",
"./src/test/resources/XmlUtilsTest/getNodeByXPath/getNodeByXPath_ok-10-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']/",
"area",
"name=United Kingdom,sort-name=United Kingdom,iso-3166-1-code=GB"},
new Object[] {"11 - xml ok, xpath empty, child list ok, no node found",
"./src/test/resources/XmlUtilsTest/getNodeByXPath/getNodeByXPath_ok-11-inp.xml",
"",
"area",
""},
new Object[] {"12 - xml ok, xpath null, child list ok, no node found",
"./src/test/resources/XmlUtilsTest/getNodeByXPath/getNodeByXPath_ok-12-inp.xml",
null,
"area",
""},
new Object[] {"13 - xml ok, xpath wrong, child list ok, no node found",
"./src/test/resources/XmlUtilsTest/getNodeByXPath/getNodeByXPath_ok-13-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']/texts()",
"area",
""},
new Object[] {"14 - xml empty, xpath ok, child list ok, no node found",
"./src/test/resources/XmlUtilsTest/getNodeByXPath/getNodeByXPath_ok-14-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']",
"area",
""},
new Object[] {"15 - xml corrupt, xpath ok, child list ok, no node found",
"./src/test/resources/XmlUtilsTest/getNodeByXPath/getNodeByXPath_ok-15-inp.xml",
"/metadata/artist[@id='b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d']",
"area",
""},
};
}
//-----------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Test(dataProvider = "data_getNodeContentByXPath_ok")
public void test_getNodeContentByXPath_ok(String testDesc, String xmlFile, String xpath, List<String> expList) {
log.info("{} ... started", testDesc);
List<HashMap<String, Object>> resList = new ArrayList<>();
try {
String xmlStr = new String(Files.readAllBytes(Paths.get(xmlFile)));
resList = XmlUtils.getNodeContentByXPath(xmlStr, xpath);
}
catch (Exception ex) {
log.error("... failed");
fail("No exception expected. Maybe file '" + xmlFile + "' not found!\n" + "Error: " + ex.getMessage());
}
for(int i = 0; i < expList.size(); i++) {
String expMapStr = expList.get(i);
HashMap<String, String> expMap = MapUtils.array2Map("::", expMapStr.split(","));
HashMap<String, Object> resMap = resList.get(i);
for(String key : expMap.keySet()) {
String expVal = expMap.get(key);
String resVal;
if(resMap.get(key) instanceof ArrayList) {
resVal = String.join("¦", (List<String>)resMap.get(key));
} else {
resVal = (String) resMap.get(key);
}
assertThat(resVal).isEqualTo(expVal);
}
}
log.info("{} ... finished successfully!", testDesc);
}
//-----------------------------------------------------------------------------
//DOC: nok
@DataProvider
public Object[][] data_getNodeContentByXPath_ok() {
return new Object[][] {
new Object[] {"01 - xml ok, xpath ok, child list ok, 2 nodes found",
"./src/test/resources/XmlUtilsTest/getNodeContentByXPath/getNodeContentByXPath_ok-01-inp.xml",
"/metadata/release-group/relation-list[@target-type='release_group']/relation[@type='mashes up' or @type='included in']",
Arrays.asList("type-id::03786c2a-cd9d-4148-b3ea-35ea61de1283,id::5d9261c0-3ccf-3154-b331-ddde4c7b8ee0,type::mashes up,title::The Grey Album,first-release-date::2004-02,target::5d9261c0-3ccf-3154-b331-ddde4c7b8ee0,direction::backward",
"type-id::589447ea-be2c-46cc-b9e9-469e1d06e18a,id::8a54edf1-1d51-40fd-8a08-01b3d0ddd874,type::included in,title::The Beatles Box Set,first-release-date::1988-11-15,target::8a54edf1-1d51-40fd-8a08-01b3d0ddd874")},
new Object[] {"02 - 6 nodes found, one list attribute",
"./src/test/resources/XmlUtilsTest/getNodeContentByXPath/getNodeContentByXPath_ok-02-inp.xml",
"/metadata/artist/relation-list[@target-type='artist']/relation[@type='member of band']",
Arrays.asList("target::0d4ab0f9-bbda-4ab1-ae2c-f772ffcfbea9,name::Pete Best,attribute::drums",
"target::300c4c73-33ac-4255-9d57-4e32627f5e13,name::Ringo Starr,attribute::drums",
"target::42a8f507-8412-4611-854f-926571049fa0,name::George Harrison,attribute.list::guitar¦lead vocals¦original")},
new Object[] {"03 - 6 nodes found, one list attribute, one list type from nodes and attributes",
"./src/test/resources/XmlUtilsTest/getNodeContentByXPath/getNodeContentByXPath_ok-03-inp.xml",
"/metadata/artist/relation-list[@target-type='artist']/relation[@type='member of band']",
Arrays.asList("target::0d4ab0f9-bbda-4ab1-ae2c-f772ffcfbea9,name::Pete Best,attribute::drums,type.list::member of band¦member of band2¦member of band3",
"target::300c4c73-33ac-4255-9d57-4e32627f5e13,name::Ringo Starr,attribute::drums",
"target::42a8f507-8412-4611-854f-926571049fa0,name::George Harrison,attribute.list::guitar¦lead vocals¦original")},
};
}
//-----------------------------------------------------------------------------
@Test(dataProvider = "data_getNodeAttributeTextByXPath_ok")
public void test_getNodeAttributeTextByXPath_ok(String testDesc, String xmlFile, String xpath, String attrName, String expRes) {
log.info("{} ... started", testDesc);
List<String> resList = new ArrayList<>();
try {
String xmlStr = new String(Files.readAllBytes(Paths.get(xmlFile)));
if(StringUtils.isNotEmpty(attrName)) {
resList = XmlUtils.getNodeAttributeTextByXPath(xmlStr, xpath, attrName);
}
else {
resList = XmlUtils.getNodeAttributeTextByXPath(xmlStr, xpath);
}
}
catch (Exception ex) {
log.error("... failed");
fail("No exception expected. Maybe file '" + xmlFile + "' not found!\n" + "Error: " + ex.getMessage());
}
String res = String.join(", ", resList);
assertThat(res).isEqualTo(expRes);
log.info("{} ... finished successfully!", testDesc);
}
//-----------------------------------------------------------------------------
//DOC: nok
@DataProvider
public Object[][] data_getNodeAttributeTextByXPath_ok() {
return new Object[][] {
new Object[] {"01 - xml ok, xpath ok, attribute name ok, 1 attribute found",
"./src/test/resources/XmlUtilsTest/getNodeAttributeTextByXPath/getNodeAttributeTextByXPath_ok-01-inp.xml",
"/metadata/artist",
"id",
"id=b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d"},
new Object[] {"02 - xml ok, xpath ok, attribute name ok, many node with attributes found",
"./src/test/resources/XmlUtilsTest/getNodeAttributeTextByXPath/getNodeAttributeTextByXPath_ok-02-inp.xml",
"/metadata/artist/alias-list/alias",
"sort-name",
"sort-name=B, sort-name=Be, sort-name=Beat, sort-name=Beatles, sort-name=Beetles, sort-name=Los Beatles, sort-name=Beatles, The, sort-name=Savage Young Beatles, The, sort-name=fab four, sort-name=Битлз, sort-name=ビートルズ (ザ), sort-name=披头士, sort-name=披頭四, sort-name=더 비틀즈"},
new Object[] {"03 - xml ok, xpath ok, attribute name ok, no attributes found",
"./src/test/resources/XmlUtilsTest/getNodeAttributeTextByXPath/getNodeAttributeTextByXPath_ok-03-inp.xml",
"/metadata/artist",
"ids",
""},
new Object[] {"04 - xml ok, xpath ok, attribute empty, all attributes of this node found",
"./src/test/resources/XmlUtilsTest/getNodeAttributeTextByXPath/getNodeAttributeTextByXPath_ok-04-inp.xml",
"/metadata/artist",
"",
"id=b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d, type=Group, type-id=e431f5f6-b5d2-343d-8b36-72607fffb74b"},
new Object[] {"05 - xml ok, xpath ok, attribute null, all attributes of this node found",
"./src/test/resources/XmlUtilsTest/getNodeAttributeTextByXPath/getNodeAttributeTextByXPath_ok-05-inp.xml",
"/metadata/artist",
null,
"id=b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d, type=Group, type-id=e431f5f6-b5d2-343d-8b36-72607fffb74b"},
new Object[] {"06 - xml ok, xpath ok, two attributes, two attributes of this node found",
"./src/test/resources/XmlUtilsTest/getNodeAttributeTextByXPath/getNodeAttributeTextByXPath_ok-06-inp.xml",
"/metadata/artist",
"id, type",
"id=b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d, type=Group"},
new Object[] {"07 - xml ok, xpath ok, two attributes, one attribute of this node found and one not",
"./src/test/resources/XmlUtilsTest/getNodeAttributeTextByXPath/getNodeAttributeTextByXPath_ok-07-inp.xml",
"/metadata/artist",
"id, types",
"id=b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d"},
new Object[] {"08 - xml ok, xpath ok - found many nodes, attribute ok, one attribute found for all nodes",
"./src/test/resources/XmlUtilsTest/getNodeAttributeTextByXPath/getNodeAttributeTextByXPath_ok-08-inp.xml",
"/metadata/artist/relation-list[@target-type='url']/relation[@type='lyrics']/target",
"id",
"id=7ccb2b9e-88f7-4030-bdde-f3eb81d16aa5, id=5f3d83eb-6ad6-4db7-a6ed-d4f437db91c6, id=b1e3f0b8-eee0-4ced-87b9-d7088481ae10, id=28f763a2-ad86-4adc-8a4a-ef5c1a666861"},
new Object[] {"09 - xml ok, xpath ok - found many nodes, attribute empty, get all attributes of all nodes",
"./src/test/resources/XmlUtilsTest/getNodeAttributeTextByXPath/getNodeAttributeTextByXPath_ok-09-inp.xml",
"/metadata/artist/relation-list[@target-type='url']/relation[@type='lyrics']",
"",
"type=lyrics, type-id=e4d73442-3762-45a8-905c-401da65544ed, type=lyrics, type-id=e4d73442-3762-45a8-905c-401da65544ed, "+
"type=lyrics, type-id=e4d73442-3762-45a8-905c-401da65544ed, "+
"type=lyrics, type-id=e4d73442-3762-45a8-905c-401da65544ed"},
new Object[] {"10 - xml ok, xpath with slash, attribute name ok, 1 attribute found",
"./src/test/resources/XmlUtilsTest/getNodeAttributeTextByXPath/getNodeAttributeTextByXPath_ok-10-inp.xml",
"/metadata/artist/",
"id",
"id=b10bbbfc-cf9e-42e0-be17-e2c3e1d2600d"},
new Object[] {"11 - xml ok, xpath empty, attribute name ok, no attribute found",
"./src/test/resources/XmlUtilsTest/getNodeAttributeTextByXPath/getNodeAttributeTextByXPath_ok-11-inp.xml",
"",
"id",
""},
new Object[] {"12 - xml ok, xpath null, attribute name ok, no attribute found",
"./src/test/resources/XmlUtilsTest/getNodeAttributeTextByXPath/getNodeAttributeTextByXPath_ok-12-inp.xml",
null,
"id",
""},
new Object[] {"13 - xml ok, xpath wrong, attribute name ok, no attribute found",
"./src/test/resources/XmlUtilsTest/getNodeAttributeTextByXPath/getNodeAttributeTextByXPath_ok-13-inp.xml",
"/metadata/artist/attr()",
"id",
""},
new Object[] {"14 - xml empty, xpath ok, attribute name ok, no attribute found",
"./src/test/resources/XmlUtilsTest/getNodeAttributeTextByXPath/getNodeAttributeTextByXPath_ok-14-inp.xml",
"/metadata/artist/",
"id",
""},
new Object[] {"15 - xml corrupt, xpath ok, attribute name ok, no attribute found",
"./src/test/resources/XmlUtilsTest/getNodeAttributeTextByXPath/getNodeAttributeTextByXPath_ok-15-inp.xml",
"/metadata/artist/",
"id",
""},
};
}
//-----------------------------------------------------------------------------
@Test(dataProvider = "data_getXpathsForMatchingNodes_ok")
public void test_getXpathsForMatchingNodes_ok(String testDesc, String xmlFile, String xpath, String expRes) {
log.info("{} ... started", testDesc);
List<String> resList = new ArrayList<>();
try {
Document xmlDoc = XmlUtils.createXmlDoc(xmlFile);
resList = XmlUtils.getXpathsForMatchingNodes(xmlDoc, xpath);
}
catch (Exception ex) {
log.error("... failed");
fail("No exception expected. Maybe file '" + xmlFile + "' not found!\n" + "Error: " + ex.getMessage());
}
assertThat(resList.size()).isEqualTo(Integer.valueOf(expRes));
for(int i = 0; i < Integer.valueOf(expRes); i++) {
String resStr = resList.get(i);
assertThat(resStr).isEqualTo(xpath+'['+String.valueOf(i+1)+']');
}
log.info("{} ... finished successfully!", testDesc);
}
//-----------------------------------------------------------------------------
//DOC: nok
@DataProvider
public Object[][] data_getXpathsForMatchingNodes_ok() {
return new Object[][] {
new Object[] {"01 - found 4 occurences (nodes)",
"./src/test/resources/XmlUtilsTest/getXpathsForMatchingNodes/getXpathsForMatchingNodes_ok-01-inp.xml",
"//Track[@mbid='6115ac6f-d29a-4ae2-ac58-6fac6eb71f68']/TagList/Tag[@tagName='GENRE']",
"4"},
new Object[] {"02 - found 4 occurences (elements) ... return list is empty",
"./src/test/resources/XmlUtilsTest/getXpathsForMatchingNodes/getXpathsForMatchingNodes_ok-02-inp.xml",
"//Track[@mbid='6115ac6f-d29a-4ae2-ac58-6fac6eb71f68']/TagList/Tag[@tagName='GENRE']/node()",
"0"},
new Object[] {"03 - found 0 occurences (nodes)",
"./src/test/resources/XmlUtilsTest/getXpathsForMatchingNodes/getXpathsForMatchingNodes_ok-03-inp.xml",
"//Track[@mbid='6115ac6f-d29a-4ae2-ac58-6fac6eb71f68']/TagList/Tag[@tagName='GENRE_x']",
"0"},
};
}
//-----------------------------------------------------------------------------
@Test(dataProvider = "data_getXpathsForMatchingEmptyNodes_ok")
public void test_getXpathsForMatchingEmptyNodes_ok(String testDesc, String xmlFile, String xpath, List<String> expResList) {
log.info("{} ... started", testDesc);
List<String> resList = new ArrayList<>();
try {
Document xmlDoc = XmlUtils.createXmlDoc(xmlFile);
resList = XmlUtils.getXpathsForMatchingEmptyNodes(xmlDoc, xpath);
}
catch (Exception ex) {
log.error("... failed");
fail("No exception expected. Maybe file '" + xmlFile + "' not found!\n" + "Error: " + ex.getMessage());
}
assertThat(resList).isEqualTo(expResList);
log.info("{} ... finished successfully!", testDesc);
}
//-----------------------------------------------------------------------------
//DOC: nok
@DataProvider
public Object[][] data_getXpathsForMatchingEmptyNodes_ok() {
return new Object[][] {
new Object[] {"01 - found 2 occurences, 1 empty node, 1 with attributes",
"./src/test/resources/XmlUtilsTest/getXpathsForMatchingEmptyNodes/getXpathsForMatchingEmptyNodes_ok-01-inp.xml",
"/Artist/Discography/ReleaseGroupList/ReleaseGroup/ArtistCreditList/ArtistCredits/UniqueIdList/UniqueId",
Collections.singletonList("/Artist/Discography/ReleaseGroupList/ReleaseGroup/ArtistCreditList/ArtistCredits/UniqueIdList/UniqueId[2]")},
new Object[] {"02 - found 2 occurences, 1 empty node, 1 with text",
"./src/test/resources/XmlUtilsTest/getXpathsForMatchingEmptyNodes/getXpathsForMatchingEmptyNodes_ok-02-inp.xml",
"/Artist/Discography/ReleaseGroupList/ReleaseGroup/ArtistCreditList/ArtistCredits/UniqueIdList/UniqueId",
Collections.singletonList("/Artist/Discography/ReleaseGroupList/ReleaseGroup/ArtistCreditList/ArtistCredits/UniqueIdList/UniqueId[2]")},
new Object[] {"03 - found 2 occurences, 2 empty nodes",
"./src/test/resources/XmlUtilsTest/getXpathsForMatchingEmptyNodes/getXpathsForMatchingEmptyNodes_ok-03-inp.xml",
"/Artist/Discography/ReleaseGroupList/ReleaseGroup/ArtistCreditList/ArtistCredits/UniqueIdList/UniqueId",
Arrays.asList("/Artist/Discography/ReleaseGroupList/ReleaseGroup/ArtistCreditList/ArtistCredits/UniqueIdList/UniqueId[1]",
"/Artist/Discography/ReleaseGroupList/ReleaseGroup/ArtistCreditList/ArtistCredits/UniqueIdList/UniqueId[2]")},
new Object[] {"04 - found 4 occurences, 2 empty nodes, 1 with attributes, 1 with text",
"./src/test/resources/XmlUtilsTest/getXpathsForMatchingEmptyNodes/getXpathsForMatchingEmptyNodes_ok-04-inp.xml",
"/Artist/Discography/ReleaseGroupList/ReleaseGroup/ArtistCreditList/ArtistCredits/UniqueIdList/UniqueId",
Arrays.asList("/Artist/Discography/ReleaseGroupList/ReleaseGroup/ArtistCreditList/ArtistCredits/UniqueIdList/UniqueId[2]",
"/Artist/Discography/ReleaseGroupList/ReleaseGroup/ArtistCreditList/ArtistCredits/UniqueIdList/UniqueId[3]")},
new Object[] {"05 - found 4 occurences, 2 empty nodes, 1 with attributes, 1 with text, 1 with blank text",
"./src/test/resources/XmlUtilsTest/getXpathsForMatchingEmptyNodes/getXpathsForMatchingEmptyNodes_ok-05-inp.xml",
"/Artist/Discography/ReleaseGroupList/ReleaseGroup/ArtistCreditList/ArtistCredits/UniqueIdList/UniqueId",
Arrays.asList("/Artist/Discography/ReleaseGroupList/ReleaseGroup/ArtistCreditList/ArtistCredits/UniqueIdList/UniqueId[2]",
"/Artist/Discography/ReleaseGroupList/ReleaseGroup/ArtistCreditList/ArtistCredits/UniqueIdList/UniqueId[3]")},
new Object[] {"06 - found 4 occurences, 0 empty nodes",
"./src/test/resources/XmlUtilsTest/getXpathsForMatchingEmptyNodes/getXpathsForMatchingEmptyNodes_ok-06-inp.xml",
"/Artist/Discography/ReleaseGroupList/ReleaseGroup/ArtistCreditList/ArtistCredits/UniqueIdList/UniqueId",
new ArrayList<String>()},
};
}
//=============================================================================
/*
* ANNOTATED METHODS (public)
*/
@BeforeClass
public static void setLogger() {
System.setProperty("log4j.configurationFile","./src/test/resources/log4j2-testing.xml");
log = LogManager.getLogger(XmlUtilsTest.class);
}
//-----------------------------------------------------------------------------
} | [
"wagenge@gmail.com"
] | wagenge@gmail.com |
5a0b78b43aaf67bf3257e277c8677696eb8e3447 | d97dbc5483ecaefb76c4c2c33e37d112223c7ef7 | /build/generated/source/aidl/debug/com/newonevpn/vpn/api/IOpenVPNStatusCallback.java | 116c72ba56a6b692d956c9a3297034226c2e5fd8 | [] | no_license | SuPair/OneVPNAndroid | 96574f40bc411edfaea7b92426399f4025d360bc | 4948a127081c10ee8ff4180dc939721731cc255a | refs/heads/master | 2020-04-04T02:46:28.960225 | 2016-10-07T11:55:11 | 2016-10-07T11:55:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 3,324 | java | /*
* This file is auto-generated. DO NOT MODIFY.
* Original file: /Users/intermarketsecurities/Desktop/Projects/Android/OneVPN/src/com/newonevpn/vpn/api/IOpenVPNStatusCallback.aidl
*/
package com.newonevpn.vpn.api;
/**
* Example of a callback interface used by IRemoteService to send
* synchronous notifications back to its clients. Note that this is a
* one-way interface so the server does not block waiting for the client.
*/
public interface IOpenVPNStatusCallback extends android.os.IInterface
{
/** Local-side IPC implementation stub class. */
public static abstract class Stub extends android.os.Binder implements com.newonevpn.vpn.api.IOpenVPNStatusCallback
{
private static final java.lang.String DESCRIPTOR = "com.newonevpn.vpn.api.IOpenVPNStatusCallback";
/** Construct the stub at attach it to the interface. */
public Stub()
{
this.attachInterface(this, DESCRIPTOR);
}
/**
* Cast an IBinder object into an com.newonevpn.vpn.api.IOpenVPNStatusCallback interface,
* generating a proxy if needed.
*/
public static com.newonevpn.vpn.api.IOpenVPNStatusCallback asInterface(android.os.IBinder obj)
{
if ((obj==null)) {
return null;
}
android.os.IInterface iin = obj.queryLocalInterface(DESCRIPTOR);
if (((iin!=null)&&(iin instanceof com.newonevpn.vpn.api.IOpenVPNStatusCallback))) {
return ((com.newonevpn.vpn.api.IOpenVPNStatusCallback)iin);
}
return new com.newonevpn.vpn.api.IOpenVPNStatusCallback.Stub.Proxy(obj);
}
@Override public android.os.IBinder asBinder()
{
return this;
}
@Override public boolean onTransact(int code, android.os.Parcel data, android.os.Parcel reply, int flags) throws android.os.RemoteException
{
switch (code)
{
case INTERFACE_TRANSACTION:
{
reply.writeString(DESCRIPTOR);
return true;
}
case TRANSACTION_newStatus:
{
data.enforceInterface(DESCRIPTOR);
java.lang.String _arg0;
_arg0 = data.readString();
java.lang.String _arg1;
_arg1 = data.readString();
java.lang.String _arg2;
_arg2 = data.readString();
java.lang.String _arg3;
_arg3 = data.readString();
this.newStatus(_arg0, _arg1, _arg2, _arg3);
return true;
}
}
return super.onTransact(code, data, reply, flags);
}
private static class Proxy implements com.newonevpn.vpn.api.IOpenVPNStatusCallback
{
private android.os.IBinder mRemote;
Proxy(android.os.IBinder remote)
{
mRemote = remote;
}
@Override public android.os.IBinder asBinder()
{
return mRemote;
}
public java.lang.String getInterfaceDescriptor()
{
return DESCRIPTOR;
}
/**
* Called when the service has a new status for you.
*/
@Override public void newStatus(java.lang.String uuid, java.lang.String state, java.lang.String message, java.lang.String level) throws android.os.RemoteException
{
android.os.Parcel _data = android.os.Parcel.obtain();
try {
_data.writeInterfaceToken(DESCRIPTOR);
_data.writeString(uuid);
_data.writeString(state);
_data.writeString(message);
_data.writeString(level);
mRemote.transact(Stub.TRANSACTION_newStatus, _data, null, android.os.IBinder.FLAG_ONEWAY);
}
finally {
_data.recycle();
}
}
}
static final int TRANSACTION_newStatus = (android.os.IBinder.FIRST_CALL_TRANSACTION + 0);
}
/**
* Called when the service has a new status for you.
*/
public void newStatus(java.lang.String uuid, java.lang.String state, java.lang.String message, java.lang.String level) throws android.os.RemoteException;
}
| [
"intermarketsecurities@Intermarkets-MacBook-Pro.local"
] | intermarketsecurities@Intermarkets-MacBook-Pro.local |
1a77ea9b16521a765189eca80ff148969ec54408 | ab3f61621a19c377b58f7f4f17e0950674b1f0c5 | /src/project_trie/desktop/TablePanel.java | 15a5ba15e5a6ec3e765dbfc59ef56a1a6c9441ec | [] | no_license | boyskila/DictionaryProject | fc2bb3f591386ab1e870a5cd91fa51bb33834ee7 | 8bed67ba05edc059fcdccca0ea8ca780087c72ae | refs/heads/master | 2016-08-11T19:55:17.637721 | 2016-02-08T11:16:47 | 2016-02-08T11:16:47 | 51,292,355 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 5,145 | java | package project_trie.desktop;
import java.awt.Color;
import java.awt.Component;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.BorderFactory;
import javax.swing.JButton;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import project_trie.trie.FileManager;
public class TablePanel extends JPanel {
private static final long serialVersionUID = 1L;
private int height;
private Table table;
private JButton edit;
private JButton remove;
private JButton viewDescription;
private DescriptionLable label;
private EditPanel editPanel;
private JScrollPane sp;
public TablePanel() {
setLayout(null);
// setBackground(Color.ORANGE);
edit = new JButton("edit");
viewDescription = new JButton("view");
//setBackground(Color.WHITE);
remove = new JButton("remove");
height = 500;
}
public void addTable(Table table, boolean setHeight) {
setBackground(Color.ORANGE);
this.table = table;
if (setHeight) {
height = (int) getTable().getPreferredSize().getHeight();
}
if (height > 500) {
height = 500;
}
sp = new JScrollPane(this.table);
sp.getViewport().setBackground(Color.ORANGE);
sp.setBorder(BorderFactory.createEmptyBorder());
sp.setBounds(5, 8, 527, height + 22);
add(sp);
edit.setBounds(430, height + 40, 100, 30);
add(edit);
viewDescription.setBounds(210, height + 40, 100, 30);
add(viewDescription);
remove.setBounds(320, height + 40, 100, 30);
add(remove);
fireEdit();
fireView();
fireRemove();
}
public void fireRemove() {
remove.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
String key = table.getColumnValue(1);
if (new MessageDialog()
.isMessageAnswerPositive("Are you sure you want to delete "
+ key)) {
table.removeRow();
FileManager.dataBase.remove(key);
FileManager.saveChanges();
Autocomplete.updateAutocomplete(FileManager.dataBase.list());
table.resetFirstColumn();
if (table.getRowCount() < 10) {
String letter = String.valueOf(key.charAt(0));
//MainPanel.tableHolder.remove(MainPanel.tableHolder.getNext());
MainPanel.bottom.remove(MainPanel.tableHolder);
MainPanel.tableHolder = new TablePanelHolder(FileManager.dataBase.list(letter));
MainPanel.bottom.add(MainPanel.tableHolder,"tpt");
MainPanel.cl.show(MainPanel.bottom, "tpt");
revalidate();
repaint();
}
if (table.getRowCount() == 0) {
try {
removeAll();
TablePanelHolder.cl
.previous(TablePanelHolder.bottom);
} catch (Exception e2) {
removeAll();
revalidate();
repaint();
}
}
}
}
});
}
public void fireEdit() {
edit.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
removeComponent(label);
removeComponent(editPanel);
if (table.isRowSelected()) {
editPanel = new EditPanel(table.getColumnValue(1), table
.getColumnValue(2));
add(editPanel);
fireSave();
revalidate();
repaint();
}
}
});
}
public void fireSave() {
new ButtonAction(editPanel.getSaveButton(), editPanel.getEditArea());
editPanel.getSaveButton().addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
String wordFromTable = table.getColumnValue(1);
String wordFromEditPAnel = editPanel.getWordField().getText();
String value = editPanel.getEditArea().getText();
if (value.length() < 1100) {
removeComponent(editPanel);
revalidate();
repaint();
if (wordFromTable != null) {
if (wordFromEditPAnel.equals(wordFromTable)) {
FileManager.dataBase.update(wordFromTable, value);
table.setColumnValue(value, 2);
} else {
FileManager.dataBase.remove(wordFromTable);
FileManager.dataBase.add(wordFromEditPAnel, value);
table.setColumnValue(wordFromEditPAnel, 1);
table.setColumnValue(value, 2);
Autocomplete
.updateAutocomplete(FileManager.dataBase
.list());
}
table.setColumnValue(false, 3);
FileManager.saveChanges();
} else {
new MessageDialog("", "reduce text description with "
+ (value.length() - 1100) + " symbols");
}
}
}
});
}
public void fireView() {
viewDescription.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
try {
if (table.isRowSelected()) {
removeComponent(label);
removeComponent(editPanel);
label = new DescriptionLable(table.getColumnValue(2),
table.getColumnValue(1));
add(label);
revalidate();
repaint();
}
} catch (Exception e2) {
new MessageDialog("", "Please select row");
}
}
});
}
public void removeComponent(Component component) {
if (component != null) {
remove(component);
}
}
public DescriptionLable getLabel() {
return label;
}
public EditPanel getEditPanel() {
return editPanel;
}
public Table getTable() {
return table;
}
} | [
"bmlalov@abv.bg"
] | bmlalov@abv.bg |
b01927a3fef62f4edccd8081c2df7ffd0b945a6a | 065c1f648e8dd061a20147ff9c0dbb6b5bc8b9be | /drjava_cluster/12844/tar_0.java | 235bcab1c46f72590684d22c35c331e22d826f4b | [] | no_license | martinezmatias/GenPat-data-C3 | 63cfe27efee2946831139747e6c20cf952f1d6f6 | b360265a6aa3bb21bd1d64f1fc43c3b37d0da2a4 | refs/heads/master | 2022-04-25T17:59:03.905613 | 2020-04-15T14:41:34 | 2020-04-15T14:41:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 4,569 | java | /*
* DynamicJava - Copyright (C) 1999-2001
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files
* (the "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to permit
* persons to whom the Software is furnished to do so, subject to the
* following conditions:
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL DYADE BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Except as contained in this notice, the name of Dyade shall not be
* used in advertising or otherwise to promote the sale, use or other
* dealings in this Software without prior written authorization from
* Dyade.
*
*/
package koala.dynamicjava.tree;
import java.util.*;
import koala.dynamicjava.tree.visitor.*;
/**
* This class represents the reference type nodes of the syntax tree
*
* @author Stephane Hillion
* @version 1.0 - 1999/04/24
*/
public class ReferenceTypeName extends TypeName {
// TODO: Develop a better representation with more structure (resolving package/class ambiguities).
/**
* The representation property name
*/
public final static String REPRESENTATION = "representation";
/**
* The representation of this type
*/
private String representation;
/**
* The representation property name
*/
public final static String IDENTIFIERS = "identifiers";
/**
* The representation of this type
*/
private List<? extends IdentifierToken> identifiers;
/**
* Initializes the type
* @param ids the list of the tokens that compose the type name
* @exception IllegalArgumentException if ids is null or empty
*/
public ReferenceTypeName(List<? extends IdentifierToken> ids) {
this(ids, SourceInfo.NONE);
}
public ReferenceTypeName(IdentifierToken... ids) {
this(Arrays.asList(ids));
}
public ReferenceTypeName(String... names) {
this(stringsToIdentifiers(names));
}
private static IdentifierToken[] stringsToIdentifiers(String[] names) {
IdentifierToken[] ids = new IdentifierToken[names.length];
for (int i = 0; i < names.length; i++) {
ids[i] = new Identifier(names[i]);
}
return ids;
}
/**
* Initializes the type
* @param ids the list of the tokens that compose the type name
* @exception IllegalArgumentException if ids is null or empty
*/
public ReferenceTypeName(List<? extends IdentifierToken> ids, SourceInfo si) {
super(si);
if (ids == null) throw new IllegalArgumentException("ids == null");
if (ids.size() == 0) throw new IllegalArgumentException("ids.size() == 0");
identifiers = ids;
representation = TreeUtilities.listToName(ids);
}
/**
* Returns the representation of this type
*/
public String getRepresentation() {
return representation;
}
/**
* Returns the list of identifiers that make up this type
*/
public List<? extends IdentifierToken> getIdentifiers() {
return identifiers;
}
/**
* Sets the identifiers of this type
* @exception IllegalArgumentException if ids is null or empty
*/
public void setIdentifiers(List<? extends IdentifierToken> ids) {
if (ids == null) throw new IllegalArgumentException("ids == null");
if (ids.size() == 0) throw new IllegalArgumentException("ids.size() == 0");
firePropertyChange(IDENTIFIERS, identifiers, identifiers = ids);
firePropertyChange(REPRESENTATION, representation, representation = TreeUtilities.listToName(ids));
}
/**
* Allows a visitor to traverse the tree
* @param visitor the visitor to accept
*/
public <T> T acceptVisitor(Visitor<T> visitor) {
return visitor.visit(this);
}
/**
* Implementation of toString for use in unit testing
*/
public String toString() {
return "("+getClass().getName()+": "+toStringHelper()+")";
}
protected String toStringHelper() {
return getRepresentation();
}
}
| [
"375833274@qq.com"
] | 375833274@qq.com |
7e75a9b29b78e6a1d71c01e7942db52083a6a2b2 | 87125856603d443f05a75a89ae378bff62ccac4c | /app/src/androidTest/java/com/codingurbrain/gradientbuttonstyleandroid/ExampleInstrumentedTest.java | 52d9d58e20e32d25218cd880cb4ba00714beec69 | [] | no_license | Bikiprasad/GradientButtonStyleAndroid | b75cbb1170cb0eac06312bb80f6ea41bbadb146e | dfdc6baf060c2d4c96f2d9016b40e7b2aff69f7f | refs/heads/master | 2022-12-08T22:29:25.847293 | 2020-09-16T20:51:30 | 2020-09-16T20:51:30 | 293,904,016 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 802 | java | package com.codingurbrain.gradientbuttonstyleandroid;
import android.content.Context;
import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
assertEquals("com.codingurbrain.gradientbuttonstyleandroid", appContext.getPackageName());
}
} | [
"biki13prasad@gmail.com"
] | biki13prasad@gmail.com |
1c3af6ce1245eda7cb19a3c089c9a2d146828ffe | 6498ae535f172b5ff6f0f9a8b4f414cfeb0c72cb | /DS/Matrix/SearchEleInSortedMatrix.java | 5ff077e65264be945513f5cf49c7dcf903a28e5e | [] | no_license | yogesh1811/mycode | 65d4b3a8e47773ebee092cea1bdd98d33de7ea42 | ab787f0e3965c522d3fae29563f61c20c1512517 | refs/heads/master | 2021-01-20T11:10:29.781065 | 2017-08-30T16:46:45 | 2017-08-30T16:46:45 | 101,665,394 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 668 | java | package DS.Matrix;
public class SearchEleInSortedMatrix {
/**
* @param args
*/
public static void main(String[] args) {
int arr[][] = { { 10, 20, 30, 40 }, { 15, 25, 35, 45 },
{ 27, 29, 37, 48 }, { 32, 33, 39, 50 } };
search(arr, 4, 29);
}
private static void search(int[][] arr, int n, int ele) {
int i = 0, j = n - 1; // set indexes for top right
// element
while (i < n && j >= 0) {
if (arr[i][j] == ele) {
System.out.print(ele + " Found at arr[" + i + "][" + j + "]");
return;
}
if (arr[i][j] > ele)
j--;
else
i++;
}
System.out.print("n Element not found");
return;
}
}
| [
"noreply@github.com"
] | yogesh1811.noreply@github.com |
651b8f80a87a164f71410dda6f0afb27fa9c01e2 | f5e8ee5cb5999a51d56934f108960bddcc062c20 | /java/src/main/java/kanzi/bitstream/DefaultOutputBitStream.java | 085ecce1e49afa02bc3b258d641e93233c51dc68 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | ndl/kanzi | 7b27156764356cadfccddcf196a402614b2b30a9 | 022d34c1b2c02f84db27f5222ec896533f0d524a | refs/heads/master | 2022-12-18T19:57:19.414538 | 2020-09-27T19:14:07 | 2020-09-27T19:14:07 | 299,101,235 | 0 | 0 | Apache-2.0 | 2020-09-27T19:11:07 | 2020-09-27T19:11:06 | null | UTF-8 | Java | false | false | 7,634 | java | /*
Copyright 2011-2017 Frederic Langlet
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
you may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package kanzi.bitstream;
import kanzi.BitStreamException;
import java.io.IOException;
import java.io.OutputStream;
import kanzi.Memory;
import kanzi.OutputBitStream;
public final class DefaultOutputBitStream implements OutputBitStream
{
private final OutputStream os;
private byte[] buffer;
private boolean closed;
private int position; // index of current byte in buffer
private int availBits; // bits not consumed in current
private long written;
private long current; // cached bits
public DefaultOutputBitStream(OutputStream os, int bufferSize)
{
if (os == null)
throw new NullPointerException("Invalid null output stream parameter");
if (bufferSize < 1024)
throw new IllegalArgumentException("Invalid buffer size (must be at least 1024)");
if (bufferSize > 1<<28)
throw new IllegalArgumentException("Invalid buffer size (must be at most 268435456)");
if ((bufferSize & 7) != 0)
throw new IllegalArgumentException("Invalid buffer size (must be a multiple of 8)");
this.os = os;
this.buffer = new byte[bufferSize];
this.availBits = 64;
}
// Write least significant bit of the input integer. Trigger exception if stream is closed
@Override
public void writeBit(int bit)
{
if (this.availBits <= 1) // availBits = 0 if stream is closed => force pushCurrent()
{
this.current |= (bit & 1);
this.pushCurrent();
}
else
{
this.availBits--;
this.current |= ((long) (bit & 1) << this.availBits);
}
}
// Write 'count' (in [1..64]) bits. Trigger exception if stream is closed
@Override
public int writeBits(long value, int count)
{
if (count == 0)
return 0;
if (count > 64)
throw new IllegalArgumentException("Invalid bit count: "+count+" (must be in [1..64])");
this.current |= ((value << (64 - count)) >>> (64 - this.availBits));
int remaining = count;
if (count >= this.availBits) {
remaining -= this.availBits;
pushCurrent();
if (remaining != 0)
this.current = value << (64 - remaining);
}
this.availBits -= remaining;
return count;
}
@Override
public int writeBits(byte[] bits, int start, int count)
{
if (this.isClosed() == true)
throw new BitStreamException("Stream closed", BitStreamException.STREAM_CLOSED);
if ((count>>3) > bits.length-start)
throw new IllegalArgumentException("Invalid length: "+count+" (must be in [1.." +
(((long)(bits.length-start))<<3) + "])");
int remaining = count;
// Byte aligned cursor ?
if ((this.availBits & 7) == 0)
{
// Fill up this.current
while ((this.availBits != 64) && (remaining >= 8))
{
this.writeBits((long) bits[start], 8);
start++;
remaining -= 8;
}
// Copy bits array to internal buffer
while ((remaining>>3) >= this.buffer.length-this.position)
{
System.arraycopy(bits, start, this.buffer, this.position, this.buffer.length-this.position);
start += (this.buffer.length-this.position);
remaining -= ((this.buffer.length-this.position)<<3);
this.position = this.buffer.length;
this.flush();
}
final int r = (remaining>>6) << 3;
if (r > 0)
{
System.arraycopy(bits, start, this.buffer, this.position, r);
this.position += r;
start += r;
remaining -= (r<<3);
}
}
else
{
// Not byte aligned
if (remaining >= 64)
{
final int r = 64 - this.availBits;
while (remaining >= 64)
{
final long value = Memory.BigEndian.readLong64(bits, start);
this.current |= (value >>> r);
this.pushCurrent();
this.current = (value << -r);
start += 8;
remaining -= 64;
}
this.availBits -= r;
}
}
// Last bytes
while (remaining >= 8)
{
this.writeBits((long) (bits[start]&0xFF), 8);
start++;
remaining -= 8;
}
if (remaining > 0)
this.writeBits((long) (bits[start]>>>(8-remaining)), remaining);
return count;
}
// Push 64 bits of current value into buffer.
private void pushCurrent()
{
Memory.BigEndian.writeLong64(this.buffer, this.position, this.current);
this.availBits = 64;
this.current = 0;
this.position += 8;
if (this.position >= this.buffer.length)
this.flush();
}
// Write buffer to underlying stream
private void flush() throws BitStreamException
{
if (this.isClosed() == true)
throw new BitStreamException("Stream closed", BitStreamException.STREAM_CLOSED);
try
{
if (this.position > 0)
{
this.os.write(this.buffer, 0, this.position);
this.written += (this.position << 3);
this.position = 0;
}
}
catch (IOException e)
{
throw new BitStreamException(e.getMessage(), BitStreamException.INPUT_OUTPUT);
}
}
@Override
public void close()
{
if (this.isClosed() == true)
return;
final int savedBitIndex = this.availBits;
final int savedPosition = this.position;
final long savedCurrent = this.current;
try
{
// Push last bytes (the very last byte may be incomplete)
for (int shift=56; this.availBits<64; shift-=8)
{
this.buffer[this.position++] = (byte) (this.current>>shift);
this.availBits += 8;
}
this.written -= (this.availBits-64);
this.availBits = 64;
this.flush();
}
catch (BitStreamException e)
{
// Revert fields to allow subsequent attempts in case of transient failure
this.position = savedPosition;
this.availBits = savedBitIndex;
this.current = savedCurrent;
throw e;
}
try
{
this.os.flush();
}
catch (IOException e)
{
throw new BitStreamException(e, BitStreamException.INPUT_OUTPUT);
}
this.closed = true;
this.position = 0;
this.availBits = 0;
this.written -= 64; // adjust because this.availBits = 0
// Reset fields to force a flush() and trigger an exception
// on writeBit() or writeBits()
this.buffer = new byte[8];
}
// Return number of bits written so far
@Override
public long written()
{
// Number of bits flushed + bytes written in memory + bits written in memory
return this.written + (this.position<<3) + (64-this.availBits);
}
public boolean isClosed()
{
return this.closed;
}
} | [
"flanglet@gmail.com"
] | flanglet@gmail.com |
5afe8b8f9e1ccb4beede48bccefaeac2214229bb | f1179e6ed40033e52d98d1f133cd2419d354f798 | /android/src/main/java/com/relywisdom/usbwebrtc/UsbCameraEnumerator.java | 83259b99822a18ae35edcca1ee202d41d2bfc08d | [
"MIT"
] | permissive | sohel-khan/react-native-webrtc-usb-lib | 3521da50321aa69e8f6358b4dbe695c278baaab3 | e81d773137f8956ca8e152a8e798952b245f100e | refs/heads/main | 2023-03-08T17:14:34.943213 | 2021-02-26T12:21:15 | 2021-02-26T12:21:15 | 336,239,969 | 3 | 0 | null | null | null | null | UTF-8 | Java | false | false | 7,534 | java | package com.relywisdom.usbwebrtc;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.usb.UsbManager;
import android.os.SystemClock;
import org.webrtc.Camera1Enumerator;
import org.webrtc.CameraEnumerationAndroid;
import org.webrtc.CameraEnumerator;
import org.webrtc.CameraVideoCapturer;
import org.webrtc.Logging;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.annotation.Nullable;
public class UsbCameraEnumerator implements CameraEnumerator {
private final UsbManager mUsbManager;
public UsbCameraEnumerator(Context context) {
mUsbManager = (UsbManager)context.getSystemService(Context.USB_SERVICE);
}
public static final String cameraName = "usbCamera";
@Override
public String[] getDeviceNames() {
ArrayList<String> namesList = new ArrayList();
for(int i = 0; i < Camera.getNumberOfCameras(); ++i) {
String name = getDeviceName(i);
if (name != null) {
namesList.add(name);
Logging.d("Camera1Enumerator", "Index: " + i + ". " + name);
} else {
Logging.e("Camera1Enumerator", "Index: " + i + ". Failed to query camera name.");
}
}
if (hasUsbDevice()) namesList.add(cameraName);
String[] namesArray = new String[namesList.size()];
return (String[])namesList.toArray(namesArray);
}
public boolean hasUsbDevice() {
if (mUsbManager.getDeviceList().size() > 0) return true;
return false;
}
@Override
public boolean isFrontFacing(String deviceName) {
if (cameraName.equals(deviceName)) return false;
Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
return info != null && info.facing == 1;
}
@Override
public boolean isBackFacing(String deviceName) {
if (cameraName.equals(deviceName)) return false;
Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
return info != null && info.facing == 0;
}
@Override
public List<CameraEnumerationAndroid.CaptureFormat> getSupportedFormats(String deviceName) {
if (cameraName.equals(deviceName)) return new ArrayList<>();
return getSupportedFormats(getCameraIndex(deviceName));
}
@Override
public CameraVideoCapturer createCapturer(String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
return new UsbCameraCapturer(deviceName, eventsHandler, this);
}
private static List<List<CameraEnumerationAndroid.CaptureFormat>> cachedSupportedFormats;
static synchronized List<CameraEnumerationAndroid.CaptureFormat> getSupportedFormats(int cameraId) {
if (cachedSupportedFormats == null) {
cachedSupportedFormats = new ArrayList();
for(int i = 0; i < Camera.getNumberOfCameras(); ++i) {
cachedSupportedFormats.add(enumerateFormats(i));
}
}
return (List)cachedSupportedFormats.get(cameraId);
}
private static List<CameraEnumerationAndroid.CaptureFormat> enumerateFormats(int cameraId) {
Logging.d("Camera1Enumerator", "Get supported formats for camera index " + cameraId + ".");
long startTimeMs = SystemClock.elapsedRealtime();
Camera camera = null;
Camera.Parameters parameters;
label94: {
ArrayList var6;
try {
Logging.d("Camera1Enumerator", "Opening camera with index " + cameraId);
camera = Camera.open(cameraId);
parameters = camera.getParameters();
break label94;
} catch (RuntimeException var15) {
Logging.e("Camera1Enumerator", "Open camera failed on camera index " + cameraId, var15);
var6 = new ArrayList();
} finally {
if (camera != null) {
camera.release();
}
}
return var6;
}
ArrayList formatList = new ArrayList();
try {
int minFps = 0;
int maxFps = 0;
List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
if (listFpsRange != null) {
int[] range = (int[])listFpsRange.get(listFpsRange.size() - 1);
minFps = range[0];
maxFps = range[1];
}
Iterator var19 = parameters.getSupportedPreviewSizes().iterator();
while(var19.hasNext()) {
Camera.Size size = (Camera.Size)var19.next();
formatList.add(new CameraEnumerationAndroid.CaptureFormat(size.width, size.height, minFps, maxFps));
}
} catch (Exception var14) {
Logging.e("Camera1Enumerator", "getSupportedFormats() failed on camera index " + cameraId, var14);
}
long endTimeMs = SystemClock.elapsedRealtime();
Logging.d("Camera1Enumerator", "Get supported formats for camera index " + cameraId + " done. Time spent: " + (endTimeMs - startTimeMs) + " ms.");
return formatList;
}
static List<org.webrtc.Size> convertSizes(List<Camera.Size> cameraSizes) {
List<org.webrtc.Size> sizes = new ArrayList();
Iterator var2 = cameraSizes.iterator();
while(var2.hasNext()) {
Camera.Size size = (Camera.Size)var2.next();
sizes.add(new org.webrtc.Size(size.width, size.height));
}
return sizes;
}
static List<CameraEnumerationAndroid.CaptureFormat.FramerateRange> convertFramerates(List<int[]> arrayRanges) {
List<CameraEnumerationAndroid.CaptureFormat.FramerateRange> ranges = new ArrayList();
Iterator var2 = arrayRanges.iterator();
while(var2.hasNext()) {
int[] range = (int[])var2.next();
ranges.add(new CameraEnumerationAndroid.CaptureFormat.FramerateRange(range[0], range[1]));
}
return ranges;
}
static int getCameraIndex(String deviceName) {
Logging.d("Camera1Enumerator", "getCameraIndex: " + deviceName);
if (cameraName.equals(deviceName)) return Camera.getNumberOfCameras();
for(int i = 0; i < Camera.getNumberOfCameras(); ++i) {
if (deviceName.equals(getDeviceName(i))) {
return i;
}
}
throw new IllegalArgumentException("No such camera: " + deviceName);
}
@Nullable
static String getDeviceName(int index) {
if (index == Camera.getNumberOfCameras()) return cameraName;
Camera.CameraInfo info = getCameraInfo(index);
if (info == null) {
return null;
} else {
String facing = info.facing == 1 ? "front" : "back";
return "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation;
}
}
@Nullable
private static Camera.CameraInfo getCameraInfo(int index) {
Camera.CameraInfo info = new Camera.CameraInfo();
try {
Camera.getCameraInfo(index, info);
return info;
} catch (Exception var3) {
Logging.e("Camera1Enumerator", "getCameraInfo failed on index " + index, var3);
return null;
}
}
}
| [
"khan.sohel005@gmail.com"
] | khan.sohel005@gmail.com |
7adc2623499c754d2601507b14a9952ac540cb04 | 34380e7a1df531797743b9724423dfb158d58fd6 | /src/main/java/com/project/pubgcommu/web/dto/game/JoinedTeamGameLogDto.java | b93958fe37645adcf47ed5b0a6306fe454a4fd28 | [] | no_license | namu1714/pubgcommu | a1bd314fd12df9eb0aed36451b7aa908447e89c6 | 988a19bc26d08d25df630c754f8987a8ce57fb3a | refs/heads/master | 2023-07-25T19:34:20.075116 | 2021-09-04T06:04:25 | 2021-09-04T06:04:25 | 382,075,711 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 517 | java | package com.project.pubgcommu.web.dto.game;
import com.project.pubgcommu.domain.killbet.TeamGameLog;
import lombok.Getter;
@Getter
public class JoinedTeamGameLogDto {
private Long id;
private Long team;
private Integer chicken;
private Integer stop;
public JoinedTeamGameLogDto(TeamGameLog teamGameLog){
this.id = teamGameLog.getId();
this.team = teamGameLog.getTeam().getId();
this.chicken = teamGameLog.getChicken();
this.stop = teamGameLog.getStop();
}
}
| [
"namu1714@naver.com"
] | namu1714@naver.com |
e1336268e71d3c9dd54af603b4cd872c95a0a6bc | 15f3f7ddabe246581e9988eebb218af357e56795 | /app/src/main/java/com/jitse/example/ExampleApplication.java | addc1fd37d38799c1d0c7a6a61a3251b184495e5 | [] | no_license | jam0cam/AndroidExample | 991f01df0e73af65bc563606c50996f9ab00cd1a | a411d12487f0de37f99a9c10aa2852b1c8da38ed | refs/heads/master | 2021-01-10T10:29:11.511034 | 2016-01-14T02:32:34 | 2016-01-14T02:32:34 | 49,080,677 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 5,600 | java | package com.jitse.example;
import android.app.Application;
import android.graphics.Bitmap;
import com.jitse.example.retrofit.BrandService;
import com.jitse.example.retrofit.PhetchService;
import com.jitse.example.retrofit.ProductService;
import com.jitse.example.retrofit.SearchService;
import com.jitse.example.retrofit.SixPmService;
import com.twitter.sdk.android.Twitter;
import com.twitter.sdk.android.core.TwitterAuthConfig;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSession;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import io.branch.referral.Branch;
import io.fabric.sdk.android.Fabric;
import retrofit.RequestInterceptor;
import retrofit.RestAdapter;
/**
* Created by jitse on 10/21/14.
*/
public class ExampleApplication extends Application {
// Note: Your consumer key and secret should be obfuscated in your source code before shipping.
private static final String TWITTER_KEY = "WEyOBPFdqY2IhYUBmGNE8IXKf";
private static final String TWITTER_SECRET = "35oIo77kbnHxbpPBfws9PUq52v60dXACJGzOOyFSvaAnGncwo5";
public static String API = "https://api.zappos.com";
public static String MAFIA_API = "https://mafia.integ.amazon.com/";
public static String PHETCH_API = "http://phetch.elasticbeanstalk.com/";
private static final String KEY = "5ca1aa6b9151f729f5b7f05b14dba5ff8aedb975";
public static final String SIX_PM = "https://secure-www.6pm.com";
BrandService mBrandService;
SearchService mSearchService;
ProductService mProductService;
PhetchService mPhetchService;
SixPmService mSixService;
public Bitmap mBitmap;
@Override
public void onCreate() {
super.onCreate();
// Branch.getAutoInstance(this);
// TwitterAuthConfig authConfig = new TwitterAuthConfig(TWITTER_KEY, TWITTER_SECRET);
// Fabric.with(this, new Twitter(authConfig));
try {
// Create a trust manager that does not validate certificate chains
TrustManager[] trustAllCerts = new TrustManager[]{
new X509TrustManager() {
@Override
public void checkClientTrusted(java.security.cert.X509Certificate[] chain, String authType) throws CertificateException {
}
@Override
public void checkServerTrusted(java.security.cert.X509Certificate[] chain, String authType) throws CertificateException {
}
public java.security.cert.X509Certificate[] getAcceptedIssuers() {
return null;
}
}
};
// Install the all-trusting trust manager
final SSLContext sc = SSLContext.getInstance("SSL");
sc.init(null, trustAllCerts, new java.security.SecureRandom());
HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory());
// Create all-trusting host name verifier
HostnameVerifier allHostsValid = new HostnameVerifier() {
public boolean verify(String hostname, SSLSession session) {
// Sure, https://www.zappos.com.russia.com is valid!
return true;
}
};
// Install the all-trusting host verifier
HttpsURLConnection.setDefaultHostnameVerifier(allHostsValid);
} catch (NoSuchAlgorithmException e) {
System.out.println("Can't set up the trust all ssl store");
} catch (KeyManagementException e) {
System.out.println("Can't set up the trust all ssl store");
}
RequestInterceptor requestInterceptor = new RequestInterceptor() {
@Override
public void intercept(RequestFacade request) {
request.addHeader("User-Agent", "Android-App v" + BuildConfig.VERSION_CODE);
request.addQueryParam("key", KEY);
}
};
RestAdapter zapposRestAdapter = new RestAdapter.Builder()
.setEndpoint(API)
.setRequestInterceptor(requestInterceptor)
.build();
mBrandService = zapposRestAdapter.create(BrandService.class);
RestAdapter mafiaRestAdapter = new RestAdapter.Builder()
.setEndpoint(MAFIA_API)
.build();
RestAdapter phetchRestAdapter = new RestAdapter.Builder()
.setEndpoint(PHETCH_API)
.build();
RestAdapter sixAdapter = new RestAdapter.Builder()
.setEndpoint(SIX_PM)
.build();
mSearchService = mafiaRestAdapter.create(SearchService.class);
mProductService = zapposRestAdapter.create(ProductService.class);
mPhetchService = phetchRestAdapter.create(PhetchService.class);
mSixService = sixAdapter.create(SixPmService.class);
}
public BrandService getBrandService() {
return mBrandService;
}
public SearchService getSearchService() {
return mSearchService;
}
public ProductService getProductService() {
return mProductService;
}
public PhetchService getPhetchService() {
return mPhetchService;
}
public SixPmService getSixService() {
return mSixService;
}
}
| [
"jitse@zappos.com"
] | jitse@zappos.com |
a9a92fc31cfa48184fdba2a6c2e917261b29452c | a1826c2ed9c12cfc395fb1a14c1a2e1f097155cb | /datalake-20200710/src/main/java/com/aliyun/datalake20200710/models/GetLifecycleRuleRequest.java | bad095adca9eea272a18023cd7119e1045112892 | [
"Apache-2.0"
] | permissive | aliyun/alibabacloud-java-sdk | 83a6036a33c7278bca6f1bafccb0180940d58b0b | 008923f156adf2e4f4785a0419f60640273854ec | refs/heads/master | 2023-09-01T04:10:33.640756 | 2023-09-01T02:40:45 | 2023-09-01T02:40:45 | 288,968,318 | 40 | 45 | null | 2023-06-13T02:47:13 | 2020-08-20T09:51:08 | Java | UTF-8 | Java | false | false | 916 | java | // This file is auto-generated, don't edit it. Thanks.
package com.aliyun.datalake20200710.models;
import com.aliyun.tea.*;
public class GetLifecycleRuleRequest extends TeaModel {
@NameInMap("BizId")
public String bizId;
@NameInMap("ResourceName")
public String resourceName;
public static GetLifecycleRuleRequest build(java.util.Map<String, ?> map) throws Exception {
GetLifecycleRuleRequest self = new GetLifecycleRuleRequest();
return TeaModel.build(map, self);
}
public GetLifecycleRuleRequest setBizId(String bizId) {
this.bizId = bizId;
return this;
}
public String getBizId() {
return this.bizId;
}
public GetLifecycleRuleRequest setResourceName(String resourceName) {
this.resourceName = resourceName;
return this;
}
public String getResourceName() {
return this.resourceName;
}
}
| [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
d38ad96f352e931907f563fc62d48fb65679a0ed | 8b507cd5ccb1588e89f346ef54863f6a5da18cfd | /src/main/java/javax/jdo/annotations/Convert.java | c5dcbb1e49aad0bbfcda76edfde4700a537cede6 | [] | no_license | mufumbo/javax.jdo | 6e79f4d4697b0227bf4c7baa95f4e624f46bb4e1 | ad36d95e3dcc099ecafde0c4b27d678704294d7a | refs/heads/master | 2020-05-02T11:41:42.621753 | 2016-01-20T12:06:51 | 2016-01-20T12:06:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 2,239 | java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package javax.jdo.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import javax.jdo.AttributeConverter;
/**
* Specifies that a given type should be converted before being stored to, and after being retrieved from
* the datastore using the given {@link AttributeConverter}.
*
* If this annotation is placed on a type, then the conversion applies to all fields or properties whose types
* match the entity type of the given {@link AttributeConverter}.
* Any {@link Convert} annotations placed on members overrides any type-level conversion specifications.
*
* If this annotation is placed on a field or property, the annotated attribute's type must be
* assignment-compatible with the {@link AttributeConverter}'s entity type argument.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE, ElementType.METHOD, ElementType.FIELD })
public @interface Convert {
/**
* The {@link AttributeConverter} to use for conversion.
* @return Converter class to use
*/
@SuppressWarnings("rawtypes")
Class<? extends AttributeConverter> value();
/**
* Whether this conversion is enabled. True by default.
* Setting this to false allows disabling conversion that was specified at PMF level.
* @return Whether the PMF default converter is enabled
*/
boolean enabled() default true;
}
| [
"andy@datanucleus.org"
] | andy@datanucleus.org |
a9d3a9123b652d033de5962ee7c8b07a40082f8e | 5598faaaaa6b3d1d8502cbdaca903f9037d99600 | /code_changes/Apache_projects/HDFS-2991/320e7a0fb5fa92036ce393417b1141dd06cc6c83/FSNamesystem.java | 8e9c9020d9e1853e6f42f2239acb205e4803c806 | [] | no_license | SPEAR-SE/LogInBugReportsEmpirical_Data | 94d1178346b4624ebe90cf515702fac86f8e2672 | ab9603c66899b48b0b86bdf63ae7f7a604212b29 | refs/heads/master | 2022-12-18T02:07:18.084659 | 2020-09-09T16:49:34 | 2020-09-09T16:49:34 | 286,338,252 | 0 | 2 | null | null | null | null | UTF-8 | Java | false | false | 191,390 | java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import org.apache.commons.logging.*;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.protocol.*;
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
import org.apache.hadoop.hdfs.security.token.block.BlockTokenSecretManager;
import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys;
import org.apache.hadoop.hdfs.server.common.GenerationStamp;
import org.apache.hadoop.hdfs.server.common.HdfsConstants.BlockUCState;
import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption;
import org.apache.hadoop.hdfs.server.common.Storage;
import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
import org.apache.hadoop.hdfs.server.common.Util;
import static org.apache.hadoop.hdfs.server.common.Util.now;
import org.apache.hadoop.hdfs.server.namenode.metrics.FSNamesystemMBean;
import org.apache.hadoop.hdfs.server.namenode.metrics.FSNamesystemMetrics;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.security.token.delegation.DelegationKey;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager;
import org.apache.hadoop.util.*;
import org.apache.hadoop.metrics.util.MBeanUtil;
import org.apache.hadoop.net.CachedDNSToSwitchMapping;
import org.apache.hadoop.net.DNSToSwitchMapping;
import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.net.Node;
import org.apache.hadoop.net.NodeBase;
import org.apache.hadoop.net.ScriptBasedMapping;
import org.apache.hadoop.hdfs.server.namenode.LeaseManager.Lease;
import org.apache.hadoop.hdfs.server.namenode.UnderReplicatedBlocks.BlockIterator;
import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations;
import org.apache.hadoop.hdfs.server.protocol.DatanodeCommand;
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
import org.apache.hadoop.hdfs.server.protocol.DisallowedDatanodeException;
import org.apache.hadoop.hdfs.server.protocol.KeyUpdateCommand;
import org.apache.hadoop.hdfs.server.protocol.NamenodeCommand;
import org.apache.hadoop.hdfs.server.protocol.NamenodeRegistration;
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
import org.apache.hadoop.hdfs.server.protocol.UpgradeCommand;
import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.BlockWithLocations;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FsServerDefaults;
import org.apache.hadoop.fs.InvalidPathException;
import org.apache.hadoop.fs.ParentNotDirectoryException;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.UnresolvedLinkException;
import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.Options.Rename;
import org.apache.hadoop.fs.permission.*;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.Text;
import org.mortbay.util.ajax.JSON;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileWriter;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.DataOutputStream;
import java.io.PrintWriter;
import java.lang.management.ManagementFactory;
import java.net.InetAddress;
import java.net.URI;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.Map.Entry;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import javax.management.NotCompliantMBeanException;
import javax.management.ObjectName;
import javax.management.StandardMBean;
import javax.management.MBeanServer;
/***************************************************
* FSNamesystem does the actual bookkeeping work for the
* DataNode.
*
* It tracks several important tables.
*
* 1) valid fsname --> blocklist (kept on disk, logged)
* 2) Set of all valid blocks (inverted #1)
* 3) block --> machinelist (kept in memory, rebuilt dynamically from reports)
* 4) machine --> blocklist (inverted #2)
* 5) LRU cache of updated-heartbeat machines
***************************************************/
@InterfaceAudience.Private
public class FSNamesystem implements FSConstants, FSNamesystemMBean, FSClusterStats,
NameNodeMXBean {
public static final Log LOG = LogFactory.getLog(FSNamesystem.class);
private static final ThreadLocal<StringBuilder> auditBuffer =
new ThreadLocal<StringBuilder>() {
protected StringBuilder initialValue() {
return new StringBuilder();
}
};
private static final void logAuditEvent(UserGroupInformation ugi,
InetAddress addr, String cmd, String src, String dst,
HdfsFileStatus stat) {
final StringBuilder sb = auditBuffer.get();
sb.setLength(0);
sb.append("ugi=").append(ugi).append("\t");
sb.append("ip=").append(addr).append("\t");
sb.append("cmd=").append(cmd).append("\t");
sb.append("src=").append(src).append("\t");
sb.append("dst=").append(dst).append("\t");
if (null == stat) {
sb.append("perm=null");
} else {
sb.append("perm=");
sb.append(stat.getOwner()).append(":");
sb.append(stat.getGroup()).append(":");
sb.append(stat.getPermission());
}
auditLog.info(sb);
}
/**
* Logger for audit events, noting successful FSNamesystem operations. Emits
* to FSNamesystem.audit at INFO. Each event causes a set of tab-separated
* <code>key=value</code> pairs to be written for the following properties:
* <code>
* ugi=<ugi in RPC>
* ip=<remote IP>
* cmd=<command>
* src=<src path>
* dst=<dst path (optional)>
* perm=<permissions (optional)>
* </code>
*/
public static final Log auditLog = LogFactory.getLog(
FSNamesystem.class.getName() + ".audit");
static final int DEFAULT_MAX_CORRUPT_FILEBLOCKS_RETURNED = 100;
static int BLOCK_DELETION_INCREMENT = 1000;
private boolean isPermissionEnabled;
private UserGroupInformation fsOwner;
private String supergroup;
private PermissionStatus defaultPermission;
// FSNamesystemMetrics counter variables
private FSNamesystemMetrics myFSMetrics;
private long capacityTotal = 0L, capacityUsed = 0L, capacityRemaining = 0L;
private int totalLoad = 0;
boolean isBlockTokenEnabled;
BlockTokenSecretManager blockTokenSecretManager;
private long blockKeyUpdateInterval;
private long blockTokenLifetime;
// Scan interval is not configurable.
private static final long DELEGATION_TOKEN_REMOVER_SCAN_INTERVAL =
TimeUnit.MILLISECONDS.convert(1, TimeUnit.HOURS);
private DelegationTokenSecretManager dtSecretManager;
//
// Stores the correct file name hierarchy
//
public FSDirectory dir;
BlockManager blockManager;
/**
* Stores the datanode -> block map.
* <p>
* Done by storing a set of {@link DatanodeDescriptor} objects, sorted by
* storage id. In order to keep the storage map consistent it tracks
* all storages ever registered with the namenode.
* A descriptor corresponding to a specific storage id can be
* <ul>
* <li>added to the map if it is a new storage id;</li>
* <li>updated with a new datanode started as a replacement for the old one
* with the same storage id; and </li>
* <li>removed if and only if an existing datanode is restarted to serve a
* different storage id.</li>
* </ul> <br>
* The list of the {@link DatanodeDescriptor}s in the map is checkpointed
* in the namespace image file. Only the {@link DatanodeInfo} part is
* persistent, the list of blocks is restored from the datanode block
* reports.
* <p>
* Mapping: StorageID -> DatanodeDescriptor
*/
NavigableMap<String, DatanodeDescriptor> datanodeMap =
new TreeMap<String, DatanodeDescriptor>();
Random r = new Random();
/**
* Stores a set of DatanodeDescriptor objects.
* This is a subset of {@link #datanodeMap}, containing nodes that are
* considered alive.
* The {@link HeartbeatMonitor} periodically checks for outdated entries,
* and removes them from the list.
*/
ArrayList<DatanodeDescriptor> heartbeats = new ArrayList<DatanodeDescriptor>();
public LeaseManager leaseManager = new LeaseManager(this);
//
// Threaded object that checks to see if we have been
// getting heartbeats from all clients.
//
Daemon hbthread = null; // HeartbeatMonitor thread
public Daemon lmthread = null; // LeaseMonitor thread
Daemon smmthread = null; // SafeModeMonitor thread
public Daemon replthread = null; // Replication thread
private volatile boolean fsRunning = true;
long systemStart = 0;
// heartbeatRecheckInterval is how often namenode checks for expired datanodes
private long heartbeatRecheckInterval;
// heartbeatExpireInterval is how long namenode waits for datanode to report
// heartbeat
private long heartbeatExpireInterval;
//replicationRecheckInterval is how often namenode checks for new replication work
private long replicationRecheckInterval;
private FsServerDefaults serverDefaults;
// allow appending to hdfs files
private boolean supportAppends = true;
private volatile SafeModeInfo safeMode; // safe mode information
private Host2NodesMap host2DataNodeMap = new Host2NodesMap();
// datanode networktoplogy
NetworkTopology clusterMap = new NetworkTopology();
private DNSToSwitchMapping dnsToSwitchMapping;
private HostsFileReader hostsReader;
private Daemon dnthread = null;
private long maxFsObjects = 0; // maximum number of fs objects
/**
* The global generation stamp for this file system.
*/
private final GenerationStamp generationStamp = new GenerationStamp();
// Ask Datanode only up to this many blocks to delete.
int blockInvalidateLimit = FSConstants.BLOCK_INVALIDATE_CHUNK;
// precision of access times.
private long accessTimePrecision = 0;
// lock to protect FSNamesystem.
private ReentrantReadWriteLock fsLock;
/**
* FSNamesystem constructor.
*/
FSNamesystem(Configuration conf) throws IOException {
try {
initialize(conf, null);
} catch(IOException e) {
LOG.error(getClass().getSimpleName() + " initialization failed.", e);
close();
throw e;
}
}
/**
* Initialize FSNamesystem.
*/
private void initialize(Configuration conf, FSImage fsImage)
throws IOException {
this.systemStart = now();
this.blockManager = new BlockManager(this, conf);
this.fsLock = new ReentrantReadWriteLock(true); // fair locking
setConfigurationParameters(conf);
dtSecretManager = createDelegationTokenSecretManager(conf);
this.registerMBean(conf); // register the MBean for the FSNamesystemStutus
if(fsImage == null) {
this.dir = new FSDirectory(this, conf);
StartupOption startOpt = NameNode.getStartupOption(conf);
this.dir.loadFSImage(getNamespaceDirs(conf),
getNamespaceEditsDirs(conf), startOpt);
long timeTakenToLoadFSImage = now() - systemStart;
LOG.info("Finished loading FSImage in " + timeTakenToLoadFSImage + " msecs");
NameNode.getNameNodeMetrics().fsImageLoadTime.set(
(int) timeTakenToLoadFSImage);
this.safeMode = new SafeModeInfo(conf);
} else {
this.dir = new FSDirectory(fsImage, this, conf);
this.safeMode = new SafeModeInfo(); // manual safe mode
}
this.hostsReader = new HostsFileReader(conf.get("dfs.hosts",""),
conf.get("dfs.hosts.exclude",""));
if (isBlockTokenEnabled) {
blockTokenSecretManager = new BlockTokenSecretManager(true,
blockKeyUpdateInterval, blockTokenLifetime);
}
}
void activateSecretManager() throws IOException {
if (dtSecretManager != null) {
dtSecretManager.startThreads();
}
}
/**
* Activate FSNamesystem daemons.
*/
void activate(Configuration conf) throws IOException {
setBlockTotal();
blockManager.activate();
this.hbthread = new Daemon(new HeartbeatMonitor());
this.lmthread = new Daemon(leaseManager.new Monitor());
this.replthread = new Daemon(new ReplicationMonitor());
hbthread.start();
lmthread.start();
replthread.start();
this.dnthread = new Daemon(new DecommissionManager(this).new Monitor(
conf.getInt("dfs.namenode.decommission.interval", 30),
conf.getInt("dfs.namenode.decommission.nodes.per.interval", 5)));
dnthread.start();
this.dnsToSwitchMapping = ReflectionUtils.newInstance(
conf.getClass(DFSConfigKeys.NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY,
ScriptBasedMapping.class,
DNSToSwitchMapping.class), conf);
/* If the dns to swith mapping supports cache, resolve network
* locations of those hosts in the include list,
* and store the mapping in the cache; so future calls to resolve
* will be fast.
*/
if (dnsToSwitchMapping instanceof CachedDNSToSwitchMapping) {
dnsToSwitchMapping.resolve(new ArrayList<String>(hostsReader.getHosts()));
}
registerMXBean();
}
public static Collection<URI> getNamespaceDirs(Configuration conf) {
return getStorageDirs(conf, DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY);
}
public static Collection<URI> getStorageDirs(Configuration conf,
String propertyName) {
Collection<String> dirNames = conf.getTrimmedStringCollection(propertyName);
StartupOption startOpt = NameNode.getStartupOption(conf);
if(startOpt == StartupOption.IMPORT) {
// In case of IMPORT this will get rid of default directories
// but will retain directories specified in hdfs-site.xml
// When importing image from a checkpoint, the name-node can
// start with empty set of storage directories.
Configuration cE = new HdfsConfiguration(false);
cE.addResource("core-default.xml");
cE.addResource("core-site.xml");
cE.addResource("hdfs-default.xml");
Collection<String> dirNames2 = cE.getTrimmedStringCollection(propertyName);
dirNames.removeAll(dirNames2);
if(dirNames.isEmpty())
LOG.warn("!!! WARNING !!!" +
"\n\tThe NameNode currently runs without persistent storage." +
"\n\tAny changes to the file system meta-data may be lost." +
"\n\tRecommended actions:" +
"\n\t\t- shutdown and restart NameNode with configured \""
+ propertyName + "\" in hdfs-site.xml;" +
"\n\t\t- use Backup Node as a persistent and up-to-date storage " +
"of the file system meta-data.");
} else if (dirNames.isEmpty())
dirNames.add("file:///tmp/hadoop/dfs/name");
return Util.stringCollectionAsURIs(dirNames);
}
public static Collection<URI> getNamespaceEditsDirs(Configuration conf) {
return getStorageDirs(conf, DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY);
}
// utility methods to acquire and release read lock and write lock
void readLock() {
this.fsLock.readLock().lock();
}
void readUnlock() {
this.fsLock.readLock().unlock();
}
void writeLock() {
this.fsLock.writeLock().lock();
}
void writeUnlock() {
this.fsLock.writeLock().unlock();
}
boolean hasWriteLock() {
return this.fsLock.isWriteLockedByCurrentThread();
}
/**
* dirs is a list of directories where the filesystem directory state
* is stored
*/
FSNamesystem(FSImage fsImage, Configuration conf) throws IOException {
this.fsLock = new ReentrantReadWriteLock(true);
this.blockManager = new BlockManager(this, conf);
setConfigurationParameters(conf);
this.dir = new FSDirectory(fsImage, this, conf);
dtSecretManager = createDelegationTokenSecretManager(conf);
}
/**
* Create FSNamesystem for {@link BackupNode}.
* Should do everything that would be done for the NameNode,
* except for loading the image.
*
* @param bnImage {@link BackupStorage}
* @param conf configuration
* @throws IOException
*/
FSNamesystem(Configuration conf, BackupStorage bnImage) throws IOException {
try {
initialize(conf, bnImage);
} catch(IOException e) {
LOG.error(getClass().getSimpleName() + " initialization failed.", e);
close();
throw e;
}
}
/**
* Initializes some of the members from configuration
*/
private void setConfigurationParameters(Configuration conf)
throws IOException {
fsOwner = UserGroupInformation.getCurrentUser();
LOG.info("fsOwner=" + fsOwner);
this.supergroup = conf.get(DFSConfigKeys.DFS_PERMISSIONS_SUPERUSERGROUP_KEY,
DFSConfigKeys.DFS_PERMISSIONS_SUPERUSERGROUP_DEFAULT);
this.isPermissionEnabled = conf.getBoolean(DFSConfigKeys.DFS_PERMISSIONS_ENABLED_KEY,
DFSConfigKeys.DFS_PERMISSIONS_ENABLED_DEFAULT);
LOG.info("supergroup=" + supergroup);
LOG.info("isPermissionEnabled=" + isPermissionEnabled);
short filePermission = (short)conf.getInt(DFSConfigKeys.DFS_NAMENODE_UPGRADE_PERMISSION_KEY,
DFSConfigKeys.DFS_NAMENODE_UPGRADE_PERMISSION_DEFAULT);
this.defaultPermission = PermissionStatus.createImmutable(
fsOwner.getShortUserName(), supergroup, new FsPermission(filePermission));
long heartbeatInterval = conf.getLong(
DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY,
DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_DEFAULT) * 1000;
this.heartbeatRecheckInterval = conf.getInt(
DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY,
DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_DEFAULT); // 5 minutes
this.heartbeatExpireInterval = 2 * heartbeatRecheckInterval +
10 * heartbeatInterval;
this.replicationRecheckInterval =
conf.getInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY,
DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_DEFAULT) * 1000L;
this.serverDefaults = new FsServerDefaults(
conf.getLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, DEFAULT_BLOCK_SIZE),
conf.getInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, DEFAULT_BYTES_PER_CHECKSUM),
conf.getInt(DFSConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_KEY, DEFAULT_WRITE_PACKET_SIZE),
(short) conf.getInt("dfs.replication", DEFAULT_REPLICATION_FACTOR),
conf.getInt("io.file.buffer.size", DEFAULT_FILE_BUFFER_SIZE));
this.maxFsObjects = conf.getLong(DFSConfigKeys.DFS_NAMENODE_MAX_OBJECTS_KEY,
DFSConfigKeys.DFS_NAMENODE_MAX_OBJECTS_DEFAULT);
this.blockInvalidateLimit = Math.max(this.blockInvalidateLimit,
20*(int)(heartbeatInterval/1000));
this.accessTimePrecision = conf.getLong(DFSConfigKeys.DFS_NAMENODE_ACCESSTIME_PRECISION_KEY, 0);
this.supportAppends = conf.getBoolean(DFSConfigKeys.DFS_SUPPORT_APPEND_KEY,
DFSConfigKeys.DFS_SUPPORT_APPEND_DEFAULT);
this.isBlockTokenEnabled = conf.getBoolean(
DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY,
DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_DEFAULT);
if (isBlockTokenEnabled) {
this.blockKeyUpdateInterval = conf.getLong(
DFSConfigKeys.DFS_BLOCK_ACCESS_KEY_UPDATE_INTERVAL_KEY,
DFSConfigKeys.DFS_BLOCK_ACCESS_KEY_UPDATE_INTERVAL_DEFAULT) * 60 * 1000L; // 10 hrs
this.blockTokenLifetime = conf.getLong(
DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_LIFETIME_KEY,
DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_LIFETIME_DEFAULT) * 60 * 1000L; // 10 hrs
}
LOG.info("isBlockTokenEnabled=" + isBlockTokenEnabled
+ " blockKeyUpdateInterval=" + blockKeyUpdateInterval / (60 * 1000)
+ " min(s), blockTokenLifetime=" + blockTokenLifetime / (60 * 1000)
+ " min(s)");
}
/**
* Return the default path permission when upgrading from releases with no
* permissions (<=0.15) to releases with permissions (>=0.16)
*/
protected PermissionStatus getUpgradePermission() {
return defaultPermission;
}
NamespaceInfo getNamespaceInfo() {
readLock();
try {
return new NamespaceInfo(dir.fsImage.getNamespaceID(),
dir.fsImage.getCTime(), getDistributedUpgradeVersion());
} finally {
readUnlock();
}
}
/**
* Close down this file system manager.
* Causes heartbeat and lease daemons to stop; waits briefly for
* them to finish, but a short timeout returns control back to caller.
*/
public void close() {
fsRunning = false;
try {
if (blockManager != null) blockManager.close();
if (hbthread != null) hbthread.interrupt();
if (replthread != null) replthread.interrupt();
if (dnthread != null) dnthread.interrupt();
if (smmthread != null) smmthread.interrupt();
if (dtSecretManager != null) dtSecretManager.stopThreads();
} catch (Exception e) {
LOG.warn("Exception shutting down FSNamesystem", e);
} finally {
// using finally to ensure we also wait for lease daemon
try {
if (lmthread != null) {
lmthread.interrupt();
lmthread.join(3000);
}
dir.close();
} catch (InterruptedException ie) {
} catch (IOException ie) {
LOG.error("Error closing FSDirectory", ie);
IOUtils.cleanup(LOG, dir);
}
}
}
/** Is this name system running? */
boolean isRunning() {
return fsRunning;
}
/**
* Dump all metadata into specified file
*/
void metaSave(String filename) throws IOException {
writeLock();
try {
checkSuperuserPrivilege();
File file = new File(System.getProperty("hadoop.log.dir"), filename);
PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(file,
true)));
long totalInodes = this.dir.totalInodes();
long totalBlocks = this.getBlocksTotal();
ArrayList<DatanodeDescriptor> live = new ArrayList<DatanodeDescriptor>();
ArrayList<DatanodeDescriptor> dead = new ArrayList<DatanodeDescriptor>();
this.DFSNodesStatus(live, dead);
String str = totalInodes + " files and directories, " + totalBlocks
+ " blocks = " + (totalInodes + totalBlocks) + " total";
out.println(str);
out.println("Live Datanodes: "+live.size());
out.println("Dead Datanodes: "+dead.size());
blockManager.metaSave(out);
//
// Dump all datanodes
//
datanodeDump(out);
out.flush();
out.close();
} finally {
writeUnlock();
}
}
long getDefaultBlockSize() {
return serverDefaults.getBlockSize();
}
FsServerDefaults getServerDefaults() {
return serverDefaults;
}
long getAccessTimePrecision() {
return accessTimePrecision;
}
private boolean isAccessTimeSupported() {
return accessTimePrecision > 0;
}
/////////////////////////////////////////////////////////
//
// These methods are called by secondary namenodes
//
/////////////////////////////////////////////////////////
/**
* return a list of blocks & their locations on <code>datanode</code> whose
* total size is <code>size</code>
*
* @param datanode on which blocks are located
* @param size total size of blocks
*/
BlocksWithLocations getBlocks(DatanodeID datanode, long size)
throws IOException {
readLock();
try {
checkSuperuserPrivilege();
DatanodeDescriptor node = getDatanode(datanode);
if (node == null) {
NameNode.stateChangeLog.warn("BLOCK* NameSystem.getBlocks: "
+ "Asking for blocks from an unrecorded node " + datanode.getName());
throw new IllegalArgumentException(
"Unexpected exception. Got getBlocks message for datanode " +
datanode.getName() + ", but there is no info for it");
}
int numBlocks = node.numBlocks();
if(numBlocks == 0) {
return new BlocksWithLocations(new BlockWithLocations[0]);
}
Iterator<BlockInfo> iter = node.getBlockIterator();
int startBlock = r.nextInt(numBlocks); // starting from a random block
// skip blocks
for(int i=0; i<startBlock; i++) {
iter.next();
}
List<BlockWithLocations> results = new ArrayList<BlockWithLocations>();
long totalSize = 0;
BlockInfo curBlock;
while(totalSize<size && iter.hasNext()) {
curBlock = iter.next();
if(!curBlock.isComplete()) continue;
totalSize += addBlock(curBlock, results);
}
if(totalSize<size) {
iter = node.getBlockIterator(); // start from the beginning
for(int i=0; i<startBlock&&totalSize<size; i++) {
curBlock = iter.next();
if(!curBlock.isComplete()) continue;
totalSize += addBlock(curBlock, results);
}
}
return new BlocksWithLocations(
results.toArray(new BlockWithLocations[results.size()]));
} finally {
readUnlock();
}
}
/**
* Get access keys
*
* @return current access keys
*/
ExportedBlockKeys getBlockKeys() {
return isBlockTokenEnabled ? blockTokenSecretManager.exportKeys()
: ExportedBlockKeys.DUMMY_KEYS;
}
/**
* Get all valid locations of the block & add the block to results
* return the length of the added block; 0 if the block is not added
*/
private long addBlock(Block block, List<BlockWithLocations> results) {
ArrayList<String> machineSet = blockManager.getValidLocations(block);
if(machineSet.size() == 0) {
return 0;
} else {
results.add(new BlockWithLocations(block,
machineSet.toArray(new String[machineSet.size()])));
return block.getNumBytes();
}
}
/////////////////////////////////////////////////////////
//
// These methods are called by HadoopFS clients
//
/////////////////////////////////////////////////////////
/**
* Set permissions for an existing file.
* @throws IOException
*/
public void setPermission(String src, FsPermission permission)
throws AccessControlException, FileNotFoundException, SafeModeException,
UnresolvedLinkException, IOException {
writeLock();
try {
if (isInSafeMode())
throw new SafeModeException("Cannot set permission for " + src, safeMode);
checkOwner(src);
dir.setPermission(src, permission);
getEditLog().logSync();
if (auditLog.isInfoEnabled() && isExternalInvocation()) {
final HdfsFileStatus stat = dir.getFileInfo(src, false);
logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"setPermission", src, null, stat);
}
} finally {
writeUnlock();
}
}
/**
* Set owner for an existing file.
* @throws IOException
*/
public void setOwner(String src, String username, String group)
throws AccessControlException, FileNotFoundException, SafeModeException,
UnresolvedLinkException, IOException {
writeLock();
try {
if (isInSafeMode())
throw new SafeModeException("Cannot set owner for " + src, safeMode);
FSPermissionChecker pc = checkOwner(src);
if (!pc.isSuper) {
if (username != null && !pc.user.equals(username)) {
throw new AccessControlException("Non-super user cannot change owner.");
}
if (group != null && !pc.containsGroup(group)) {
throw new AccessControlException("User does not belong to " + group
+ " .");
}
}
dir.setOwner(src, username, group);
getEditLog().logSync();
if (auditLog.isInfoEnabled() && isExternalInvocation()) {
final HdfsFileStatus stat = dir.getFileInfo(src, false);
logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"setOwner", src, null, stat);
}
} finally {
writeUnlock();
}
}
/**
* Get block locations within the specified range.
* @see ClientProtocol#getBlockLocations(String, long, long)
*/
LocatedBlocks getBlockLocations(String clientMachine, String src,
long offset, long length) throws AccessControlException,
FileNotFoundException, UnresolvedLinkException, IOException {
LocatedBlocks blocks = getBlockLocations(src, offset, length, true, true);
if (blocks != null) {
//sort the blocks
DatanodeDescriptor client = host2DataNodeMap.getDatanodeByHost(
clientMachine);
for (LocatedBlock b : blocks.getLocatedBlocks()) {
clusterMap.pseudoSortByDistance(client, b.getLocations());
}
}
return blocks;
}
/**
* Get block locations within the specified range.
* @see ClientProtocol#getBlockLocations(String, long, long)
* @throws FileNotFoundException
*/
LocatedBlocks getBlockLocations(String src, long offset, long length,
boolean doAccessTime, boolean needBlockToken) throws FileNotFoundException,
UnresolvedLinkException, IOException {
if (isPermissionEnabled) {
checkPathAccess(src, FsAction.READ);
}
if (offset < 0) {
throw new HadoopIllegalArgumentException(
"Negative offset is not supported. File: " + src);
}
if (length < 0) {
throw new HadoopIllegalArgumentException(
"Negative length is not supported. File: " + src);
}
final LocatedBlocks ret = getBlockLocationsInternal(src,
offset, length, doAccessTime, needBlockToken);
if (auditLog.isInfoEnabled() && isExternalInvocation()) {
logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"open", src, null, null);
}
return ret;
}
private LocatedBlocks getBlockLocationsInternal(String src,
long offset,
long length,
boolean doAccessTime,
boolean needBlockToken)
throws FileNotFoundException, UnresolvedLinkException, IOException {
for (int attempt = 0; attempt < 2; attempt++) {
if (attempt == 0) { // first attempt is with readlock
readLock();
} else { // second attempt is with write lock
writeLock(); // writelock is needed to set accesstime
}
try {
long now = now();
INodeFile inode = dir.getFileINode(src);
if (inode == null) {
throw new FileNotFoundException("File does not exist: " + src);
}
assert !inode.isLink();
if (doAccessTime && isAccessTimeSupported()) {
if (now <= inode.getAccessTime() + getAccessTimePrecision()) {
// if we have to set access time but we only have the readlock, then
// restart this entire operation with the writeLock.
if (attempt == 0) {
continue;
}
}
dir.setTimes(src, inode, -1, now, false);
}
return getBlockLocationsInternal(inode, offset, length, needBlockToken);
} finally {
if (attempt == 0) {
readUnlock();
} else {
writeUnlock();
}
}
}
return null; // can never reach here
}
LocatedBlocks getBlockLocationsInternal(INodeFile inode,
long offset, long length, boolean needBlockToken)
throws IOException {
readLock();
try {
final BlockInfo[] blocks = inode.getBlocks();
if (LOG.isDebugEnabled()) {
LOG.debug("blocks = " + java.util.Arrays.asList(blocks));
}
if (blocks == null) {
return null;
}
if (blocks.length == 0) {
return new LocatedBlocks(0, inode.isUnderConstruction(),
Collections.<LocatedBlock>emptyList(), null, false);
} else {
final long n = inode.computeFileSize(false);
final List<LocatedBlock> locatedblocks = blockManager.getBlockLocations(
blocks, offset, length, Integer.MAX_VALUE);
final BlockInfo last = inode.getLastBlock();
if (LOG.isDebugEnabled()) {
LOG.debug("last = " + last);
}
if(isBlockTokenEnabled && needBlockToken) {
setBlockTokens(locatedblocks);
}
if (last.isComplete()) {
return new LocatedBlocks(n, inode.isUnderConstruction(), locatedblocks,
blockManager.getBlockLocation(last, n-last.getNumBytes()), true);
} else {
return new LocatedBlocks(n, inode.isUnderConstruction(), locatedblocks,
blockManager.getBlockLocation(last, n), false);
}
}
} finally {
readUnlock();
}
}
/** Generate block tokens for the blocks to be returned. */
private void setBlockTokens(List<LocatedBlock> locatedBlocks) throws IOException {
for(LocatedBlock l : locatedBlocks) {
Token<BlockTokenIdentifier> token =
blockTokenSecretManager.generateToken(l.getBlock(),
EnumSet.of(BlockTokenSecretManager.AccessMode.READ));
l.setBlockToken(token);
}
}
/**
* Moves all the blocks from srcs and appends them to trg
* To avoid rollbacks we will verify validitity of ALL of the args
* before we start actual move.
* @param target
* @param srcs
* @throws IOException
*/
public void concat(String target, String [] srcs)
throws IOException, UnresolvedLinkException {
if(FSNamesystem.LOG.isDebugEnabled()) {
FSNamesystem.LOG.debug("concat " + Arrays.toString(srcs) +
" to " + target);
}
// check safe mode
if (isInSafeMode()) {
throw new SafeModeException("concat: cannot concat " + target, safeMode);
}
// verify args
if(target.isEmpty()) {
throw new IllegalArgumentException("concat: trg file name is empty");
}
if(srcs == null || srcs.length == 0) {
throw new IllegalArgumentException("concat: srcs list is empty or null");
}
// currently we require all the files to be in the same dir
String trgParent =
target.substring(0, target.lastIndexOf(Path.SEPARATOR_CHAR));
for(String s : srcs) {
String srcParent = s.substring(0, s.lastIndexOf(Path.SEPARATOR_CHAR));
if(! srcParent.equals(trgParent)) {
throw new IllegalArgumentException
("concat: srcs and target shoould be in same dir");
}
}
writeLock();
try {
// write permission for the target
if (isPermissionEnabled) {
checkPathAccess(target, FsAction.WRITE);
// and srcs
for(String aSrc: srcs) {
checkPathAccess(aSrc, FsAction.READ); // read the file
checkParentAccess(aSrc, FsAction.WRITE); // for delete
}
}
// to make sure no two files are the same
Set<INode> si = new HashSet<INode>();
// we put the following prerequisite for the operation
// replication and blocks sizes should be the same for ALL the blocks
// check the target
INode inode = dir.getFileINode(target);
if(inode == null) {
throw new IllegalArgumentException("concat: trg file doesn't exist");
}
if(inode.isUnderConstruction()) {
throw new IllegalArgumentException("concat: trg file is uner construction");
}
INodeFile trgInode = (INodeFile) inode;
// per design trg shouldn't be empty and all the blocks same size
if(trgInode.blocks.length == 0) {
throw new IllegalArgumentException("concat: "+ target + " file is empty");
}
long blockSize = trgInode.getPreferredBlockSize();
// check the end block to be full
if(blockSize != trgInode.blocks[trgInode.blocks.length-1].getNumBytes()) {
throw new IllegalArgumentException(target + " blocks size should be the same");
}
si.add(trgInode);
short repl = trgInode.getReplication();
// now check the srcs
boolean endSrc = false; // final src file doesn't have to have full end block
for(int i=0; i<srcs.length; i++) {
String src = srcs[i];
if(i==srcs.length-1)
endSrc=true;
INodeFile srcInode = dir.getFileINode(src);
if(src.isEmpty()
|| srcInode == null
|| srcInode.isUnderConstruction()
|| srcInode.blocks.length == 0) {
throw new IllegalArgumentException("concat: file " + src +
" is invalid or empty or underConstruction");
}
// check replication and blocks size
if(repl != srcInode.getReplication()) {
throw new IllegalArgumentException(src + " and " + target + " " +
"should have same replication: "
+ repl + " vs. " + srcInode.getReplication());
}
//boolean endBlock=false;
// verify that all the blocks are of the same length as target
// should be enough to check the end blocks
int idx = srcInode.blocks.length-1;
if(endSrc)
idx = srcInode.blocks.length-2; // end block of endSrc is OK not to be full
if(idx >= 0 && srcInode.blocks[idx].getNumBytes() != blockSize) {
throw new IllegalArgumentException("concat: blocks sizes of " +
src + " and " + target + " should all be the same");
}
si.add(srcInode);
}
// make sure no two files are the same
if(si.size() < srcs.length+1) { // trg + srcs
// it means at least two files are the same
throw new IllegalArgumentException("at least two files are the same");
}
if(NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("DIR* NameSystem.concat: " +
Arrays.toString(srcs) + " to " + target);
}
dir.concatInternal(target,srcs);
} finally {
writeUnlock();
}
getEditLog().logSync();
if (auditLog.isInfoEnabled() && isExternalInvocation()) {
final HdfsFileStatus stat = dir.getFileInfo(target, false);
logAuditEvent(UserGroupInformation.getLoginUser(),
Server.getRemoteIp(),
"concat", Arrays.toString(srcs), target, stat);
}
}
/**
* stores the modification and access time for this inode.
* The access time is precise upto an hour. The transaction, if needed, is
* written to the edits log but is not flushed.
*/
public void setTimes(String src, long mtime, long atime)
throws IOException, UnresolvedLinkException {
if (!isAccessTimeSupported() && atime != -1) {
throw new IOException("Access time for hdfs is not configured. " +
" Please set dfs.support.accessTime configuration parameter.");
}
writeLock();
try {
//
// The caller needs to have write access to set access & modification times.
if (isPermissionEnabled) {
checkPathAccess(src, FsAction.WRITE);
}
INodeFile inode = dir.getFileINode(src);
if (inode != null) {
dir.setTimes(src, inode, mtime, atime, true);
if (auditLog.isInfoEnabled() && isExternalInvocation()) {
final HdfsFileStatus stat = dir.getFileInfo(src, false);
logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"setTimes", src, null, stat);
}
} else {
throw new FileNotFoundException("File " + src + " does not exist.");
}
} finally {
writeUnlock();
}
}
/**
* Create a symbolic link.
*/
public void createSymlink(String target, String link,
PermissionStatus dirPerms, boolean createParent)
throws IOException, UnresolvedLinkException {
writeLock();
try {
if (!createParent) {
verifyParentDir(link);
}
createSymlinkInternal(target, link, dirPerms, createParent);
} finally {
writeUnlock();
}
getEditLog().logSync();
if (auditLog.isInfoEnabled() && isExternalInvocation()) {
final HdfsFileStatus stat = dir.getFileInfo(link, false);
logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"createSymlink", link, target, stat);
}
}
/**
* Create a symbolic link.
*/
private void createSymlinkInternal(String target, String link,
PermissionStatus dirPerms, boolean createParent)
throws IOException, UnresolvedLinkException {
writeLock();
try {
if (NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("DIR* NameSystem.createSymlink: target=" +
target + " link=" + link);
}
if (isInSafeMode()) {
throw new SafeModeException("Cannot create symlink " + link, safeMode);
}
if (!DFSUtil.isValidName(link)) {
throw new InvalidPathException("Invalid file name: " + link);
}
if (!dir.isValidToCreate(link)) {
throw new IOException("failed to create link " + link
+" either because the filename is invalid or the file exists");
}
if (isPermissionEnabled) {
checkAncestorAccess(link, FsAction.WRITE);
}
// validate that we have enough inodes.
checkFsObjectLimit();
// add symbolic link to namespace
dir.addSymlink(link, target, dirPerms, createParent);
} finally {
writeUnlock();
}
}
/**
* Set replication for an existing file.
*
* The NameNode sets new replication and schedules either replication of
* under-replicated data blocks or removal of the excessive block copies
* if the blocks are over-replicated.
*
* @see ClientProtocol#setReplication(String, short)
* @param src file name
* @param replication new replication
* @return true if successful;
* false if file does not exist or is a directory
*/
public boolean setReplication(String src, short replication)
throws IOException, UnresolvedLinkException {
boolean status = setReplicationInternal(src, replication);
getEditLog().logSync();
if (status && auditLog.isInfoEnabled() && isExternalInvocation()) {
logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"setReplication", src, null, null);
}
return status;
}
private boolean setReplicationInternal(String src,
short replication) throws AccessControlException, QuotaExceededException,
SafeModeException, UnresolvedLinkException, IOException {
writeLock();
try {
if (isInSafeMode())
throw new SafeModeException("Cannot set replication for " + src, safeMode);
blockManager.verifyReplication(src, replication, null);
if (isPermissionEnabled) {
checkPathAccess(src, FsAction.WRITE);
}
int[] oldReplication = new int[1];
Block[] fileBlocks;
fileBlocks = dir.setReplication(src, replication, oldReplication);
if (fileBlocks == null) // file not found or is a directory
return false;
int oldRepl = oldReplication[0];
if (oldRepl == replication) // the same replication
return true;
// update needReplication priority queues
for(int idx = 0; idx < fileBlocks.length; idx++)
blockManager.updateNeededReplications(fileBlocks[idx], 0, replication-oldRepl);
if (oldRepl > replication) {
// old replication > the new one; need to remove copies
LOG.info("Reducing replication for file " + src
+ ". New replication is " + replication);
for(int idx = 0; idx < fileBlocks.length; idx++)
blockManager.processOverReplicatedBlock(fileBlocks[idx], replication, null, null);
} else { // replication factor is increased
LOG.info("Increasing replication for file " + src
+ ". New replication is " + replication);
}
return true;
} finally {
writeUnlock();
}
}
long getPreferredBlockSize(String filename)
throws IOException, UnresolvedLinkException {
if (isPermissionEnabled) {
checkTraverse(filename);
}
return dir.getPreferredBlockSize(filename);
}
/*
* Verify that parent directory of src exists.
*/
private void verifyParentDir(String src) throws FileNotFoundException,
ParentNotDirectoryException, UnresolvedLinkException {
Path parent = new Path(src).getParent();
if (parent != null) {
INode[] pathINodes = dir.getExistingPathINodes(parent.toString());
INode parentNode = pathINodes[pathINodes.length - 1];
if (parentNode == null) {
throw new FileNotFoundException("Parent directory doesn't exist: "
+ parent.toString());
} else if (!parentNode.isDirectory() && !parentNode.isLink()) {
throw new ParentNotDirectoryException("Parent path is not a directory: "
+ parent.toString());
}
}
}
/**
* Create a new file entry in the namespace.
*
* For description of parameters and exceptions thrown see
* {@link ClientProtocol#create()}
*/
void startFile(String src, PermissionStatus permissions, String holder,
String clientMachine, EnumSet<CreateFlag> flag, boolean createParent,
short replication, long blockSize) throws AccessControlException,
SafeModeException, FileAlreadyExistsException, UnresolvedLinkException,
FileNotFoundException, ParentNotDirectoryException, IOException {
startFileInternal(src, permissions, holder, clientMachine, flag,
createParent, replication, blockSize);
getEditLog().logSync();
if (auditLog.isInfoEnabled() && isExternalInvocation()) {
final HdfsFileStatus stat = dir.getFileInfo(src, false);
logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"create", src, null, stat);
}
}
/**
* Create new or open an existing file for append.<p>
*
* In case of opening the file for append, the method returns the last
* block of the file if this is a partial block, which can still be used
* for writing more data. The client uses the returned block locations
* to form the data pipeline for this block.<br>
* The method returns null if the last block is full or if this is a
* new file. The client then allocates a new block with the next call
* using {@link NameNode#addBlock()}.<p>
*
* For description of parameters and exceptions thrown see
* {@link ClientProtocol#create()}
*
* @return the last block locations if the block is partial or null otherwise
*/
private LocatedBlock startFileInternal(String src,
PermissionStatus permissions, String holder, String clientMachine,
EnumSet<CreateFlag> flag, boolean createParent, short replication,
long blockSize) throws SafeModeException, FileAlreadyExistsException,
AccessControlException, UnresolvedLinkException, FileNotFoundException,
ParentNotDirectoryException, IOException {
writeLock();
try {
boolean overwrite = flag.contains(CreateFlag.OVERWRITE);
boolean append = flag.contains(CreateFlag.APPEND);
boolean create = flag.contains(CreateFlag.CREATE);
if (NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("DIR* NameSystem.startFile: src=" + src
+ ", holder=" + holder
+ ", clientMachine=" + clientMachine
+ ", createParent=" + createParent
+ ", replication=" + replication
+ ", overwrite=" + overwrite
+ ", append=" + append);
}
if (isInSafeMode())
throw new SafeModeException("Cannot create file" + src, safeMode);
if (!DFSUtil.isValidName(src)) {
throw new InvalidPathException(src);
}
// Verify that the destination does not exist as a directory already.
boolean pathExists = dir.exists(src);
if (pathExists && dir.isDir(src)) {
throw new FileAlreadyExistsException("Cannot create file "+ src + "; already exists as a directory.");
}
if (isPermissionEnabled) {
if (append || (overwrite && pathExists)) {
checkPathAccess(src, FsAction.WRITE);
}
else {
checkAncestorAccess(src, FsAction.WRITE);
}
}
if (!createParent) {
verifyParentDir(src);
}
try {
INode myFile = dir.getFileINode(src);
recoverLeaseInternal(myFile, src, holder, clientMachine, false);
try {
blockManager.verifyReplication(src, replication, clientMachine);
} catch(IOException e) {
throw new IOException("failed to create "+e.getMessage());
}
if (append) {
if (myFile == null) {
if(!create)
throw new FileNotFoundException("failed to append to non-existent file "
+ src + " on client " + clientMachine);
else {
//append & create a nonexist file equals to overwrite
return startFileInternal(src, permissions, holder, clientMachine,
EnumSet.of(CreateFlag.OVERWRITE), createParent, replication, blockSize);
}
} else if (myFile.isDirectory()) {
throw new IOException("failed to append to directory " + src
+" on client " + clientMachine);
}
} else if (!dir.isValidToCreate(src)) {
if (overwrite) {
delete(src, true);
} else {
throw new IOException("failed to create file " + src
+" on client " + clientMachine
+" either because the filename is invalid or the file exists");
}
}
DatanodeDescriptor clientNode =
host2DataNodeMap.getDatanodeByHost(clientMachine);
if (append) {
//
// Replace current node with a INodeUnderConstruction.
// Recreate in-memory lease record.
//
INodeFile node = (INodeFile) myFile;
INodeFileUnderConstruction cons = new INodeFileUnderConstruction(
node.getLocalNameBytes(),
node.getReplication(),
node.getModificationTime(),
node.getPreferredBlockSize(),
node.getBlocks(),
node.getPermissionStatus(),
holder,
clientMachine,
clientNode);
dir.replaceNode(src, node, cons);
leaseManager.addLease(cons.getClientName(), src);
// convert last block to under-construction
LocatedBlock lb =
blockManager.convertLastBlockToUnderConstruction(cons);
if (lb != null && isBlockTokenEnabled) {
lb.setBlockToken(blockTokenSecretManager.generateToken(lb.getBlock(),
EnumSet.of(BlockTokenSecretManager.AccessMode.WRITE)));
}
// add append file record to log, record lease, etc.
getEditLog().logOpenFile(src, cons);
return lb;
} else {
// Now we can add the name to the filesystem. This file has no
// blocks associated with it.
//
checkFsObjectLimit();
// increment global generation stamp
long genstamp = nextGenerationStamp();
INodeFileUnderConstruction newNode = dir.addFile(src, permissions,
replication, blockSize, holder, clientMachine, clientNode, genstamp);
if (newNode == null) {
throw new IOException("DIR* NameSystem.startFile: " +
"Unable to add file to namespace.");
}
leaseManager.addLease(newNode.getClientName(), src);
// record file record in log, record new generation stamp
getEditLog().logOpenFile(src, newNode);
if (NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("DIR* NameSystem.startFile: "
+"add "+src+" to namespace for "+holder);
}
}
} catch (IOException ie) {
NameNode.stateChangeLog.warn("DIR* NameSystem.startFile: "
+ie.getMessage());
throw ie;
}
} finally {
writeUnlock();
}
return null;
}
/**
* Recover lease;
* Immediately revoke the lease of the current lease holder and start lease
* recovery so that the file can be forced to be closed.
*
* @param src the path of the file to start lease recovery
* @param holder the lease holder's name
* @param clientMachine the client machine's name
* @return true if the file is already closed
* @throws IOException
*/
synchronized boolean recoverLease(String src, String holder, String clientMachine)
throws IOException {
if (isInSafeMode()) {
throw new SafeModeException(
"Cannot recover the lease of " + src, safeMode);
}
if (!DFSUtil.isValidName(src)) {
throw new IOException("Invalid file name: " + src);
}
INode inode = dir.getFileINode(src);
if (inode == null) {
throw new FileNotFoundException("File not found " + src);
}
if (!inode.isUnderConstruction()) {
return true;
}
if (isPermissionEnabled) {
checkPathAccess(src, FsAction.WRITE);
}
recoverLeaseInternal(inode, src, holder, clientMachine, true);
return false;
}
private void recoverLeaseInternal(INode fileInode,
String src, String holder, String clientMachine, boolean force)
throws IOException {
if (fileInode != null && fileInode.isUnderConstruction()) {
INodeFileUnderConstruction pendingFile = (INodeFileUnderConstruction) fileInode;
//
// If the file is under construction , then it must be in our
// leases. Find the appropriate lease record.
//
Lease lease = leaseManager.getLease(holder);
//
// We found the lease for this file. And surprisingly the original
// holder is trying to recreate this file. This should never occur.
//
if (!force && lease != null) {
Lease leaseFile = leaseManager.getLeaseByPath(src);
if ((leaseFile != null && leaseFile.equals(lease)) ||
lease.getHolder().equals(holder)) {
throw new AlreadyBeingCreatedException(
"failed to create file " + src + " for " + holder +
" on client " + clientMachine +
" because current leaseholder is trying to recreate file.");
}
}
//
// Find the original holder.
//
lease = leaseManager.getLease(pendingFile.getClientName());
if (lease == null) {
throw new AlreadyBeingCreatedException(
"failed to create file " + src + " for " + holder +
" on client " + clientMachine +
" because pendingCreates is non-null but no leases found.");
}
if (force) {
// close now: no need to wait for soft lease expiration and
// close only the file src
LOG.info("recoverLease: recover lease " + lease + ", src=" + src +
" from client " + pendingFile.getClientName());
internalReleaseLease(lease, src, holder);
} else {
assert lease.getHolder().equals(pendingFile.getClientName()) :
"Current lease holder " + lease.getHolder() +
" does not match file creator " + pendingFile.getClientName();
//
// If the original holder has not renewed in the last SOFTLIMIT
// period, then start lease recovery.
//
if (lease.expiredSoftLimit()) {
LOG.info("startFile: recover lease " + lease + ", src=" + src +
" from client " + pendingFile.getClientName());
boolean isClosed = internalReleaseLease(lease, src, null);
if(!isClosed)
throw new RecoveryInProgressException(
"Failed to close file " + src +
". Lease recovery is in progress. Try again later.");
} else {
BlockInfoUnderConstruction lastBlock=pendingFile.getLastBlock();
if(lastBlock != null && lastBlock.getBlockUCState() ==
BlockUCState.UNDER_RECOVERY) {
throw new RecoveryInProgressException(
"Recovery in progress, file [" + src + "], " +
"lease owner [" + lease.getHolder() + "]");
} else {
throw new AlreadyBeingCreatedException(
"Failed to create file [" + src + "] for [" + holder +
"] on client [" + clientMachine +
"], because this file is already being created by [" +
pendingFile.getClientName() + "] on [" +
pendingFile.getClientMachine() + "]");
}
}
}
}
}
/**
* Append to an existing file in the namespace.
*/
LocatedBlock appendFile(String src, String holder, String clientMachine)
throws AccessControlException, SafeModeException,
FileAlreadyExistsException, FileNotFoundException,
ParentNotDirectoryException, IOException {
if (supportAppends == false) {
throw new UnsupportedOperationException("Append to hdfs not supported." +
" Please refer to dfs.support.append configuration parameter.");
}
LocatedBlock lb =
startFileInternal(src, null, holder, clientMachine,
EnumSet.of(CreateFlag.APPEND),
false, (short)blockManager.maxReplication, (long)0);
getEditLog().logSync();
if (lb != null) {
if (NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("DIR* NameSystem.appendFile: file "
+src+" for "+holder+" at "+clientMachine
+" block " + lb.getBlock()
+" block size " + lb.getBlock().getNumBytes());
}
}
if (auditLog.isInfoEnabled() && isExternalInvocation()) {
logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"append", src, null, null);
}
return lb;
}
/**
* The client would like to obtain an additional block for the indicated
* filename (which is being written-to). Return an array that consists
* of the block, plus a set of machines. The first on this list should
* be where the client writes data. Subsequent items in the list must
* be provided in the connection to the first datanode.
*
* Make sure the previous blocks have been reported by datanodes and
* are replicated. Will return an empty 2-elt array if we want the
* client to "try again later".
*/
public LocatedBlock getAdditionalBlock(String src,
String clientName,
Block previous,
HashMap<Node, Node> excludedNodes
)
throws LeaseExpiredException, NotReplicatedYetException,
QuotaExceededException, SafeModeException, UnresolvedLinkException,
IOException {
long fileLength, blockSize;
int replication;
DatanodeDescriptor clientNode = null;
Block newBlock = null;
if(NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug(
"BLOCK* NameSystem.getAdditionalBlock: file "
+src+" for "+clientName);
}
writeLock();
try {
if (isInSafeMode()) {
throw new SafeModeException("Cannot add block to " + src, safeMode);
}
// have we exceeded the configured limit of fs objects.
checkFsObjectLimit();
INodeFileUnderConstruction pendingFile = checkLease(src, clientName);
// commit the last block and complete it if it has minimum replicas
blockManager.commitOrCompleteLastBlock(pendingFile, previous);
//
// If we fail this, bad things happen!
//
if (!checkFileProgress(pendingFile, false)) {
throw new NotReplicatedYetException("Not replicated yet:" + src);
}
fileLength = pendingFile.computeContentSummary().getLength();
blockSize = pendingFile.getPreferredBlockSize();
clientNode = pendingFile.getClientNode();
replication = (int)pendingFile.getReplication();
} finally {
writeUnlock();
}
// choose targets for the new block to be allocated.
DatanodeDescriptor targets[] = blockManager.replicator.chooseTarget(
src, replication, clientNode, excludedNodes, blockSize);
if (targets.length < blockManager.minReplication) {
throw new IOException("File " + src + " could only be replicated to " +
targets.length + " nodes, instead of " +
blockManager.minReplication);
}
// Allocate a new block and record it in the INode.
writeLock();
try {
INode[] pathINodes = dir.getExistingPathINodes(src);
int inodesLen = pathINodes.length;
checkLease(src, clientName, pathINodes[inodesLen-1]);
INodeFileUnderConstruction pendingFile = (INodeFileUnderConstruction)
pathINodes[inodesLen - 1];
if (!checkFileProgress(pendingFile, false)) {
throw new NotReplicatedYetException("Not replicated yet:" + src);
}
// allocate new block record block locations in INode.
newBlock = allocateBlock(src, pathINodes, targets);
for (DatanodeDescriptor dn : targets) {
dn.incBlocksScheduled();
}
} finally {
writeUnlock();
}
// Create next block
LocatedBlock b = new LocatedBlock(newBlock, targets, fileLength);
if (isBlockTokenEnabled) {
b.setBlockToken(blockTokenSecretManager.generateToken(b.getBlock(),
EnumSet.of(BlockTokenSecretManager.AccessMode.WRITE)));
}
return b;
}
/**
* The client would like to let go of the given block
*/
public boolean abandonBlock(Block b, String src, String holder)
throws LeaseExpiredException, FileNotFoundException,
UnresolvedLinkException, IOException {
writeLock();
try {
if (isInSafeMode()) {
throw new SafeModeException("Cannot addbandon block " + b +
" for file "+src, safeMode);
}
//
// Remove the block from the pending creates list
//
if(NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("BLOCK* NameSystem.abandonBlock: "
+b+"of file "+src);
}
INodeFileUnderConstruction file = checkLease(src, holder);
dir.removeBlock(src, file, b);
if(NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("BLOCK* NameSystem.abandonBlock: "
+ b
+ " is removed from pendingCreates");
}
return true;
} finally {
writeUnlock();
}
}
// make sure that we still have the lease on this file.
private INodeFileUnderConstruction checkLease(String src, String holder)
throws LeaseExpiredException, UnresolvedLinkException {
INodeFile file = dir.getFileINode(src);
checkLease(src, holder, file);
return (INodeFileUnderConstruction)file;
}
private void checkLease(String src, String holder, INode file)
throws LeaseExpiredException {
if (file == null || file.isDirectory()) {
Lease lease = leaseManager.getLease(holder);
throw new LeaseExpiredException("No lease on " + src +
" File does not exist. " +
(lease != null ? lease.toString() :
"Holder " + holder +
" does not have any open files."));
}
if (!file.isUnderConstruction()) {
Lease lease = leaseManager.getLease(holder);
throw new LeaseExpiredException("No lease on " + src +
" File is not open for writing. " +
(lease != null ? lease.toString() :
"Holder " + holder +
" does not have any open files."));
}
INodeFileUnderConstruction pendingFile = (INodeFileUnderConstruction)file;
if (holder != null && !pendingFile.getClientName().equals(holder)) {
throw new LeaseExpiredException("Lease mismatch on " + src + " owned by "
+ pendingFile.getClientName() + " but is accessed by " + holder);
}
}
/**
* Complete in-progress write to the given file.
* @return true if successful, false if the client should continue to retry
* (e.g if not all blocks have reached minimum replication yet)
* @throws IOException on error (eg lease mismatch, file not open, file deleted)
*/
public boolean completeFile(String src, String holder, Block last)
throws SafeModeException, UnresolvedLinkException, IOException {
boolean success = completeFileInternal(src, holder, last);
getEditLog().logSync();
return success ;
}
private boolean completeFileInternal(String src,
String holder, Block last) throws SafeModeException,
UnresolvedLinkException, IOException {
writeLock();
try {
if(NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("DIR* NameSystem.completeFile: " +
src + " for " + holder);
}
if (isInSafeMode())
throw new SafeModeException("Cannot complete file " + src, safeMode);
INodeFileUnderConstruction pendingFile = checkLease(src, holder);
// commit the last block and complete it if it has minimum replicas
blockManager.commitOrCompleteLastBlock(pendingFile, last);
if (!checkFileProgress(pendingFile, true)) {
return false;
}
finalizeINodeFileUnderConstruction(src, pendingFile);
NameNode.stateChangeLog.info("DIR* NameSystem.completeFile: file " + src
+ " is closed by " + holder);
return true;
} finally {
writeUnlock();
}
}
/**
* Check all blocks of a file. If any blocks are lower than their intended
* replication factor, then insert them into neededReplication
*/
private void checkReplicationFactor(INodeFile file) {
int numExpectedReplicas = file.getReplication();
Block[] pendingBlocks = file.getBlocks();
int nrBlocks = pendingBlocks.length;
for (int i = 0; i < nrBlocks; i++) {
blockManager.checkReplication(pendingBlocks[i], numExpectedReplicas);
}
}
static Random randBlockId = new Random();
/**
* Allocate a block at the given pending filename
*
* @param src path to the file
* @param inodes INode representing each of the components of src.
* <code>inodes[inodes.length-1]</code> is the INode for the file.
*
* @throws QuotaExceededException If addition of block exceeds space quota
*/
private Block allocateBlock(String src, INode[] inodes,
DatanodeDescriptor targets[]) throws QuotaExceededException {
Block b = new Block(FSNamesystem.randBlockId.nextLong(), 0, 0);
while(isValidBlock(b)) {
b.setBlockId(FSNamesystem.randBlockId.nextLong());
}
b.setGenerationStamp(getGenerationStamp());
b = dir.addBlock(src, inodes, b, targets);
NameNode.stateChangeLog.info("BLOCK* NameSystem.allocateBlock: "
+src+ ". "+b);
return b;
}
/**
* Check that the indicated file's blocks are present and
* replicated. If not, return false. If checkall is true, then check
* all blocks, otherwise check only penultimate block.
*/
boolean checkFileProgress(INodeFile v, boolean checkall) {
writeLock();
try {
if (checkall) {
//
// check all blocks of the file.
//
for (BlockInfo block: v.getBlocks()) {
if (!block.isComplete()) {
LOG.info("BLOCK* NameSystem.checkFileProgress: "
+ "block " + block + " has not reached minimal replication "
+ blockManager.minReplication);
return false;
}
}
} else {
//
// check the penultimate block of this file
//
BlockInfo b = v.getPenultimateBlock();
if (b != null && !b.isComplete()) {
LOG.info("BLOCK* NameSystem.checkFileProgress: "
+ "block " + b + " has not reached minimal replication "
+ blockManager.minReplication);
return false;
}
}
return true;
} finally {
writeUnlock();
}
}
/**
* Mark the block belonging to datanode as corrupt
* @param blk Block to be marked as corrupt
* @param dn Datanode which holds the corrupt replica
*/
public void markBlockAsCorrupt(Block blk, DatanodeInfo dn)
throws IOException {
writeLock();
try {
blockManager.findAndMarkBlockAsCorrupt(blk, dn);
} finally {
writeUnlock();
}
}
////////////////////////////////////////////////////////////////
// Here's how to handle block-copy failure during client write:
// -- As usual, the client's write should result in a streaming
// backup write to a k-machine sequence.
// -- If one of the backup machines fails, no worries. Fail silently.
// -- Before client is allowed to close and finalize file, make sure
// that the blocks are backed up. Namenode may have to issue specific backup
// commands to make up for earlier datanode failures. Once all copies
// are made, edit namespace and return to client.
////////////////////////////////////////////////////////////////
/**
* Change the indicated filename.
* @deprecated Use {@link #renameTo(String, String, Options.Rename...)} instead.
*/
@Deprecated
boolean renameTo(String src, String dst)
throws IOException, UnresolvedLinkException {
boolean status = renameToInternal(src, dst);
getEditLog().logSync();
if (status && auditLog.isInfoEnabled() && isExternalInvocation()) {
final HdfsFileStatus stat = dir.getFileInfo(dst, false);
logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"rename", src, dst, stat);
}
return status;
}
/** @deprecated See {@link #renameTo(String, String)} */
@Deprecated
private boolean renameToInternal(String src, String dst)
throws IOException, UnresolvedLinkException {
writeLock();
try {
if(NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("DIR* NameSystem.renameTo: " + src +
" to " + dst);
}
if (isInSafeMode())
throw new SafeModeException("Cannot rename " + src, safeMode);
if (!DFSUtil.isValidName(dst)) {
throw new IOException("Invalid name: " + dst);
}
if (isPermissionEnabled) {
//We should not be doing this. This is move() not renameTo().
//but for now,
String actualdst = dir.isDir(dst)?
dst + Path.SEPARATOR + new Path(src).getName(): dst;
checkParentAccess(src, FsAction.WRITE);
checkAncestorAccess(actualdst, FsAction.WRITE);
}
HdfsFileStatus dinfo = dir.getFileInfo(dst, false);
if (dir.renameTo(src, dst)) {
changeLease(src, dst, dinfo); // update lease with new filename
return true;
}
return false;
} finally {
writeUnlock();
}
}
/** Rename src to dst */
void renameTo(String src, String dst, Options.Rename... options)
throws IOException, UnresolvedLinkException {
renameToInternal(src, dst, options);
getEditLog().logSync();
if (auditLog.isInfoEnabled() && isExternalInvocation()) {
StringBuilder cmd = new StringBuilder("rename options=");
for (Rename option : options) {
cmd.append(option.value()).append(" ");
}
final HdfsFileStatus stat = dir.getFileInfo(dst, false);
logAuditEvent(UserGroupInformation.getCurrentUser(), Server.getRemoteIp(),
cmd.toString(), src, dst, stat);
}
}
private void renameToInternal(String src, String dst,
Options.Rename... options) throws IOException {
writeLock();
try {
if (NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("DIR* NameSystem.renameTo: with options - "
+ src + " to " + dst);
}
if (isInSafeMode()) {
throw new SafeModeException("Cannot rename " + src, safeMode);
}
if (!DFSUtil.isValidName(dst)) {
throw new InvalidPathException("Invalid name: " + dst);
}
if (isPermissionEnabled) {
checkParentAccess(src, FsAction.WRITE);
checkAncestorAccess(dst, FsAction.WRITE);
}
HdfsFileStatus dinfo = dir.getFileInfo(dst, false);
dir.renameTo(src, dst, options);
changeLease(src, dst, dinfo); // update lease with new filename
} finally {
writeUnlock();
}
}
/**
* Remove the indicated file from namespace.
*
* @see ClientProtocol#delete(String, boolean) for detailed descriptoin and
* description of exceptions
*/
public boolean delete(String src, boolean recursive)
throws AccessControlException, SafeModeException,
UnresolvedLinkException, IOException {
if ((!recursive) && (!dir.isDirEmpty(src))) {
throw new IOException(src + " is non empty");
}
if (NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("DIR* NameSystem.delete: " + src);
}
boolean status = deleteInternal(src, true);
if (status && auditLog.isInfoEnabled() && isExternalInvocation()) {
logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"delete", src, null, null);
}
return status;
}
/**
* Remove a file/directory from the namespace.
* <p>
* For large directories, deletion is incremental. The blocks under
* the directory are collected and deleted a small number at a time holding
* the {@link FSNamesystem} lock.
* <p>
* For small directory or file the deletion is done in one shot.
*
* @see ClientProtocol#delete(String, boolean) for description of exceptions
*/
private boolean deleteInternal(String src, boolean enforcePermission)
throws AccessControlException, SafeModeException,
UnresolvedLinkException, IOException{
boolean deleteNow = false;
ArrayList<Block> collectedBlocks = new ArrayList<Block>();
writeLock();
try {
if (isInSafeMode()) {
throw new SafeModeException("Cannot delete " + src, safeMode);
}
if (enforcePermission && isPermissionEnabled) {
checkPermission(src, false, null, FsAction.WRITE, null, FsAction.ALL);
}
// Unlink the target directory from directory tree
if (!dir.delete(src, collectedBlocks)) {
return false;
}
deleteNow = collectedBlocks.size() <= BLOCK_DELETION_INCREMENT;
if (deleteNow) { // Perform small deletes right away
removeBlocks(collectedBlocks);
}
} finally {
writeUnlock();
}
// Log directory deletion to editlog
getEditLog().logSync();
if (!deleteNow) {
removeBlocks(collectedBlocks); // Incremental deletion of blocks
}
collectedBlocks.clear();
if (NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("DIR* Namesystem.delete: "
+ src +" is removed");
}
return true;
}
/** From the given list, incrementally remove the blocks from blockManager */
private void removeBlocks(List<Block> blocks) {
int start = 0;
int end = 0;
while (start < blocks.size()) {
end = BLOCK_DELETION_INCREMENT + start;
end = end > blocks.size() ? blocks.size() : end;
writeLock();
try {
for (int i=start; i<end; i++) {
blockManager.removeBlock(blocks.get(i));
}
} finally {
writeUnlock();
}
start = end;
}
}
void removePathAndBlocks(String src, List<Block> blocks) {
leaseManager.removeLeaseWithPrefixPath(src);
if (blocks == null) {
return;
}
for(Block b : blocks) {
blockManager.removeBlock(b);
}
}
/**
* Get the file info for a specific file.
*
* @param src The string representation of the path to the file
* @param resolveLink whether to throw UnresolvedLinkException
* if src refers to a symlink
*
* @throws AccessControlException if access is denied
* @throws UnresolvedLinkException if a symlink is encountered.
*
* @return object containing information regarding the file
* or null if file not found
*/
HdfsFileStatus getFileInfo(String src, boolean resolveLink)
throws AccessControlException, UnresolvedLinkException {
if (!DFSUtil.isValidName(src)) {
throw new InvalidPathException("Invalid file name: " + src);
}
if (isPermissionEnabled) {
checkTraverse(src);
}
return dir.getFileInfo(src, resolveLink);
}
/**
* Create all the necessary directories
*/
public boolean mkdirs(String src, PermissionStatus permissions,
boolean createParent) throws IOException, UnresolvedLinkException {
boolean status = mkdirsInternal(src, permissions, createParent);
getEditLog().logSync();
if (status && auditLog.isInfoEnabled() && isExternalInvocation()) {
final HdfsFileStatus stat = dir.getFileInfo(src, false);
logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"mkdirs", src, null, stat);
}
return status;
}
/**
* Create all the necessary directories
*/
private boolean mkdirsInternal(String src,
PermissionStatus permissions, boolean createParent)
throws IOException, UnresolvedLinkException {
writeLock();
try {
if(NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("DIR* NameSystem.mkdirs: " + src);
}
if (isPermissionEnabled) {
checkTraverse(src);
}
if (dir.isDir(src)) {
// all the users of mkdirs() are used to expect 'true' even if
// a new directory is not created.
return true;
}
if (isInSafeMode())
throw new SafeModeException("Cannot create directory " + src, safeMode);
if (!DFSUtil.isValidName(src)) {
throw new InvalidPathException(src);
}
if (isPermissionEnabled) {
checkAncestorAccess(src, FsAction.WRITE);
}
if (!createParent) {
verifyParentDir(src);
}
// validate that we have enough inodes. This is, at best, a
// heuristic because the mkdirs() operation migth need to
// create multiple inodes.
checkFsObjectLimit();
if (!dir.mkdirs(src, permissions, false, now())) {
throw new IOException("Failed to create directory: " + src);
}
return true;
} finally {
writeUnlock();
}
}
ContentSummary getContentSummary(String src) throws AccessControlException,
FileNotFoundException, UnresolvedLinkException {
if (isPermissionEnabled) {
checkPermission(src, false, null, null, null, FsAction.READ_EXECUTE);
}
return dir.getContentSummary(src);
}
/**
* Set the namespace quota and diskspace quota for a directory.
* See {@link ClientProtocol#setQuota(String, long, long)} for the
* contract.
*/
void setQuota(String path, long nsQuota, long dsQuota)
throws IOException, UnresolvedLinkException {
writeLock();
try {
if (isInSafeMode())
throw new SafeModeException("Cannot set quota on " + path, safeMode);
if (isPermissionEnabled) {
checkSuperuserPrivilege();
}
dir.setQuota(path, nsQuota, dsQuota);
} finally {
writeUnlock();
}
getEditLog().logSync();
}
/** Persist all metadata about this file.
* @param src The string representation of the path
* @param clientName The string representation of the client
* @throws IOException if path does not exist
*/
void fsync(String src, String clientName)
throws IOException, UnresolvedLinkException {
NameNode.stateChangeLog.info("BLOCK* NameSystem.fsync: file "
+ src + " for " + clientName);
writeLock();
try {
if (isInSafeMode()) {
throw new SafeModeException("Cannot fsync file " + src, safeMode);
}
INodeFileUnderConstruction pendingFile = checkLease(src, clientName);
dir.persistBlocks(src, pendingFile);
} finally {
writeUnlock();
}
getEditLog().logSync();
}
/**
* Move a file that is being written to be immutable.
* @param src The filename
* @param lease The lease for the client creating the file
* @param recoveryLeaseHolder reassign lease to this holder if the last block
* needs recovery; keep current holder if null.
* @throws AlreadyBeingCreatedException if file is waiting to achieve minimal
* replication;<br>
* RecoveryInProgressException if lease recovery is in progress.<br>
* IOException in case of an error.
* @return true if file has been successfully finalized and closed or
* false if block recovery has been initiated
*/
boolean internalReleaseLease(Lease lease, String src,
String recoveryLeaseHolder) throws AlreadyBeingCreatedException,
IOException, UnresolvedLinkException {
LOG.info("Recovering lease=" + lease + ", src=" + src);
INodeFile iFile = dir.getFileINode(src);
if (iFile == null) {
final String message = "DIR* NameSystem.internalReleaseLease: "
+ "attempt to release a create lock on "
+ src + " file does not exist.";
NameNode.stateChangeLog.warn(message);
throw new IOException(message);
}
if (!iFile.isUnderConstruction()) {
final String message = "DIR* NameSystem.internalReleaseLease: "
+ "attempt to release a create lock on "
+ src + " but file is already closed.";
NameNode.stateChangeLog.warn(message);
throw new IOException(message);
}
INodeFileUnderConstruction pendingFile = (INodeFileUnderConstruction) iFile;
int nrBlocks = pendingFile.numBlocks();
BlockInfo[] blocks = pendingFile.getBlocks();
int nrCompleteBlocks;
BlockInfo curBlock = null;
for(nrCompleteBlocks = 0; nrCompleteBlocks < nrBlocks; nrCompleteBlocks++) {
curBlock = blocks[nrCompleteBlocks];
if(!curBlock.isComplete())
break;
assert blockManager.checkMinReplication(curBlock) :
"A COMPLETE block is not minimally replicated in " + src;
}
// If there are no incomplete blocks associated with this file,
// then reap lease immediately and close the file.
if(nrCompleteBlocks == nrBlocks) {
finalizeINodeFileUnderConstruction(src, pendingFile);
NameNode.stateChangeLog.warn("BLOCK*"
+ " internalReleaseLease: All existing blocks are COMPLETE,"
+ " lease removed, file closed.");
return true; // closed!
}
// Only the last and the penultimate blocks may be in non COMPLETE state.
// If the penultimate block is not COMPLETE, then it must be COMMITTED.
if(nrCompleteBlocks < nrBlocks - 2 ||
nrCompleteBlocks == nrBlocks - 2 &&
curBlock.getBlockUCState() != BlockUCState.COMMITTED) {
final String message = "DIR* NameSystem.internalReleaseLease: "
+ "attempt to release a create lock on "
+ src + " but file is already closed.";
NameNode.stateChangeLog.warn(message);
throw new IOException(message);
}
// no we know that the last block is not COMPLETE, and
// that the penultimate block if exists is either COMPLETE or COMMITTED
BlockInfoUnderConstruction lastBlock = pendingFile.getLastBlock();
BlockUCState lastBlockState = lastBlock.getBlockUCState();
BlockInfo penultimateBlock = pendingFile.getPenultimateBlock();
boolean penultimateBlockMinReplication;
BlockUCState penultimateBlockState;
if (penultimateBlock == null) {
penultimateBlockState = BlockUCState.COMPLETE;
// If penultimate block doesn't exist then its minReplication is met
penultimateBlockMinReplication = true;
} else {
penultimateBlockState = BlockUCState.COMMITTED;
penultimateBlockMinReplication =
blockManager.checkMinReplication(penultimateBlock);
}
assert penultimateBlockState == BlockUCState.COMPLETE ||
penultimateBlockState == BlockUCState.COMMITTED :
"Unexpected state of penultimate block in " + src;
switch(lastBlockState) {
case COMPLETE:
assert false : "Already checked that the last block is incomplete";
break;
case COMMITTED:
// Close file if committed blocks are minimally replicated
if(penultimateBlockMinReplication &&
blockManager.checkMinReplication(lastBlock)) {
finalizeINodeFileUnderConstruction(src, pendingFile);
NameNode.stateChangeLog.warn("BLOCK*"
+ " internalReleaseLease: Committed blocks are minimally replicated,"
+ " lease removed, file closed.");
return true; // closed!
}
// Cannot close file right now, since some blocks
// are not yet minimally replicated.
// This may potentially cause infinite loop in lease recovery
// if there are no valid replicas on data-nodes.
String message = "DIR* NameSystem.internalReleaseLease: " +
"Failed to release lease for file " + src +
". Committed blocks are waiting to be minimally replicated." +
" Try again later.";
NameNode.stateChangeLog.warn(message);
throw new AlreadyBeingCreatedException(message);
case UNDER_CONSTRUCTION:
case UNDER_RECOVERY:
// setup the last block locations from the blockManager if not known
if(lastBlock.getNumExpectedLocations() == 0)
lastBlock.setExpectedLocations(blockManager.getNodes(lastBlock));
// start recovery of the last block for this file
long blockRecoveryId = nextGenerationStamp();
lease = reassignLease(lease, src, recoveryLeaseHolder, pendingFile);
lastBlock.initializeBlockRecovery(blockRecoveryId);
leaseManager.renewLease(lease);
// Cannot close file right now, since the last block requires recovery.
// This may potentially cause infinite loop in lease recovery
// if there are no valid replicas on data-nodes.
NameNode.stateChangeLog.warn(
"DIR* NameSystem.internalReleaseLease: " +
"File " + src + " has not been closed." +
" Lease recovery is in progress. " +
"RecoveryId = " + blockRecoveryId + " for block " + lastBlock);
break;
}
return false;
}
Lease reassignLease(Lease lease, String src, String newHolder,
INodeFileUnderConstruction pendingFile) {
if(newHolder == null)
return lease;
pendingFile.setClientName(newHolder);
return leaseManager.reassignLease(lease, src, newHolder);
}
private void finalizeINodeFileUnderConstruction(String src,
INodeFileUnderConstruction pendingFile)
throws IOException, UnresolvedLinkException {
leaseManager.removeLease(pendingFile.getClientName(), src);
// The file is no longer pending.
// Create permanent INode, update blocks
INodeFile newFile = pendingFile.convertToInodeFile();
dir.replaceNode(src, pendingFile, newFile);
// close file and persist block allocations for this file
dir.closeFile(src, newFile);
checkReplicationFactor(newFile);
}
void commitBlockSynchronization(Block lastblock,
long newgenerationstamp, long newlength,
boolean closeFile, boolean deleteblock, DatanodeID[] newtargets)
throws IOException, UnresolvedLinkException {
String src = "";
writeLock();
try {
LOG.info("commitBlockSynchronization(lastblock=" + lastblock
+ ", newgenerationstamp=" + newgenerationstamp
+ ", newlength=" + newlength
+ ", newtargets=" + Arrays.asList(newtargets)
+ ", closeFile=" + closeFile
+ ", deleteBlock=" + deleteblock
+ ")");
if (isInSafeMode()) {
throw new SafeModeException("Cannot commitBlockSynchronization "
+ lastblock, safeMode);
}
final BlockInfo storedBlock = blockManager.getStoredBlock(lastblock);
if (storedBlock == null) {
throw new IOException("Block (=" + lastblock + ") not found");
}
INodeFile iFile = storedBlock.getINode();
if (!iFile.isUnderConstruction() || storedBlock.isComplete()) {
throw new IOException("Unexpected block (=" + lastblock
+ ") since the file (=" + iFile.getLocalName()
+ ") is not under construction");
}
long recoveryId =
((BlockInfoUnderConstruction)storedBlock).getBlockRecoveryId();
if(recoveryId != newgenerationstamp) {
throw new IOException("The recovery id " + newgenerationstamp
+ " does not match current recovery id "
+ recoveryId + " for block " + lastblock);
}
INodeFileUnderConstruction pendingFile = (INodeFileUnderConstruction)iFile;
if (deleteblock) {
pendingFile.removeLastBlock(lastblock);
blockManager.removeBlockFromMap(storedBlock);
}
else {
// update last block
storedBlock.setGenerationStamp(newgenerationstamp);
storedBlock.setNumBytes(newlength);
// find the DatanodeDescriptor objects
// There should be no locations in the blockManager till now because the
// file is underConstruction
DatanodeDescriptor[] descriptors = null;
if (newtargets.length > 0) {
descriptors = new DatanodeDescriptor[newtargets.length];
for(int i = 0; i < newtargets.length; i++) {
descriptors[i] = getDatanode(newtargets[i]);
}
}
if (closeFile) {
// the file is getting closed. Insert block locations into blockManager.
// Otherwise fsck will report these blocks as MISSING, especially if the
// blocksReceived from Datanodes take a long time to arrive.
for (int i = 0; i < descriptors.length; i++) {
descriptors[i].addBlock(storedBlock);
}
}
// add pipeline locations into the INodeUnderConstruction
pendingFile.setLastBlock(storedBlock, descriptors);
}
// If this commit does not want to close the file, persist
// blocks only if append is supported and return
src = leaseManager.findPath(pendingFile);
if (!closeFile) {
if (supportAppends) {
dir.persistBlocks(src, pendingFile);
getEditLog().logSync();
}
LOG.info("commitBlockSynchronization(" + lastblock + ") successful");
return;
}
// commit the last block and complete it if it has minimum replicas
blockManager.commitOrCompleteLastBlock(pendingFile, storedBlock);
//remove lease, close file
finalizeINodeFileUnderConstruction(src, pendingFile);
} finally {
writeUnlock();
}
getEditLog().logSync();
LOG.info("commitBlockSynchronization(newblock=" + lastblock
+ ", file=" + src
+ ", newgenerationstamp=" + newgenerationstamp
+ ", newlength=" + newlength
+ ", newtargets=" + Arrays.asList(newtargets) + ") successful");
}
/**
* Renew the lease(s) held by the given client
*/
void renewLease(String holder) throws IOException {
writeLock();
try {
if (isInSafeMode())
throw new SafeModeException("Cannot renew lease for " + holder, safeMode);
leaseManager.renewLease(holder);
} finally {
writeUnlock();
}
}
/**
* Get a partial listing of the indicated directory
*
* @param src the directory name
* @param startAfter the name to start after
* @param needLocation if blockLocations need to be returned
* @return a partial listing starting after startAfter
*
* @throws AccessControlException if access is denied
* @throws UnresolvedLinkException if symbolic link is encountered
* @throws IOException if other I/O error occurred
*/
public DirectoryListing getListing(String src, byte[] startAfter,
boolean needLocation)
throws AccessControlException, UnresolvedLinkException, IOException {
if (isPermissionEnabled) {
if (dir.isDir(src)) {
checkPathAccess(src, FsAction.READ_EXECUTE);
}
else {
checkTraverse(src);
}
}
if (auditLog.isInfoEnabled() && isExternalInvocation()) {
logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"listStatus", src, null, null);
}
return dir.getListing(src, startAfter, needLocation);
}
/////////////////////////////////////////////////////////
//
// These methods are called by datanodes
//
/////////////////////////////////////////////////////////
/**
* Register Datanode.
* <p>
* The purpose of registration is to identify whether the new datanode
* serves a new data storage, and will report new data block copies,
* which the namenode was not aware of; or the datanode is a replacement
* node for the data storage that was previously served by a different
* or the same (in terms of host:port) datanode.
* The data storages are distinguished by their storageIDs. When a new
* data storage is reported the namenode issues a new unique storageID.
* <p>
* Finally, the namenode returns its namespaceID as the registrationID
* for the datanodes.
* namespaceID is a persistent attribute of the name space.
* The registrationID is checked every time the datanode is communicating
* with the namenode.
* Datanodes with inappropriate registrationID are rejected.
* If the namenode stops, and then restarts it can restore its
* namespaceID and will continue serving the datanodes that has previously
* registered with the namenode without restarting the whole cluster.
*
* @see org.apache.hadoop.hdfs.server.datanode.DataNode#register()
*/
public void registerDatanode(DatanodeRegistration nodeReg
) throws IOException {
writeLock();
try {
String dnAddress = Server.getRemoteAddress();
if (dnAddress == null) {
// Mostly called inside an RPC.
// But if not, use address passed by the data-node.
dnAddress = nodeReg.getHost();
}
// check if the datanode is allowed to be connect to the namenode
if (!verifyNodeRegistration(nodeReg, dnAddress)) {
throw new DisallowedDatanodeException(nodeReg);
}
String hostName = nodeReg.getHost();
// update the datanode's name with ip:port
DatanodeID dnReg = new DatanodeID(dnAddress + ":" + nodeReg.getPort(),
nodeReg.getStorageID(),
nodeReg.getInfoPort(),
nodeReg.getIpcPort());
nodeReg.updateRegInfo(dnReg);
nodeReg.exportedKeys = getBlockKeys();
NameNode.stateChangeLog.info(
"BLOCK* NameSystem.registerDatanode: "
+ "node registration from " + nodeReg.getName()
+ " storage " + nodeReg.getStorageID());
DatanodeDescriptor nodeS = datanodeMap.get(nodeReg.getStorageID());
DatanodeDescriptor nodeN = host2DataNodeMap.getDatanodeByName(nodeReg.getName());
if (nodeN != null && nodeN != nodeS) {
NameNode.LOG.info("BLOCK* NameSystem.registerDatanode: "
+ "node from name: " + nodeN.getName());
// nodeN previously served a different data storage,
// which is not served by anybody anymore.
removeDatanode(nodeN);
// physically remove node from datanodeMap
wipeDatanode(nodeN);
nodeN = null;
}
if (nodeS != null) {
if (nodeN == nodeS) {
// The same datanode has been just restarted to serve the same data
// storage. We do not need to remove old data blocks, the delta will
// be calculated on the next block report from the datanode
if(NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("BLOCK* NameSystem.registerDatanode: "
+ "node restarted.");
}
} else {
// nodeS is found
/* The registering datanode is a replacement node for the existing
data storage, which from now on will be served by a new node.
If this message repeats, both nodes might have same storageID
by (insanely rare) random chance. User needs to restart one of the
nodes with its data cleared (or user can just remove the StorageID
value in "VERSION" file under the data directory of the datanode,
but this is might not work if VERSION file format has changed
*/
NameNode.stateChangeLog.info( "BLOCK* NameSystem.registerDatanode: "
+ "node " + nodeS.getName()
+ " is replaced by " + nodeReg.getName() +
" with the same storageID " +
nodeReg.getStorageID());
}
// update cluster map
clusterMap.remove(nodeS);
nodeS.updateRegInfo(nodeReg);
nodeS.setHostName(hostName);
// resolve network location
resolveNetworkLocation(nodeS);
clusterMap.add(nodeS);
// also treat the registration message as a heartbeat
synchronized(heartbeats) {
if( !heartbeats.contains(nodeS)) {
heartbeats.add(nodeS);
//update its timestamp
nodeS.updateHeartbeat(0L, 0L, 0L, 0, 0);
nodeS.isAlive = true;
}
}
return;
}
// this is a new datanode serving a new data storage
if (nodeReg.getStorageID().equals("")) {
// this data storage has never been registered
// it is either empty or was created by pre-storageID version of DFS
nodeReg.storageID = newStorageID();
if(NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug(
"BLOCK* NameSystem.registerDatanode: "
+ "new storageID " + nodeReg.getStorageID() + " assigned.");
}
}
// register new datanode
DatanodeDescriptor nodeDescr
= new DatanodeDescriptor(nodeReg, NetworkTopology.DEFAULT_RACK, hostName);
resolveNetworkLocation(nodeDescr);
unprotectedAddDatanode(nodeDescr);
clusterMap.add(nodeDescr);
// also treat the registration message as a heartbeat
synchronized(heartbeats) {
heartbeats.add(nodeDescr);
nodeDescr.isAlive = true;
// no need to update its timestamp
// because its is done when the descriptor is created
}
if (safeMode != null) {
safeMode.checkMode();
}
return;
} finally {
writeUnlock();
}
}
/* Resolve a node's network location */
private void resolveNetworkLocation (DatanodeDescriptor node) {
List<String> names = new ArrayList<String>(1);
if (dnsToSwitchMapping instanceof CachedDNSToSwitchMapping) {
// get the node's IP address
names.add(node.getHost());
} else {
// get the node's host name
String hostName = node.getHostName();
int colon = hostName.indexOf(":");
hostName = (colon==-1)?hostName:hostName.substring(0,colon);
names.add(hostName);
}
// resolve its network location
List<String> rName = dnsToSwitchMapping.resolve(names);
String networkLocation;
if (rName == null) {
LOG.error("The resolve call returned null! Using " +
NetworkTopology.DEFAULT_RACK + " for host " + names);
networkLocation = NetworkTopology.DEFAULT_RACK;
} else {
networkLocation = rName.get(0);
}
node.setNetworkLocation(networkLocation);
}
/**
* Get registrationID for datanodes based on the namespaceID.
*
* @see #registerDatanode(DatanodeRegistration)
* @see FSImage#newNamespaceID()
* @return registration ID
*/
public String getRegistrationID() {
return Storage.getRegistrationID(dir.fsImage);
}
/**
* Generate new storage ID.
*
* @return unique storage ID
*
* Note: that collisions are still possible if somebody will try
* to bring in a data storage from a different cluster.
*/
private String newStorageID() {
String newID = null;
while(newID == null) {
newID = "DS" + Integer.toString(r.nextInt());
if (datanodeMap.get(newID) != null)
newID = null;
}
return newID;
}
private boolean isDatanodeDead(DatanodeDescriptor node) {
return (node.getLastUpdate() <
(now() - heartbeatExpireInterval));
}
private void setDatanodeDead(DatanodeDescriptor node) throws IOException {
node.setLastUpdate(0);
}
/**
* The given node has reported in. This method should:
* 1) Record the heartbeat, so the datanode isn't timed out
* 2) Adjust usage stats for future block allocation
*
* If a substantial amount of time passed since the last datanode
* heartbeat then request an immediate block report.
*
* @return an array of datanode commands
* @throws IOException
*/
DatanodeCommand[] handleHeartbeat(DatanodeRegistration nodeReg,
long capacity, long dfsUsed, long remaining,
int xceiverCount, int xmitsInProgress, int failedVolumes)
throws IOException {
DatanodeCommand cmd = null;
synchronized (heartbeats) {
synchronized (datanodeMap) {
DatanodeDescriptor nodeinfo = null;
try {
nodeinfo = getDatanode(nodeReg);
} catch(UnregisteredNodeException e) {
return new DatanodeCommand[]{DatanodeCommand.REGISTER};
}
// Check if this datanode should actually be shutdown instead.
if (nodeinfo != null && shouldNodeShutdown(nodeinfo)) {
setDatanodeDead(nodeinfo);
throw new DisallowedDatanodeException(nodeinfo);
}
if (nodeinfo == null || !nodeinfo.isAlive) {
return new DatanodeCommand[]{DatanodeCommand.REGISTER};
}
updateStats(nodeinfo, false);
nodeinfo.updateHeartbeat(capacity, dfsUsed, remaining, xceiverCount,
failedVolumes);
updateStats(nodeinfo, true);
//check lease recovery
cmd = nodeinfo.getLeaseRecoveryCommand(Integer.MAX_VALUE);
if (cmd != null) {
return new DatanodeCommand[] {cmd};
}
ArrayList<DatanodeCommand> cmds = new ArrayList<DatanodeCommand>(3);
//check pending replication
cmd = nodeinfo.getReplicationCommand(
blockManager.maxReplicationStreams - xmitsInProgress);
if (cmd != null) {
cmds.add(cmd);
}
//check block invalidation
cmd = nodeinfo.getInvalidateBlocks(blockInvalidateLimit);
if (cmd != null) {
cmds.add(cmd);
}
// check access key update
if (isBlockTokenEnabled && nodeinfo.needKeyUpdate) {
cmds.add(new KeyUpdateCommand(blockTokenSecretManager.exportKeys()));
nodeinfo.needKeyUpdate = false;
}
if (!cmds.isEmpty()) {
return cmds.toArray(new DatanodeCommand[cmds.size()]);
}
}
}
//check distributed upgrade
cmd = getDistributedUpgradeCommand();
if (cmd != null) {
return new DatanodeCommand[] {cmd};
}
return null;
}
private void updateStats(DatanodeDescriptor node, boolean isAdded) {
//
// The statistics are protected by the heartbeat lock
//
assert(Thread.holdsLock(heartbeats));
if (isAdded) {
capacityTotal += node.getCapacity();
capacityUsed += node.getDfsUsed();
capacityRemaining += node.getRemaining();
totalLoad += node.getXceiverCount();
} else {
capacityTotal -= node.getCapacity();
capacityUsed -= node.getDfsUsed();
capacityRemaining -= node.getRemaining();
totalLoad -= node.getXceiverCount();
}
}
/**
* Update access keys.
*/
void updateBlockKey() throws IOException {
this.blockTokenSecretManager.updateKeys();
synchronized (heartbeats) {
for (DatanodeDescriptor nodeInfo : heartbeats) {
nodeInfo.needKeyUpdate = true;
}
}
}
/**
* Periodically calls heartbeatCheck() and updateBlockKey()
*/
class HeartbeatMonitor implements Runnable {
private long lastHeartbeatCheck;
private long lastBlockKeyUpdate;
/**
*/
public void run() {
while (fsRunning) {
try {
long now = now();
if (lastHeartbeatCheck + heartbeatRecheckInterval < now) {
heartbeatCheck();
lastHeartbeatCheck = now;
}
if (isBlockTokenEnabled && (lastBlockKeyUpdate + blockKeyUpdateInterval < now)) {
updateBlockKey();
lastBlockKeyUpdate = now;
}
} catch (Exception e) {
FSNamesystem.LOG.error(StringUtils.stringifyException(e));
}
try {
Thread.sleep(5000); // 5 seconds
} catch (InterruptedException ie) {
}
}
}
}
/**
* Periodically calls computeReplicationWork().
*/
class ReplicationMonitor implements Runnable {
static final int INVALIDATE_WORK_PCT_PER_ITERATION = 32;
static final float REPLICATION_WORK_MULTIPLIER_PER_ITERATION = 2;
public void run() {
while (fsRunning) {
try {
computeDatanodeWork();
blockManager.processPendingReplications();
Thread.sleep(replicationRecheckInterval);
} catch (InterruptedException ie) {
LOG.warn("ReplicationMonitor thread received InterruptedException." + ie);
break;
} catch (IOException ie) {
LOG.warn("ReplicationMonitor thread received exception. " + ie);
} catch (Throwable t) {
LOG.warn("ReplicationMonitor thread received Runtime exception. " + t);
Runtime.getRuntime().exit(-1);
}
}
}
}
/////////////////////////////////////////////////////////
//
// These methods are called by the Namenode system, to see
// if there is any work for registered datanodes.
//
/////////////////////////////////////////////////////////
/**
* Compute block replication and block invalidation work
* that can be scheduled on data-nodes.
* The datanode will be informed of this work at the next heartbeat.
*
* @return number of blocks scheduled for replication or removal.
* @throws IOException
*/
public int computeDatanodeWork() throws IOException {
int workFound = 0;
int blocksToProcess = 0;
int nodesToProcess = 0;
// blocks should not be replicated or removed if safe mode is on
if (isInSafeMode())
return workFound;
synchronized(heartbeats) {
blocksToProcess = (int)(heartbeats.size()
* ReplicationMonitor.REPLICATION_WORK_MULTIPLIER_PER_ITERATION);
nodesToProcess = (int)Math.ceil((double)heartbeats.size()
* ReplicationMonitor.INVALIDATE_WORK_PCT_PER_ITERATION / 100);
}
workFound = blockManager.computeReplicationWork(blocksToProcess);
// Update FSNamesystemMetrics counters
writeLock();
try {
blockManager.updateState();
blockManager.scheduledReplicationBlocksCount = workFound;
} finally {
writeUnlock();
}
workFound += blockManager.computeInvalidateWork(nodesToProcess);
return workFound;
}
public void setNodeReplicationLimit(int limit) {
blockManager.maxReplicationStreams = limit;
}
/**
* Remove a datanode descriptor.
* @param nodeID datanode ID.
* @throws IOException
*/
public void removeDatanode(DatanodeID nodeID)
throws IOException {
writeLock();
try {
DatanodeDescriptor nodeInfo = getDatanode(nodeID);
if (nodeInfo != null) {
removeDatanode(nodeInfo);
} else {
NameNode.stateChangeLog.warn("BLOCK* NameSystem.removeDatanode: "
+ nodeID.getName() + " does not exist");
}
} finally {
writeUnlock();
}
}
/**
* Remove a datanode descriptor.
* @param nodeInfo datanode descriptor.
*/
private void removeDatanode(DatanodeDescriptor nodeInfo) {
synchronized (heartbeats) {
if (nodeInfo.isAlive) {
updateStats(nodeInfo, false);
heartbeats.remove(nodeInfo);
nodeInfo.isAlive = false;
}
}
Iterator<? extends Block> it = nodeInfo.getBlockIterator();
while(it.hasNext()) {
blockManager.removeStoredBlock(it.next(), nodeInfo);
}
unprotectedRemoveDatanode(nodeInfo);
clusterMap.remove(nodeInfo);
if (safeMode != null) {
safeMode.checkMode();
}
}
void unprotectedRemoveDatanode(DatanodeDescriptor nodeDescr) {
nodeDescr.resetBlocks();
blockManager.removeFromInvalidates(nodeDescr.getStorageID());
if(NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug(
"BLOCK* NameSystem.unprotectedRemoveDatanode: "
+ nodeDescr.getName() + " is out of service now.");
}
}
void unprotectedAddDatanode(DatanodeDescriptor nodeDescr) {
/* To keep host2DataNodeMap consistent with datanodeMap,
remove from host2DataNodeMap the datanodeDescriptor removed
from datanodeMap before adding nodeDescr to host2DataNodeMap.
*/
host2DataNodeMap.remove(
datanodeMap.put(nodeDescr.getStorageID(), nodeDescr));
host2DataNodeMap.add(nodeDescr);
if(NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug(
"BLOCK* NameSystem.unprotectedAddDatanode: "
+ "node " + nodeDescr.getName() + " is added to datanodeMap.");
}
}
/**
* Physically remove node from datanodeMap.
*
* @param nodeID node
* @throws IOException
*/
void wipeDatanode(DatanodeID nodeID) throws IOException {
String key = nodeID.getStorageID();
host2DataNodeMap.remove(datanodeMap.remove(key));
if(NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug(
"BLOCK* NameSystem.wipeDatanode: "
+ nodeID.getName() + " storage " + key
+ " is removed from datanodeMap.");
}
}
FSImage getFSImage() {
return dir.fsImage;
}
FSEditLog getEditLog() {
return getFSImage().getEditLog();
}
/**
* Check if there are any expired heartbeats, and if so,
* whether any blocks have to be re-replicated.
* While removing dead datanodes, make sure that only one datanode is marked
* dead at a time within the synchronized section. Otherwise, a cascading
* effect causes more datanodes to be declared dead.
*/
void heartbeatCheck() {
boolean allAlive = false;
while (!allAlive) {
boolean foundDead = false;
DatanodeID nodeID = null;
// locate the first dead node.
synchronized(heartbeats) {
for (Iterator<DatanodeDescriptor> it = heartbeats.iterator();
it.hasNext();) {
DatanodeDescriptor nodeInfo = it.next();
if (isDatanodeDead(nodeInfo)) {
myFSMetrics.numExpiredHeartbeats.inc();
foundDead = true;
nodeID = nodeInfo;
break;
}
}
}
// acquire the fsnamesystem lock, and then remove the dead node.
if (foundDead) {
writeLock();
try {
synchronized(heartbeats) {
synchronized (datanodeMap) {
DatanodeDescriptor nodeInfo = null;
try {
nodeInfo = getDatanode(nodeID);
} catch (IOException e) {
nodeInfo = null;
}
if (nodeInfo != null && isDatanodeDead(nodeInfo)) {
NameNode.stateChangeLog.info("BLOCK* NameSystem.heartbeatCheck: "
+ "lost heartbeat from " + nodeInfo.getName());
removeDatanode(nodeInfo);
}
}
}
} finally {
writeUnlock();
}
}
allAlive = !foundDead;
}
}
/**
* The given node is reporting all its blocks. Use this info to
* update the (machine-->blocklist) and (block-->machinelist) tables.
*/
public void processReport(DatanodeID nodeID,
BlockListAsLongs newReport
) throws IOException {
writeLock();
try {
long startTime = now();
if (NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("BLOCK* NameSystem.processReport: "
+ "from " + nodeID.getName()+" " +
newReport.getNumberOfBlocks()+" blocks");
}
DatanodeDescriptor node = getDatanode(nodeID);
if (node == null || !node.isAlive) {
throw new IOException("ProcessReport from dead or unregisterted node: "
+ nodeID.getName());
}
// Check if this datanode should actually be shutdown instead.
if (shouldNodeShutdown(node)) {
setDatanodeDead(node);
throw new DisallowedDatanodeException(node);
}
blockManager.processReport(node, newReport);
NameNode.getNameNodeMetrics().blockReport.inc((int) (now() - startTime));
} finally {
writeUnlock();
}
}
/**
* We want "replication" replicates for the block, but we now have too many.
* In this method, copy enough nodes from 'srcNodes' into 'dstNodes' such that:
*
* srcNodes.size() - dstNodes.size() == replication
*
* We pick node that make sure that replicas are spread across racks and
* also try hard to pick one with least free space.
* The algorithm is first to pick a node with least free space from nodes
* that are on a rack holding more than one replicas of the block.
* So removing such a replica won't remove a rack.
* If no such a node is available,
* then pick a node with least free space
*/
void chooseExcessReplicates(Collection<DatanodeDescriptor> nonExcess,
Block b, short replication,
DatanodeDescriptor addedNode,
DatanodeDescriptor delNodeHint,
BlockPlacementPolicy replicator) {
// first form a rack to datanodes map and
INodeFile inode = blockManager.getINode(b);
HashMap<String, ArrayList<DatanodeDescriptor>> rackMap =
new HashMap<String, ArrayList<DatanodeDescriptor>>();
for (Iterator<DatanodeDescriptor> iter = nonExcess.iterator();
iter.hasNext();) {
DatanodeDescriptor node = iter.next();
String rackName = node.getNetworkLocation();
ArrayList<DatanodeDescriptor> datanodeList = rackMap.get(rackName);
if(datanodeList==null) {
datanodeList = new ArrayList<DatanodeDescriptor>();
}
datanodeList.add(node);
rackMap.put(rackName, datanodeList);
}
// split nodes into two sets
// priSet contains nodes on rack with more than one replica
// remains contains the remaining nodes
ArrayList<DatanodeDescriptor> priSet = new ArrayList<DatanodeDescriptor>();
ArrayList<DatanodeDescriptor> remains = new ArrayList<DatanodeDescriptor>();
for( Iterator<Entry<String, ArrayList<DatanodeDescriptor>>> iter =
rackMap.entrySet().iterator(); iter.hasNext(); ) {
Entry<String, ArrayList<DatanodeDescriptor>> rackEntry = iter.next();
ArrayList<DatanodeDescriptor> datanodeList = rackEntry.getValue();
if( datanodeList.size() == 1 ) {
remains.add(datanodeList.get(0));
} else {
priSet.addAll(datanodeList);
}
}
// pick one node to delete that favors the delete hint
// otherwise pick one with least space from priSet if it is not empty
// otherwise one node with least space from remains
boolean firstOne = true;
while (nonExcess.size() - replication > 0) {
DatanodeInfo cur = null;
// check if we can del delNodeHint
if (firstOne && delNodeHint !=null && nonExcess.contains(delNodeHint) &&
(priSet.contains(delNodeHint) || (addedNode != null && !priSet.contains(addedNode))) ) {
cur = delNodeHint;
} else { // regular excessive replica removal
cur = replicator.chooseReplicaToDelete(inode, b, replication, priSet, remains);
}
firstOne = false;
// adjust rackmap, priSet, and remains
String rack = cur.getNetworkLocation();
ArrayList<DatanodeDescriptor> datanodes = rackMap.get(rack);
datanodes.remove(cur);
if(datanodes.isEmpty()) {
rackMap.remove(rack);
}
if( priSet.remove(cur) ) {
if (datanodes.size() == 1) {
priSet.remove(datanodes.get(0));
remains.add(datanodes.get(0));
}
} else {
remains.remove(cur);
}
nonExcess.remove(cur);
blockManager.addToExcessReplicate(cur, b);
//
// The 'excessblocks' tracks blocks until we get confirmation
// that the datanode has deleted them; the only way we remove them
// is when we get a "removeBlock" message.
//
// The 'invalidate' list is used to inform the datanode the block
// should be deleted. Items are removed from the invalidate list
// upon giving instructions to the namenode.
//
blockManager.addToInvalidates(b, cur);
NameNode.stateChangeLog.info("BLOCK* NameSystem.chooseExcessReplicates: "
+"("+cur.getName()+", "+b+") is added to recentInvalidateSets");
}
}
/**
* The given node is reporting that it received a certain block.
*/
public void blockReceived(DatanodeID nodeID,
Block block,
String delHint
) throws IOException {
writeLock();
try {
DatanodeDescriptor node = getDatanode(nodeID);
if (node == null || !node.isAlive) {
NameNode.stateChangeLog.warn("BLOCK* NameSystem.blockReceived: " + block
+ " is received from dead or unregistered node " + nodeID.getName());
throw new IOException(
"Got blockReceived message from unregistered or dead node " + block);
}
if (NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("BLOCK* NameSystem.blockReceived: "
+block+" is received from " + nodeID.getName());
}
// Check if this datanode should actually be shutdown instead.
if (shouldNodeShutdown(node)) {
setDatanodeDead(node);
throw new DisallowedDatanodeException(node);
}
blockManager.addBlock(node, block, delHint);
} finally {
writeUnlock();
}
}
public long getMissingBlocksCount() {
// not locking
return blockManager.getMissingBlocksCount();
}
long[] getStats() {
synchronized(heartbeats) {
return new long[] {this.capacityTotal, this.capacityUsed,
this.capacityRemaining,
getUnderReplicatedBlocks(),
getCorruptReplicaBlocks(),
getMissingBlocksCount()};
}
}
/**
* Total raw bytes including non-dfs used space.
*/
public long getCapacityTotal() {
return getStats()[0];
}
/**
* Total used space by data nodes
*/
public long getCapacityUsed() {
return getStats()[1];
}
/**
* Total used space by data nodes as percentage of total capacity
*/
public float getCapacityUsedPercent() {
synchronized(heartbeats){
if (capacityTotal <= 0) {
return 100;
}
return ((float)capacityUsed * 100.0f)/(float)capacityTotal;
}
}
/**
* Total used space by data nodes for non DFS purposes such
* as storing temporary files on the local file system
*/
public long getCapacityUsedNonDFS() {
long nonDFSUsed = 0;
synchronized(heartbeats){
nonDFSUsed = capacityTotal - capacityRemaining - capacityUsed;
}
return nonDFSUsed < 0 ? 0 : nonDFSUsed;
}
/**
* Total non-used raw bytes.
*/
public long getCapacityRemaining() {
return getStats()[2];
}
/**
* Total remaining space by data nodes as percentage of total capacity
*/
public float getCapacityRemainingPercent() {
synchronized(heartbeats){
if (capacityTotal <= 0) {
return 0;
}
return ((float)capacityRemaining * 100.0f)/(float)capacityTotal;
}
}
/**
* Total number of connections.
*/
public int getTotalLoad() {
synchronized (heartbeats) {
return this.totalLoad;
}
}
int getNumberOfDatanodes(DatanodeReportType type) {
return getDatanodeListForReport(type).size();
}
private ArrayList<DatanodeDescriptor> getDatanodeListForReport(
DatanodeReportType type) {
boolean listLiveNodes = type == DatanodeReportType.ALL ||
type == DatanodeReportType.LIVE;
boolean listDeadNodes = type == DatanodeReportType.ALL ||
type == DatanodeReportType.DEAD;
HashMap<String, String> mustList = new HashMap<String, String>();
readLock();
try {
if (listDeadNodes) {
//first load all the nodes listed in include and exclude files.
for (Iterator<String> it = hostsReader.getHosts().iterator();
it.hasNext();) {
mustList.put(it.next(), "");
}
for (Iterator<String> it = hostsReader.getExcludedHosts().iterator();
it.hasNext();) {
mustList.put(it.next(), "");
}
}
ArrayList<DatanodeDescriptor> nodes = null;
synchronized (datanodeMap) {
nodes = new ArrayList<DatanodeDescriptor>(datanodeMap.size() +
mustList.size());
for(Iterator<DatanodeDescriptor> it = datanodeMap.values().iterator();
it.hasNext();) {
DatanodeDescriptor dn = it.next();
boolean isDead = isDatanodeDead(dn);
if ( (isDead && listDeadNodes) || (!isDead && listLiveNodes) ) {
nodes.add(dn);
}
//Remove any form of the this datanode in include/exclude lists.
mustList.remove(dn.getName());
mustList.remove(dn.getHost());
mustList.remove(dn.getHostName());
}
}
if (listDeadNodes) {
for (Iterator<String> it = mustList.keySet().iterator(); it.hasNext();) {
DatanodeDescriptor dn =
new DatanodeDescriptor(new DatanodeID(it.next()));
dn.setLastUpdate(0);
nodes.add(dn);
}
}
return nodes;
} finally {
readUnlock();
}
}
public DatanodeInfo[] datanodeReport( DatanodeReportType type
) throws AccessControlException {
readLock();
try {
checkSuperuserPrivilege();
ArrayList<DatanodeDescriptor> results = getDatanodeListForReport(type);
DatanodeInfo[] arr = new DatanodeInfo[results.size()];
for (int i=0; i<arr.length; i++) {
arr[i] = new DatanodeInfo(results.get(i));
}
return arr;
} finally {
readUnlock();
}
}
/**
* Save namespace image.
* This will save current namespace into fsimage file and empty edits file.
* Requires superuser privilege and safe mode.
*
* @throws AccessControlException if superuser privilege is violated.
* @throws IOException if
*/
void saveNamespace() throws AccessControlException, IOException {
writeLock();
try {
checkSuperuserPrivilege();
if(!isInSafeMode()) {
throw new IOException("Safe mode should be turned ON " +
"in order to create namespace image.");
}
getFSImage().saveNamespace(true);
LOG.info("New namespace image has been created.");
} finally {
writeUnlock();
}
}
/**
* Enables/Disables/Checks restoring failed storage replicas if the storage becomes available again.
* Requires superuser privilege.
*
* @throws AccessControlException if superuser privilege is violated.
*/
boolean restoreFailedStorage(String arg) throws AccessControlException {
writeLock();
try {
checkSuperuserPrivilege();
// if it is disabled - enable it and vice versa.
if(arg.equals("check"))
return getFSImage().getRestoreFailedStorage();
boolean val = arg.equals("true"); // false if not
getFSImage().setRestoreFailedStorage(val);
return val;
} finally {
writeUnlock();
}
}
/**
*/
public void DFSNodesStatus(ArrayList<DatanodeDescriptor> live,
ArrayList<DatanodeDescriptor> dead) {
readLock();
try {
ArrayList<DatanodeDescriptor> results =
getDatanodeListForReport(DatanodeReportType.ALL);
for(Iterator<DatanodeDescriptor> it = results.iterator(); it.hasNext();) {
DatanodeDescriptor node = it.next();
if (isDatanodeDead(node))
dead.add(node);
else
live.add(node);
}
} finally {
readUnlock();
}
}
/**
* Prints information about all datanodes.
*/
private void datanodeDump(PrintWriter out) {
readLock();
try {
synchronized (datanodeMap) {
out.println("Metasave: Number of datanodes: " + datanodeMap.size());
for(Iterator<DatanodeDescriptor> it = datanodeMap.values().iterator(); it.hasNext();) {
DatanodeDescriptor node = it.next();
out.println(node.dumpDatanode());
}
}
} finally {
readUnlock();
}
}
/**
* Start decommissioning the specified datanode.
*/
private void startDecommission (DatanodeDescriptor node)
throws IOException {
if (!node.isDecommissionInProgress() && !node.isDecommissioned()) {
LOG.info("Start Decommissioning node " + node.getName());
node.startDecommission();
node.decommissioningStatus.setStartTime(now());
//
// all the blocks that reside on this node have to be
// replicated.
checkDecommissionStateInternal(node);
}
}
/**
* Stop decommissioning the specified datanodes.
*/
public void stopDecommission (DatanodeDescriptor node)
throws IOException {
LOG.info("Stop Decommissioning node " + node.getName());
node.stopDecommission();
}
/**
*/
public DatanodeInfo getDataNodeInfo(String name) {
return datanodeMap.get(name);
}
public Date getStartTime() {
return new Date(systemStart);
}
short getMaxReplication() { return (short)blockManager.maxReplication; }
short getMinReplication() { return (short)blockManager.minReplication; }
short getDefaultReplication() { return (short)blockManager.defaultReplication; }
/**
* Clamp the specified replication between the minimum and maximum
* replication levels for this namesystem.
*/
short adjustReplication(short replication) {
short minReplication = getMinReplication();
if (replication < minReplication) {
replication = minReplication;
}
short maxReplication = getMaxReplication();
if (replication > maxReplication) {
replication = maxReplication;
}
return replication;
}
/**
* A immutable object that stores the number of live replicas and
* the number of decommissined Replicas.
*/
static class NumberReplicas {
private int liveReplicas;
int decommissionedReplicas;
private int corruptReplicas;
private int excessReplicas;
NumberReplicas() {
initialize(0, 0, 0, 0);
}
NumberReplicas(int live, int decommissioned, int corrupt, int excess) {
initialize(live, decommissioned, corrupt, excess);
}
void initialize(int live, int decommissioned, int corrupt, int excess) {
liveReplicas = live;
decommissionedReplicas = decommissioned;
corruptReplicas = corrupt;
excessReplicas = excess;
}
int liveReplicas() {
return liveReplicas;
}
int decommissionedReplicas() {
return decommissionedReplicas;
}
int corruptReplicas() {
return corruptReplicas;
}
int excessReplicas() {
return excessReplicas;
}
}
/**
* Change, if appropriate, the admin state of a datanode to
* decommission completed. Return true if decommission is complete.
*/
boolean checkDecommissionStateInternal(DatanodeDescriptor node) {
//
// Check to see if all blocks in this decommissioned
// node has reached their target replication factor.
//
if (node.isDecommissionInProgress()) {
if (!blockManager.isReplicationInProgress(node)) {
node.setDecommissioned();
LOG.info("Decommission complete for node " + node.getName());
}
}
if (node.isDecommissioned()) {
return true;
}
return false;
}
/**
* Keeps track of which datanodes/ipaddress are allowed to connect to the namenode.
*/
private boolean inHostsList(DatanodeID node, String ipAddr) {
Set<String> hostsList = hostsReader.getHosts();
return (hostsList.isEmpty() ||
(ipAddr != null && hostsList.contains(ipAddr)) ||
hostsList.contains(node.getHost()) ||
hostsList.contains(node.getName()) ||
((node instanceof DatanodeInfo) &&
hostsList.contains(((DatanodeInfo)node).getHostName())));
}
private boolean inExcludedHostsList(DatanodeID node, String ipAddr) {
Set<String> excludeList = hostsReader.getExcludedHosts();
return ((ipAddr != null && excludeList.contains(ipAddr)) ||
excludeList.contains(node.getHost()) ||
excludeList.contains(node.getName()) ||
((node instanceof DatanodeInfo) &&
excludeList.contains(((DatanodeInfo)node).getHostName())));
}
/**
* Rereads the config to get hosts and exclude list file names.
* Rereads the files to update the hosts and exclude lists. It
* checks if any of the hosts have changed states:
* 1. Added to hosts --> no further work needed here.
* 2. Removed from hosts --> mark AdminState as decommissioned.
* 3. Added to exclude --> start decommission.
* 4. Removed from exclude --> stop decommission.
*/
public void refreshNodes(Configuration conf) throws IOException {
checkSuperuserPrivilege();
// Reread the config to get dfs.hosts and dfs.hosts.exclude filenames.
// Update the file names and refresh internal includes and excludes list
if (conf == null)
conf = new HdfsConfiguration();
hostsReader.updateFileNames(conf.get("dfs.hosts",""),
conf.get("dfs.hosts.exclude", ""));
hostsReader.refresh();
writeLock();
try {
for (Iterator<DatanodeDescriptor> it = datanodeMap.values().iterator();
it.hasNext();) {
DatanodeDescriptor node = it.next();
// Check if not include.
if (!inHostsList(node, null)) {
node.setDecommissioned(); // case 2.
} else {
if (inExcludedHostsList(node, null)) {
if (!node.isDecommissionInProgress() &&
!node.isDecommissioned()) {
startDecommission(node); // case 3.
}
} else {
if (node.isDecommissionInProgress() ||
node.isDecommissioned()) {
stopDecommission(node); // case 4.
}
}
}
}
} finally {
writeUnlock();
}
}
void finalizeUpgrade() throws IOException {
checkSuperuserPrivilege();
getFSImage().finalizeUpgrade();
}
/**
* Checks if the node is not on the hosts list. If it is not, then
* it will be ignored. If the node is in the hosts list, but is also
* on the exclude list, then it will be decommissioned.
* Returns FALSE if node is rejected for registration.
* Returns TRUE if node is registered (including when it is on the
* exclude list and is being decommissioned).
*/
private boolean verifyNodeRegistration(DatanodeID nodeReg, String ipAddr)
throws IOException {
assert (hasWriteLock());
if (!inHostsList(nodeReg, ipAddr)) {
return false;
}
if (inExcludedHostsList(nodeReg, ipAddr)) {
DatanodeDescriptor node = getDatanode(nodeReg);
if (node == null) {
throw new IOException("verifyNodeRegistration: unknown datanode " +
nodeReg.getName());
}
if (!checkDecommissionStateInternal(node)) {
startDecommission(node);
}
}
return true;
}
/**
* Checks if the Admin state bit is DECOMMISSIONED. If so, then
* we should shut it down.
*
* Returns true if the node should be shutdown.
*/
private boolean shouldNodeShutdown(DatanodeDescriptor node) {
return (node.isDecommissioned());
}
/**
* Get data node by storage ID.
*
* @param nodeID
* @return DatanodeDescriptor or null if the node is not found.
* @throws IOException
*/
public DatanodeDescriptor getDatanode(DatanodeID nodeID) throws IOException {
UnregisteredNodeException e = null;
DatanodeDescriptor node = datanodeMap.get(nodeID.getStorageID());
if (node == null)
return null;
if (!node.getName().equals(nodeID.getName())) {
e = new UnregisteredNodeException(nodeID, node);
NameNode.stateChangeLog.fatal("BLOCK* NameSystem.getDatanode: "
+ e.getLocalizedMessage());
throw e;
}
return node;
}
/** Choose a random datanode
*
* @return a randomly chosen datanode
*/
DatanodeDescriptor getRandomDatanode() {
return (DatanodeDescriptor)clusterMap.chooseRandom(NodeBase.ROOT);
}
/**
* SafeModeInfo contains information related to the safe mode.
* <p>
* An instance of {@link SafeModeInfo} is created when the name node
* enters safe mode.
* <p>
* During name node startup {@link SafeModeInfo} counts the number of
* <em>safe blocks</em>, those that have at least the minimal number of
* replicas, and calculates the ratio of safe blocks to the total number
* of blocks in the system, which is the size of blocks in
* {@link FSNamesystem#blockManager}. When the ratio reaches the
* {@link #threshold} it starts the {@link SafeModeMonitor} daemon in order
* to monitor whether the safe mode {@link #extension} is passed.
* Then it leaves safe mode and destroys itself.
* <p>
* If safe mode is turned on manually then the number of safe blocks is
* not tracked because the name node is not intended to leave safe mode
* automatically in the case.
*
* @see ClientProtocol#setSafeMode(FSConstants.SafeModeAction)
* @see SafeModeMonitor
*/
class SafeModeInfo {
// configuration fields
/** Safe mode threshold condition %.*/
private double threshold;
/** Safe mode minimum number of datanodes alive */
private int datanodeThreshold;
/** Safe mode extension after the threshold. */
private int extension;
/** Min replication required by safe mode. */
private int safeReplication;
// internal fields
/** Time when threshold was reached.
*
* <br>-1 safe mode is off
* <br> 0 safe mode is on, but threshold is not reached yet
*/
private long reached = -1;
/** Total number of blocks. */
int blockTotal;
/** Number of safe blocks. */
private int blockSafe;
/** Number of blocks needed to satisfy safe mode threshold condition */
private int blockThreshold;
/** time of the last status printout */
private long lastStatusReport = 0;
/**
* Creates SafeModeInfo when the name node enters
* automatic safe mode at startup.
*
* @param conf configuration
*/
SafeModeInfo(Configuration conf) {
this.threshold = conf.getFloat(DFSConfigKeys.DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_KEY, DFSConfigKeys.DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_DEFAULT);
if(threshold > 1.0) {
LOG.warn("The threshold value should't be greater than 1, threshold: " + threshold);
}
this.datanodeThreshold = conf.getInt(
DFSConfigKeys.DFS_NAMENODE_SAFEMODE_MIN_DATANODES_KEY,
DFSConfigKeys.DFS_NAMENODE_SAFEMODE_MIN_DATANODES_DEFAULT);
this.extension = conf.getInt(DFSConfigKeys.DFS_NAMENODE_SAFEMODE_EXTENSION_KEY, 0);
this.safeReplication = conf.getInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_MIN_KEY,
DFSConfigKeys.DFS_NAMENODE_REPLICATION_MIN_DEFAULT);
this.blockTotal = 0;
this.blockSafe = 0;
}
/**
* Creates SafeModeInfo when safe mode is entered manually.
*
* The {@link #threshold} is set to 1.5 so that it could never be reached.
* {@link #blockTotal} is set to -1 to indicate that safe mode is manual.
*
* @see SafeModeInfo
*/
private SafeModeInfo() {
this.threshold = 1.5f; // this threshold can never be reached
this.datanodeThreshold = Integer.MAX_VALUE;
this.extension = Integer.MAX_VALUE;
this.safeReplication = Short.MAX_VALUE + 1; // more than maxReplication
this.blockTotal = -1;
this.blockSafe = -1;
this.reached = -1;
enter();
reportStatus("STATE* Safe mode is ON.", true);
}
/**
* Check if safe mode is on.
* @return true if in safe mode
*/
synchronized boolean isOn() {
try {
assert isConsistent() : " SafeMode: Inconsistent filesystem state: "
+ "Total num of blocks, active blocks, or "
+ "total safe blocks don't match.";
} catch(IOException e) {
System.err.print(StringUtils.stringifyException(e));
}
return this.reached >= 0;
}
/**
* Enter safe mode.
*/
void enter() {
this.reached = 0;
}
/**
* Leave safe mode.
* <p>
* Switch to manual safe mode if distributed upgrade is required.<br>
* Check for invalid, under- & over-replicated blocks in the end of startup.
*/
synchronized void leave(boolean checkForUpgrades) {
if(checkForUpgrades) {
// verify whether a distributed upgrade needs to be started
boolean needUpgrade = false;
try {
needUpgrade = startDistributedUpgradeIfNeeded();
} catch(IOException e) {
FSNamesystem.LOG.error(StringUtils.stringifyException(e));
}
if(needUpgrade) {
// switch to manual safe mode
safeMode = new SafeModeInfo();
return;
}
}
// verify blocks replications
blockManager.processMisReplicatedBlocks();
long timeInSafemode = now() - systemStart;
NameNode.stateChangeLog.info("STATE* Leaving safe mode after "
+ timeInSafemode/1000 + " secs.");
NameNode.getNameNodeMetrics().safeModeTime.set((int) timeInSafemode);
if (reached >= 0) {
NameNode.stateChangeLog.info("STATE* Safe mode is OFF.");
}
reached = -1;
safeMode = null;
NameNode.stateChangeLog.info("STATE* Network topology has "
+clusterMap.getNumOfRacks()+" racks and "
+clusterMap.getNumOfLeaves()+ " datanodes");
NameNode.stateChangeLog.info("STATE* UnderReplicatedBlocks has "
+blockManager.neededReplications.size()+" blocks");
}
/**
* Safe mode can be turned off iff
* the threshold is reached and
* the extension time have passed.
* @return true if can leave or false otherwise.
*/
synchronized boolean canLeave() {
if (reached == 0)
return false;
if (now() - reached < extension) {
reportStatus("STATE* Safe mode ON.", false);
return false;
}
return !needEnter();
}
/**
* There is no need to enter safe mode
* if DFS is empty or {@link #threshold} == 0
*/
boolean needEnter() {
return (threshold != 0 && blockSafe < blockThreshold) ||
(getNumLiveDataNodes() < datanodeThreshold);
}
/**
* Check and trigger safe mode if needed.
*/
private void checkMode() {
if (needEnter()) {
enter();
reportStatus("STATE* Safe mode ON.", false);
return;
}
// the threshold is reached
if (!isOn() || // safe mode is off
extension <= 0 || threshold <= 0) { // don't need to wait
this.leave(true); // leave safe mode
return;
}
if (reached > 0) { // threshold has already been reached before
reportStatus("STATE* Safe mode ON.", false);
return;
}
// start monitor
reached = now();
smmthread = new Daemon(new SafeModeMonitor());
smmthread.start();
reportStatus("STATE* Safe mode extension entered.", true);
}
/**
* Set total number of blocks.
*/
synchronized void setBlockTotal(int total) {
this.blockTotal = total;
this.blockThreshold = (int) (blockTotal * threshold);
checkMode();
}
/**
* Increment number of safe blocks if current block has
* reached minimal replication.
* @param replication current replication
*/
synchronized void incrementSafeBlockCount(short replication) {
if ((int)replication == safeReplication)
this.blockSafe++;
checkMode();
}
/**
* Decrement number of safe blocks if current block has
* fallen below minimal replication.
* @param replication current replication
*/
synchronized void decrementSafeBlockCount(short replication) {
if (replication == safeReplication-1)
this.blockSafe--;
checkMode();
}
/**
* Check if safe mode was entered manually or at startup.
*/
boolean isManual() {
return extension == Integer.MAX_VALUE;
}
/**
* Set manual safe mode.
*/
synchronized void setManual() {
extension = Integer.MAX_VALUE;
}
/**
* A tip on how safe mode is to be turned off: manually or automatically.
*/
String getTurnOffTip() {
if(reached < 0)
return "Safe mode is OFF.";
String leaveMsg = "Safe mode will be turned off automatically";
if(isManual()) {
if(getDistributedUpgradeState())
return leaveMsg + " upon completion of " +
"the distributed upgrade: upgrade progress = " +
getDistributedUpgradeStatus() + "%";
leaveMsg = "Use \"hdfs dfsadmin -safemode leave\" to turn safe mode off";
}
if(blockTotal < 0)
return leaveMsg + ".";
int numLive = getNumLiveDataNodes();
String msg = "";
if (reached == 0) {
if (blockSafe < blockThreshold) {
msg += String.format(
"The reported blocks %d needs additional %d"
+ " blocks to reach the threshold %.4f of total blocks %d.",
blockSafe, (blockThreshold - blockSafe) + 1, threshold, blockTotal);
}
if (numLive < datanodeThreshold) {
if (!"".equals(msg)) {
msg += "\n";
}
msg += String.format(
"The number of live datanodes %d needs an additional %d live "
+ "datanodes to reach the minimum number %d.",
numLive, (datanodeThreshold - numLive) + 1 , datanodeThreshold);
}
msg += " " + leaveMsg;
} else {
msg = String.format("The reported blocks %d has reached the threshold"
+ " %.4f of total blocks %d.", blockSafe, threshold,
blockTotal);
if (datanodeThreshold > 0) {
msg += String.format(" The number of live datanodes %d has reached "
+ "the minimum number %d.",
numLive, datanodeThreshold);
}
msg += " " + leaveMsg;
}
if(reached == 0 || isManual()) { // threshold is not reached or manual
return msg + ".";
}
// extension period is in progress
return msg + " in " + Math.abs(reached + extension - now()) / 1000
+ " seconds.";
}
/**
* Print status every 20 seconds.
*/
private void reportStatus(String msg, boolean rightNow) {
long curTime = now();
if(!rightNow && (curTime - lastStatusReport < 20 * 1000))
return;
NameNode.stateChangeLog.info(msg + " \n" + getTurnOffTip());
lastStatusReport = curTime;
}
/**
* Returns printable state of the class.
*/
public String toString() {
String resText = "Current safe blocks = "
+ blockSafe
+ ". Target blocks = " + blockThreshold + " for threshold = %" + threshold
+ ". Minimal replication = " + safeReplication + ".";
if (reached > 0)
resText += " Threshold was reached " + new Date(reached) + ".";
return resText;
}
/**
* Checks consistency of the class state.
* This is costly and currently called only in assert.
*/
boolean isConsistent() throws IOException {
if (blockTotal == -1 && blockSafe == -1) {
return true; // manual safe mode
}
int activeBlocks = blockManager.getActiveBlockCount();
return (blockTotal == activeBlocks) ||
(blockSafe >= 0 && blockSafe <= blockTotal);
}
}
/**
* Periodically check whether it is time to leave safe mode.
* This thread starts when the threshold level is reached.
*
*/
class SafeModeMonitor implements Runnable {
/** interval in msec for checking safe mode: {@value} */
private static final long recheckInterval = 1000;
/**
*/
public void run() {
while (fsRunning && (safeMode != null && !safeMode.canLeave())) {
try {
Thread.sleep(recheckInterval);
} catch (InterruptedException ie) {
}
}
if (!fsRunning) {
LOG.info("NameNode is being shutdown, exit SafeModeMonitor thread. ");
} else {
// leave safe mode and stop the monitor
try {
leaveSafeMode(true);
} catch(SafeModeException es) { // should never happen
String msg = "SafeModeMonitor may not run during distributed upgrade.";
assert false : msg;
throw new RuntimeException(msg, es);
}
}
smmthread = null;
}
}
boolean setSafeMode(SafeModeAction action) throws IOException {
if (action != SafeModeAction.SAFEMODE_GET) {
checkSuperuserPrivilege();
switch(action) {
case SAFEMODE_LEAVE: // leave safe mode
leaveSafeMode(false);
break;
case SAFEMODE_ENTER: // enter safe mode
enterSafeMode();
break;
}
}
return isInSafeMode();
}
/**
* Check whether the name node is in safe mode.
* @return true if safe mode is ON, false otherwise
*/
boolean isInSafeMode() {
if (safeMode == null)
return false;
return safeMode.isOn();
}
/**
* Increment number of blocks that reached minimal replication.
* @param replication current replication
*/
void incrementSafeBlockCount(int replication) {
if (safeMode == null)
return;
safeMode.incrementSafeBlockCount((short)replication);
}
/**
* Decrement number of blocks that reached minimal replication.
*/
void decrementSafeBlockCount(Block b) {
if (safeMode == null) // mostly true
return;
safeMode.decrementSafeBlockCount((short)blockManager.countNodes(b).liveReplicas());
}
/**
* Set the total number of blocks in the system.
*/
void setBlockTotal() {
if (safeMode == null)
return;
safeMode.setBlockTotal((int)getCompleteBlocksTotal());
}
/**
* Get the total number of blocks in the system.
*/
public long getBlocksTotal() {
return blockManager.getTotalBlocks();
}
/**
* Get the total number of COMPLETE blocks in the system.
* For safe mode only complete blocks are counted.
*/
long getCompleteBlocksTotal() {
// Calculate number of blocks under construction
long numUCBlocks = 0;
for (Lease lease : leaseManager.getSortedLeases()) {
for (String path : lease.getPaths()) {
INode node;
try {
node = dir.getFileINode(path);
} catch (UnresolvedLinkException e) {
throw new AssertionError("Lease files should reside on this FS");
}
assert node != null : "Found a lease for nonexisting file.";
assert node.isUnderConstruction() :
"Found a lease for file that is not under construction.";
INodeFileUnderConstruction cons = (INodeFileUnderConstruction) node;
BlockInfo[] blocks = cons.getBlocks();
if(blocks == null)
continue;
for(BlockInfo b : blocks) {
if(!b.isComplete())
numUCBlocks++;
}
}
}
LOG.info("Number of blocks under construction: " + numUCBlocks);
return getBlocksTotal() - numUCBlocks;
}
/**
* Enter safe mode manually.
* @throws IOException
*/
void enterSafeMode() throws IOException {
writeLock();
try {
// Ensure that any concurrent operations have been fully synced
// before entering safe mode. This ensures that the FSImage
// is entirely stable on disk as soon as we're in safe mode.
getEditLog().logSyncAll();
if (!isInSafeMode()) {
safeMode = new SafeModeInfo();
return;
}
safeMode.setManual();
getEditLog().logSyncAll();
NameNode.stateChangeLog.info("STATE* Safe mode is ON. "
+ safeMode.getTurnOffTip());
} finally {
writeUnlock();
}
}
/**
* Leave safe mode.
* @throws IOException
*/
void leaveSafeMode(boolean checkForUpgrades) throws SafeModeException {
writeLock();
try {
if (!isInSafeMode()) {
NameNode.stateChangeLog.info("STATE* Safe mode is already OFF.");
return;
}
if(getDistributedUpgradeState())
throw new SafeModeException("Distributed upgrade is in progress",
safeMode);
safeMode.leave(checkForUpgrades);
} finally {
writeUnlock();
}
}
String getSafeModeTip() {
readLock();
try {
if (!isInSafeMode())
return "";
return safeMode.getTurnOffTip();
} finally {
readUnlock();
}
}
long getEditLogSize() throws IOException {
return getEditLog().getEditLogSize();
}
CheckpointSignature rollEditLog() throws IOException {
writeLock();
try {
if (isInSafeMode()) {
throw new SafeModeException("Checkpoint not created",
safeMode);
}
LOG.info("Roll Edit Log from " + Server.getRemoteAddress());
return getFSImage().rollEditLog();
} finally {
writeUnlock();
}
}
/**
* Moves fsimage.ckpt to fsImage and edits.new to edits
* Reopens the new edits file.
*
* @param sig the signature of this checkpoint (old image)
*/
void rollFSImage(CheckpointSignature sig) throws IOException {
writeLock();
try {
if (isInSafeMode()) {
throw new SafeModeException("Checkpoint not created",
safeMode);
}
LOG.info("Roll FSImage from " + Server.getRemoteAddress());
getFSImage().rollFSImage(sig, true);
} finally {
writeUnlock();
}
}
NamenodeCommand startCheckpoint(
NamenodeRegistration bnReg, // backup node
NamenodeRegistration nnReg) // active name-node
throws IOException {
writeLock();
try {
LOG.info("Start checkpoint for " + bnReg.getAddress());
NamenodeCommand cmd = getFSImage().startCheckpoint(bnReg, nnReg);
getEditLog().logSync();
return cmd;
} finally {
writeUnlock();
}
}
void endCheckpoint(NamenodeRegistration registration,
CheckpointSignature sig) throws IOException {
writeLock();
try {
LOG.info("End checkpoint for " + registration.getAddress());
getFSImage().endCheckpoint(sig, registration.getRole());
} finally {
writeUnlock();
}
}
/**
* Returns whether the given block is one pointed-to by a file.
*/
private boolean isValidBlock(Block b) {
return (blockManager.getINode(b) != null);
}
// Distributed upgrade manager
final UpgradeManagerNamenode upgradeManager = new UpgradeManagerNamenode(this);
UpgradeStatusReport distributedUpgradeProgress(UpgradeAction action
) throws IOException {
return upgradeManager.distributedUpgradeProgress(action);
}
UpgradeCommand processDistributedUpgradeCommand(UpgradeCommand comm) throws IOException {
return upgradeManager.processUpgradeCommand(comm);
}
int getDistributedUpgradeVersion() {
return upgradeManager.getUpgradeVersion();
}
UpgradeCommand getDistributedUpgradeCommand() throws IOException {
return upgradeManager.getBroadcastCommand();
}
boolean getDistributedUpgradeState() {
return upgradeManager.getUpgradeState();
}
short getDistributedUpgradeStatus() {
return upgradeManager.getUpgradeStatus();
}
boolean startDistributedUpgradeIfNeeded() throws IOException {
return upgradeManager.startUpgrade();
}
PermissionStatus createFsOwnerPermissions(FsPermission permission) {
return new PermissionStatus(fsOwner.getShortUserName(), supergroup, permission);
}
private FSPermissionChecker checkOwner(String path
) throws AccessControlException, UnresolvedLinkException {
return checkPermission(path, true, null, null, null, null);
}
private FSPermissionChecker checkPathAccess(String path, FsAction access
) throws AccessControlException, UnresolvedLinkException {
return checkPermission(path, false, null, null, access, null);
}
private FSPermissionChecker checkParentAccess(String path, FsAction access
) throws AccessControlException, UnresolvedLinkException {
return checkPermission(path, false, null, access, null, null);
}
private FSPermissionChecker checkAncestorAccess(String path, FsAction access
) throws AccessControlException, UnresolvedLinkException {
return checkPermission(path, false, access, null, null, null);
}
private FSPermissionChecker checkTraverse(String path
) throws AccessControlException, UnresolvedLinkException {
return checkPermission(path, false, null, null, null, null);
}
private void checkSuperuserPrivilege() throws AccessControlException {
if (isPermissionEnabled) {
FSPermissionChecker.checkSuperuserPrivilege(fsOwner, supergroup);
}
}
/**
* Check whether current user have permissions to access the path.
* For more details of the parameters, see
* {@link FSPermissionChecker#checkPermission(String, INodeDirectory, boolean, FsAction, FsAction, FsAction, FsAction)}.
*/
private FSPermissionChecker checkPermission(String path, boolean doCheckOwner,
FsAction ancestorAccess, FsAction parentAccess, FsAction access,
FsAction subAccess) throws AccessControlException, UnresolvedLinkException {
FSPermissionChecker pc = new FSPermissionChecker(
fsOwner.getShortUserName(), supergroup);
if (!pc.isSuper) {
dir.waitForReady();
readLock();
try {
pc.checkPermission(path, dir.rootDir, doCheckOwner,
ancestorAccess, parentAccess, access, subAccess);
} finally {
readUnlock();
}
}
return pc;
}
/**
* Check to see if we have exceeded the limit on the number
* of inodes.
*/
void checkFsObjectLimit() throws IOException {
if (maxFsObjects != 0 &&
maxFsObjects <= dir.totalInodes() + getBlocksTotal()) {
throw new IOException("Exceeded the configured number of objects " +
maxFsObjects + " in the filesystem.");
}
}
/**
* Get the total number of objects in the system.
*/
long getMaxObjects() {
return maxFsObjects;
}
public long getFilesTotal() {
return this.dir.totalInodes();
}
public long getPendingReplicationBlocks() {
return blockManager.pendingReplicationBlocksCount;
}
public long getUnderReplicatedBlocks() {
return blockManager.underReplicatedBlocksCount;
}
/** Returns number of blocks with corrupt replicas */
public long getCorruptReplicaBlocks() {
return blockManager.corruptReplicaBlocksCount;
}
public long getScheduledReplicationBlocks() {
return blockManager.scheduledReplicationBlocksCount;
}
public long getPendingDeletionBlocks() {
return blockManager.pendingDeletionBlocksCount;
}
public long getExcessBlocks() {
return blockManager.excessBlocksCount;
}
public int getBlockCapacity() {
return blockManager.getCapacity();
}
public String getFSState() {
return isInSafeMode() ? "safeMode" : "Operational";
}
private ObjectName mbeanName;
/**
* Register the FSNamesystem MBean using the name
* "hadoop:service=NameNode,name=FSNamesystemState"
*/
void registerMBean(Configuration conf) {
// We wrap to bypass standard mbean naming convention.
// This wraping can be removed in java 6 as it is more flexible in
// package naming for mbeans and their impl.
StandardMBean bean;
try {
myFSMetrics = new FSNamesystemMetrics(this, conf);
bean = new StandardMBean(this,FSNamesystemMBean.class);
mbeanName = MBeanUtil.registerMBean("NameNode", "FSNamesystemState", bean);
} catch (NotCompliantMBeanException e) {
e.printStackTrace();
}
LOG.info("Registered FSNamesystemStatusMBean");
}
/**
* get FSNamesystemMetrics
*/
public FSNamesystemMetrics getFSNamesystemMetrics() {
return myFSMetrics;
}
/**
* shutdown FSNamesystem
*/
public void shutdown() {
if (mbeanName != null)
MBeanUtil.unregisterMBean(mbeanName);
}
/**
* Number of live data nodes
* @return Number of live data nodes
*/
public int getNumLiveDataNodes() {
int numLive = 0;
synchronized (datanodeMap) {
for(Iterator<DatanodeDescriptor> it = datanodeMap.values().iterator();
it.hasNext();) {
DatanodeDescriptor dn = it.next();
if (!isDatanodeDead(dn) ) {
numLive++;
}
}
}
return numLive;
}
/**
* Number of dead data nodes
* @return Number of dead data nodes
*/
public int getNumDeadDataNodes() {
int numDead = 0;
synchronized (datanodeMap) {
for(Iterator<DatanodeDescriptor> it = datanodeMap.values().iterator();
it.hasNext();) {
DatanodeDescriptor dn = it.next();
if (isDatanodeDead(dn) ) {
numDead++;
}
}
}
return numDead;
}
/**
* Sets the generation stamp for this filesystem
*/
public void setGenerationStamp(long stamp) {
generationStamp.setStamp(stamp);
}
/**
* Gets the generation stamp for this filesystem
*/
public long getGenerationStamp() {
return generationStamp.getStamp();
}
/**
* Increments, logs and then returns the stamp
*/
private long nextGenerationStamp() throws IOException {
if (isInSafeMode()) {
throw new SafeModeException(
"Cannot get next generation stamp", safeMode);
}
long gs = generationStamp.nextStamp();
getEditLog().logGenerationStamp(gs);
// NB: callers sync the log
return gs;
}
private INodeFileUnderConstruction checkUCBlock(Block block, String clientName)
throws IOException {
// check safe mode
if (isInSafeMode())
throw new SafeModeException("Cannot get a new generation stamp and an " +
"access token for block " + block, safeMode);
// check stored block state
BlockInfo storedBlock = blockManager.getStoredBlock(block);
if (storedBlock == null ||
storedBlock.getBlockUCState() != BlockUCState.UNDER_CONSTRUCTION) {
throw new IOException(block +
" does not exist or is not under Construction" + storedBlock);
}
// check file inode
INodeFile file = storedBlock.getINode();
if (file==null || !file.isUnderConstruction()) {
throw new IOException("The file " + storedBlock +
" is belonged to does not exist or it is not under construction.");
}
// check lease
INodeFileUnderConstruction pendingFile = (INodeFileUnderConstruction)file;
if (clientName == null || !clientName.equals(pendingFile.getClientName())) {
throw new LeaseExpiredException("Lease mismatch: " + block +
" is accessed by a non lease holder " + clientName);
}
return pendingFile;
}
/**
* Get a new generation stamp together with an access token for
* a block under construction
*
* This method is called for recovering a failed pipeline or setting up
* a pipeline to append to a block.
*
* @param block a block
* @param clientName the name of a client
* @return a located block with a new generation stamp and an access token
* @throws IOException if any error occurs
*/
LocatedBlock updateBlockForPipeline(Block block,
String clientName) throws IOException {
LocatedBlock locatedBlock;
writeLock();
try {
// check vadility of parameters
checkUCBlock(block, clientName);
// get a new generation stamp and an access token
block.setGenerationStamp(nextGenerationStamp());
locatedBlock = new LocatedBlock(block, new DatanodeInfo[0]);
if (isBlockTokenEnabled) {
locatedBlock.setBlockToken(blockTokenSecretManager.generateToken(
block, EnumSet.of(BlockTokenSecretManager.AccessMode.WRITE)));
}
} finally {
writeUnlock();
}
getEditLog().logSync();
return locatedBlock;
}
/**
* Update a pipeline for a block under construction
*
* @param clientName the name of the client
* @param oldblock and old block
* @param newBlock a new block with a new generation stamp and length
* @param newNodes datanodes in the pipeline
* @throws IOException if any error occurs
*/
void updatePipeline(String clientName, Block oldBlock,
Block newBlock, DatanodeID[] newNodes)
throws IOException {
writeLock();
try {
assert newBlock.getBlockId()==oldBlock.getBlockId() : newBlock + " and "
+ oldBlock + " has different block identifier";
LOG.info("updatePipeline(block=" + oldBlock
+ ", newGenerationStamp=" + newBlock.getGenerationStamp()
+ ", newLength=" + newBlock.getNumBytes()
+ ", newNodes=" + Arrays.asList(newNodes)
+ ", clientName=" + clientName
+ ")");
// check the vadility of the block and lease holder name
final INodeFileUnderConstruction pendingFile =
checkUCBlock(oldBlock, clientName);
final BlockInfoUnderConstruction blockinfo = pendingFile.getLastBlock();
// check new GS & length: this is not expected
if (newBlock.getGenerationStamp() <= blockinfo.getGenerationStamp() ||
newBlock.getNumBytes() < blockinfo.getNumBytes()) {
String msg = "Update " + oldBlock + " (len = " +
blockinfo.getNumBytes() + ") to an older state: " + newBlock +
" (len = " + newBlock.getNumBytes() +")";
LOG.warn(msg);
throw new IOException(msg);
}
// Update old block with the new generation stamp and new length
blockinfo.setGenerationStamp(newBlock.getGenerationStamp());
blockinfo.setNumBytes(newBlock.getNumBytes());
// find the DatanodeDescriptor objects
DatanodeDescriptor[] descriptors = null;
if (newNodes.length > 0) {
descriptors = new DatanodeDescriptor[newNodes.length];
for(int i = 0; i < newNodes.length; i++) {
descriptors[i] = getDatanode(newNodes[i]);
}
}
blockinfo.setExpectedLocations(descriptors);
// persist blocks only if append is supported
String src = leaseManager.findPath(pendingFile);
if (supportAppends) {
dir.persistBlocks(src, pendingFile);
getEditLog().logSync();
}
LOG.info("updatePipeline(" + oldBlock + ") successfully to " + newBlock);
return;
} finally {
writeUnlock();
}
}
// rename was successful. If any part of the renamed subtree had
// files that were being written to, update with new filename.
//
void changeLease(String src, String dst, HdfsFileStatus dinfo) {
String overwrite;
String replaceBy;
boolean destinationExisted = true;
if (dinfo == null) {
destinationExisted = false;
}
if (destinationExisted && dinfo.isDir()) {
Path spath = new Path(src);
overwrite = spath.getParent().toString() + Path.SEPARATOR;
replaceBy = dst + Path.SEPARATOR;
} else {
overwrite = src;
replaceBy = dst;
}
leaseManager.changeLease(src, dst, overwrite, replaceBy);
}
/**
* Serializes leases.
*/
void saveFilesUnderConstruction(DataOutputStream out) throws IOException {
synchronized (leaseManager) {
out.writeInt(leaseManager.countPath()); // write the size
for (Lease lease : leaseManager.getSortedLeases()) {
for(String path : lease.getPaths()) {
// verify that path exists in namespace
INode node;
try {
node = dir.getFileINode(path);
} catch (UnresolvedLinkException e) {
throw new AssertionError("Lease files should reside on this FS");
}
if (node == null) {
throw new IOException("saveLeases found path " + path +
" but no matching entry in namespace.");
}
if (!node.isUnderConstruction()) {
throw new IOException("saveLeases found path " + path +
" but is not under construction.");
}
INodeFileUnderConstruction cons = (INodeFileUnderConstruction) node;
FSImageSerialization.writeINodeUnderConstruction(out, cons, path);
}
}
}
}
/**
* Register a name-node.
* <p>
* Registration is allowed if there is no ongoing streaming to
* another backup node.
* We currently allow only one backup node, but multiple chackpointers
* if there are no backups.
*
* @param registration
* @throws IOException
*/
void registerBackupNode(NamenodeRegistration registration)
throws IOException {
writeLock();
try {
if(getFSImage().getNamespaceID() != registration.getNamespaceID())
throw new IOException("Incompatible namespaceIDs: "
+ " Namenode namespaceID = " + getFSImage().getNamespaceID()
+ "; " + registration.getRole() +
" node namespaceID = " + registration.getNamespaceID());
boolean regAllowed = getEditLog().checkBackupRegistration(registration);
if(!regAllowed)
throw new IOException("Registration is not allowed. " +
"Another node is registered as a backup.");
} finally {
writeUnlock();
}
}
/**
* Release (unregister) backup node.
* <p>
* Find and remove the backup stream corresponding to the node.
* @param registration
* @throws IOException
*/
void releaseBackupNode(NamenodeRegistration registration)
throws IOException {
writeLock();
try {
if(getFSImage().getNamespaceID() != registration.getNamespaceID())
throw new IOException("Incompatible namespaceIDs: "
+ " Namenode namespaceID = " + getFSImage().getNamespaceID()
+ "; " + registration.getRole() +
" node namespaceID = " + registration.getNamespaceID());
getEditLog().releaseBackupStream(registration);
} finally {
writeUnlock();
}
}
public int numCorruptReplicas(Block blk) {
return blockManager.numCorruptReplicas(blk);
}
/** Get a datanode descriptor given corresponding storageID */
DatanodeDescriptor getDatanode(String nodeID) {
return datanodeMap.get(nodeID);
}
/**
* Return a range of corrupt replica block ids. Up to numExpectedBlocks
* blocks starting at the next block after startingBlockId are returned
* (fewer if numExpectedBlocks blocks are unavailable). If startingBlockId
* is null, up to numExpectedBlocks blocks are returned from the beginning.
* If startingBlockId cannot be found, null is returned.
*
* @param numExpectedBlocks Number of block ids to return.
* 0 <= numExpectedBlocks <= 100
* @param startingBlockId Block id from which to start. If null, start at
* beginning.
* @return Up to numExpectedBlocks blocks from startingBlockId if it exists
*
*/
long[] getCorruptReplicaBlockIds(int numExpectedBlocks,
Long startingBlockId) {
return blockManager.getCorruptReplicaBlockIds(numExpectedBlocks,
startingBlockId);
}
static class CorruptFileBlockInfo {
String path;
Block block;
public CorruptFileBlockInfo(String p, Block b) {
path = p;
block = b;
}
public String toString() {
return block.getBlockName() + "\t" + path;
}
}
/**
* @param path Restrict corrupt files to this portion of namespace.
* @param startBlockAfter Support for continuation; the set of files we return
* back is ordered by blockid; startBlockAfter tells where to start from
* @return a list in which each entry describes a corrupt file/block
* @throws AccessControlException
* @throws IOException
*/
Collection<CorruptFileBlockInfo> listCorruptFileBlocks(String path,
String startBlockAfter) throws AccessControlException, IOException {
readLock();
try {
checkSuperuserPrivilege();
long startBlockId = 0;
// print a limited # of corrupt files per call
int count = 0;
ArrayList<CorruptFileBlockInfo> corruptFiles = new ArrayList<CorruptFileBlockInfo>();
if (startBlockAfter != null) {
startBlockId = Block.filename2id(startBlockAfter);
}
BlockIterator blkIterator = blockManager.getCorruptReplicaBlockIterator();
while (blkIterator.hasNext()) {
Block blk = blkIterator.next();
INode inode = blockManager.getINode(blk);
if (inode != null && blockManager.countNodes(blk).liveReplicas() == 0) {
String src = FSDirectory.getFullPathName(inode);
if (((startBlockAfter == null) || (blk.getBlockId() > startBlockId))
&& (src.startsWith(path))) {
corruptFiles.add(new CorruptFileBlockInfo(src, blk));
count++;
if (count >= DEFAULT_MAX_CORRUPT_FILEBLOCKS_RETURNED)
break;
}
}
}
LOG.info("list corrupt file blocks returned: " + count);
return corruptFiles;
} finally {
readUnlock();
}
}
public ArrayList<DatanodeDescriptor> getDecommissioningNodes() {
readLock();
try {
ArrayList<DatanodeDescriptor> decommissioningNodes =
new ArrayList<DatanodeDescriptor>();
ArrayList<DatanodeDescriptor> results =
getDatanodeListForReport(DatanodeReportType.LIVE);
for (Iterator<DatanodeDescriptor> it = results.iterator(); it.hasNext();) {
DatanodeDescriptor node = it.next();
if (node.isDecommissionInProgress()) {
decommissioningNodes.add(node);
}
}
return decommissioningNodes;
} finally {
readUnlock();
}
}
/*
* Delegation Token
*/
private DelegationTokenSecretManager createDelegationTokenSecretManager(
Configuration conf) {
return new DelegationTokenSecretManager(conf.getLong(
DFSConfigKeys.DFS_NAMENODE_DELEGATION_KEY_UPDATE_INTERVAL_KEY,
DFSConfigKeys.DFS_NAMENODE_DELEGATION_KEY_UPDATE_INTERVAL_DEFAULT),
conf.getLong(
DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_MAX_LIFETIME_KEY,
DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_MAX_LIFETIME_DEFAULT),
conf.getLong(
DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_RENEW_INTERVAL_KEY,
DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_RENEW_INTERVAL_DEFAULT),
DELEGATION_TOKEN_REMOVER_SCAN_INTERVAL, this);
}
/**
* Returns the DelegationTokenSecretManager instance in the namesystem.
* @return delegation token secret manager object
*/
public DelegationTokenSecretManager getDelegationTokenSecretManager() {
return dtSecretManager;
}
/**
* @param renewer
* @return Token<DelegationTokenIdentifier>
* @throws IOException
*/
public Token<DelegationTokenIdentifier> getDelegationToken(Text renewer)
throws IOException {
if (isInSafeMode()) {
throw new SafeModeException("Cannot issue delegation token", safeMode);
}
if (!isAllowedDelegationTokenOp()) {
throw new IOException(
"Delegation Token can be issued only with kerberos or web authentication");
}
if(dtSecretManager == null || !dtSecretManager.isRunning()) {
LOG.warn("trying to get DT with no secret manager running");
return null;
}
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
String user = ugi.getUserName();
Text owner = new Text(user);
Text realUser = null;
if (ugi.getRealUser() != null) {
realUser = new Text(ugi.getRealUser().getUserName());
}
DelegationTokenIdentifier dtId = new DelegationTokenIdentifier(owner,
renewer, realUser);
Token<DelegationTokenIdentifier> token = new Token<DelegationTokenIdentifier>(
dtId, dtSecretManager);
long expiryTime = dtSecretManager.getTokenExpiryTime(dtId);
logGetDelegationToken(dtId, expiryTime);
return token;
}
/**
*
* @param token
* @return New expiryTime of the token
* @throws InvalidToken
* @throws IOException
*/
public long renewDelegationToken(Token<DelegationTokenIdentifier> token)
throws InvalidToken, IOException {
if (isInSafeMode()) {
throw new SafeModeException("Cannot renew delegation token", safeMode);
}
if (!isAllowedDelegationTokenOp()) {
throw new IOException(
"Delegation Token can be renewed only with kerberos or web authentication");
}
String renewer = UserGroupInformation.getCurrentUser().getShortUserName();
long expiryTime = dtSecretManager.renewToken(token, renewer);
DelegationTokenIdentifier id = new DelegationTokenIdentifier();
ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier());
DataInputStream in = new DataInputStream(buf);
id.readFields(in);
logRenewDelegationToken(id, expiryTime);
return expiryTime;
}
/**
*
* @param token
* @throws IOException
*/
public void cancelDelegationToken(Token<DelegationTokenIdentifier> token)
throws IOException {
if (isInSafeMode()) {
throw new SafeModeException("Cannot cancel delegation token", safeMode);
}
String canceller = UserGroupInformation.getCurrentUser().getUserName();
DelegationTokenIdentifier id = dtSecretManager
.cancelToken(token, canceller);
logCancelDelegationToken(id);
}
/**
* @param out save state of the secret manager
*/
void saveSecretManagerState(DataOutputStream out) throws IOException {
dtSecretManager.saveSecretManagerState(out);
}
/**
* @param in load the state of secret manager from input stream
*/
void loadSecretManagerState(DataInputStream in) throws IOException {
dtSecretManager.loadSecretManagerState(in);
}
/**
* Log the getDelegationToken operation to edit logs
*
* @param id identifer of the new delegation token
* @param expiryTime when delegation token expires
*/
private void logGetDelegationToken(DelegationTokenIdentifier id,
long expiryTime) throws IOException {
writeLock();
try {
getEditLog().logGetDelegationToken(id, expiryTime);
} finally {
writeUnlock();
}
getEditLog().logSync();
}
/**
* Log the renewDelegationToken operation to edit logs
*
* @param id identifer of the delegation token being renewed
* @param expiryTime when delegation token expires
*/
private void logRenewDelegationToken(DelegationTokenIdentifier id,
long expiryTime) throws IOException {
writeLock();
try {
getEditLog().logRenewDelegationToken(id, expiryTime);
} finally {
writeUnlock();
}
getEditLog().logSync();
}
/**
* Log the cancelDelegationToken operation to edit logs
*
* @param id identifer of the delegation token being cancelled
*/
private void logCancelDelegationToken(DelegationTokenIdentifier id)
throws IOException {
writeLock();
try {
getEditLog().logCancelDelegationToken(id);
} finally {
writeUnlock();
}
getEditLog().logSync();
}
/**
* Log the updateMasterKey operation to edit logs
*
* @param key new delegation key.
*/
public void logUpdateMasterKey(DelegationKey key) throws IOException {
writeLock();
try {
getEditLog().logUpdateMasterKey(key);
} finally {
writeUnlock();
}
getEditLog().logSync();
}
/**
*
* @return true if delegation token operation is allowed
*/
private boolean isAllowedDelegationTokenOp() throws IOException {
AuthenticationMethod authMethod = getConnectionAuthenticationMethod();
if (UserGroupInformation.isSecurityEnabled()
&& (authMethod != AuthenticationMethod.KERBEROS)
&& (authMethod != AuthenticationMethod.KERBEROS_SSL)
&& (authMethod != AuthenticationMethod.CERTIFICATE)) {
return false;
}
return true;
}
/**
* Returns authentication method used to establish the connection
* @return AuthenticationMethod used to establish connection
* @throws IOException
*/
private AuthenticationMethod getConnectionAuthenticationMethod()
throws IOException {
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
AuthenticationMethod authMethod = ugi.getAuthenticationMethod();
if (authMethod == AuthenticationMethod.PROXY) {
authMethod = ugi.getRealUser().getAuthenticationMethod();
}
return authMethod;
}
/**
* If the remote IP for namenode method invokation is null, then the
* invocation is internal to the namenode. Client invoked methods are invoked
* over RPC and always have address != null.
*/
private boolean isExternalInvocation() {
return Server.getRemoteIp() != null;
}
/**
* Log fsck event in the audit log
*/
void logFsckEvent(String src, InetAddress remoteAddress) throws IOException {
if (auditLog.isInfoEnabled()) {
logAuditEvent(UserGroupInformation.getCurrentUser(),
remoteAddress,
"fsck", src, null, null);
}
}
/**
* Register NameNodeMXBean
*/
private void registerMXBean() {
// register MXBean
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
try {
ObjectName mxbeanName = new ObjectName("HadoopInfo:type=NameNodeInfo");
mbs.registerMBean(this, mxbeanName);
} catch ( javax.management.InstanceAlreadyExistsException iaee ) {
// in unit tests, we may run and restart the NN within the same JVM
LOG.info("NameNode MXBean already registered");
} catch ( javax.management.JMException e ) {
LOG.warn("Failed to register NameNodeMXBean", e);
}
}
/**
* Class representing Namenode information for JMX interfaces
*/
@Override // NameNodeMXBean
public String getVersion() {
return VersionInfo.getVersion();
}
@Override // NameNodeMXBean
public long getUsed() {
return this.getCapacityUsed();
}
@Override // NameNodeMXBean
public long getFree() {
return this.getCapacityRemaining();
}
@Override // NameNodeMXBean
public long getTotal() {
return this.getCapacityTotal();
}
@Override // NameNodeMXBean
public String getSafemode() {
if (!this.isInSafeMode())
return "";
return "Safe mode is ON." + this.getSafeModeTip();
}
@Override // NameNodeMXBean
public boolean isUpgradeFinalized() {
return this.getFSImage().isUpgradeFinalized();
}
@Override // NameNodeMXBean
public long getNonDfsUsedSpace() {
return getCapacityUsedNonDFS();
}
@Override // NameNodeMXBean
public float getPercentUsed() {
return getCapacityUsedPercent();
}
@Override // NameNodeMXBean
public float getPercentRemaining() {
return getCapacityRemainingPercent();
}
@Override // NameNodeMXBean
public long getTotalBlocks() {
return getBlocksTotal();
}
@Override // NameNodeMXBean
public long getTotalFiles() {
return getFilesTotal();
}
@Override // NameNodeMXBean
public int getThreads() {
return ManagementFactory.getThreadMXBean().getThreadCount();
}
/**
* Returned information is a JSON representation of map with host name as the
* key and value is a map of live node attribute keys to its values
*/
@Override // NameNodeMXBean
public String getLiveNodes() {
final Map<String, Object> info = new HashMap<String, Object>();
final ArrayList<DatanodeDescriptor> aliveNodeList =
this.getDatanodeListForReport(DatanodeReportType.LIVE);
for (DatanodeDescriptor node : aliveNodeList) {
final Map<String, Object> innerinfo = new HashMap<String, Object>();
innerinfo.put("lastContact", getLastContact(node));
innerinfo.put("usedSpace", getDfsUsed(node));
info.put(node.getHostName(), innerinfo);
}
return JSON.toString(info);
}
/**
* Returned information is a JSON representation of map with host name as the
* key and value is a map of dead node attribute keys to its values
*/
@Override // NameNodeMXBean
public String getDeadNodes() {
final Map<String, Object> info = new HashMap<String, Object>();
final ArrayList<DatanodeDescriptor> deadNodeList =
this.getDatanodeListForReport(DatanodeReportType.DEAD);
for (DatanodeDescriptor node : deadNodeList) {
final Map<String, Object> innerinfo = new HashMap<String, Object>();
innerinfo.put("lastContact", getLastContact(node));
info.put(node.getHostName(), innerinfo);
}
return JSON.toString(info);
}
/**
* Returned information is a JSON representation of map with host name as the
* key and value is a map of decomisioning node attribute keys to its values
*/
@Override // NameNodeMXBean
public String getDecomNodes() {
final Map<String, Object> info = new HashMap<String, Object>();
final ArrayList<DatanodeDescriptor> decomNodeList =
this.getDecommissioningNodes();
for (DatanodeDescriptor node : decomNodeList) {
final Map<String, Object> innerinfo = new HashMap<String, Object>();
innerinfo.put("underReplicatedBlocks", node.decommissioningStatus
.getUnderReplicatedBlocks());
innerinfo.put("decommissionOnlyReplicas", node.decommissioningStatus
.getDecommissionOnlyReplicas());
innerinfo.put("underReplicateInOpenFiles", node.decommissioningStatus
.getUnderReplicatedInOpenFiles());
info.put(node.getHostName(), innerinfo);
}
return JSON.toString(info);
}
private long getLastContact(DatanodeDescriptor alivenode) {
return (System.currentTimeMillis() - alivenode.getLastUpdate())/1000;
}
private long getDfsUsed(DatanodeDescriptor alivenode) {
return alivenode.getDfsUsed();
}
}
| [
"archen94@gmail.com"
] | archen94@gmail.com |
bc52a3191f57bb2294dda2c47c89a277cffd8937 | 99b2878b8215bbbe1aad542334c1628f0fb3c57f | /gmall-ums/src/main/java/com/atguigu/gmall/ums/config/MybatisPlusConfig.java | 4e60cfb5dfed4050d194f1519cf3ec4ede5f51bb | [
"Apache-2.0"
] | permissive | ShangBaiShuYao/gmall | a78060764ec45408905ef9dd74ac0e14e22c4b0c | 16876593176851856bc4ba4eb8777393cc35805e | refs/heads/master | 2022-12-22T21:58:33.626881 | 2021-03-19T17:57:32 | 2021-03-19T17:57:32 | 218,497,070 | 3 | 0 | Apache-2.0 | 2022-12-16T14:50:45 | 2019-10-30T10:04:21 | JavaScript | UTF-8 | Java | false | false | 784 | java | package com.atguigu.gmall.ums.config;
import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
//Spring boot方式
@Configuration
public class MybatisPlusConfig {
@Bean
public PaginationInterceptor paginationInterceptor() {
PaginationInterceptor paginationInterceptor = new PaginationInterceptor();
// 设置请求的页面大于最大页后操作, true调回到首页,false 继续请求 默认false
// paginationInterceptor.setOverflow(false);
// 设置最大单页限制数量,默认 500 条,-1 不受限制
// paginationInterceptor.setLimit(500);
return paginationInterceptor;
}
} | [
"shangbaishuyao@163.com"
] | shangbaishuyao@163.com |
e1ccc588495fe6a7459437c60de98c5fd833123a | 4095eea2be8071b19a1aebe5b2c812ba9c74350b | /pipeline/stream-processing/src/main/java/io/fineo/lambda/avro/DeflatorFactory.java | 49d2c126033a49d802c83b092e8e6267c3e2c902 | [] | no_license | jyates/fineo-ingest | c24381ed017e0d0cd05294026a8861cb73c12706 | ce64905f113853d964b04fb8f0a899b89f566ee8 | refs/heads/master | 2021-03-24T13:10:28.057192 | 2017-04-04T20:31:32 | 2017-04-04T20:31:32 | 58,411,956 | 1 | 0 | null | null | null | null | UTF-8 | Java | false | false | 4,730 | java | package io.fineo.lambda.avro;
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream;
import org.apache.commons.compress.compressors.xz.XZCompressorInputStream;
import org.apache.commons.compress.compressors.xz.XZCompressorOutputStream;
import org.tukaani.xz.LZMA2Options;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.function.Supplier;
import java.util.zip.Deflater;
import java.util.zip.DeflaterInputStream;
import java.util.zip.DeflaterOutputStream;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import static io.fineo.lambda.avro.DeflatorConstants.*;
/**
* Factory for a deflator stream around the usual output stream
*/
public abstract class DeflatorFactory {
public abstract OutputStream deflate(OutputStream baos) throws IOException;
public abstract InputStream inflate(InputStream in) throws IOException;
protected abstract String getName();
public static final int DEFAULT_DEFLATE_LEVEL = Deflater.DEFAULT_COMPRESSION;
public static final int DEFAULT_XZ_LEVEL = LZMA2Options.PRESET_DEFAULT;
public static DeflatorFactory gzip() {
return new DeflatorFactory() {
@Override
public OutputStream deflate(OutputStream baos) throws IOException {
return new GZIPOutputStream(baos);
}
@Override
public InputStream inflate(InputStream in) throws IOException {
return new GZIPInputStream(in);
}
@Override
protected String getName() {
return DeflatorConstants.GZIP_DEFLATOR;
}
};
}
public static DeflatorFactory none() {
return new DeflatorFactory() {
@Override
public OutputStream deflate(OutputStream baos) throws IOException {
return baos;
}
@Override
public InputStream inflate(InputStream in) throws IOException {
return in;
}
@Override
protected String getName() {
return NULL_DEFLATOR;
}
};
}
public static DeflatorFactory bzip() {
return new DeflatorFactory() {
@Override
public OutputStream deflate(OutputStream baos) throws IOException {
return new BZip2CompressorOutputStream(baos);
}
@Override
public InputStream inflate(InputStream in) throws IOException {
return new BZip2CompressorInputStream(in);
}
@Override
protected String getName() {
return BZIP2_DEFLATOR;
}
};
}
public static DeflatorFactory xz(int compressionLevel) {
return new DeflatorFactory() {
@Override
public OutputStream deflate(OutputStream baos) throws IOException {
return new XZCompressorOutputStream(baos, compressionLevel);
}
@Override
public InputStream inflate(InputStream in) throws IOException {
return new XZCompressorInputStream(in);
}
@Override
protected String getName() {
return XZ_DEFLATOR;
}
};
}
public static DeflatorFactory deflate(int compressionLevel) {
return new DeflatorFactory() {
@Override
public OutputStream deflate(OutputStream baos) throws IOException {
// see org.apache.avro.file.DeflateCodec for why we use nowrap where
return new DeflaterOutputStream(baos, new Deflater(compressionLevel, true));
}
@Override
public InputStream inflate(InputStream in) throws IOException {
return new DeflaterInputStream(in);
}
@Override
protected String getName() {
return DEFLATE_DEFLATOR;
}
};
}
public enum DeflatorFactoryEnum {
NULL(() -> none()),
BZIP(() -> bzip()),
DEFLATE(() -> deflate(DEFAULT_DEFLATE_LEVEL)),
GZIP(() -> gzip()),
XZ(() -> xz(DEFAULT_XZ_LEVEL));
private final Supplier<DeflatorFactory> supplier;
DeflatorFactoryEnum(Supplier<DeflatorFactory> supplier) {
this.supplier = supplier;
}
public DeflatorFactory getFactory() {
return supplier.get();
}
public static int ordinalOf(DeflatorFactory deflator) {
if (deflator == null) {
return NULL.ordinal();
}
switch (deflator.getName()) {
case NULL_DEFLATOR:
return NULL.ordinal();
case DeflatorConstants.GZIP_DEFLATOR:
return GZIP.ordinal();
case BZIP2_DEFLATOR:
return BZIP.ordinal();
case DEFLATE_DEFLATOR:
return DEFLATE.ordinal();
case XZ_DEFLATOR:
return XZ.ordinal();
default:
throw new UnsupportedOperationException("Known deflator factory: " + deflator);
}
}
}
}
| [
"jesse.k.yates@gmail.com"
] | jesse.k.yates@gmail.com |
9672b27a652c58ce67257d30afc0d07a8d953324 | d60ed49cf28e50a497935bb7c90d2b6613ea84d0 | /yyhttpcore/src/main/java/com/yycloud/core/utils/http/.svn/text-base/AsyncHttpClient.java.svn-base | 527753f649bb8a380598d560a295e63f8b1d7162 | [] | no_license | m122469119/G3MobileAPP | b50a0d8baee7f123f39f4aa0165ad1c7b1cdc33f | 2e8f2c324cf9238a421aeb4878a23241abf71a9d | refs/heads/master | 2021-05-09T02:57:40.397559 | 2017-12-11T11:00:46 | 2017-12-11T11:00:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 28,267 | /*
* Copyright (C) 2013 WhiteCat 白猫 (www.thinkandroid.cn)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yycloud.core.utils.http;
import android.content.Context;
import org.apache.http.Header;
import org.apache.http.HeaderElement;
import org.apache.http.HttpEntity;
import org.apache.http.HttpRequest;
import org.apache.http.HttpRequestInterceptor;
import org.apache.http.HttpResponse;
import org.apache.http.HttpResponseInterceptor;
import org.apache.http.HttpVersion;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CookieStore;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpEntityEnclosingRequestBase;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.protocol.ClientContext;
import org.apache.http.conn.params.ConnManagerParams;
import org.apache.http.conn.params.ConnPerRouteBean;
import org.apache.http.conn.scheme.PlainSocketFactory;
import org.apache.http.conn.scheme.Scheme;
import org.apache.http.conn.scheme.SchemeRegistry;
import org.apache.http.conn.ssl.SSLSocketFactory;
import org.apache.http.entity.HttpEntityWrapper;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.HttpConnectionParams;
import org.apache.http.params.HttpParams;
import org.apache.http.params.HttpProtocolParams;
import org.apache.http.protocol.BasicHttpContext;
import org.apache.http.protocol.HttpContext;
import org.apache.http.protocol.SyncBasicHttpContext;
import java.io.IOException;
import java.io.InputStream;
import java.lang.ref.WeakReference;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.WeakHashMap;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.zip.GZIPInputStream;
public class AsyncHttpClient
{
private static final String VERSION = "1.1";
/** 线程池维护线程的最少数量 */
private static final int DEFAULT_CORE_POOL_SIZE = 5;
private static final int DEFAULT_MAXIMUM_POOL_SIZE = 10;
/** 线程池维护线程所允许的空闲时间 */
private static final int DEFAULT_KEEP_ALIVETIME = 0;
/** http请求最大并发连接数 */
private static final int DEFAULT_MAX_CONNECTIONS = 10;
/** 超时时间,默认10秒 */
private static final int DEFAULT_SOCKET_TIMEOUT = 10 * 1000;
/** 默认错误尝试次数 */
private static final int DEFAULT_MAX_RETRIES = 5;
/** 默认的套接字缓冲区大小 */
private static final int DEFAULT_SOCKET_BUFFER_SIZE = 8192;
private static final String HEADER_ACCEPT_ENCODING = "Accept-Encoding";
private static final String ENCODING_GZIP = "gzip";
private static int maxConnections = DEFAULT_MAX_CONNECTIONS;
private static int socketTimeout = DEFAULT_SOCKET_TIMEOUT;
private final DefaultHttpClient httpClient;
private final HttpContext httpContext;
private ThreadPoolExecutor threadPool;
private final Map<Context, List<WeakReference<Future<?>>>> requestMap;
private final Map<String, String> clientHeaderMap;
public AsyncHttpClient()
{
BasicHttpParams httpParams = new BasicHttpParams();
ConnManagerParams.setTimeout(httpParams, socketTimeout);
ConnManagerParams.setMaxConnectionsPerRoute(httpParams,
new ConnPerRouteBean(maxConnections));
ConnManagerParams.setMaxTotalConnections(httpParams,
DEFAULT_MAX_CONNECTIONS);
HttpConnectionParams.setSoTimeout(httpParams, socketTimeout);
HttpConnectionParams.setConnectionTimeout(httpParams, socketTimeout);
HttpConnectionParams.setTcpNoDelay(httpParams, true);
HttpConnectionParams.setSocketBufferSize(httpParams,
DEFAULT_SOCKET_BUFFER_SIZE);
HttpProtocolParams.setVersion(httpParams, HttpVersion.HTTP_1_1);
HttpProtocolParams.setUserAgent(httpParams, String.format(
"thinkandroid/%s (http://www.thinkandroid.cn)", VERSION));
SchemeRegistry schemeRegistry = new SchemeRegistry();
schemeRegistry.register(new Scheme("http", PlainSocketFactory
.getSocketFactory(), 80));
schemeRegistry.register(new Scheme("https", SSLSocketFactory
.getSocketFactory(), 443));
ThreadSafeClientConnManager cm = new ThreadSafeClientConnManager(
httpParams, schemeRegistry);
httpContext = new SyncBasicHttpContext(new BasicHttpContext());
httpClient = new DefaultHttpClient(cm, httpParams);
httpClient.addRequestInterceptor(new HttpRequestInterceptor()
{
@Override
public void process(HttpRequest request, HttpContext context)
{
if (!request.containsHeader(HEADER_ACCEPT_ENCODING))
{
request.addHeader(HEADER_ACCEPT_ENCODING, ENCODING_GZIP);
}
for (String header : clientHeaderMap.keySet())
{
request.addHeader(header, clientHeaderMap.get(header));
}
}
});
httpClient.addResponseInterceptor(new HttpResponseInterceptor()
{
@Override
public void process(HttpResponse response, HttpContext context)
{
final HttpEntity entity = response.getEntity();
if (entity == null)
{
return;
}
final Header encoding = entity.getContentEncoding();
if (encoding != null)
{
for (HeaderElement element : encoding.getElements())
{
if (element.getName().equalsIgnoreCase(ENCODING_GZIP))
{
response.setEntity(new InflatingEntity(response
.getEntity()));
break;
}
}
}
}
});
httpClient.setHttpRequestRetryHandler(new RetryHandler(
DEFAULT_MAX_RETRIES));
threadPool = new ThreadPoolExecutor(DEFAULT_CORE_POOL_SIZE,
DEFAULT_MAXIMUM_POOL_SIZE, DEFAULT_KEEP_ALIVETIME,
TimeUnit.SECONDS, new ArrayBlockingQueue<Runnable>(3),
new ThreadPoolExecutor.CallerRunsPolicy());
requestMap = new WeakHashMap<Context, List<WeakReference<Future<?>>>>();
clientHeaderMap = new HashMap<String, String>();
}
/**
* Get the underlying HttpClient instance. This is useful for setting
* additional fine-grained settings for requests by accessing the client's
* ConnectionManager, HttpParams and SchemeRegistry.
*/
public HttpClient getHttpClient()
{
return this.httpClient;
}
/**
* Get the underlying HttpContext instance. This is useful for getting and
* setting fine-grained settings for requests by accessing the context's
* attributes such as the CookieStore.
*/
public HttpContext getHttpContext()
{
return this.httpContext;
}
/**
* Sets an optional CookieStore to use when making requests
*
* @param cookieStore
* The CookieStore implementation to use, usually an instance of
* {@link PersistentCookieStore}
*/
public void setCookieStore(CookieStore cookieStore)
{
httpContext.setAttribute(ClientContext.COOKIE_STORE, cookieStore);
}
/**
* Overrides the threadpool implementation used when queuing/pooling
* requests. By default, Executors.newCachedThreadPool() is used.
*
* @param threadPool
* an instance of {@link ThreadPoolExecutor} to use for
* queuing/pooling requests.
*/
public void setThreadPool(ThreadPoolExecutor threadPool)
{
this.threadPool = threadPool;
}
/**
* Sets the User-Agent header to be sent with each request. By default,
* "Android Asynchronous Http Client/VERSION (http://loopj.com/android-async-http/)"
* is used.
*
* @param userAgent
* the string to use in the User-Agent header.
*/
public void setUserAgent(String userAgent)
{
HttpProtocolParams.setUserAgent(this.httpClient.getParams(), userAgent);
}
/**
* Sets the connection time oout. By default, 10 seconds
*
* @param timeout
* the connect/socket timeout in milliseconds
*/
public void setTimeout(int timeout)
{
final HttpParams httpParams = this.httpClient.getParams();
ConnManagerParams.setTimeout(httpParams, timeout);
HttpConnectionParams.setSoTimeout(httpParams, timeout);
HttpConnectionParams.setConnectionTimeout(httpParams, timeout);
}
/**
* Sets the SSLSocketFactory to user when making requests. By default, a
* new, default SSLSocketFactory is used.
*
* @param sslSocketFactory
* the socket factory to use for https requests.
*/
public void setSSLSocketFactory(SSLSocketFactory sslSocketFactory)
{
this.httpClient.getConnectionManager().getSchemeRegistry()
.register(new Scheme("https", sslSocketFactory, 443));
}
/**
* Sets headers that will be added to all requests this client makes (before
* sending).
*
* @param header
* the name of the header
* @param value
* the contents of the header
*/
public void addHeader(String header, String value)
{
clientHeaderMap.put(header, value);
}
/**
* Sets basic authentication for the request. Uses AuthScope.ANY. This is
* the same as setBasicAuth('username','password',AuthScope.ANY)
*
* @param username
* @param password
*/
public void setBasicAuth(String user, String pass)
{
AuthScope scope = AuthScope.ANY;
setBasicAuth(user, pass, scope);
}
/**
* Sets basic authentication for the request. You should pass in your
* AuthScope for security. It should be like this
* setBasicAuth("username","password", new
* AuthScope("host",port,AuthScope.ANY_REALM))
*
* @param username
* @param password
* @param scope
* - an AuthScope object
*
*/
public void setBasicAuth(String user, String pass, AuthScope scope)
{
UsernamePasswordCredentials credentials = new UsernamePasswordCredentials(
user, pass);
this.httpClient.getCredentialsProvider().setCredentials(scope,
credentials);
}
/**
* Cancels any pending (or potentially active) requests associated with the
* passed Context.
* <p>
* <b>Note:</b> This will only affect requests which were created with a
* non-null android Context. This method is intended to be used in the
* onDestroy method of your android activities to destroy all requests which
* are no longer required.
*
* @param context
* the android Context instance associated to the request.
* @param mayInterruptIfRunning
* specifies if active requests should be cancelled along with
* pending requests.
*/
public void cancelRequests(Context context, boolean mayInterruptIfRunning)
{
List<WeakReference<Future<?>>> requestList = requestMap.get(context);
if (requestList != null)
{
for (WeakReference<Future<?>> requestRef : requestList)
{
Future<?> request = requestRef.get();
if (request != null)
{
request.cancel(mayInterruptIfRunning);
}
}
}
requestMap.remove(context);
}
//
// HTTP GET Requests
//
/**
* Perform a HTTP GET request, without any parameters.
*
* @param url
* the URL to send the request to.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void get(String url, AsyncHttpResponseHandler responseHandler)
{
get(null, url, null, responseHandler);
}
/**
* Perform a HTTP GET request with parameters.
*
* @param url
* the URL to send the request to.
* @param params
* additional GET parameters to send with the request.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void get(String url, RequestParams params,
AsyncHttpResponseHandler responseHandler)
{
get(null, url, params, responseHandler);
}
/**
* Perform a HTTP GET request without any parameters and track the Android
* Context which initiated the request.
*
* @param context
* the Android Context which initiated the request.
* @param url
* the URL to send the request to.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void get(Context context, String url,
AsyncHttpResponseHandler responseHandler)
{
get(context, url, null, responseHandler);
}
/**
* Perform a HTTP GET request and track the Android Context which initiated
* the request.
*
* @param context
* the Android Context which initiated the request.
* @param url
* the URL to send the request to.
* @param params
* additional GET parameters to send with the request.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void get(Context context, String url, RequestParams params,
AsyncHttpResponseHandler responseHandler)
{
sendRequest(httpClient, httpContext,
new HttpGet(getUrlWithQueryString(url, params)), null,
responseHandler, context);
}
//
// HTTP download Requests
//
public void download(String url, AsyncHttpResponseHandler responseHandler)
{
download(null, url, null, responseHandler);
}
public void download(String url, RequestParams params,
AsyncHttpResponseHandler responseHandler)
{
download(null, url, params, responseHandler);
}
/**
* Perform a HTTP GET request without any parameters and track the Android
* Context which initiated the request.
*
* @param context
* the Android Context which initiated the request.
* @param url
* the URL to send the request to.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void download(Context context, String url,
AsyncHttpResponseHandler responseHandler)
{
download(context, url, null, responseHandler);
}
/**
* Perform a HTTP GET request and track the Android Context which initiated
* the request.
*
* @param context
* the Android Context which initiated the request.
* @param url
* the URL to send the request to.
* @param params
* additional GET parameters to send with the request.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void download(Context context, String url, RequestParams params,
AsyncHttpResponseHandler responseHandler)
{
sendRequest(httpClient, httpContext,
new HttpGet(getUrlWithQueryString(url, params)), null,
responseHandler, context);
}
/**
* Perform a HTTP GET request and track the Android Context which initiated
* the request with customized headers
*
* @param url
* the URL to send the request to.
* @param headers
* set headers only for this request
* @param params
* additional GET parameters to send with the request.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void get(Context context, String url, Header[] headers,
RequestParams params, AsyncHttpResponseHandler responseHandler)
{
HttpUriRequest request = new HttpGet(getUrlWithQueryString(url, params));
if (headers != null)
request.setHeaders(headers);
sendRequest(httpClient, httpContext, request, null, responseHandler,
context);
}
//
// HTTP POST Requests
//
/**
* Perform a HTTP POST request, without any parameters.
*
* @param url
* the URL to send the request to.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void post(String url, AsyncHttpResponseHandler responseHandler)
{
post(null, url, null, responseHandler);
}
/**
* Perform a HTTP POST request with parameters.
*
* @param url
* the URL to send the request to.
* @param params
* additional POST parameters or files to send with the request.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void post(String url, RequestParams params,
AsyncHttpResponseHandler responseHandler)
{
post(null, url, params, responseHandler);
}
/**
* Perform a HTTP POST request and track the Android Context which initiated
* the request.
*
* @param context
* the Android Context which initiated the request.
* @param url
* the URL to send the request to.
* @param params
* additional POST parameters or files to send with the request.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void post(Context context, String url, RequestParams params,
AsyncHttpResponseHandler responseHandler)
{
post(context, url, paramsToEntity(params), null, responseHandler);
}
/**
* Perform a HTTP POST request and track the Android Context which initiated
* the request.
*
* @param context
* the Android Context which initiated the request.
* @param url
* the URL to send the request to.
* @param entity
* a raw {@link HttpEntity} to send with the request, for
* example, use this to send string/json/xml payloads to a server
* by passing a {@link org.apache.http.entity.StringEntity}.
* @param contentType
* the content type of the payload you are sending, for example
* application/json if sending a json payload.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void post(Context context, String url, HttpEntity entity,
String contentType, AsyncHttpResponseHandler responseHandler)
{
sendRequest(httpClient, httpContext,
addEntityToRequestBase(new HttpPost(url), entity), contentType,
responseHandler, context);
}
/**
* Perform a HTTP POST request and track the Android Context which initiated
* the request. Set headers only for this request
*
* @param context
* the Android Context which initiated the request.
* @param url
* the URL to send the request to.
* @param headers
* set headers only for this request
* @param params
* additional POST parameters to send with the request.
* @param contentType
* the content type of the payload you are sending, for example
* application/json if sending a json payload.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void post(Context context, String url, Header[] headers,
RequestParams params, String contentType,
AsyncHttpResponseHandler responseHandler)
{
HttpEntityEnclosingRequestBase request = new HttpPost(url);
if (params != null)
request.setEntity(paramsToEntity(params));
if (headers != null)
request.setHeaders(headers);
sendRequest(httpClient, httpContext, request, contentType,
responseHandler, context);
}
/**
* Perform a HTTP POST request and track the Android Context which initiated
* the request. Set headers only for this request
*
* @param context
* the Android Context which initiated the request.
* @param url
* the URL to send the request to.
* @param headers
* set headers only for this request
* @param entity
* a raw {@link HttpEntity} to send with the request, for
* example, use this to send string/json/xml payloads to a server
* by passing a {@link org.apache.http.entity.StringEntity}.
* @param contentType
* the content type of the payload you are sending, for example
* application/json if sending a json payload.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void post(Context context, String url, Header[] headers,
HttpEntity entity, String contentType,
AsyncHttpResponseHandler responseHandler)
{
HttpEntityEnclosingRequestBase request = addEntityToRequestBase(
new HttpPost(url), entity);
if (headers != null)
request.setHeaders(headers);
sendRequest(httpClient, httpContext, request, contentType,
responseHandler, context);
}
//
// HTTP PUT Requests
//
/**
* Perform a HTTP PUT request, without any parameters.
*
* @param url
* the URL to send the request to.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void put(String url, AsyncHttpResponseHandler responseHandler)
{
put(null, url, null, responseHandler);
}
/**
* Perform a HTTP PUT request with parameters.
*
* @param url
* the URL to send the request to.
* @param params
* additional PUT parameters or files to send with the request.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void put(String url, RequestParams params,
AsyncHttpResponseHandler responseHandler)
{
put(null, url, params, responseHandler);
}
/**
* Perform a HTTP PUT request and track the Android Context which initiated
* the request.
*
* @param context
* the Android Context which initiated the request.
* @param url
* the URL to send the request to.
* @param params
* additional PUT parameters or files to send with the request.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void put(Context context, String url, RequestParams params,
AsyncHttpResponseHandler responseHandler)
{
put(context, url, paramsToEntity(params), null, responseHandler);
}
/**
* Perform a HTTP PUT request and track the Android Context which initiated
* the request. And set one-time headers for the request
*
* @param context
* the Android Context which initiated the request.
* @param url
* the URL to send the request to.
* @param entity
* a raw {@link HttpEntity} to send with the request, for
* example, use this to send string/json/xml payloads to a server
* by passing a {@link org.apache.http.entity.StringEntity}.
* @param contentType
* the content type of the payload you are sending, for example
* application/json if sending a json payload.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void put(Context context, String url, HttpEntity entity,
String contentType, AsyncHttpResponseHandler responseHandler)
{
sendRequest(httpClient, httpContext,
addEntityToRequestBase(new HttpPut(url), entity), contentType,
responseHandler, context);
}
/**
* Perform a HTTP PUT request and track the Android Context which initiated
* the request. And set one-time headers for the request
*
* @param context
* the Android Context which initiated the request.
* @param url
* the URL to send the request to.
* @param headers
* set one-time headers for this request
* @param entity
* a raw {@link HttpEntity} to send with the request, for
* example, use this to send string/json/xml payloads to a server
* by passing a {@link org.apache.http.entity.StringEntity}.
* @param contentType
* the content type of the payload you are sending, for example
* application/json if sending a json payload.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void put(Context context, String url, Header[] headers,
HttpEntity entity, String contentType,
AsyncHttpResponseHandler responseHandler)
{
HttpEntityEnclosingRequestBase request = addEntityToRequestBase(
new HttpPut(url), entity);
if (headers != null)
request.setHeaders(headers);
sendRequest(httpClient, httpContext, request, contentType,
responseHandler, context);
}
//
// HTTP DELETE Requests
//
/**
* Perform a HTTP DELETE request.
*
* @param url
* the URL to send the request to.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void delete(String url, AsyncHttpResponseHandler responseHandler)
{
delete(null, url, responseHandler);
}
/**
* Perform a HTTP DELETE request.
*
* @param context
* the Android Context which initiated the request.
* @param url
* the URL to send the request to.
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void delete(Context context, String url,
AsyncHttpResponseHandler responseHandler)
{
final HttpDelete delete = new HttpDelete(url);
sendRequest(httpClient, httpContext, delete, null, responseHandler,
context);
}
/**
* Perform a HTTP DELETE request.
*
* @param context
* the Android Context which initiated the request.
* @param url
* the URL to send the request to.
* @param headers
* set one-time headers for this request
* @param responseHandler
* the response handler instance that should handle the response.
*/
public void delete(Context context, String url, Header[] headers,
AsyncHttpResponseHandler responseHandler)
{
final HttpDelete delete = new HttpDelete(url);
if (headers != null)
delete.setHeaders(headers);
sendRequest(httpClient, httpContext, delete, null, responseHandler,
context);
}
// Private stuff
protected void sendRequest(DefaultHttpClient client,
HttpContext httpContext, HttpUriRequest uriRequest,
String contentType, AsyncHttpResponseHandler responseHandler,
Context context)
{
if (contentType != null)
{
uriRequest.addHeader("Content-Type", contentType);
}
Future<?> request = threadPool.submit(new AsyncHttpRequest(client,
httpContext, uriRequest, responseHandler));
if (context != null)
{
// Add request to request map
List<WeakReference<Future<?>>> requestList = requestMap
.get(context);
if (requestList == null)
{
requestList = new LinkedList<WeakReference<Future<?>>>();
requestMap.put(context, requestList);
}
requestList.add(new WeakReference<Future<?>>(request));
// TODO: Remove dead weakrefs from requestLists?
}
}
public static String getUrlWithQueryString(String url, RequestParams params)
{
if (params != null)
{
String paramString = params.getParamString();
if (url.indexOf("?") == -1)
{
url += "?" + paramString;
} else
{
url += "&" + paramString;
}
}
return url;
}
private HttpEntity paramsToEntity(RequestParams params)
{
HttpEntity entity = null;
if (params != null)
{
entity = params.getEntity();
}
return entity;
}
private HttpEntityEnclosingRequestBase addEntityToRequestBase(
HttpEntityEnclosingRequestBase requestBase, HttpEntity entity)
{
if (entity != null)
{
requestBase.setEntity(entity);
}
return requestBase;
}
private static class InflatingEntity extends HttpEntityWrapper
{
public InflatingEntity(HttpEntity wrapped)
{
super(wrapped);
}
@Override
public InputStream getContent() throws IOException
{
return new GZIPInputStream(wrappedEntity.getContent());
}
@Override
public long getContentLength()
{
return -1;
}
}
}
| [
"1373939387@qq.com"
] | 1373939387@qq.com | |
bdb1e65177e5ef3485eb11190f4876758f8f0132 | cd686241381e0babae7b440b9564d191c80c1c39 | /app/src/main/java/com/example/wellington/lolguide/view/ui/BaseDetailsActivity.java | d361cc00f50cd4acc7bc1a0b7ccd2e2c6ca4b641 | [] | no_license | wellyogui/LolGuide | 39e0667ae8d1e68a5c0ef8d3a9e7ef0b4ba02e22 | a0238011d7e6e19c58b353f63653ec87ba45c052 | refs/heads/master | 2021-01-13T02:48:31.890627 | 2017-05-17T14:15:10 | 2017-05-17T14:15:10 | 77,160,039 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 709 | java | package com.example.wellington.lolguide.view.ui;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.MenuItem;
public class BaseDetailsActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getSupportActionBar() != null) {
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == android.R.id.home) {
this.finish();
}
return super.onOptionsItemSelected(item);
}
}
| [
"well_yogui@hotmail.com"
] | well_yogui@hotmail.com |
828373f289d4cc1a4b52eec1d9feb756e1ac6bd4 | d77ed095865671f97b7287ce0a1499aeb947e88a | /shiro-study-chapter12/src/test/java/com/github/chenyiliang/shiro/chapter12/ShiroTest.java | 6fe74b896a9d60ce3f431f7722a0746fae0ffa76 | [] | no_license | chenyiliang/shiro-study | 52c2018b80f567b326a10c4cba2ccbe803013139 | 8c447367136995e92096b78e44b180cf6e8d0251 | refs/heads/master | 2021-01-17T13:08:52.214884 | 2016-06-30T13:09:05 | 2016-06-30T13:09:05 | 59,626,844 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 4,103 | java | package com.github.chenyiliang.shiro.chapter12;
import javax.sql.DataSource;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.authc.UsernamePasswordToken;
import org.apache.shiro.subject.Subject;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.transaction.TransactionConfiguration;
import com.github.chenyiliang.shiro.chapter12.entity.Permission;
import com.github.chenyiliang.shiro.chapter12.entity.Role;
import com.github.chenyiliang.shiro.chapter12.entity.User;
import com.github.chenyiliang.shiro.chapter12.realm.UserRealm;
import com.github.chenyiliang.shiro.chapter12.service.PermissionService;
import com.github.chenyiliang.shiro.chapter12.service.RoleService;
import com.github.chenyiliang.shiro.chapter12.service.UserService;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { "classpath:spring-beans.xml", "classpath:spring-shiro.xml" })
@TransactionConfiguration(defaultRollback = false)
public class ShiroTest {
@Autowired
protected PermissionService permissionService;
@Autowired
protected RoleService roleService;
@Autowired
protected UserService userService;
@Autowired
private UserRealm userRealm;
protected JdbcTemplate jdbcTemplate;
@Autowired
private void setDataSource(DataSource ds) {
jdbcTemplate = new JdbcTemplate(ds);
}
protected String password = "123";
protected Permission p1;
protected Permission p2;
protected Permission p3;
protected Role r1;
protected Role r2;
protected User u1;
protected User u2;
protected User u3;
protected User u4;
@Before
public void setUp() {
jdbcTemplate.update("delete from sys_users");
jdbcTemplate.update("delete from sys_roles");
jdbcTemplate.update("delete from sys_permissions");
jdbcTemplate.update("delete from sys_users_roles");
jdbcTemplate.update("delete from sys_roles_permissions");
// 1、新增权限
p1 = new Permission("user:create", "用户模块新增", Boolean.TRUE);
p2 = new Permission("user:update", "用户模块修改", Boolean.TRUE);
p3 = new Permission("menu:create", "菜单模块新增", Boolean.TRUE);
permissionService.createPermission(p1);
permissionService.createPermission(p2);
permissionService.createPermission(p3);
// 2、新增角色
r1 = new Role("admin", "管理员", Boolean.TRUE);
r2 = new Role("user", "用户管理员", Boolean.TRUE);
roleService.createRole(r1);
roleService.createRole(r2);
// 3、关联角色-权限
roleService.correlationPermissions(r1.getId(), p1.getId());
roleService.correlationPermissions(r1.getId(), p2.getId());
roleService.correlationPermissions(r1.getId(), p3.getId());
roleService.correlationPermissions(r2.getId(), p1.getId());
roleService.correlationPermissions(r2.getId(), p2.getId());
// 4、新增用户
u1 = new User("zhang", password);
u2 = new User("li", password);
u3 = new User("wu", password);
u4 = new User("wang", password);
u4.setLocked(Boolean.TRUE);
userService.createUser(u1);
userService.createUser(u2);
userService.createUser(u3);
userService.createUser(u4);
// 5、关联用户-角色
userService.correlationRoles(u1.getId(), r1.getId());
}
@Test
public void test() {
Subject subject = SecurityUtils.getSubject();
UsernamePasswordToken token = new UsernamePasswordToken(u1.getUsername(), password);
subject.login(token);
Assert.assertTrue(subject.isAuthenticated());
subject.checkRole("admin");
subject.checkPermission("user:create");
userService.changePassword(u1.getId(), password + "1");
userRealm.clearCache(subject.getPrincipals());
token = new UsernamePasswordToken(u1.getUsername(), password + "1");
subject.login(token);
}
}
| [
"chenyiliangconan@qq.com"
] | chenyiliangconan@qq.com |
8c479534a562bd9f5e1eee50b6eb578eeb650ca7 | 651592b56fa78e6a0d708378d6009a64cb1be8f3 | /app/src/main/java/com/mobile/proisa/pedidoprueba/Adapters/ItemsAdapter.java | 11600d39ebedaaf4478d5714d5b93905df9ac16b | [] | no_license | saitamaHero/PedidoPrueba | b2b1ef958726484d468114a913f5ac07325e91a3 | 7da58de77b16b8d346bd93bf6733447ca9574db2 | refs/heads/master | 2020-04-05T16:02:24.087362 | 2019-06-12T13:26:13 | 2019-06-12T13:26:13 | 156,994,478 | 1 | 0 | null | 2019-05-21T13:48:42 | 2018-11-10T15:34:09 | Java | UTF-8 | Java | false | false | 3,023 | java | package com.mobile.proisa.pedidoprueba.Adapters;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.v7.widget.CardView;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.mobile.proisa.pedidoprueba.R;
import java.util.List;
import Models.Item;
import Utils.NumberUtils;
public class ItemsAdapter extends RecyclerView.Adapter<ItemsAdapter.ItemHolder> {
private List<Item> items;
private int layoutResource;
private OnItemClickListener onItemClickListener;
public ItemsAdapter(List<Item> itemList, int layoutResource) {
this.items = itemList;
this.layoutResource = layoutResource;
}
public void setOnItemClickListener(OnItemClickListener itemClickListener) {
this.onItemClickListener = itemClickListener;
}
@NonNull
@Override
public ItemHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
Context context = parent.getContext();
View view = LayoutInflater.from(context).inflate(layoutResource, parent, false);
return new ItemHolder(view);
}
@Override
public void onBindViewHolder(@NonNull ItemHolder holder, int position) {
final Item item = items.get(position);
holder.txtId.setText(item.getId());
holder.txtName.setText(item.getName());
holder.txtPrice.setText(NumberUtils.formatNumber(item.getPrice(), NumberUtils.FORMAT_NUMER_DOUBLE));
holder.txtCategory.setText(item.getCategory().getName());
String stockQuantity = holder.txtQuantity.getContext().getString(R.string.two_string_format,
NumberUtils.formatToInteger(item.getQuantity()),item.getUnit().getId());
holder.txtQuantity.setText(stockQuantity);
holder.cardView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if(onItemClickListener != null) onItemClickListener.onItemClick(item);
}
});
}
@Override
public int getItemCount() {
return items.size();
}
public class ItemHolder extends RecyclerView.ViewHolder{
public TextView txtId;
public TextView txtName;
public TextView txtPrice;
public TextView txtQuantity;
public TextView txtCategory;
public CardView cardView;
public ItemHolder(View itemView) {
super(itemView);
txtId = itemView.findViewById(R.id.id);
txtName = itemView.findViewById(R.id.name);
txtQuantity = itemView.findViewById(R.id.stock);
txtPrice = itemView.findViewById(R.id.price);
txtCategory = itemView.findViewById(R.id.category);
cardView = itemView.findViewById(R.id.card);
}
}
public interface OnItemClickListener{
void onItemClick(Item item);
}
}
| [
"tec.dionicioacevedo@gmail.com"
] | tec.dionicioacevedo@gmail.com |
a712691e4c1f2c6f808d43cd741c5d8d56f0a57e | e8e968c6083132098af6cee11eed6468bf9e29df | /src/test/java/com/google/devtools/build/lib/packages/PackageFactoryTest.java | 44b0f588b281f7b74927148c657cb8b975a0deb6 | [
"Apache-2.0"
] | permissive | FengRillian/bazel-0.19.2-dist | c1b8d19951b7895de4155b89d5ec8fbd3d292385 | 85152fe2690bb5a34464b8dabd4675cdf4897590 | refs/heads/master | 2020-04-14T11:29:37.992753 | 2019-01-03T00:52:00 | 2019-01-03T00:52:00 | 163,815,152 | 2 | 1 | null | null | null | null | UTF-8 | Java | false | false | 46,152 | java | // Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.packages;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.fail;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.eventbus.EventBus;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.PackageIdentifier;
import com.google.devtools.build.lib.events.Reporter;
import com.google.devtools.build.lib.packages.util.PackageFactoryApparatus;
import com.google.devtools.build.lib.packages.util.PackageFactoryTestBase;
import com.google.devtools.build.lib.syntax.Type;
import com.google.devtools.build.lib.testutil.MoreAsserts;
import com.google.devtools.build.lib.testutil.TestUtils;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Unit tests for {@code PackageFactory}.
*/
@RunWith(JUnit4.class)
public class PackageFactoryTest extends PackageFactoryTestBase {
@Test
public void testCreatePackage() throws Exception {
Path buildFile = scratch.file("/pkgname/BUILD", "# empty build file ");
Package pkg = packages.createPackage("pkgname", buildFile);
assertThat(pkg.getName()).isEqualTo("pkgname");
assertThat(Sets.newHashSet(pkg.getTargets(Rule.class))).isEmpty();
}
@Test
public void testCreatePackageIsolatedFromOuterErrors() throws Exception {
ExecutorService e = Executors.newCachedThreadPool();
final Semaphore beforeError = new Semaphore(0);
final Semaphore afterError = new Semaphore(0);
Reporter reporter = new Reporter(new EventBus());
ParsingTracker parser = new ParsingTracker(beforeError, afterError, reporter);
final Logger log = Logger.getLogger(PackageFactory.class.getName());
log.addHandler(parser);
Level originalLevel = log.getLevel();
log.setLevel(Level.FINE);
e.execute(new ErrorReporter(reporter, beforeError, afterError));
e.execute(parser);
// wait for all to finish
e.shutdown();
assertThat(e.awaitTermination(TestUtils.WAIT_TIMEOUT_MILLISECONDS, TimeUnit.MILLISECONDS))
.isTrue();
log.removeHandler(parser);
log.setLevel(originalLevel);
assertThat(parser.hasParsed()).isTrue();
}
@Test
public void testBadRuleName() throws Exception {
events.setFailFast(false);
Path buildFile = scratch.file("/badrulename/BUILD", "cc_library(name = 3)");
Package pkg = packages.createPackage("badrulename", buildFile);
events.assertContainsError("cc_library 'name' attribute must be a string");
assertThat(pkg.containsErrors()).isTrue();
}
@Test
public void testNoRuleName() throws Exception {
events.setFailFast(false);
Path buildFile = scratch.file("/badrulename/BUILD", "cc_library()");
Package pkg = packages.createPackage("badrulename", buildFile);
events.assertContainsError("cc_library rule has no 'name' attribute");
assertThat(pkg.containsErrors()).isTrue();
}
@Test
public void testBadPackageName() throws Exception {
try {
// PathFragment parsing de-double slashes, and normalization of the path fragment removes
// up reference (/../), so use triple dot /.../ that will always be a forbidden package name.
packages.createPackage("not even a legal/.../label",
emptyBuildFile("not even a legal/.../label"));
fail();
} catch (NoSuchPackageException e) {
assertThat(e)
.hasMessageThat()
.contains(
"no such package 'not even a legal/.../label': "
+ "illegal package name: 'not even a legal/.../label' ");
}
}
@Test
public void testColonInExportsFilesTargetName() throws Exception {
events.setFailFast(false);
Path path =
scratch.file(
"/googledata/cafe/BUILD",
"exports_files(['houseads/house_ads:ca-aol_parenting_html'])");
Package pkg = packages.createPackage("googledata/cafe", path);
events.assertContainsError("target names may not contain ':'");
assertThat(pkg.getTargets(FileTarget.class).toString())
.doesNotContain("houseads/house_ads:ca-aol_parenting_html");
assertThat(pkg.containsErrors()).isTrue();
}
@Test
public void testPackageNameWithPROTECTEDIsOk() throws Exception {
events.setFailFast(false);
// One "PROTECTED":
assertThat(isValidPackageName("foo/PROTECTED/bar")).isTrue();
// Multiple "PROTECTED"s:
assertThat(isValidPackageName("foo/PROTECTED/bar/PROTECTED/wiz")).isTrue();
}
@Test
public void testDuplicateRuleName() throws Exception {
events.setFailFast(false);
Path buildFile =
scratch.file(
"/duplicaterulename/BUILD",
"# -*- python -*-",
"proto_library(name = 'spell_proto', srcs = ['spell.proto'], cc_api_version = 2)",
"cc_library(name = 'spell_proto')");
Package pkg = packages.createPackage("duplicaterulename", buildFile);
events.assertContainsError(
"cc_library rule 'spell_proto' in package "
+ "'duplicaterulename' conflicts with existing proto_library rule");
assertThat(pkg.containsErrors()).isTrue();
}
@Test
public void testDuplicatedDependencies() throws Exception {
events.setFailFast(false);
Path buildFile =
scratch.file(
"/has_dupe/BUILD",
"cc_library(name='dep')",
"cc_library(name='has_dupe', deps=[':dep', ':dep'])");
Package pkg = packages.createPackage("has_dupe", buildFile);
events.assertContainsError(
"Label '//has_dupe:dep' is duplicated in the 'deps' " + "attribute of rule 'has_dupe'");
assertThat(pkg.containsErrors()).isTrue();
assertThat(pkg.getRule("has_dupe")).isNotNull();
assertThat(pkg.getRule("dep")).isNotNull();
assertThat(pkg.getRule("has_dupe").containsErrors()).isTrue();
assertThat(pkg.getRule("dep").containsErrors()).isTrue(); // because all rules in an
// errant package are
// themselves errant.
}
@Test
public void testPrefixWithinSameRule1() throws Exception {
events.setFailFast(false);
Path buildFile =
scratch.file(
"/fruit/orange/BUILD",
"genrule(name='orange', srcs=[], outs=['a', 'a/b'], cmd='')");
packages.createPackage("fruit/orange", buildFile);
events.assertContainsError("rule 'orange' has conflicting output files 'a/b' and 'a");
}
@Test
public void testPrefixWithinSameRule2() throws Exception {
events.setFailFast(false);
Path buildFile =
scratch.file(
"/fruit/orange/BUILD",
"genrule(name='orange', srcs=[], outs=['a/b', 'a'], cmd='')");
packages.createPackage("fruit/orange", buildFile);
events.assertContainsError("rule 'orange' has conflicting output files 'a' and 'a/b");
}
@Test
public void testPrefixBetweenRules1() throws Exception {
events.setFailFast(false);
Path buildFile =
scratch.file(
"/fruit/kiwi/BUILD",
"genrule(name='kiwi1', srcs=[], outs=['a'], cmd='')",
"genrule(name='kiwi2', srcs=[], outs=['a/b'], cmd='')");
packages.createPackage("fruit/kiwi", buildFile);
events.assertContainsError(
"output file 'a/b' of rule 'kiwi2' conflicts " + "with output file 'a' of rule 'kiwi1'");
}
@Test
public void testPrefixBetweenRules2() throws Exception {
events.setFailFast(false);
Path buildFile =
scratch.file(
"/fruit/kiwi/BUILD",
"genrule(name='kiwi1', srcs=[], outs=['a/b'], cmd='')",
"genrule(name='kiwi2', srcs=[], outs=['a'], cmd='')");
packages.createPackage("fruit/kiwi", buildFile);
events.assertContainsError(
"output file 'a' of rule 'kiwi2' conflicts " + "with output file 'a/b' of rule 'kiwi1'");
}
@Test
public void testPackageConstant() throws Exception {
Path buildFile =
scratch.file("/pina/BUILD", "cc_library(name=PACKAGE_NAME + '-colada')");
Package pkg =
packages.createPackage(
"pina", buildFile, "--incompatible_package_name_is_a_function=false");
events.assertNoWarningsOrErrors();
assertThat(pkg.containsErrors()).isFalse();
assertThat(pkg.getRule("pina-colada")).isNotNull();
assertThat(pkg.getRule("pina-colada").containsErrors()).isFalse();
assertThat(Sets.newHashSet(pkg.getTargets(Rule.class)).size()).isSameAs(1);
}
@Test
public void testPackageConstantIsForbidden() throws Exception {
events.setFailFast(false);
Path buildFile = scratch.file("/pina/BUILD", "cc_library(name=PACKAGE_NAME + '-colada')");
packages.createPackage("pina", buildFile, "--incompatible_package_name_is_a_function=true");
events.assertContainsError("The value 'PACKAGE_NAME' has been removed");
}
@Test
public void testPackageNameFunction() throws Exception {
Path buildFile = scratch.file("/pina/BUILD", "cc_library(name=package_name() + '-colada')");
Package pkg = packages.createPackage("pina", buildFile);
events.assertNoWarningsOrErrors();
assertThat(pkg.containsErrors()).isFalse();
assertThat(pkg.getRule("pina-colada")).isNotNull();
assertThat(pkg.getRule("pina-colada").containsErrors()).isFalse();
assertThat(Sets.newHashSet(pkg.getTargets(Rule.class)).size()).isSameAs(1);
}
@Test
public void testPackageConstantInExternalRepository() throws Exception {
Path buildFile =
scratch.file(
"/external/a/b/BUILD",
"genrule(name='c', srcs=[], outs=['ao'], cmd=REPOSITORY_NAME + ' ' + PACKAGE_NAME)");
Package pkg =
packages.createPackage(
PackageIdentifier.create("@a", PathFragment.create("b")),
buildFile,
events.reporter(),
"--incompatible_package_name_is_a_function=false");
Rule c = pkg.getRule("c");
assertThat(AggregatingAttributeMapper.of(c).get("cmd", Type.STRING)).isEqualTo("@a b");
}
@Test
public void testPackageConstantInExternalRepositoryIsForbidden() throws Exception {
events.setFailFast(false);
Path buildFile =
scratch.file(
"/external/a/b/BUILD", "genrule(name='c', srcs=[], outs=['ao'], cmd=REPOSITORY_NAME)");
packages.createPackage(
PackageIdentifier.create("@a", PathFragment.create("b")),
buildFile,
events.reporter(),
"--incompatible_package_name_is_a_function=true");
events.assertContainsError("The value 'REPOSITORY_NAME' has been removed");
}
@Test
public void testPackageFunctionInExternalRepository() throws Exception {
Path buildFile =
scratch.file(
"/external/a/b/BUILD",
"genrule(name='c', srcs=[], outs=['o'], cmd=repository_name() + ' ' + package_name())");
Package pkg =
packages.createPackage(
PackageIdentifier.create("@a", PathFragment.create("b")), buildFile, events.reporter());
Rule c = pkg.getRule("c");
assertThat(AggregatingAttributeMapper.of(c).get("cmd", Type.STRING)).isEqualTo("@a b");
}
@Test
public void testMultipleDuplicateRuleName() throws Exception {
events.setFailFast(false);
Path buildFile =
scratch.file(
"/multipleduplicaterulename/BUILD",
"# -*- python -*-",
"proto_library(name = 'spellcheck_proto',",
" srcs = ['spellcheck.proto'],",
" cc_api_version = 2)",
"cc_library(name = 'spellcheck_proto')",
"proto_library(name = 'spell_proto',",
" srcs = ['spell.proto'],",
" cc_api_version = 2)",
"cc_library(name = 'spell_proto')");
Package pkg = packages.createPackage("multipleduplicaterulename", buildFile);
events.assertContainsError(
"cc_library rule 'spellcheck_proto' in package "
+ "'multipleduplicaterulename' conflicts with existing proto_library rule");
events.assertContainsError(
"cc_library rule 'spell_proto' in package "
+ "'multipleduplicaterulename' conflicts with existing proto_library rule");
assertThat(pkg.containsErrors()).isTrue();
}
@Test
public void testBuildFileTargetExists() throws Exception {
Path buildFile = scratch.file("/foo/BUILD", "");
Package pkg = packages.createPackage("foo", buildFile);
Target target = pkg.getTarget("BUILD");
assertThat(target.getName()).isEqualTo("BUILD");
// Test that it's memoized:
assertThat(pkg.getTarget("BUILD")).isSameAs(target);
}
@Test
public void testCreationOfInputFiles() throws Exception {
Path buildFile =
scratch.file(
"/foo/BUILD",
"exports_files(['Z'])",
"cc_library(name='W', deps=['X', 'Y'])",
"cc_library(name='X', srcs=['X'])",
"cc_library(name='Y')");
Package pkg = packages.createPackage("foo", buildFile);
assertThat(pkg.containsErrors()).isFalse();
// X is a rule with a circular self-dependency.
assertThat(pkg.getTarget("X").getClass()).isSameAs(Rule.class);
// Y is a rule
assertThat(pkg.getTarget("Y").getClass()).isSameAs(Rule.class);
// Z is a file
assertThat(pkg.getTarget("Z").getClass()).isSameAs(InputFile.class);
// A is nothing
try {
pkg.getTarget("A");
fail();
} catch (NoSuchTargetException e) {
assertThat(e)
.hasMessage(
"no such target '//foo:A': "
+ "target 'A' not declared in package 'foo' defined by /foo/BUILD");
}
// These are the only input files: BUILD, Z
Set<String> inputFiles = Sets.newTreeSet();
for (InputFile inputFile : pkg.getTargets(InputFile.class)) {
inputFiles.add(inputFile.getName());
}
assertThat(Lists.newArrayList(inputFiles)).containsExactly("BUILD", "Z").inOrder();
}
@Test
public void testThirdPartyLicenseError() throws Exception {
events.setFailFast(false);
Path buildFile =
scratch.file("/third_party/foo/BUILD", "# line 1", "cc_library(name='bar')", "# line 3");
Package pkg = packages.createPackage("third_party/foo", buildFile);
events.assertContainsError(
"third-party rule '//third_party/foo:bar' lacks a license "
+ "declaration with one of the following types: "
+ "notice, reciprocal, permissive, restricted, unencumbered, by_exception_only");
assertThat(pkg.containsErrors()).isTrue();
}
@Test
public void testThirdPartyLicenseExportsFileError() throws Exception {
events.setFailFast(false);
Path buildFile = scratch.file("/third_party/foo/BUILD", "exports_files(['bar'])");
Package pkg = packages.createPackage("third_party/foo", buildFile);
events.assertContainsError(
"third-party file 'bar' lacks a license "
+ "declaration with one of the following types: "
+ "notice, reciprocal, permissive, restricted, unencumbered, by_exception_only");
assertThat(pkg.containsErrors()).isTrue();
}
@Test
public void testDuplicateRuleIsNotAddedToPackage() throws Exception {
events.setFailFast(false);
Path path =
scratch.file(
"/dup/BUILD",
"proto_library(name = 'dup_proto',",
" srcs = ['dup.proto'],",
" cc_api_version = 2)",
"",
"cc_library(name = 'dup_proto',",
" srcs = ['dup.pb.cc', 'dup.pb.h'])");
Package pkg = packages.createPackage("dup", path);
events.assertContainsError(
"cc_library rule 'dup_proto' in package 'dup' "
+ "conflicts with existing proto_library rule");
assertThat(pkg.containsErrors()).isTrue();
Rule dupProto = pkg.getRule("dup_proto");
// Check that the first rule of the given name "wins", and that each of the
// "winning" rule's outputs is a member of the package.
assertThat(dupProto.getRuleClass()).isEqualTo("proto_library");
for (OutputFile out : dupProto.getOutputFiles()) {
assertThat(pkg.getTargets(FileTarget.class)).contains(out);
}
}
@Test
public void testConflictingRuleDoesNotUpdatePackage() throws Exception {
events.setFailFast(false);
// In this test, rule2's outputs conflict with rule1, so rule2 is rejected.
// However, we must check that neither rule2, nor any of its inputs or
// outputs is a member of the package, and that the conflicting output file
// "out2" still has rule1 as its getGeneratingRule().
Path path =
scratch.file(
"/conflict/BUILD",
"genrule(name = 'rule1',",
" cmd = '',",
" srcs = ['in1', 'in2'],",
" outs = ['out1', 'out2'])",
"genrule(name = 'rule2',",
" cmd = '',",
" srcs = ['in3', 'in4'],",
" outs = ['out3', 'out2'])");
Package pkg = packages.createPackage("conflict", path);
events.assertContainsError(
"generated file 'out2' in rule 'rule2' "
+ "conflicts with existing generated file from rule 'rule1'");
assertThat(pkg.containsErrors()).isTrue();
assertThat(pkg.getRule("rule2")).isNull();
// Ensure that rule2's "out2" didn't overwrite rule1's:
assertThat(((OutputFile) pkg.getTarget("out2")).getGeneratingRule())
.isSameAs(pkg.getRule("rule1"));
// None of rule2, its inputs, or its outputs should belong to pkg:
List<Target> found = new ArrayList<>();
for (String targetName : ImmutableList.of("rule2", "in3", "in4", "out3")) {
try {
found.add(pkg.getTarget(targetName));
// No fail() here: if there's no exception, we add the name to a list
// and we check below that it's empty.
} catch (NoSuchTargetException e) {
/* good! */
}
}
assertThat(found).isEmpty();
}
// Was: Regression test for bug "Rules declared after an error in
// a package should be considered 'in error'".
// Now: Regression test for bug "Why aren't ERRORS considered
// fatal?*"
@Test
public void testAllRulesInErrantPackageAreInError() throws Exception {
events.setFailFast(false);
Path path =
scratch.file(
"/error/BUILD",
"genrule(name = 'rule1',",
" cmd = ':',",
" outs = ['out.1'])",
"list = ['bad']",
"PopulateList(list)", // undefined => error
"genrule(name = 'rule2',",
" cmd = ':',",
" outs = list)");
Package pkg = packages.createPackage("error", path);
events.assertContainsError("name 'PopulateList' is not defined");
assertThat(pkg.containsErrors()).isTrue();
// rule1 would be fine but is still marked as in error:
assertThat(pkg.getRule("rule1").containsErrors()).isTrue();
// rule2 is considered "in error" because it's after an error.
// Indeed, it has the wrong "outs" set because the call to PopulateList
// failed.
Rule rule2 = pkg.getRule("rule2");
assertThat(rule2.containsErrors()).isTrue();
assertThat(Sets.newHashSet(rule2.getOutputFiles()))
.isEqualTo(Sets.newHashSet(pkg.getTarget("bad")));
}
@Test
public void testHelpfulErrorForMissingExportsFiles() throws Exception {
Path path = scratch.file("/x/BUILD", "cc_library(name='x', srcs=['x.cc'])");
scratch.file("/x/x.cc");
scratch.file("/x/y.cc");
scratch.file("/x/dir/dummy");
Package pkg = packages.createPackage("x", path);
assertThat(pkg.getTarget("x.cc")).isNotNull(); // existing and mentioned.
try {
pkg.getTarget("y.cc"); // existing but not mentioned.
fail();
} catch (NoSuchTargetException e) {
assertThat(e)
.hasMessage(
"no such target '//x:y.cc': "
+ "target 'y.cc' not declared in package 'x'; "
+ "however, a source file of this name exists. "
+ "(Perhaps add 'exports_files([\"y.cc\"])' to x/BUILD?) "
+ "defined by /x/BUILD");
}
try {
pkg.getTarget("z.cc"); // non-existent and unmentioned.
fail();
} catch (NoSuchTargetException e) {
assertThat(e)
.hasMessage(
"no such target '//x:z.cc': "
+ "target 'z.cc' not declared in package 'x' (did you mean 'x.cc'?) "
+ "defined by /x/BUILD");
}
try {
pkg.getTarget("dir"); // existing directory but not mentioned.
fail();
} catch (NoSuchTargetException e) {
assertThat(e)
.hasMessage(
"no such target '//x:dir': target 'dir' not declared in package 'x'; "
+ "however, a source directory of this name exists. "
+ "(Perhaps add 'exports_files([\"dir\"])' to x/BUILD, "
+ "or define a filegroup?) defined by /x/BUILD");
}
}
@Test
public void testTestSuitesImplicitlyDependOnAllRulesInPackage() throws Exception {
Path path =
scratch.file(
"/x/BUILD",
"java_test(name='j')",
"test_suite(name='t1')",
"test_suite(name='t2', tests=['//foo'])",
"test_suite(name='t3', tests=['//foo'])",
"cc_test(name='c')");
Package pkg = packages.createPackage("x", path);
// Things to note:
// - the t1 refers to both :j and :c, even though :c is a forward reference.
// - $implicit_tests is empty unless tests=[]
assertThat(attributes(pkg.getRule("t1")).get("$implicit_tests", BuildType.LABEL_LIST))
.containsExactlyElementsIn(
Sets.newHashSet(
Label.parseAbsolute("//x:c", ImmutableMap.of()),
Label.parseAbsolute("//x:j", ImmutableMap.of())));
assertThat(attributes(pkg.getRule("t2")).get("$implicit_tests", BuildType.LABEL_LIST))
.isEmpty();
assertThat(attributes(pkg.getRule("t3")).get("$implicit_tests", BuildType.LABEL_LIST))
.isEmpty();
}
@Test
public void testGlobDirectoryExclusion() throws Exception {
emptyFile("/fruit/data/apple");
emptyFile("/fruit/data/pear");
emptyFile("/fruit/data/berry/black");
emptyFile("/fruit/data/berry/blue");
Path file =
scratch.file(
"/fruit/BUILD",
"cc_library(name = 'yes', srcs = glob(['data/*']))",
"cc_library(name = 'no', srcs = glob(['data/*'], exclude_directories=0))");
Package pkg = packages.eval("fruit", file);
events.assertNoWarningsOrErrors();
List<Label> yesFiles = attributes(pkg.getRule("yes")).get("srcs", BuildType.LABEL_LIST);
List<Label> noFiles = attributes(pkg.getRule("no")).get("srcs", BuildType.LABEL_LIST);
assertThat(yesFiles)
.containsExactly(
Label.parseAbsolute("@//fruit:data/apple", ImmutableMap.of()),
Label.parseAbsolute("@//fruit:data/pear", ImmutableMap.of()));
assertThat(noFiles)
.containsExactly(
Label.parseAbsolute("@//fruit:data/apple", ImmutableMap.of()),
Label.parseAbsolute("@//fruit:data/pear", ImmutableMap.of()),
Label.parseAbsolute("@//fruit:data/berry", ImmutableMap.of()));
}
// TODO(bazel-team): This is really a test for GlobCache.
@Test
public void testRecursiveGlob() throws Exception {
emptyFile("/rg/a.cc");
emptyFile("/rg/foo/bar.cc");
emptyFile("/rg/foo/foo.cc");
emptyFile("/rg/foo/wiz/bam.cc");
emptyFile("/rg/foo/wiz/bum.cc");
emptyFile("/rg/foo/wiz/quid/gav.cc");
Path file =
scratch.file(
"/rg/BUILD",
"cc_library(name = 'ri', srcs = glob(['**/*.cc']))",
"cc_library(name = 're', srcs = glob(['*.cc'], exclude=['**/*.c']))");
Package pkg = packages.eval("rg", file);
events.assertNoWarningsOrErrors();
assertEvaluates(
pkg,
ImmutableList.of(
"BUILD",
"a.cc",
"foo",
"foo/bar.cc",
"foo/foo.cc",
"foo/wiz",
"foo/wiz/bam.cc",
"foo/wiz/bum.cc",
"foo/wiz/quid",
"foo/wiz/quid/gav.cc"),
"**");
assertEvaluates(
pkg,
ImmutableList.of(
"a.cc",
"foo/bar.cc",
"foo/foo.cc",
"foo/wiz/bam.cc",
"foo/wiz/bum.cc",
"foo/wiz/quid/gav.cc"),
"**/*.cc");
assertEvaluates(
pkg, ImmutableList.of("foo/bar.cc", "foo/wiz/bam.cc", "foo/wiz/bum.cc"), "**/b*.cc");
assertEvaluates(
pkg,
ImmutableList.of(
"foo/bar.cc", "foo/foo.cc", "foo/wiz/bam.cc", "foo/wiz/bum.cc", "foo/wiz/quid/gav.cc"),
"**/*/*.cc");
assertEvaluates(pkg, ImmutableList.of("foo/wiz/quid/gav.cc"), "foo/**/quid/*.cc");
assertEvaluates(
pkg,
Collections.<String>emptyList(),
ImmutableList.of("*.cc", "*/*.cc", "*/*/*.cc"),
ImmutableList.of("**/*.cc"));
assertEvaluates(
pkg,
Collections.<String>emptyList(),
ImmutableList.of("**/*.cc"),
ImmutableList.of("**/*.cc"));
assertEvaluates(
pkg,
Collections.<String>emptyList(),
ImmutableList.of("**/*.cc"),
ImmutableList.of("*.cc", "*/*.cc", "*/*/*.cc", "*/*/*/*.cc"));
assertEvaluates(
pkg,
Collections.<String>emptyList(),
ImmutableList.of("**"),
ImmutableList.of("*", "*/*", "*/*/*", "*/*/*/*"));
assertEvaluates(
pkg,
ImmutableList.of(
"foo/bar.cc", "foo/foo.cc", "foo/wiz/bam.cc", "foo/wiz/bum.cc", "foo/wiz/quid/gav.cc"),
ImmutableList.of("**/*.cc"),
ImmutableList.of("*.cc"));
assertEvaluates(
pkg,
ImmutableList.of("a.cc", "foo/wiz/bam.cc", "foo/wiz/bum.cc", "foo/wiz/quid/gav.cc"),
ImmutableList.of("**/*.cc"),
ImmutableList.of("*/*.cc"));
assertEvaluates(
pkg,
ImmutableList.of("a.cc", "foo/bar.cc", "foo/foo.cc", "foo/wiz/quid/gav.cc"),
ImmutableList.of("**/*.cc"),
ImmutableList.of("**/wiz/*.cc"));
}
@Test
public void testInsufficientArgumentGlobErrors() throws Exception {
events.setFailFast(false);
assertGlobFails(
"glob()",
"insufficient arguments received by glob(include: sequence of strings, "
+ "*, exclude: sequence of strings = [], exclude_directories: int = 1) "
+ "(got 0, expected at least 1)");
}
@Test
public void testGlobUnamedExclude() throws Exception {
events.setFailFast(false);
assertGlobFails(
"glob(['a'], ['b'])",
"too many (2) positional arguments in call to glob(include: sequence of strings, "
+ "*, exclude: sequence of strings = [], exclude_directories: int = 1)");
}
@Test
public void testTooManyArgumentsGlobErrors() throws Exception {
events.setFailFast(false);
assertGlobFails(
"glob(1,2,3,4)",
"too many (4) positional arguments in call to glob(include: sequence of strings, "
+ "*, exclude: sequence of strings = [], exclude_directories: int = 1)");
}
@Test
public void testGlobEnforcesListArgument() throws Exception {
events.setFailFast(false);
assertGlobFails(
"glob(1, exclude=2)",
"argument 'include' has type 'int', but should be 'sequence'\n"
+ "in call to builtin function glob(include, *, exclude, exclude_directories)");
}
@Test
public void testGlobEnforcesListOfStringsArguments() throws Exception {
events.setFailFast(false);
assertGlobFails(
"glob(['a', 'b'], exclude=['c', 42])",
"expected value of type 'string' for element 1 of 'glob' argument, but got 42 (int)");
}
@Test
public void testGlobNegativeTest() throws Exception {
// Negative test that assertGlob does throw an error when asserting against the wrong values.
try {
assertGlobMatches(
/*result=*/ ImmutableList.of("Wombat1.java", "This_file_doesn_t_exist.java"),
/*includes=*/ ImmutableList.of("W*", "subdir"),
/*excludes=*/ ImmutableList.<String>of(),
/* excludeDirs= */ true);
fail();
} catch (IllegalArgumentException e) {
assertThat(e).hasMessage("ERROR /globs/BUILD:2:73: name 'this_will_fail' is not defined");
}
}
@Test
public void testGlobExcludeDirectories() throws Exception {
assertGlobMatches(
/*result=*/ ImmutableList.of("Wombat1.java", "Wombat2.java"),
/*includes=*/ ImmutableList.of("W*", "subdir"),
/*excludes=*/ ImmutableList.<String>of(),
/* excludeDirs= */ true);
}
@Test
public void testGlobDoesNotExcludeDirectories() throws Exception {
assertGlobMatches(
/*result=*/ ImmutableList.of("Wombat1.java", "Wombat2.java", "subdir"),
/*includes=*/ ImmutableList.of("W*", "subdir"),
/*excludes=*/ ImmutableList.<String>of(),
/* excludeDirs= */ false);
}
@Test
public void testGlobWithEmptyExcludedList() throws Exception {
assertGlobMatches(
/*result=*/ ImmutableList.of("Wombat1.java", "Wombat2.java"),
/*includes=*/ ImmutableList.of("W*"),
/*excludes=*/ Collections.<String>emptyList(),
/* excludeDirs= */ false);
}
@Test
public void testGlobWithQuestionMarkProducesError() throws Exception {
assertGlobProducesError("Wombat?.java", true);
}
@Test
public void testGlobWithoutQuestionMarkDoesntProduceError() throws Exception {
assertGlobProducesError("Wombat*.java", false);
}
@Test
public void testGlobWithNonMatchingExcludedList() throws Exception {
assertGlobMatches(
/*result=*/ ImmutableList.of("Wombat1.java"),
/*includes=*/ ImmutableList.of("W*"),
/*excludes=*/ ImmutableList.of("*2*"),
/* excludeDirs= */ false);
}
@Test
public void testGlobWithTwoMatchingGlobExpressionsAndNonmatchingExclusion() throws Exception {
assertGlobMatches(
/*result=*/ ImmutableList.of("Wombat1.java", "subdir/Wombat3.java"),
/*includes=*/ ImmutableList.of("W*", "subdir/W*"),
/*excludes=*/ ImmutableList.of("*2*"),
/* excludeDirs= */ false);
}
@Test
public void testGlobWithSubdirMatchAndExclusion() throws Exception {
assertGlobMatches(
/*result=*/ ImmutableList.of("subdir/Wombat3.java"),
/*includes=*/ ImmutableList.of("W*", "subdir/W*"),
/*excludes=*/ ImmutableList.of("Wombat*.java"),
/* excludeDirs= */ false);
}
@Test
public void testBadCharacterInGlob() throws Exception {
events.setFailFast(false);
assertGlobFails("glob(['?'])", "glob pattern '?' contains forbidden '?' wildcard");
}
/**
* Tests that a glob evaluation that encounters an I/O error throws instead of constructing a
* package.
*/
@Test
public void testGlobWithIOErrors() throws Exception {
events.setFailFast(false);
scratch.dir("/pkg");
scratch.dir("/pkg/globs");
Path unreadableSubdir = scratch.resolve("/pkg/globs/unreadable_subdir");
unreadableSubdir.createDirectory();
unreadableSubdir.setReadable(false);
Path file = scratch.file("/pkg/BUILD", "cc_library(name = 'c', srcs = glob(['globs/**']))");
MoreAsserts.assertThrows(NoSuchPackageException.class, () -> packages.eval("pkg", file));
events.assertContainsError("Directory is not readable");
}
@Test
public void testPackageGroupSpecMinimal() throws Exception {
expectEvalSuccess("package_group(name='skin', packages=[])");
}
@Test
public void testPackageGroupSpecSimple() throws Exception {
expectEvalSuccess("package_group(name='skin', packages=['//group/abelian'])");
}
@Test
public void testPackageGroupSpecEmpty() throws Exception {
expectEvalSuccess("package_group(name='seed')");
}
@Test
public void testPackageGroupSpecIncludes() throws Exception {
expectEvalSuccess(
"package_group(name='wine',",
" includes=['//wine:cabernet_sauvignon',",
" '//wine:pinot_noir'])");
}
@Test
public void testPackageGroupSpecBad() throws Exception {
expectEvalError("invalid package name", "package_group(name='skin', packages=['--25:17--'])");
}
@Test
public void testPackageGroupsWithSameName() throws Exception {
expectEvalError(
"conflicts with existing package group",
"package_group(name='skin', packages=[])",
"package_group(name='skin', packages=[])");
}
@Test
public void testPackageGroupNamedArguments() throws Exception {
expectEvalError("does not accept positional arguments", "package_group('skin')");
}
@Test
public void testPackageSpecMinimal() throws Exception {
Package pkg = expectEvalSuccess("package(default_visibility=[])");
assertThat(pkg.getDefaultVisibility()).isNotNull();
}
@Test
public void testPackageSpecSimple() throws Exception {
expectEvalSuccess("package(default_visibility=['//group:lie'])");
}
@Test
public void testPackageSpecBad() throws Exception {
expectEvalError("invalid target name", "package(default_visibility=[':::'])");
}
@Test
public void testDoublePackageSpecification() throws Exception {
expectEvalError(
"can only be used once",
"package(default_visibility=[])",
"package(default_visibility=[])");
}
@Test
public void testEmptyPackageSpecification() throws Exception {
expectEvalError("at least one argument must be given to the 'package' function", "package()");
}
@Test
public void testDefaultTestonly() throws Exception {
Package pkg = expectEvalSuccess("package(default_testonly = 1)");
assertThat(pkg.getDefaultTestOnly()).isTrue();
}
@Test
public void testDefaultDeprecation() throws Exception {
String testMessage = "OMG PONIES!";
Package pkg = expectEvalSuccess("package(default_deprecation = \"" + testMessage + "\")");
assertThat(pkg.getDefaultDeprecation()).isEqualTo(testMessage);
}
@Test
public void testExportsBuildFile() throws Exception {
Package pkg =
expectEvalSuccess("exports_files(['BUILD'], visibility=['//visibility:private'])");
assertThat(pkg.getTarget("BUILD")).isEqualTo(pkg.getBuildFile());
}
@Test
public void testDefaultDeprecationPropagation() throws Exception {
String msg = "I am completely operational, and all my circuits are functioning perfectly.";
Path file =
scratch.file(
"/foo/BUILD",
"package(default_deprecation = \"" + msg + "\")",
"sh_library(name = 'bar', srcs=['b'])");
Package pkg = packages.eval("foo", file);
Rule fooRule = (Rule) pkg.getTarget("bar");
String deprAttr =
attributes(fooRule).get("deprecation", com.google.devtools.build.lib.syntax.Type.STRING);
assertThat(deprAttr).isEqualTo(msg);
}
@Test
public void testDefaultTestonlyPropagation() throws Exception {
Path file =
scratch.file(
"/foo/BUILD",
"package(default_testonly = 1)",
"sh_library(name = 'foo', srcs=['b'])",
"sh_library(name = 'bar', srcs=['b'], testonly = 0)");
Package pkg = packages.eval("foo", file);
Rule fooRule = (Rule) pkg.getTarget("foo");
assertThat(
attributes(fooRule).get("testonly", com.google.devtools.build.lib.syntax.Type.BOOLEAN))
.isTrue();
Rule barRule = (Rule) pkg.getTarget("bar");
assertThat(
attributes(barRule).get("testonly", com.google.devtools.build.lib.syntax.Type.BOOLEAN))
.isFalse();
}
@Test
public void testDefaultDeprecationOverriding() throws Exception {
String msg = "I am completely operational, and all my circuits are functioning perfectly.";
String deceive = "OMG PONIES!";
Path file =
scratch.file(
"/foo/BUILD",
"package(default_deprecation = \"" + deceive + "\")",
"sh_library(name = 'bar', srcs=['b'], deprecation = \"" + msg + "\")");
Package pkg = packages.eval("foo", file);
Rule fooRule = (Rule) pkg.getTarget("bar");
String deprAttr =
attributes(fooRule).get("deprecation", com.google.devtools.build.lib.syntax.Type.STRING);
assertThat(deprAttr).isEqualTo(msg);
}
@Test
public void testPackageFeatures() throws Exception {
Path file =
scratch.file(
"/a/BUILD",
"sh_library(name='before')",
"package(features=['b', 'c'])",
"sh_library(name='after')");
Package pkg = packages.eval("a", file);
assertThat(pkg.getFeatures()).containsExactly("b", "c");
}
@Test
public void testTransientErrorsInGlobbing() throws Exception {
events.setFailFast(false);
Path buildFile =
scratch.file("/e/BUILD", "sh_library(name = 'e', data = glob(['*.txt']))");
Path parentDir = buildFile.getParentDirectory();
scratch.file("/e/data.txt");
throwOnReaddir = parentDir;
MoreAsserts.assertThrows(
NoSuchPackageException.class, () -> packages.createPackage("e", buildFile));
events.setFailFast(true);
throwOnReaddir = null;
Package pkg = packages.createPackage("e", buildFile);
assertThat(pkg.containsErrors()).isFalse();
assertThat(pkg.getRule("e")).isNotNull();
List globList = (List) pkg.getRule("e").getAttributeContainer().getAttr("data");
assertThat(globList).containsExactly(Label.parseAbsolute("//e:data.txt", ImmutableMap.of()));
}
@Test
public void testExportTwicePublicOK() throws Exception {
// In theory, this could be an error, but too many existing files rely on it
// and it is okay.
expectEvalSuccess(
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:public\" ])",
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:public\" ])");
}
@Test
public void testExportTwicePublicOK2() throws Exception {
expectEvalSuccess(
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:private\" ])",
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:private\" ])");
}
@Test
public void testExportTwiceFail() throws Exception {
expectEvalError(
"visibility for exported file 'a.cc' declared twice",
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:private\" ])",
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:public\" ])");
}
@Test
public void testExportTwiceFail2() throws Exception {
expectEvalError(
"visibility for exported file 'a.cc' declared twice",
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:public\" ])",
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:private\" ])");
}
@Test
public void testExportLicenseTwice() throws Exception {
expectEvalError(
"licenses for exported file 'a.cc' declared twice",
"exports_files([\"a.cc\"], licenses = [\"notice\"])",
"exports_files([\"a.cc\"], licenses = [\"notice\"])");
}
@Test
public void testExportGenruleConflict() throws Exception {
expectEvalError(
"generated file 'a.cc' in rule 'foo' conflicts with existing source file",
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:public\" ])",
"genrule(name = 'foo',",
" outs = ['a.cc'],",
" cmd = '')");
}
@Test
public void testGenruleExportConflict() throws Exception {
expectEvalError(
"generated label '//pkg:a.cc' conflicts with existing generated file",
"genrule(name = 'foo',",
" outs = ['a.cc'],",
" cmd = '')",
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:public\" ])");
}
@Test
public void testValidEnvironmentGroup() throws Exception {
expectEvalSuccess(
"environment(name = 'foo')",
"environment_group(name='group', environments = [':foo'], defaults = [':foo'])");
}
@Test
public void testIncompleteEnvironmentGroup() throws Exception {
expectEvalError(
"missing mandatory named-only argument 'defaults' while calling "
+ "environment_group(*, name: string, ",
"environment(name = 'foo')",
"environment_group(name='group', environments = [':foo'])");
}
@Test
public void testEnvironmentGroupMissingTarget() throws Exception {
expectEvalError(
"environment //pkg:foo does not exist",
"environment_group(name='group', environments = [':foo'], defaults = [':foo'])");
}
@Test
public void testEnvironmentGroupWrongTargetType() throws Exception {
expectEvalError(
"//pkg:foo is not a valid environment",
"cc_library(name = 'foo')",
"environment_group(name='group', environments = [':foo'], defaults = [':foo'])");
}
@Test
public void testEnvironmentGroupWrongPackage() throws Exception {
expectEvalError(
"//foo:foo is not in the same package as group //pkg:group",
"environment_group(name='group', environments = ['//foo'], defaults = ['//foo'])");
}
@Test
public void testEnvironmentGroupInvalidDefault() throws Exception {
expectEvalError(
"default //pkg:bar is not a declared environment for group //pkg:group",
"environment(name = 'foo')",
"environment(name = 'bar')",
"environment_group(name='group', environments = [':foo'], defaults = [':bar'])");
}
@Test
public void testEnvironmentGroupDuplicateEnvironments() throws Exception {
expectEvalError(
"label '//pkg:foo' is duplicated in the 'environments' list of 'group'",
"environment(name = 'foo')",
"environment_group(name='group', environments = [':foo', ':foo'], defaults = [':foo'])");
}
@Test
public void testEnvironmentGroupDuplicateDefaults() throws Exception {
expectEvalError(
"label '//pkg:foo' is duplicated in the 'defaults' list of 'group'",
"environment(name = 'foo')",
"environment_group(name='group', environments = [':foo'], defaults = [':foo', ':foo'])");
}
@Test
public void testMultipleEnvironmentGroupsValidMembership() throws Exception {
expectEvalSuccess(
"environment(name = 'foo')",
"environment(name = 'bar')",
"environment_group(name='foo_group', environments = [':foo'], defaults = [':foo'])",
"environment_group(name='bar_group', environments = [':bar'], defaults = [':bar'])");
}
@Test
public void testMultipleEnvironmentGroupsConflictingMembership() throws Exception {
expectEvalError(
"environment //pkg:foo belongs to both //pkg:bar_group and //pkg:foo_group",
"environment(name = 'foo')",
"environment(name = 'bar')",
"environment_group(name='foo_group', environments = [':foo'], defaults = [':foo'])",
"environment_group(name='bar_group', environments = [':foo'], defaults = [':foo'])");
}
@Test
public void testFulfillsReferencesWrongTargetType() throws Exception {
expectEvalError(
"in \"fulfills\" attribute of //pkg:foo: //pkg:bar is not a valid environment",
"environment(name = 'foo', fulfills = [':bar'])",
"cc_library(name = 'bar')",
"environment_group(name='foo_group', environments = [':foo'], defaults = [])");
}
@Test
public void testFulfillsNotInEnvironmentGroup() throws Exception {
expectEvalError(
"in \"fulfills\" attribute of //pkg:foo: //pkg:bar is not a member of this group",
"environment(name = 'foo', fulfills = [':bar'])",
"environment(name = 'bar')",
"environment_group(name='foo_group', environments = [':foo'], defaults = [])");
}
@Test
public void testPackageDefaultEnvironments() throws Exception {
Package pkg =
expectEvalSuccess(
"package(",
" default_compatible_with=['//foo'],",
" default_restricted_to=['//bar'],",
")");
assertThat(pkg.getDefaultCompatibleWith())
.containsExactly(Label.parseAbsolute("//foo", ImmutableMap.of()));
assertThat(pkg.getDefaultRestrictedTo())
.containsExactly(Label.parseAbsolute("//bar", ImmutableMap.of()));
}
@Test
public void testPackageDefaultCompatibilityDuplicates() throws Exception {
expectEvalError(
"'//foo:foo' is duplicated in the 'default_compatible_with' list",
"package(default_compatible_with=['//foo', '//bar', '//foo'])");
}
@Test
public void testPackageDefaultRestrictionDuplicates() throws Exception {
expectEvalError(
"'//foo:foo' is duplicated in the 'default_restricted_to' list",
"package(default_restricted_to=['//foo', '//bar', '//foo'])");
}
@Override
protected PackageFactoryApparatus createPackageFactoryApparatus() {
return new PackageFactoryApparatus(events.reporter());
}
@Override
protected String getPathPrefix() {
return "";
}
}
| [
"fengruilin@localhost.localdomain"
] | fengruilin@localhost.localdomain |
eda43f007b91233066fb860f29eec5a8b01ac322 | 35483f71b227763ad37bf5766558df16a3055447 | /src/main/java/com/softwareverde/bitcoin/server/Environment.java | 7beffa4edbe9e3a9e2f04806c290521bf2bcc24f | [
"MIT"
] | permissive | SoftwareVerde/bitcoin-verde | 1f4f49ed46a2086d33053b0ed5608605902c2be9 | e9d140cac8b93a9db572bda906db9decfac1d7ae | refs/heads/master | 2023-07-11T16:28:46.730818 | 2023-04-29T13:40:44 | 2023-04-29T13:40:44 | 124,832,169 | 42 | 19 | MIT | 2022-12-21T21:59:19 | 2018-03-12T04:05:56 | Java | UTF-8 | Java | false | false | 1,251 | java | package com.softwareverde.bitcoin.server;
import com.softwareverde.bitcoin.server.database.Database;
import com.softwareverde.bitcoin.server.database.DatabaseConnectionFactory;
import com.softwareverde.bitcoin.server.database.DatabaseConnectionFactoryFactory;
public class Environment {
protected final Database _database;
protected final DatabaseConnectionFactoryFactory _databaseConnectionFactoryFactory;
protected DatabaseConnectionFactory _databaseConnectionFactory;
public Environment(final Database database, final DatabaseConnectionFactoryFactory databaseConnectionFactoryFactory) {
_database = database;
_databaseConnectionFactoryFactory = databaseConnectionFactoryFactory;
}
public Database getDatabase() {
return _database;
}
public synchronized DatabaseConnectionFactory getDatabaseConnectionFactory() {
if (_databaseConnectionFactory == null) {
_databaseConnectionFactory = _databaseConnectionFactoryFactory.newDatabaseConnectionFactory();
}
return _databaseConnectionFactory;
}
public DatabaseConnectionFactory newDatabaseConnectionFactory() {
return _databaseConnectionFactoryFactory.newDatabaseConnectionFactory();
}
} | [
"josh@softwareverde.com"
] | josh@softwareverde.com |
49df733cba2fb3f141e80b27b68f88d51e806f97 | 0dad1b4e8a8f35edea72971b4682fda5372bd52c | /JavaSource/utils/ImageUtil.java | b45fb588db1cda56679a5dcd58ee18adda0b4ccb | [] | no_license | ramirescm/Web-Media-Browser | 49183bfcda0a46d5976a1656ddca1e08c231340b | ad7c9e444e4d25ed55f6cc83865efb2c3801aca7 | refs/heads/master | 2021-01-23T22:38:27.960848 | 2012-05-10T17:41:33 | 2012-05-10T17:41:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 2,982 | java | package utils;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.Image;
import java.awt.RenderingHints;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Iterator;
import javax.imageio.IIOImage;
import javax.imageio.ImageIO;
import javax.imageio.ImageWriteParam;
import javax.imageio.ImageWriter;
import javax.imageio.stream.ImageOutputStream;
import database.ImageItem;
public class ImageUtil {
public ImageItem createThumbNail(final byte[] inputImageData, int thumbHeight) throws Exception
{
InputStream inputStream = new ByteArrayInputStream(inputImageData);
Image image = javax.imageio.ImageIO.read(inputStream);
ImageItem thumbNail = createThumbNail(image, thumbHeight);
inputStream.close();
return(thumbNail);
}
public ImageItem createThumbNail(final Image image, int thumbHeight) throws IOException {
int imageWidth = image.getWidth(null);
int imageHeight = image.getHeight(null);
double imageRatio = (double)imageWidth / (double)imageHeight;
int thumbWidth = (int)(thumbHeight * imageRatio);
if(imageWidth < thumbWidth && imageHeight < thumbHeight) {
thumbWidth = imageWidth;
thumbHeight = imageHeight;
} else if(imageWidth < thumbWidth) {
thumbWidth = imageWidth;
} else if(imageHeight < thumbHeight) {
thumbHeight = imageHeight;
}
BufferedImage thumbImage = new BufferedImage(thumbWidth, thumbHeight, BufferedImage.TYPE_INT_RGB);
Graphics2D graphics2D = thumbImage.createGraphics();
graphics2D.setBackground(Color.WHITE);
graphics2D.setPaint(Color.WHITE);
graphics2D.fillRect(0, 0, thumbWidth, thumbHeight);
graphics2D.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BICUBIC);
graphics2D.drawImage(image, 0, 0, thumbWidth, thumbHeight, null);
// get jpeg writer and set highest quality
Iterator<ImageWriter> iter = ImageIO.getImageWritersByFormatName("jpeg");
ImageWriter writer = (ImageWriter)iter.next();
ImageWriteParam iwp = writer.getDefaultWriteParam();
iwp.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
iwp.setCompressionQuality(1);
ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream();
ImageOutputStream imageOutputStream = ImageIO.createImageOutputStream(byteOutputStream);
writer.setOutput(imageOutputStream);
writer.write(null, new IIOImage(thumbImage, null, null), iwp);
//javax.imageio.ImageIO.write(thumbImage, "JPG", outputStream);
ImageItem outputImage = new ImageItem();
outputImage.setImageData(byteOutputStream.toByteArray());
outputImage.setMimeType("image/jpeg");
outputImage.setSizeBytes(byteOutputStream.size());
imageOutputStream.close();
byteOutputStream.close();
return(outputImage);
}
}
| [
"i.coronel@hotmail.com"
] | i.coronel@hotmail.com |
53c26d152f7ebfbc3ca465b0e833b417b75e3b1e | 28982258f99d95dd7025eadd8604bf02dd630942 | /src/main/java/com/test/design/proxy/ProxyImage.java | 2486e332801c32c4ee981919e32201c75109434d | [] | no_license | arpitaggarwal/design-patterns | c3fb39d86e1c2b0cd6eb34b3b8ebc3518400c22c | e9fe8a1b3f0d4b7875dea9a4ace08ee693bc1bfc | refs/heads/master | 2021-05-04T10:52:09.203677 | 2017-03-07T18:50:31 | 2017-03-07T18:50:31 | 50,238,587 | 0 | 1 | null | null | null | null | UTF-8 | Java | false | false | 337 | java | package com.test.design.proxy;
public class ProxyImage implements Image {
private RealImage realImage;
private String fileName;
public ProxyImage(String fileName) {
this.fileName = fileName;
}
@Override
public void display() {
if (realImage == null) {
realImage = new RealImage(fileName);
}
realImage.display();
}
} | [
"aggarwalarpit.89@gmail.com"
] | aggarwalarpit.89@gmail.com |
6759c8ab7f09a5ef782f54ee6af56cb8ba549690 | 7b0521dfb4ec76ee1632b614f32ee532f4626ea2 | /src/main/java/alcoholmod/Mathioks/Mob/RenderEntityNarutoMob5.java | 92a10f7b896f93efb376035904b00d292bad8d7f | [] | no_license | M9wo/NarutoUnderworld | 6c5be180ab3a00b4664fd74f6305e7a1b50fe9fc | 948065d8d43b0020443c0020775991b91f01dd50 | refs/heads/master | 2023-06-29T09:27:24.629868 | 2021-07-27T03:18:08 | 2021-07-27T03:18:08 | 389,832,397 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 935 | java | package alcoholmod.Mathioks.Mob;
import net.minecraft.client.model.ModelBase;
import net.minecraft.client.renderer.entity.RenderLiving;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.util.ResourceLocation;
import org.lwjgl.opengl.GL11;
public class RenderEntityNarutoMob5 extends RenderLiving {
private static final ResourceLocation mobTextures = new ResourceLocation("tm:textures/entity/nagato.png");
public RenderEntityNarutoMob5(ModelBase par1ModelBase, float par2) {
super(par1ModelBase, par2);
}
protected ResourceLocation getEntityTexture(EntityNarutoMob5 Entity) {
return mobTextures;
}
protected ResourceLocation getEntityTexture(Entity entity) {
return getEntityTexture((EntityNarutoMob5)entity);
}
protected void preRenderCallback(EntityLivingBase par1EntityLivingBase, float par2) {
GL11.glScalef(1.0F, 1.0F, 1.0F);
}
}
| [
"mrkrank2023@gmail.com"
] | mrkrank2023@gmail.com |
d428bf820ba34759c81d30609fccc59107ba26e7 | 5de0799b4a3b37db9c056d2c633fda14b58f7514 | /src/java/entity/Attendencie.java | 88f5010500d88a9bc29c6169a45c6bb327d0fdb3 | [] | no_license | pantelicnikola/PIA-projekat | ac34eed4d1e622df0f63d58d4b225cf9cc16deed | 183e8e3a625935197a9490433d244321f668291c | refs/heads/master | 2021-01-13T05:19:54.493967 | 2017-02-14T14:17:25 | 2017-02-14T14:17:25 | 81,331,437 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 2,372 | java | package entity;
// Generated Jan 19, 2017 1:02:32 PM by Hibernate Tools 4.3.1
import java.util.Date;
import javax.persistence.AttributeOverride;
import javax.persistence.AttributeOverrides;
import javax.persistence.Column;
import javax.persistence.EmbeddedId;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
/**
* Attendencie generated by hbm2java
*/
@Entity
@Table(name="attendencie"
,catalog="festivali"
)
public class Attendencie implements java.io.Serializable {
private AttendencieId id = new AttendencieId();
private Festival festival;
private Date startTime;
private Date endTime;
public Attendencie() {
}
public Attendencie(AttendencieId id, Festival festival) {
this.id = id;
this.festival = festival;
}
public Attendencie(AttendencieId id, Festival festival, Date startTime, Date endTime) {
this.id = id;
this.festival = festival;
this.startTime = startTime;
this.endTime = endTime;
}
@EmbeddedId
@AttributeOverrides( {
@AttributeOverride(name="idFestival", column=@Column(name="IdFestival", nullable=false) ),
@AttributeOverride(name="performer", column=@Column(name="Performer", nullable=false, length=45) ) } )
public AttendencieId getId() {
return this.id;
}
public void setId(AttendencieId id) {
this.id = id;
}
@ManyToOne(fetch=FetchType.LAZY)
@JoinColumn(name="IdFestival", nullable=false, insertable=false, updatable=false)
public Festival getFestival() {
return this.festival;
}
public void setFestival(Festival festival) {
this.festival = festival;
}
@Temporal(TemporalType.TIMESTAMP)
@Column(name="StartTime", length=19)
public Date getStartTime() {
return this.startTime;
}
public void setStartTime(Date startTime) {
this.startTime = startTime;
}
@Temporal(TemporalType.TIMESTAMP)
@Column(name="EndTime", length=19)
public Date getEndTime() {
return this.endTime;
}
public void setEndTime(Date endTime) {
this.endTime = endTime;
}
}
| [
"pantelic.n94@gmail.com"
] | pantelic.n94@gmail.com |
631b251cb060563eb952a81bc59f551040d92714 | a90a7bfc49b5fe3533857383d3e7e5407fe03f82 | /xconf-dataservice/src/main/java/com/comcast/xconf/estbfirmware/legacy/LegacyConverterHelper.java | 7fb9885d37813b56dc26c111eec57062dad3ddcb | [
"MIT",
"Apache-2.0"
] | permissive | comcast-icfar/xconfserver | d8406f4d3baffd511ec386bef9b6c31e65943e63 | a13989e16510c734d13a1575f992f8eacca8250b | refs/heads/main | 2023-01-11T19:40:56.417261 | 2020-11-17T20:22:37 | 2020-11-17T20:22:37 | 308,412,315 | 0 | 1 | NOASSERTION | 2020-11-18T16:48:16 | 2020-10-29T18:11:58 | Java | UTF-8 | Java | false | false | 3,221 | java | /*
* If not stated otherwise in this file or this component's Licenses.txt file the
* following copyright and licenses apply:
*
* Copyright 2018 RDK Management
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Author: Igor Kostrov
* Created: 1/22/2016
*/
package com.comcast.xconf.estbfirmware.legacy;
import com.comcast.apps.hesperius.ruleengine.domain.additional.AuxFreeArgType;
import com.comcast.apps.hesperius.ruleengine.domain.additional.data.IpAddressGroup;
import com.comcast.apps.hesperius.ruleengine.domain.standard.StandardOperation;
import com.comcast.apps.hesperius.ruleengine.main.api.Operation;
import com.comcast.apps.hesperius.ruleengine.main.impl.Condition;
import com.comcast.xconf.GenericNamespacedList;
import com.comcast.xconf.StbContext;
import com.comcast.xconf.converter.GenericNamespacedListsConverter;
import com.comcast.xconf.estbfirmware.factory.RuleFactory;
import com.comcast.xconf.service.GenericNamespacedListQueriesService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class LegacyConverterHelper {
private static final Logger log = LoggerFactory.getLogger(LegacyConverterHelper.class);
@Autowired
private GenericNamespacedListQueriesService listService;
public IpAddressGroup getIpAddressGroup(Condition cond) {
Operation operation = cond.getOperation();
if (RuleFactory.IN_LIST.equals(operation)) {
String listId = (String) cond.getFixedArg().getValue();
GenericNamespacedList list = listService.getListById(listId);
return list != null ? GenericNamespacedListsConverter.convertToIpAddressGroup(list) : makeIpAddressGroup(listId);
} else if (StandardOperation.IN.equals(operation)) {
return (IpAddressGroup) cond.getFixedArg().getValue();
} else {
log.warn("Unknown operation for IP freeArg: " + operation);
return new IpAddressGroup();
}
}
public IpAddressGroup makeIpAddressGroup(String id) {
IpAddressGroup group = new IpAddressGroup();
group.setId(id);
group.setName(id);
return group;
}
public static boolean isLegacyIpCondition(Condition condition) {
return AuxFreeArgType.IP_ADDRESS.equals(condition.getFreeArg().getType())
&& StbContext.IP_ADDRESS.equals(condition.getFreeArg().getName())
&& StandardOperation.IN.equals(condition.getOperation())
&& condition.getFixedArg().getValue() instanceof IpAddressGroup;
}
}
| [
"Gabriel_DeJesus@cable.comcast.com"
] | Gabriel_DeJesus@cable.comcast.com |
f0bf481cf318a39918f4ed81ff2dfd7e86f568a3 | b607a7802ae6631db6c6b2726cd76c2d9c803eda | /Player.java | 55ccab4adca7d49e7761e066a075dbd580ea0b90 | [] | no_license | gutee/umlProject | 4b388cedcdd67a6a809c2e610ddd27fcc60383ad | 705637b055b632720ba4ba871067575d10517981 | refs/heads/master | 2016-09-05T14:21:45.382405 | 2014-08-22T14:40:44 | 2014-08-22T14:40:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 440 | java | package prog2.tp2;
import java.util.ArrayList;
/**
* Created by Martin Gutierrez.
* User: Martin
* Date: 26/08/11
* Time: 07:49
* To change this template use File | Settings | File Templates.
*/
public class Player {
public Card hand;
public Player() {
this.hand = new ArrayList();
}
public ArrayList getHand() {
return hand;
}
public void add(Card card) {
hand.add(card);
}
}
| [
"gutix007@hotmail.com"
] | gutix007@hotmail.com |
25b631b93ea6a439af2c626e303a0170f0cc685e | 2763b5bae4b32fde70fc1aa5388d0779998860b2 | /src/com/bean/Book_Reader.java | 4c859f361b81e6f896d2058ec77239eab8926bbb | [] | no_license | ashwinkg/Library-Management-System | 5f8955d632a56c8722441bbc60faccb7f469ab6f | 7348279edd64bf4254dc950207721d212e7b6410 | refs/heads/master | 2022-02-18T03:26:36.119123 | 2019-09-17T01:58:32 | 2019-09-17T01:58:32 | 208,935,944 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 455 | java | package com.bean;
public class Book_Reader {
private int book_id;
private int reader_id;
public Book_Reader(int book_id, int reader_id) {
this.book_id = book_id;
this.reader_id = reader_id;
}
public int getBook_id() {
return book_id;
}
public void setBook_id(int book_id) {
this.book_id = book_id;
}
public int getReader_id() {
return reader_id;
}
public void setReader_id(int reader_id) {
this.reader_id = reader_id;
}
}
| [
"ashu.ashwinkg@gmail.com"
] | ashu.ashwinkg@gmail.com |
00d40af055a8f7c476db69430a10932b8b27f21e | 86818023c3891c3418e61ed231b8d88be68973f7 | /app/src/main/java/com/fmovies/app/models/GetCommentsModel.java | dc66d5fadea4053b7fa1ceb0febbf8940ccf25dc | [] | no_license | fandofastest/fmoviesnew | 9efc0be4c78f7b866c7f2e609a9358c90482d09e | c16229213be30d39f1f3a9e12aae32c0b6fa27ca | refs/heads/master | 2022-06-09T01:47:22.673993 | 2020-05-07T10:55:58 | 2020-05-07T10:55:58 | 262,024,492 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,768 | java | package com.fmovies.app.models;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
public class GetCommentsModel {
@SerializedName("comments_id")
@Expose
private String commentsId;
@SerializedName("videos_id")
@Expose
private String videosId;
@SerializedName("user_id")
@Expose
private String userId;
@SerializedName("user_name")
@Expose
private String userName;
@SerializedName("user_img_url")
@Expose
private String userImgUrl;
@SerializedName("comments")
@Expose
private String comments;
@SerializedName("replay_id")
@Expose
private String replyId;
public String getCommentsId() {
return commentsId;
}
public void setCommentsId(String commentsId) {
this.commentsId = commentsId;
}
public String getVideosId() {
return videosId;
}
public void setVideosId(String videosId) {
this.videosId = videosId;
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getUserImgUrl() {
return userImgUrl;
}
public void setUserImgUrl(String userImgUrl) {
this.userImgUrl = userImgUrl;
}
public String getComments() {
return comments;
}
public void setComments(String comments) {
this.comments = comments;
}
public String getReplyId() {
return replyId;
}
public void setReplyId(String replyId) {
this.replyId = replyId;
}
}
| [
"fandofast@gmail.com"
] | fandofast@gmail.com |
d3923c8d668d7edd68dbac8dbcba6ee1a19fb2bd | bfe2e8577792a47fca4b615917e7701f625565f8 | /class-11/stacksAndQueues/src/test/java/stacksAndQueues/AppTest.java | a00a2886ea24e37a04e44b09bd80ee3ed35302f6 | [] | no_license | jlollis/amazon-java-401d1 | 8c97e9971f0481b6298d457d93a1b248f9999e34 | 1aa8e5eac9089c3862b49665a2e35121612db915 | refs/heads/master | 2022-04-17T05:26:37.319299 | 2020-04-03T18:57:12 | 2020-04-03T18:57:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 357 | java | /*
* This Java source file was generated by the Gradle 'init' task.
*/
package stacksAndQueues;
import org.junit.Test;
import static org.junit.Assert.*;
public class AppTest {
@Test public void testAppHasAGreeting() {
App classUnderTest = new App();
assertNotNull("app should have a greeting", classUnderTest.getGreeting());
}
}
| [
"nick.carignan@sbcglobal.net"
] | nick.carignan@sbcglobal.net |
9abbdda022a8aead72846054165c27ec7edc5b0c | 2c797364b7928807cb3c246aa5cb6a916c2b745e | /src/main/java/com/spoloborota/ocp/lambdas/javarush/OverridableImpl.java | 901c1a7d6d8a4b3934a59a2464ea81d02abb6023 | [] | no_license | Spoloborota/1Z0-809 | ed0604b5bdb08cb0de5923ae53100b8cd83fd82f | 5f428b46bd1ab7253ecd0666cb6142fb4763027f | refs/heads/master | 2021-01-17T16:04:23.621017 | 2017-04-20T16:18:53 | 2017-04-20T16:18:53 | 82,979,117 | 0 | 1 | null | null | null | null | UTF-8 | Java | false | false | 193 | java | package com.spoloborota.ocp.lambdas.javarush;
public class OverridableImpl implements Defaulable {
@Override
public String notRequired() {
return "Overridden implementation";
}
}
| [
"spoloborota@yandex.ru"
] | spoloborota@yandex.ru |
226ef277e16c9caf12b94f0cedba9d8419241455 | e42afd54dcc0add3d2b8823ee98a18c50023a396 | /java-game-servers/proto-google-cloud-game-servers-v1beta/src/main/java/com/google/cloud/gaming/v1beta/PreviewCreateGameServerClusterRequest.java | 4a9b946fcb5375db6d62353e513b6b0a56f19540 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | degloba/google-cloud-java | eea41ebb64f4128583533bc1547e264e730750e2 | b1850f15cd562c659c6e8aaee1d1e65d4cd4147e | refs/heads/master | 2022-07-07T17:29:12.510736 | 2022-07-04T09:19:33 | 2022-07-04T09:19:33 | 180,201,746 | 0 | 0 | Apache-2.0 | 2022-07-04T09:17:23 | 2019-04-08T17:42:24 | Java | UTF-8 | Java | false | false | 46,926 | java | /*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/gaming/v1beta/game_server_clusters.proto
package com.google.cloud.gaming.v1beta;
/**
*
*
* <pre>
* Request message for GameServerClustersService.PreviewCreateGameServerCluster.
* </pre>
*
* Protobuf type {@code google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest}
*/
public final class PreviewCreateGameServerClusterRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest)
PreviewCreateGameServerClusterRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use PreviewCreateGameServerClusterRequest.newBuilder() to construct.
private PreviewCreateGameServerClusterRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private PreviewCreateGameServerClusterRequest() {
parent_ = "";
gameServerClusterId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new PreviewCreateGameServerClusterRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private PreviewCreateGameServerClusterRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
parent_ = s;
break;
}
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
gameServerClusterId_ = s;
break;
}
case 26:
{
com.google.cloud.gaming.v1beta.GameServerCluster.Builder subBuilder = null;
if (gameServerCluster_ != null) {
subBuilder = gameServerCluster_.toBuilder();
}
gameServerCluster_ =
input.readMessage(
com.google.cloud.gaming.v1beta.GameServerCluster.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(gameServerCluster_);
gameServerCluster_ = subBuilder.buildPartial();
}
break;
}
case 34:
{
com.google.protobuf.Timestamp.Builder subBuilder = null;
if (previewTime_ != null) {
subBuilder = previewTime_.toBuilder();
}
previewTime_ =
input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(previewTime_);
previewTime_ = subBuilder.buildPartial();
}
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gaming.v1beta.GameServerClusters
.internal_static_google_cloud_gaming_v1beta_PreviewCreateGameServerClusterRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gaming.v1beta.GameServerClusters
.internal_static_google_cloud_gaming_v1beta_PreviewCreateGameServerClusterRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest.class,
com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
private volatile java.lang.Object parent_;
/**
*
*
* <pre>
* Required. The parent resource name. Uses the form:
* `projects/{project}/locations/{location}/realms/{realm}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent resource name. Uses the form:
* `projects/{project}/locations/{location}/realms/{realm}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int GAME_SERVER_CLUSTER_ID_FIELD_NUMBER = 2;
private volatile java.lang.Object gameServerClusterId_;
/**
*
*
* <pre>
* Required. The ID of the game server cluster resource to be created.
* </pre>
*
* <code>string game_server_cluster_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The gameServerClusterId.
*/
@java.lang.Override
public java.lang.String getGameServerClusterId() {
java.lang.Object ref = gameServerClusterId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
gameServerClusterId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID of the game server cluster resource to be created.
* </pre>
*
* <code>string game_server_cluster_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for gameServerClusterId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getGameServerClusterIdBytes() {
java.lang.Object ref = gameServerClusterId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
gameServerClusterId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int GAME_SERVER_CLUSTER_FIELD_NUMBER = 3;
private com.google.cloud.gaming.v1beta.GameServerCluster gameServerCluster_;
/**
*
*
* <pre>
* Required. The game server cluster resource to be created.
* </pre>
*
* <code>
* .google.cloud.gaming.v1beta.GameServerCluster game_server_cluster = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the gameServerCluster field is set.
*/
@java.lang.Override
public boolean hasGameServerCluster() {
return gameServerCluster_ != null;
}
/**
*
*
* <pre>
* Required. The game server cluster resource to be created.
* </pre>
*
* <code>
* .google.cloud.gaming.v1beta.GameServerCluster game_server_cluster = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The gameServerCluster.
*/
@java.lang.Override
public com.google.cloud.gaming.v1beta.GameServerCluster getGameServerCluster() {
return gameServerCluster_ == null
? com.google.cloud.gaming.v1beta.GameServerCluster.getDefaultInstance()
: gameServerCluster_;
}
/**
*
*
* <pre>
* Required. The game server cluster resource to be created.
* </pre>
*
* <code>
* .google.cloud.gaming.v1beta.GameServerCluster game_server_cluster = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.gaming.v1beta.GameServerClusterOrBuilder getGameServerClusterOrBuilder() {
return getGameServerCluster();
}
public static final int PREVIEW_TIME_FIELD_NUMBER = 4;
private com.google.protobuf.Timestamp previewTime_;
/**
*
*
* <pre>
* Optional. The target timestamp to compute the preview.
* </pre>
*
* <code>.google.protobuf.Timestamp preview_time = 4 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the previewTime field is set.
*/
@java.lang.Override
public boolean hasPreviewTime() {
return previewTime_ != null;
}
/**
*
*
* <pre>
* Optional. The target timestamp to compute the preview.
* </pre>
*
* <code>.google.protobuf.Timestamp preview_time = 4 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The previewTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getPreviewTime() {
return previewTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : previewTime_;
}
/**
*
*
* <pre>
* Optional. The target timestamp to compute the preview.
* </pre>
*
* <code>.google.protobuf.Timestamp preview_time = 4 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getPreviewTimeOrBuilder() {
return getPreviewTime();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(gameServerClusterId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, gameServerClusterId_);
}
if (gameServerCluster_ != null) {
output.writeMessage(3, getGameServerCluster());
}
if (previewTime_ != null) {
output.writeMessage(4, getPreviewTime());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(gameServerClusterId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, gameServerClusterId_);
}
if (gameServerCluster_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getGameServerCluster());
}
if (previewTime_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getPreviewTime());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest)) {
return super.equals(obj);
}
com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest other =
(com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getGameServerClusterId().equals(other.getGameServerClusterId())) return false;
if (hasGameServerCluster() != other.hasGameServerCluster()) return false;
if (hasGameServerCluster()) {
if (!getGameServerCluster().equals(other.getGameServerCluster())) return false;
}
if (hasPreviewTime() != other.hasPreviewTime()) return false;
if (hasPreviewTime()) {
if (!getPreviewTime().equals(other.getPreviewTime())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + GAME_SERVER_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getGameServerClusterId().hashCode();
if (hasGameServerCluster()) {
hash = (37 * hash) + GAME_SERVER_CLUSTER_FIELD_NUMBER;
hash = (53 * hash) + getGameServerCluster().hashCode();
}
if (hasPreviewTime()) {
hash = (37 * hash) + PREVIEW_TIME_FIELD_NUMBER;
hash = (53 * hash) + getPreviewTime().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for GameServerClustersService.PreviewCreateGameServerCluster.
* </pre>
*
* Protobuf type {@code google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest)
com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gaming.v1beta.GameServerClusters
.internal_static_google_cloud_gaming_v1beta_PreviewCreateGameServerClusterRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gaming.v1beta.GameServerClusters
.internal_static_google_cloud_gaming_v1beta_PreviewCreateGameServerClusterRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest.class,
com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest.Builder.class);
}
// Construct using
// com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
parent_ = "";
gameServerClusterId_ = "";
if (gameServerClusterBuilder_ == null) {
gameServerCluster_ = null;
} else {
gameServerCluster_ = null;
gameServerClusterBuilder_ = null;
}
if (previewTimeBuilder_ == null) {
previewTime_ = null;
} else {
previewTime_ = null;
previewTimeBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.gaming.v1beta.GameServerClusters
.internal_static_google_cloud_gaming_v1beta_PreviewCreateGameServerClusterRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest
getDefaultInstanceForType() {
return com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest build() {
com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest buildPartial() {
com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest result =
new com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest(this);
result.parent_ = parent_;
result.gameServerClusterId_ = gameServerClusterId_;
if (gameServerClusterBuilder_ == null) {
result.gameServerCluster_ = gameServerCluster_;
} else {
result.gameServerCluster_ = gameServerClusterBuilder_.build();
}
if (previewTimeBuilder_ == null) {
result.previewTime_ = previewTime_;
} else {
result.previewTime_ = previewTimeBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest) {
return mergeFrom(
(com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest other) {
if (other
== com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest
.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
onChanged();
}
if (!other.getGameServerClusterId().isEmpty()) {
gameServerClusterId_ = other.gameServerClusterId_;
onChanged();
}
if (other.hasGameServerCluster()) {
mergeGameServerCluster(other.getGameServerCluster());
}
if (other.hasPreviewTime()) {
mergePreviewTime(other.getPreviewTime());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest)
e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource name. Uses the form:
* `projects/{project}/locations/{location}/realms/{realm}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource name. Uses the form:
* `projects/{project}/locations/{location}/realms/{realm}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource name. Uses the form:
* `projects/{project}/locations/{location}/realms/{realm}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource name. Uses the form:
* `projects/{project}/locations/{location}/realms/{realm}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource name. Uses the form:
* `projects/{project}/locations/{location}/realms/{realm}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
onChanged();
return this;
}
private java.lang.Object gameServerClusterId_ = "";
/**
*
*
* <pre>
* Required. The ID of the game server cluster resource to be created.
* </pre>
*
* <code>string game_server_cluster_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The gameServerClusterId.
*/
public java.lang.String getGameServerClusterId() {
java.lang.Object ref = gameServerClusterId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
gameServerClusterId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID of the game server cluster resource to be created.
* </pre>
*
* <code>string game_server_cluster_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for gameServerClusterId.
*/
public com.google.protobuf.ByteString getGameServerClusterIdBytes() {
java.lang.Object ref = gameServerClusterId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
gameServerClusterId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID of the game server cluster resource to be created.
* </pre>
*
* <code>string game_server_cluster_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The gameServerClusterId to set.
* @return This builder for chaining.
*/
public Builder setGameServerClusterId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
gameServerClusterId_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID of the game server cluster resource to be created.
* </pre>
*
* <code>string game_server_cluster_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearGameServerClusterId() {
gameServerClusterId_ = getDefaultInstance().getGameServerClusterId();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID of the game server cluster resource to be created.
* </pre>
*
* <code>string game_server_cluster_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for gameServerClusterId to set.
* @return This builder for chaining.
*/
public Builder setGameServerClusterIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
gameServerClusterId_ = value;
onChanged();
return this;
}
private com.google.cloud.gaming.v1beta.GameServerCluster gameServerCluster_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gaming.v1beta.GameServerCluster,
com.google.cloud.gaming.v1beta.GameServerCluster.Builder,
com.google.cloud.gaming.v1beta.GameServerClusterOrBuilder>
gameServerClusterBuilder_;
/**
*
*
* <pre>
* Required. The game server cluster resource to be created.
* </pre>
*
* <code>
* .google.cloud.gaming.v1beta.GameServerCluster game_server_cluster = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the gameServerCluster field is set.
*/
public boolean hasGameServerCluster() {
return gameServerClusterBuilder_ != null || gameServerCluster_ != null;
}
/**
*
*
* <pre>
* Required. The game server cluster resource to be created.
* </pre>
*
* <code>
* .google.cloud.gaming.v1beta.GameServerCluster game_server_cluster = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The gameServerCluster.
*/
public com.google.cloud.gaming.v1beta.GameServerCluster getGameServerCluster() {
if (gameServerClusterBuilder_ == null) {
return gameServerCluster_ == null
? com.google.cloud.gaming.v1beta.GameServerCluster.getDefaultInstance()
: gameServerCluster_;
} else {
return gameServerClusterBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The game server cluster resource to be created.
* </pre>
*
* <code>
* .google.cloud.gaming.v1beta.GameServerCluster game_server_cluster = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setGameServerCluster(com.google.cloud.gaming.v1beta.GameServerCluster value) {
if (gameServerClusterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
gameServerCluster_ = value;
onChanged();
} else {
gameServerClusterBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Required. The game server cluster resource to be created.
* </pre>
*
* <code>
* .google.cloud.gaming.v1beta.GameServerCluster game_server_cluster = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setGameServerCluster(
com.google.cloud.gaming.v1beta.GameServerCluster.Builder builderForValue) {
if (gameServerClusterBuilder_ == null) {
gameServerCluster_ = builderForValue.build();
onChanged();
} else {
gameServerClusterBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. The game server cluster resource to be created.
* </pre>
*
* <code>
* .google.cloud.gaming.v1beta.GameServerCluster game_server_cluster = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeGameServerCluster(com.google.cloud.gaming.v1beta.GameServerCluster value) {
if (gameServerClusterBuilder_ == null) {
if (gameServerCluster_ != null) {
gameServerCluster_ =
com.google.cloud.gaming.v1beta.GameServerCluster.newBuilder(gameServerCluster_)
.mergeFrom(value)
.buildPartial();
} else {
gameServerCluster_ = value;
}
onChanged();
} else {
gameServerClusterBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* Required. The game server cluster resource to be created.
* </pre>
*
* <code>
* .google.cloud.gaming.v1beta.GameServerCluster game_server_cluster = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearGameServerCluster() {
if (gameServerClusterBuilder_ == null) {
gameServerCluster_ = null;
onChanged();
} else {
gameServerCluster_ = null;
gameServerClusterBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* Required. The game server cluster resource to be created.
* </pre>
*
* <code>
* .google.cloud.gaming.v1beta.GameServerCluster game_server_cluster = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.gaming.v1beta.GameServerCluster.Builder getGameServerClusterBuilder() {
onChanged();
return getGameServerClusterFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The game server cluster resource to be created.
* </pre>
*
* <code>
* .google.cloud.gaming.v1beta.GameServerCluster game_server_cluster = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.gaming.v1beta.GameServerClusterOrBuilder
getGameServerClusterOrBuilder() {
if (gameServerClusterBuilder_ != null) {
return gameServerClusterBuilder_.getMessageOrBuilder();
} else {
return gameServerCluster_ == null
? com.google.cloud.gaming.v1beta.GameServerCluster.getDefaultInstance()
: gameServerCluster_;
}
}
/**
*
*
* <pre>
* Required. The game server cluster resource to be created.
* </pre>
*
* <code>
* .google.cloud.gaming.v1beta.GameServerCluster game_server_cluster = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gaming.v1beta.GameServerCluster,
com.google.cloud.gaming.v1beta.GameServerCluster.Builder,
com.google.cloud.gaming.v1beta.GameServerClusterOrBuilder>
getGameServerClusterFieldBuilder() {
if (gameServerClusterBuilder_ == null) {
gameServerClusterBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gaming.v1beta.GameServerCluster,
com.google.cloud.gaming.v1beta.GameServerCluster.Builder,
com.google.cloud.gaming.v1beta.GameServerClusterOrBuilder>(
getGameServerCluster(), getParentForChildren(), isClean());
gameServerCluster_ = null;
}
return gameServerClusterBuilder_;
}
private com.google.protobuf.Timestamp previewTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
previewTimeBuilder_;
/**
*
*
* <pre>
* Optional. The target timestamp to compute the preview.
* </pre>
*
* <code>.google.protobuf.Timestamp preview_time = 4 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the previewTime field is set.
*/
public boolean hasPreviewTime() {
return previewTimeBuilder_ != null || previewTime_ != null;
}
/**
*
*
* <pre>
* Optional. The target timestamp to compute the preview.
* </pre>
*
* <code>.google.protobuf.Timestamp preview_time = 4 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The previewTime.
*/
public com.google.protobuf.Timestamp getPreviewTime() {
if (previewTimeBuilder_ == null) {
return previewTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: previewTime_;
} else {
return previewTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. The target timestamp to compute the preview.
* </pre>
*
* <code>.google.protobuf.Timestamp preview_time = 4 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setPreviewTime(com.google.protobuf.Timestamp value) {
if (previewTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
previewTime_ = value;
onChanged();
} else {
previewTimeBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Optional. The target timestamp to compute the preview.
* </pre>
*
* <code>.google.protobuf.Timestamp preview_time = 4 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setPreviewTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (previewTimeBuilder_ == null) {
previewTime_ = builderForValue.build();
onChanged();
} else {
previewTimeBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Optional. The target timestamp to compute the preview.
* </pre>
*
* <code>.google.protobuf.Timestamp preview_time = 4 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergePreviewTime(com.google.protobuf.Timestamp value) {
if (previewTimeBuilder_ == null) {
if (previewTime_ != null) {
previewTime_ =
com.google.protobuf.Timestamp.newBuilder(previewTime_)
.mergeFrom(value)
.buildPartial();
} else {
previewTime_ = value;
}
onChanged();
} else {
previewTimeBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* Optional. The target timestamp to compute the preview.
* </pre>
*
* <code>.google.protobuf.Timestamp preview_time = 4 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearPreviewTime() {
if (previewTimeBuilder_ == null) {
previewTime_ = null;
onChanged();
} else {
previewTime_ = null;
previewTimeBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* Optional. The target timestamp to compute the preview.
* </pre>
*
* <code>.google.protobuf.Timestamp preview_time = 4 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.Timestamp.Builder getPreviewTimeBuilder() {
onChanged();
return getPreviewTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. The target timestamp to compute the preview.
* </pre>
*
* <code>.google.protobuf.Timestamp preview_time = 4 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.TimestampOrBuilder getPreviewTimeOrBuilder() {
if (previewTimeBuilder_ != null) {
return previewTimeBuilder_.getMessageOrBuilder();
} else {
return previewTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: previewTime_;
}
}
/**
*
*
* <pre>
* Optional. The target timestamp to compute the preview.
* </pre>
*
* <code>.google.protobuf.Timestamp preview_time = 4 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getPreviewTimeFieldBuilder() {
if (previewTimeBuilder_ == null) {
previewTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getPreviewTime(), getParentForChildren(), isClean());
previewTime_ = null;
}
return previewTimeBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest)
private static final com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest();
}
public static com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<PreviewCreateGameServerClusterRequest> PARSER =
new com.google.protobuf.AbstractParser<PreviewCreateGameServerClusterRequest>() {
@java.lang.Override
public PreviewCreateGameServerClusterRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new PreviewCreateGameServerClusterRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<PreviewCreateGameServerClusterRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<PreviewCreateGameServerClusterRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.gaming.v1beta.PreviewCreateGameServerClusterRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| [
"neenushaji@google.com"
] | neenushaji@google.com |
58d3b9343ee8e20202848c15f30a0ff2a590e3fb | 537f50e879a420fb2b3800b491cc57ae423fb042 | /DroidSkyWrite/src/com/skywrite/droidskywrite/CloudMaker.java | 6fff50c072c631a4425c0a51f078a9b9d1d759e0 | [] | no_license | won21kr/SkywriteForAndroid | ed5d6921e5f2b6f80d3528fa9397f9c8739fc734 | b761e4bb09746d2c1caa9e1afe7ec51e0982dbed | refs/heads/master | 2020-12-28T23:23:04.375499 | 2014-02-09T10:03:00 | 2014-02-09T10:03:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 2,272 | java | package com.skywrite.droidskywrite;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Intent;
import android.os.Build;
import android.os.Bundle;
import android.support.v4.app.NavUtils;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.EditText;
import com.parse.ParseGeoPoint;
import com.parse.ParseObject;
import com.parse.ParseUser;
public class CloudMaker extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_cloud_maker);
// Show the Up button in the action bar.
setupActionBar();
}
/**
* Set up the {@link android.app.ActionBar}, if the API is available.
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
private void setupActionBar() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
getActionBar().setDisplayHomeAsUpEnabled(true);
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.cloud_maker, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
// This ID represents the Home or Up button. In the case of this
// activity, the Up button is shown. Use NavUtils to allow users
// to navigate up one level in the application structure. For
// more details, see the Navigation pattern on Android Design:
//
// http://developer.android.com/design/patterns/navigation.html#up-vs-back
//
NavUtils.navigateUpFromSameTask(this);
return true;
}
return super.onOptionsItemSelected(item);
}
public void condenseCloud(View view){
ParseUser currentUser = ParseUser.getCurrentUser();
ParseObject cloud = new ParseObject("clouds");
cloud.put("user", currentUser.get("username"));
cloud.put("cumulus", 0);
cloud.put("cords", new ParseGeoPoint(40.0, -87.0));
EditText text = (EditText) findViewById(R.id.edit_content);
cloud.put("text", text.getText().toString());
cloud.saveInBackground();
Intent intent = new Intent(this, Skywrite.class);
startActivity(intent);
}
}
| [
"bearcat.kiwi@gmail.com"
] | bearcat.kiwi@gmail.com |
af384639e8ac4c3baf8a1da23ab839fa8dfb08df | 70eeec2812cf6528b33cc079ffbef8ff394a1200 | /src/main/java/edu/nju/Vo/checkstyle/SubType.java | 2b46a8b924007998384b024dee75619916c70e4a | [] | no_license | cylong1016/CodeAnalyze | 9dcbd1051c859fce59c3d176d43f517c8db435a4 | e651a3960c269b823d9dc4160724ae5142bf4d43 | refs/heads/master | 2021-01-17T14:03:47.936892 | 2017-06-12T08:21:54 | 2017-06-12T08:21:54 | 83,425,083 | 1 | 0 | null | null | null | null | UTF-8 | Java | false | false | 711 | java | package edu.nju.Vo.checkstyle;
/**
* Created by Administrator on 2017/4/14.
*/
public class SubType {
private long id;
private String text;
private boolean status;
public SubType(long id, String name, boolean status){
this.id = id;
this.text = name;
this.status = status;
}
public boolean getStatus() {
return status;
}
public void setStatus(boolean status) {
this.status = status;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
}
| [
"694856297@qq.com"
] | 694856297@qq.com |
6b19c9d095f0af11526a2b67121617134485c37f | ee48d81f7ce8b55bf1f172ab956e58f1a2b4f399 | /onlineshoppingbackend/src/main/java/met/edu/onlineshoppingbackend/App.java | e9e1d51b07e1d8c1db6e1ae5b2ba8abbc92125d3 | [] | no_license | bhagyeshparab/onlinespareparts-shopping | eb776348922d970c2286adfaf509eb0b70ce89f3 | eba90635c080a7c01f007d14167abeb522c30149 | refs/heads/master | 2021-01-01T17:51:06.280896 | 2017-07-24T19:45:24 | 2017-07-24T19:45:24 | 98,177,309 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 205 | java | package met.edu.onlineshoppingbackend;
/**
* Hello world!
*
*/
public class App
{
public static void main( String[] args )
{
System.out.println( "Hello World!" );
}
}
| [
"bhagyeshparab24395@gmail.com"
] | bhagyeshparab24395@gmail.com |
b09eda8797bae08077ebcbe7fff61b53694d5d60 | 0c7a1f6917e9db647daecc974d63ff11b69b0fda | /TruyenTNV_Web/src/main/java/vn/com/fis/model/mnpcm/CustomerType.java | 709b8f818d8d546ed67e381d8fc75ec5896dea19 | [] | no_license | rsfirst/TruyenTNV | 3971441a2a222f72a06517a74f4bff6f684e38a8 | 091f29039054975b61f107c52a53e38311931a40 | refs/heads/master | 2022-07-05T23:08:14.312035 | 2020-03-11T03:58:37 | 2020-03-11T03:58:37 | 241,400,377 | 0 | 0 | null | 2022-06-29T18:00:36 | 2020-02-18T15:44:13 | JavaScript | UTF-8 | Java | false | false | 571 | java | package vn.com.fis.model.mnpcm;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
@Entity
public class CustomerType {
@Id
@Column(name = "VALUE")
String idCustomer;
@Column(name = "NAME")
String titleCustomer;
public String getIdCustomer() {
return idCustomer;
}
public void setIdCustomer(String idCustomer) {
this.idCustomer = idCustomer;
}
public String getTitleCustomer() {
return titleCustomer;
}
public void setTitleCustomer(String titleCustomer) {
this.titleCustomer = titleCustomer;
}
}
| [
"anhvuong6996@gmail.com"
] | anhvuong6996@gmail.com |
e7de0342124896c76e145c62dcb7b0da2aa63990 | 626ebdd2cfd8d6b18b6551bcdd5b02ec9d72a096 | /Lab №6/LABA6/src/test/java/L6T3.java | 819d5dd77bfb067561d04e46ed4a49bc162bd3a0 | [] | no_license | Yaroslaa/QA | c81b2ae0a58b3e478aa59136921d3166764dd53c | f7c0e51c96de282d49077ba9fdf62c08eaab20e9 | refs/heads/master | 2020-06-05T10:53:55.881443 | 2019-06-19T07:51:13 | 2019-06-19T07:51:13 | 192,415,737 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,628 | java | import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.chrome.ChromeDriver;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.List;
public class L6T3 {
static ChromeDriver driver;
List<WebElement> nameElem;
int numberPages;
boolean condition = true;
@BeforeClass
public static void setDriver(){
System.setProperty("webdriver.chrome.driver", "D:\\Универ\\2 курс -4сем-\\QA\\6\\chromedriver_win32\\chromedriver.exe");
driver = new ChromeDriver();
driver.get("https://pn.com.ua/");
}
@Test
public void test1() {
driver.findElement(By.xpath(".//*[@id='column-center']/section/div[5]/ul/li[1]/a")).click();
driver.findElement(By.xpath(".//*[@id='producers-filter-block']/div[3]/span[3]/a")).click();
nameElem = driver.findElements(By.xpath(".//*[@id='column-center']/section/div[3]/ul/li/article/div[2]/div[1]/a"));
for(int i=0; i<nameElem.size()-1;i++){
String labelSofa = nameElem.get(i).getText(); String brandSofa = "AMF";
char[] brandlabelSofa = labelSofa.toCharArray(); char[] brandSofaa = brandSofa.toCharArray(); for (int j=0;j<brandSofaa.length;j++){
if(brandlabelSofa[j]!=brandSofaa[j]){
condition = false;
}}}Assert.assertEquals(condition,true);}
@AfterClass
public static void closeWeb(){
driver.close();}}
| [
"noreply@github.com"
] | Yaroslaa.noreply@github.com |
121b17bb1e61bb8aa7e55cf71251c7870eac0a40 | fd30c55b5a73ce880747d5388fa9ee48be86ced9 | /src/main/java/com/github/mangelt/sat/services/model/ExpedidoEn.java | 2412f106906510bc98627e351d666587bd2d7b16 | [] | no_license | miguelfreelancer56577/sat-services-spring-boot-starter | d3eab605407034113f6008e3bb9e7d0731ed891d | 6f5f9eb1880e06eb1ad4593eadacf6c7e7f2a092 | refs/heads/master | 2021-06-19T12:38:19.581654 | 2019-12-10T23:38:38 | 2019-12-10T23:38:38 | 201,131,776 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 7,507 | java | //
// Este archivo ha sido generado por la arquitectura JavaTM para la implantación de la referencia de enlace (JAXB) XML v2.2.11
// Visite <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Todas las modificaciones realizadas en este archivo se perderán si se vuelve a compilar el esquema de origen.
// Generado el: 2017.05.29 a las 09:10:41 AM CDT
//
package com.github.mangelt.sat.services.model;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.XmlValue;
import com.fasterxml.jackson.annotation.JsonInclude;
/**
* <p>Clase Java para anonymous complex type.
*
* <p>El siguiente fragmento de esquema especifica el contenido que se espera que haya en esta clase.
*
* <pre>
* <complexType>
* <simpleContent>
* <extension base="<http://www.w3.org/2001/XMLSchema>string">
* <attribute name="codigoPostal" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="pais" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="estado" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="municipio" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="colonia" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="calle" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="localidad" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="noInterior" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="noExterior" type="{http://www.w3.org/2001/XMLSchema}string" />
* </extension>
* </simpleContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"value"
})
@XmlRootElement(name = "ExpedidoEn")
@JsonInclude(JsonInclude.Include.NON_EMPTY)
public class ExpedidoEn {
@XmlValue
protected String value;
@XmlAttribute(name = "codigoPostal")
protected String codigoPostal;
@XmlAttribute(name = "pais")
protected String pais;
@XmlAttribute(name = "estado")
protected String estado;
@XmlAttribute(name = "municipio")
protected String municipio;
@XmlAttribute(name = "colonia")
protected String colonia;
@XmlAttribute(name = "calle")
protected String calle;
@XmlAttribute(name = "localidad")
protected String localidad;
@XmlAttribute(name = "noInterior")
protected String noInterior;
@XmlAttribute(name = "noExterior")
protected String noExterior;
/**
* Obtiene el valor de la propiedad value.
*
* @return
* possible object is
* {@link String }
*
*/
public String getValue() {
return value;
}
/**
* Define el valor de la propiedad value.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setValue(String value) {
this.value = value;
}
/**
* Obtiene el valor de la propiedad codigoPostal.
*
* @return
* possible object is
* {@link String }
*
*/
public String getCodigoPostal() {
return codigoPostal;
}
/**
* Define el valor de la propiedad codigoPostal.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setCodigoPostal(String value) {
this.codigoPostal = value;
}
/**
* Obtiene el valor de la propiedad pais.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPais() {
return pais;
}
/**
* Define el valor de la propiedad pais.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPais(String value) {
this.pais = value;
}
/**
* Obtiene el valor de la propiedad estado.
*
* @return
* possible object is
* {@link String }
*
*/
public String getEstado() {
return estado;
}
/**
* Define el valor de la propiedad estado.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setEstado(String value) {
this.estado = value;
}
/**
* Obtiene el valor de la propiedad municipio.
*
* @return
* possible object is
* {@link String }
*
*/
public String getMunicipio() {
return municipio;
}
/**
* Define el valor de la propiedad municipio.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setMunicipio(String value) {
this.municipio = value;
}
/**
* Obtiene el valor de la propiedad colonia.
*
* @return
* possible object is
* {@link String }
*
*/
public String getColonia() {
return colonia;
}
/**
* Define el valor de la propiedad colonia.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setColonia(String value) {
this.colonia = value;
}
/**
* Obtiene el valor de la propiedad calle.
*
* @return
* possible object is
* {@link String }
*
*/
public String getCalle() {
return calle;
}
/**
* Define el valor de la propiedad calle.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setCalle(String value) {
this.calle = value;
}
/**
* Obtiene el valor de la propiedad localidad.
*
* @return
* possible object is
* {@link String }
*
*/
public String getLocalidad() {
return localidad;
}
/**
* Define el valor de la propiedad localidad.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setLocalidad(String value) {
this.localidad = value;
}
/**
* Obtiene el valor de la propiedad noInterior.
*
* @return
* possible object is
* {@link String }
*
*/
public String getNoInterior() {
return noInterior;
}
/**
* Define el valor de la propiedad noInterior.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setNoInterior(String value) {
this.noInterior = value;
}
/**
* Obtiene el valor de la propiedad noExterior.
*
* @return
* possible object is
* {@link String }
*
*/
public String getNoExterior() {
return noExterior;
}
/**
* Define el valor de la propiedad noExterior.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setNoExterior(String value) {
this.noExterior = value;
}
}
| [
"miguelfreelancer56577@gmail.com"
] | miguelfreelancer56577@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.