index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/Ltp3Tap.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: Ltp3Tap.java *
* *
* Author: James LAWRENCE *
* Modified by: Marc GIMPEL *
* Based on code by: Jean-Marc VALIN *
* *
* Date: March 2003 *
* *
******************************************************************************/
/* $Id: Ltp3Tap.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */
/* Copyright (C) 2002 Jean-Marc Valin
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the Xiph.org Foundation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.xiph.speex;
/**
* Long Term Prediction Quantisation and Unquantisation (3Tap)
*
* @author Jim Lawrence, helloNetwork.com
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.2 $
*/
public class Ltp3Tap
extends Ltp
{
/** */
private float[] gain;
/** */
private int[] gain_cdbk;
/** */
private int gain_bits;
/** */
private int pitch_bits;
/** */
private float[][] e;
/**
* Constructor
* @param gain_cdbk
* @param gain_bits
* @param pitch_bits
*/
public Ltp3Tap(final int[] gain_cdbk,
final int gain_bits,
final int pitch_bits)
{
this.gain = new float[3];
this.gain_cdbk = gain_cdbk;
this.gain_bits = gain_bits;
this.pitch_bits = pitch_bits;
this.e = new float[3][128];
}
/**
* Long Term Prediction Quantification (3Tap).
* @return pitch
*/
public final int quant(float[] target, float[] sw, int sws, float[] ak, float[] awk1, float[] awk2,
float[] exc, int es, int start, int end, float pitch_coef, int p,
int nsf, Bits bits, float[] exc2, int e2s, float[] r, int complexity)
{
int i,j;
int[] cdbk_index = new int[1];
int pitch=0, best_gain_index=0;
float[] best_exc;
int best_pitch=0;
float err, best_err=-1;
int N;
int[] nbest;
float[] gains;
N=complexity;
if (N>10)
N=10;
nbest=new int[N];
gains = new float[N];
if (N==0 || end<start) {
bits.pack(0, pitch_bits);
bits.pack(0, gain_bits);
for (i=0;i<nsf;i++)
exc[es+i]=0;
return start;
}
best_exc=new float[nsf];
if (N>end-start+1)
N=end-start+1;
open_loop_nbest_pitch(sw, sws, start, end, nsf, nbest, gains, N);
for (i=0;i<N;i++) {
pitch=nbest[i];
for (j=0;j<nsf;j++)
exc[es+j]=0;
err=pitch_gain_search_3tap(target, ak, awk1, awk2, exc, es, pitch, p, nsf,
bits, exc2, e2s, r, cdbk_index);
if (err<best_err || best_err<0) {
for (j=0;j<nsf;j++)
best_exc[j]=exc[es+j];
best_err=err;
best_pitch=pitch;
best_gain_index=cdbk_index[0];
}
}
bits.pack(best_pitch-start, pitch_bits);
bits.pack(best_gain_index, gain_bits);
for (i=0;i<nsf;i++)
exc[es+i]=best_exc[i];
return pitch;
}
/**
* Long Term Prediction Unquantification (3Tap).
* @param exc - Excitation
* @param es - Excitation offset
* @param start - Smallest pitch value allowed
* @param pitch_coef - Voicing (pitch) coefficient
* @param nsf - Number of samples in subframe
* @param gain_val
* @param bits - Speex bits buffer.
* @param count_lost
* @param subframe_offset
* @param last_pitch_gain
* @return pitch
*/
public final int unquant(float[] exc, int es, int start, float pitch_coef,
int nsf, float[] gain_val, Bits bits,
int count_lost, int subframe_offset, float last_pitch_gain)
{
int i, pitch, gain_index;
pitch = bits.unpack(pitch_bits);
pitch += start;
gain_index = bits.unpack(gain_bits);
gain[0] = 0.015625f*(float)gain_cdbk[gain_index*3]+.5f;
gain[1] = 0.015625f*(float)gain_cdbk[gain_index*3+1]+.5f;
gain[2] = 0.015625f*(float)gain_cdbk[gain_index*3+2]+.5f;
if (count_lost != 0 && pitch > subframe_offset)
{
float gain_sum = Math.abs(gain[1]);
float tmp = count_lost < 4 ? last_pitch_gain : 0.4f * last_pitch_gain;
if (tmp>.95f)
tmp=.95f;
if (gain[0]>0)
gain_sum += gain[0];
else
gain_sum -= .5f*gain[0];
if (gain[2]>0)
gain_sum += gain[2];
else
gain_sum -= .5f*gain[0];
if (gain_sum > tmp) {
float fact = tmp/gain_sum;
for (i=0;i<3;i++)
gain[i]*=fact;
}
}
gain_val[0]=gain[0];
gain_val[1]=gain[1];
gain_val[2]=gain[2];
for (i=0;i<3;i++)
{
int j, tmp1, tmp2, pp=pitch+1-i;
tmp1=nsf;
if (tmp1>pp)
tmp1=pp;
tmp2=nsf;
if (tmp2>pp+pitch)
tmp2=pp+pitch;
for (j=0;j<tmp1;j++)
e[i][j]=exc[es+j-pp];
for (j=tmp1;j<tmp2;j++)
e[i][j]=exc[es+j-pp-pitch];
for (j=tmp2;j<nsf;j++)
e[i][j]=0;
}
for (i=0;i<nsf;i++) {
exc[es+i]=gain[0]*e[2][i]+gain[1]*e[1][i]+gain[2]*e[0][i];
}
return pitch;
}
/**
* Finds the best quantized 3-tap pitch predictor by analysis by synthesis.
* @param target Target vector
* @param ak LPCs for this subframe
* @param awk1 Weighted LPCs #1 for this subframe
* @param awk2 Weighted LPCs #2 for this subframe
* @param exc Excitation
* @param es
* @param pitch Pitch value
* @param p Number of LPC coeffs
* @param nsf Number of samples in subframe
* @param bits
* @param exc2
* @param e2s
* @param r
* @param cdbk_index
* @return the best quantized 3-tap pitch predictor by analysis by synthesis.
*/
private float pitch_gain_search_3tap(final float[] target,
final float[] ak,
final float[] awk1,
final float[] awk2,
final float[] exc,
final int es,
final int pitch,
final int p,
final int nsf,
final Bits bits,
final float[] exc2,
final int e2s,
final float[] r,
final int[] cdbk_index
)
{
int i,j;
float[][] x;
// float[][] e;
float[] corr = new float[3];
float[][] A = new float[3][3];
int gain_cdbk_size;
float err1,err2;
gain_cdbk_size=1<<gain_bits;
x = new float[3][nsf];
e = new float[3][nsf];
for (i=2; i>=0; i--) {
int pp=pitch+1-i;
for (j=0; j<nsf; j++) {
if (j-pp<0)
e[i][j]=exc2[e2s+j-pp];
else if (j-pp-pitch<0)
e[i][j]=exc2[e2s+j-pp-pitch];
else
e[i][j]=0;
}
if (i==2)
Filters.syn_percep_zero(e[i], 0, ak, awk1, awk2, x[i], nsf, p);
else {
for (j=0;j<nsf-1;j++)
x[i][j+1]=x[i+1][j];
x[i][0]=0;
for (j=0;j<nsf;j++)
x[i][j]+=e[i][0]*r[j];
}
}
for (i=0; i<3; i++)
corr[i] = inner_prod(x[i], 0, target, 0, nsf);
for (i=0; i<3; i++)
for (j=0; j<=i; j++)
A[i][j] = A[j][i] = inner_prod(x[i], 0, x[j], 0, nsf);
{
float[] C = new float[9];
int ptr=0;
int best_cdbk=0;
float best_sum=0;
C[0]=corr[2];
C[1]=corr[1];
C[2]=corr[0];
C[3]=A[1][2];
C[4]=A[0][1];
C[5]=A[0][2];
C[6]=A[2][2];
C[7]=A[1][1];
C[8]=A[0][0];
for (i=0; i<gain_cdbk_size; i++) {
float sum=0;
float g0,g1,g2;
ptr = 3*i;
g0=0.015625f*gain_cdbk[ptr]+.5f;
g1=0.015625f*gain_cdbk[ptr+1]+.5f;
g2=0.015625f*gain_cdbk[ptr+2]+.5f;
sum += C[0]*g0;
sum += C[1]*g1;
sum += C[2]*g2;
sum -= C[3]*g0*g1;
sum -= C[4]*g2*g1;
sum -= C[5]*g2*g0;
sum -= .5f*C[6]*g0*g0;
sum -= .5f*C[7]*g1*g1;
sum -= .5f*C[8]*g2*g2;
/* If true, force "safe" pitch values to handle packet loss better */
if (false) {
float tot = Math.abs(gain_cdbk[ptr+1]);
if (gain_cdbk[ptr]>0)
tot+=gain_cdbk[ptr];
if (gain_cdbk[ptr+2]>0)
tot+=gain_cdbk[ptr+2];
if (tot>1)
continue;
}
if (sum>best_sum || i==0)
{
best_sum=sum;
best_cdbk=i;
}
}
gain[0] = 0.015625f*gain_cdbk[best_cdbk*3] + .5f;
gain[1] = 0.015625f*gain_cdbk[best_cdbk*3+1]+ .5f;
gain[2] = 0.015625f*gain_cdbk[best_cdbk*3+2]+ .5f;
cdbk_index[0]=best_cdbk;
}
for (i=0; i<nsf; i++)
exc[es+i]=gain[0]*e[2][i]+gain[1]*e[1][i]+gain[2]*e[0][i];
err1=0;
err2=0;
for (i=0; i<nsf; i++)
err1+=target[i]*target[i];
for (i=0; i<nsf; i++)
err2+=(target[i]-gain[2]*x[0][i]-gain[1]*x[1][i]-gain[0]*x[2][i])
* (target[i]-gain[2]*x[0][i]-gain[1]*x[1][i]-gain[0]*x[2][i]);
return err2;
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/LtpForcedPitch.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: LtpForcedPitch.java *
* *
* Author: James LAWRENCE *
* Modified by: Marc GIMPEL *
* Based on code by: Jean-Marc VALIN *
* *
* Date: March 2003 *
* *
******************************************************************************/
/* $Id: LtpForcedPitch.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */
/* Copyright (C) 2002 Jean-Marc Valin
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the Xiph.org Foundation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.xiph.speex;
/**
* Long Term Prediction Quantisation and Unquantisation (Forced Pitch)
*
* @author Jim Lawrence, helloNetwork.com
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.2 $
*/
public class LtpForcedPitch
extends Ltp
{
/**
* Long Term Prediction Quantification (Forced Pitch).
* @return pitch
*/
public final int quant(float[] target, float[] sw, int sws, float[] ak, float[] awk1, float[] awk2,
float[] exc, int es, int start, int end, float pitch_coef, int p,
int nsf, Bits bits, float[] exc2, int e2s, float[] r, int complexity)
{
int i;
if (pitch_coef>.99f)
pitch_coef=.99f;
for (i=0;i<nsf;i++) {
exc[es+i]=exc[es+i-start]*pitch_coef;
}
return start;
}
/**
* Long Term Prediction Unquantification (Forced Pitch).
* @param exc - Excitation
* @param es - Excitation offset
* @param start - Smallest pitch value allowed
* @param pitch_coef - Voicing (pitch) coefficient
* @param nsf - Number of samples in subframe
* @param gain_val
* @param bits - Speex bits buffer.
* @param count_lost
* @param subframe_offset
* @param last_pitch_gain
* @return pitch
*/
public final int unquant(float[] exc, int es, int start, float pitch_coef,
int nsf, float[] gain_val, Bits bits,
int count_lost, int subframe_offset, float last_pitch_gain)
{
int i;
if (pitch_coef>.99f) {
pitch_coef=.99f;
}
for (i=0;i<nsf;i++) {
exc[es+i]=exc[es+i-start]*pitch_coef;
}
gain_val[0] = gain_val[2] = 0;
gain_val[1] = pitch_coef;
return start;
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/Misc.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: Misc.java *
* *
* Author: Marc GIMPEL *
* *
* Date: 14th July 2003 *
* *
******************************************************************************/
/* $Id: Misc.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */
package org.xiph.speex;
/**
* Miscellaneous functions
*
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.2 $
*/
public class Misc
{
/**
* Builds an Asymmetric "pseudo-Hamming" window.
* @param windowSize
* @param subFrameSize
* @return an Asymmetric "pseudo-Hamming" window.
*/
public static float[] window(final int windowSize, final int subFrameSize)
{
int i;
int part1 = subFrameSize * 7 / 2;
int part2 = subFrameSize * 5 / 2;
float[] window = new float[windowSize];
for (i=0; i<part1; i++)
window[i]=(float) (0.54 - 0.46 * Math.cos(Math.PI * i / part1));
for (i=0; i<part2; i++)
window[part1+i]=(float) (0.54 + 0.46 * Math.cos(Math.PI * i / part2));
return window;
}
/**
* Create the window for autocorrelation (lag-windowing).
* @param lpcSize
* @param lagFactor
* @return the window for autocorrelation.
*/
public static float[] lagWindow(final int lpcSize, final float lagFactor)
{
float[] lagWindow = new float[lpcSize+1];
for (int i=0; i<lpcSize+1; i++)
lagWindow[i]=(float) Math.exp(-0.5 * (2*Math.PI*lagFactor*i) *
(2*Math.PI*lagFactor*i));
return lagWindow;
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/NbCodec.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: NbCodec.java *
* *
* Author: Marc GIMPEL *
* Based on code by: Jean-Marc VALIN *
* *
* Date: 14th July 2003 *
* *
******************************************************************************/
/* $Id: NbCodec.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */
package org.xiph.speex;
/**
* Narrowband Codec.
* This class contains all the basic structures needed by the Narrowband
* encoder and decoder.
*
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.2 $
*/
public class NbCodec
implements Codebook
{
//---------------------------------------------------------------------------
// Constants
//---------------------------------------------------------------------------
/** Very small initial value for some of the buffers. */
public static final float VERY_SMALL = (float) 0e-30;
/** The Narrowband Frame Size gives the size in bits of a Narrowband frame for a given narrowband submode. */
public static final int[] NB_FRAME_SIZE = {5, 43, 119, 160, 220, 300, 364, 492, 79, 1, 1, 1, 1, 1, 1, 1};
/** The Narrowband Submodes gives the number of submodes possible for the Narrowband codec. */
public static final int NB_SUBMODES = 16;
/** The Narrowband Submodes Bits gives the number bits used to encode the Narrowband Submode*/
public static final int NB_SUBMODE_BITS = 4;
/** */
public static final float[] exc_gain_quant_scal1 = {-0.35f, 0.05f};
/** */
public static final float[] exc_gain_quant_scal3 = {-2.794750f, -1.810660f,
-1.169850f, -0.848119f,
-0.587190f, -0.329818f,
-0.063266f, 0.282826f};
//---------------------------------------------------------------------------
// Tools
//---------------------------------------------------------------------------
/** */
protected Lsp m_lsp;
/** */
protected Filters filters;
//---------------------------------------------------------------------------
// Parameters
//---------------------------------------------------------------------------
protected SubMode[] submodes; /** Sub-mode data */
protected int submodeID; /** Activated sub-mode */
protected int first; /** Is this the first frame? */
protected int frameSize; /** Size of frames */
protected int subframeSize; /** Size of sub-frames */
protected int nbSubframes; /** Number of sub-frames */
protected int windowSize; /** Analysis (LPC) window length */
protected int lpcSize; /** LPC order */
protected int bufSize; /** Buffer size */
protected int min_pitch; /** Minimum pitch value allowed */
protected int max_pitch; /** Maximum pitch value allowed */
protected float gamma1; /** Perceptual filter: A(z/gamma1) */
protected float gamma2; /** Perceptual filter: A(z/gamma2) */
protected float lag_factor; /** Lag windowing Gaussian width */
protected float lpc_floor; /** Noise floor multiplier for A[0] in LPC analysis*/
protected float preemph; /** Pre-emphasis: P(z) = 1 - a*z^-1*/
protected float pre_mem; /** 1-element memory for pre-emphasis */
//---------------------------------------------------------------------------
// Variables
//---------------------------------------------------------------------------
protected float[] frmBuf; /** Input buffer (original signal) */
protected int frmIdx;
protected float[] excBuf; /** Excitation buffer */
protected int excIdx; /** Start of excitation frame */
protected float[] innov; /** Innovation for the frame */
protected float[] lpc; /** LPCs for current frame */
protected float[] qlsp; /** Quantized LSPs for current frame */
protected float[] old_qlsp; /** Quantized LSPs for previous frame */
protected float[] interp_qlsp; /** Interpolated quantized LSPs */
protected float[] interp_qlpc; /** Interpolated quantized LPCs */
protected float[] mem_sp; /** Filter memory for synthesis signal */
protected float[] pi_gain; /** Gain of LPC filter at theta=pi (fe/2) */
protected float[] awk1, awk2, awk3;
// Vocoder data
protected float voc_m1;
protected float voc_m2;
protected float voc_mean;
protected int voc_offset;
protected int dtx_enabled; /** 1 for enabling DTX, 0 otherwise */
/**
* Constructor.
*/
public NbCodec()
{
m_lsp = new Lsp();
filters = new Filters();
}
/**
* Narrowband initialisation.
*/
public void nbinit()
{
// Initialize SubModes
submodes = buildNbSubModes();
submodeID = 5;
// Initialize narrwoband parameters and variables
init(160, 40, 10, 640);
}
/**
* Initialisation.
* @param frameSize
* @param subframeSize
* @param lpcSize
* @param bufSize
*/
protected void init(final int frameSize,
final int subframeSize,
final int lpcSize,
final int bufSize)
{
first = 1;
// Codec parameters, should eventually have several "modes"
this.frameSize = frameSize;
this.windowSize = frameSize*3/2;
this.subframeSize = subframeSize;
this.nbSubframes = frameSize/subframeSize;
this.lpcSize = lpcSize;
this.bufSize = bufSize;
min_pitch = 17;
max_pitch = 144;
preemph = 0.0f;
pre_mem = 0.0f;
gamma1 = 0.9f;
gamma2 = 0.6f;
lag_factor = .01f;
lpc_floor = 1.0001f;
frmBuf = new float[bufSize];
frmIdx = bufSize - windowSize;
excBuf = new float[bufSize];
excIdx = bufSize - windowSize;
innov = new float[frameSize];
lpc = new float[lpcSize+1];
qlsp = new float[lpcSize];
old_qlsp = new float[lpcSize];
interp_qlsp = new float[lpcSize];
interp_qlpc = new float[lpcSize+1];
mem_sp = new float[5*lpcSize]; // TODO - check why 5 (why not 2 or 1)
pi_gain = new float[nbSubframes];
awk1 = new float[lpcSize+1];
awk2 = new float[lpcSize+1];
awk3 = new float[lpcSize+1];
voc_m1 = voc_m2 = voc_mean = 0;
voc_offset = 0;
dtx_enabled = 0; // disabled by default
}
/**
* Build narrowband submodes
*/
private static SubMode[] buildNbSubModes()
{
/* Initialize Long Term Predictions */
Ltp3Tap ltpNb = new Ltp3Tap(gain_cdbk_nb, 7, 7);
Ltp3Tap ltpVlbr = new Ltp3Tap(gain_cdbk_lbr, 5, 0);
Ltp3Tap ltpLbr = new Ltp3Tap(gain_cdbk_lbr, 5, 7);
Ltp3Tap ltpMed = new Ltp3Tap(gain_cdbk_lbr, 5, 7);
LtpForcedPitch ltpFP = new LtpForcedPitch();
/* Initialize Codebook Searches */
NoiseSearch noiseSearch = new NoiseSearch();
SplitShapeSearch ssNbVlbrSearch = new SplitShapeSearch(40, 10, 4, exc_10_16_table, 4, 0);
SplitShapeSearch ssNbLbrSearch = new SplitShapeSearch(40, 10, 4, exc_10_32_table, 5, 0);
SplitShapeSearch ssNbSearch = new SplitShapeSearch(40, 5, 8, exc_5_64_table, 6, 0);
SplitShapeSearch ssNbMedSearch = new SplitShapeSearch(40, 8, 5, exc_8_128_table, 7, 0);
SplitShapeSearch ssSbSearch = new SplitShapeSearch(40, 5, 8, exc_5_256_table, 8, 0);
SplitShapeSearch ssNbUlbrSearch = new SplitShapeSearch(40, 20, 2, exc_20_32_table, 5, 0);
/* Initialize Line Spectral Pair Quantizers */
NbLspQuant nbLspQuant = new NbLspQuant();
LbrLspQuant lbrLspQuant = new LbrLspQuant();
/* Initialize narrow-band modes */
SubMode[] nbSubModes = new SubMode[NB_SUBMODES];
/* 2150 bps "vocoder-like" mode for comfort noise */
nbSubModes[1] = new SubMode(0, 1, 0, 0, lbrLspQuant, ltpFP, noiseSearch, .7f, .7f, -1, 43);
/* 5.95 kbps very low bit-rate mode */
nbSubModes[2] = new SubMode(0, 0, 0, 0, lbrLspQuant, ltpVlbr, ssNbVlbrSearch, 0.7f, 0.5f, .55f, 119);
/* 8 kbps low bit-rate mode */
nbSubModes[3] = new SubMode(-1, 0, 1, 0, lbrLspQuant, ltpLbr, ssNbLbrSearch, 0.7f, 0.55f, .45f, 160);
/* 11 kbps medium bit-rate mode */
nbSubModes[4] = new SubMode(-1, 0, 1, 0, lbrLspQuant, ltpMed, ssNbMedSearch, 0.7f, 0.63f, .35f, 220);
/* 15 kbps high bit-rate mode */
nbSubModes[5] = new SubMode(-1, 0, 3, 0, nbLspQuant, ltpNb, ssNbSearch, 0.7f, 0.65f, .25f, 300);
/* 18.2 high bit-rate mode */
nbSubModes[6] = new SubMode(-1, 0, 3, 0, nbLspQuant, ltpNb, ssSbSearch, 0.68f, 0.65f, .1f, 364);
/* 24.6 kbps high bit-rate mode */
nbSubModes[7] = new SubMode(-1, 0, 3, 1, nbLspQuant, ltpNb, ssNbSearch, 0.65f, 0.65f, -1, 492);
/* 3.95 kbps very low bit-rate mode */
nbSubModes[8] = new SubMode(0, 1, 0, 0, lbrLspQuant, ltpFP, ssNbUlbrSearch, .7f, .5f, .65f, 79);
/* Return the Narrowband SubModes*/
return nbSubModes;
}
/**
* Returns the size of a frame (ex: 160 samples for a narrowband frame,
* 320 for wideband and 640 for ultra-wideband).
* @return the size of a frame (number of audio samples in a frame).
*/
public int getFrameSize()
{
return frameSize;
}
/**
* Returns whether or not we are using Discontinuous Transmission encoding.
* @return whether or not we are using Discontinuous Transmission encoding.
*/
public boolean getDtx()
{
return dtx_enabled != 0;
}
/**
* Returns the Pitch Gain array.
* @return the Pitch Gain array.
*/
public float[] getPiGain()
{
return pi_gain;
}
/**
* Returns the excitation array.
* @return the excitation array.
*/
public float[] getExc()
{
float[] excTmp = new float[frameSize];
System.arraycopy(excBuf, excIdx, excTmp, 0, frameSize);
return excTmp;
}
/**
* Returns the innovation array.
* @return the innovation array.
*/
public float[] getInnov()
{
return innov;
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/NbDecoder.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: NbDecoder.java *
* *
* Author: James LAWRENCE *
* Modified by: Marc GIMPEL *
* Based on code by: Jean-Marc VALIN *
* *
* Date: March 2003 *
* *
******************************************************************************/
/* $Id: NbDecoder.java,v 1.3 2005/05/27 13:16:27 mgimpel Exp $ */
/* Copyright (C) 2002 Jean-Marc Valin
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the Xiph.org Foundation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.xiph.speex;
import java.io.StreamCorruptedException;
import java.util.Random;
/**
* Narrowband Speex Decoder
*
* @author Jim Lawrence, helloNetwork.com
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.3 $
*/
public class NbDecoder
extends NbCodec
implements Decoder
{
private float[] innov2;
/*Packet loss*/
private int count_lost;
private int last_pitch; /** Pitch of last correctly decoded frame */
private float last_pitch_gain; /** Pitch gain of last correctly decoded frame */
private float[] pitch_gain_buf; /** Pitch gain of last decoded frames */
private int pitch_gain_buf_idx; /** Tail of the buffer */
private float last_ol_gain; /** Open-loop gain for previous frame */
/** */
protected Random random = new Random();
/** */
protected Stereo stereo;
/** */
protected Inband inband;
/** */
protected boolean enhanced;
/**
* Constructor
*/
public NbDecoder()
{
stereo = new Stereo();
inband = new Inband(stereo);
enhanced = true;
}
/**
* Initialise
* @param frameSize
* @param subframeSize
* @param lpcSize
* @param bufSize
*/
public void init(final int frameSize,
final int subframeSize,
final int lpcSize,
final int bufSize)
{
super.init(frameSize, subframeSize, lpcSize, bufSize);
filters.init ();
innov2 = new float[40];
count_lost = 0;
last_pitch = 40;
last_pitch_gain = 0;
pitch_gain_buf = new float[3];
pitch_gain_buf_idx = 0;
last_ol_gain = 0;
}
/**
* Decode the given input bits.
* @param bits - Speex bits buffer.
* @param out - the decoded mono audio frame.
* @return 1 if a terminator was found, 0 if not.
* @throws StreamCorruptedException If there is an error detected in the
* data stream.
*/
public int decode(final Bits bits, final float[] out)
throws StreamCorruptedException
{
int i, sub, pitch, ol_pitch=0, m;
float[] pitch_gain = new float[3];
float ol_gain=0.0f, ol_pitch_coef=0.0f;
int best_pitch=40;
float best_pitch_gain=0;
float pitch_average=0;
if (bits == null && dtx_enabled!=0) {
submodeID = 0;
}
else {
/* If bits is NULL, consider the packet to be lost (what could we do anyway) */
if (bits == null) {
decodeLost(out);
return 0;
}
/* Search for next narrowband block (handle requests, skip wideband blocks) */
do {
if (bits.unpack(1)!=0) { /* Skip wideband block (for compatibility) */
//Wideband
/* Get the sub-mode that was used */
m = bits.unpack(SbCodec.SB_SUBMODE_BITS);
int advance = SbCodec.SB_FRAME_SIZE[m];
if (advance < 0) {
throw new StreamCorruptedException("Invalid sideband mode encountered (1st sideband): " + m);
//return -2;
}
advance -= (SbCodec.SB_SUBMODE_BITS+1);
bits.advance(advance);
if (bits.unpack(1)!=0) { /* Skip ultra-wideband block (for compatibility) */
/* Get the sub-mode that was used */
m = bits.unpack(SbCodec.SB_SUBMODE_BITS);
advance = SbCodec.SB_FRAME_SIZE[m];
if (advance < 0) {
throw new StreamCorruptedException("Invalid sideband mode encountered. (2nd sideband): " + m);
//return -2;
}
advance -= (SbCodec.SB_SUBMODE_BITS+1);
bits.advance(advance);
if (bits.unpack(1)!=0) { /* Sanity check */
throw new StreamCorruptedException("More than two sideband layers found");
//return -2;
}
}
//*/
}
/* Get the sub-mode that was used */
m = bits.unpack(NB_SUBMODE_BITS);
if (m==15) { /* We found a terminator */
return 1;
}
else if (m==14) { /* Speex in-band request */
inband.speexInbandRequest(bits);
}
else if (m==13) { /* User in-band request */
inband.userInbandRequest(bits);
}
else if (m>8) { /* Invalid mode */
throw new StreamCorruptedException("Invalid mode encountered: " + m);
//return -2;
}
}
while (m>8);
submodeID = m;
}
/* Shift all buffers by one frame */
System.arraycopy(frmBuf, frameSize, frmBuf, 0, bufSize-frameSize);
System.arraycopy(excBuf, frameSize, excBuf, 0, bufSize-frameSize);
/* If null mode (no transmission), just set a couple things to zero*/
if (submodes[submodeID] == null) {
Filters.bw_lpc(.93f, interp_qlpc, lpc, 10);
float innov_gain=0;
for (i=0;i<frameSize;i++)
innov_gain += innov[i]*innov[i];
innov_gain=(float)Math.sqrt(innov_gain/frameSize);
for (i=excIdx;i<excIdx+frameSize;i++) {
excBuf[i]=3*innov_gain*(random.nextFloat() - .5f);
}
first=1;
/* Final signal synthesis from excitation */
Filters.iir_mem2(excBuf, excIdx, lpc, frmBuf, frmIdx, frameSize, lpcSize, mem_sp);
out[0] = frmBuf[frmIdx] + preemph*pre_mem;
for (i=1;i<frameSize;i++)
out[i]=frmBuf[frmIdx+i] + preemph*out[i-1];
pre_mem=out[frameSize-1];
count_lost=0;
return 0;
}
/* Unquantize LSPs */
submodes[submodeID].lsqQuant.unquant(qlsp, lpcSize, bits);
/*Damp memory if a frame was lost and the LSP changed too much*/
if (count_lost != 0)
{
float lsp_dist=0, fact;
for (i=0;i<lpcSize;i++)
lsp_dist += Math.abs(old_qlsp[i] - qlsp[i]);
fact = (float) (.6*Math.exp(-.2*lsp_dist));
for (i=0;i<2*lpcSize;i++)
mem_sp[i] *= fact;
}
/* Handle first frame and lost-packet case */
if (first!=0 || count_lost != 0) {
for (i=0;i<lpcSize;i++)
old_qlsp[i] = qlsp[i];
}
/* Get open-loop pitch estimation for low bit-rate pitch coding */
if (submodes[submodeID].lbr_pitch!=-1) {
ol_pitch = min_pitch + bits.unpack(7);
}
if (submodes[submodeID].forced_pitch_gain!=0) {
int quant= bits.unpack(4);
ol_pitch_coef=0.066667f*quant;
}
/* Get global excitation gain */
int qe = bits.unpack(5);
ol_gain = (float)Math.exp(qe/3.5);
/* unpacks unused dtx bits */
if (submodeID==1) {
int extra = bits.unpack(4);
if (extra == 15)
dtx_enabled=1;
else
dtx_enabled=0;
}
if (submodeID>1)
dtx_enabled=0;
/*Loop on subframes */
for (sub=0;sub<nbSubframes;sub++) {
int offset, spIdx, extIdx;
float tmp;
/* Offset relative to start of frame */
offset = subframeSize*sub;
/* Original signal */
spIdx = frmIdx + offset;
/* Excitation */
extIdx = excIdx+offset;
/* LSP interpolation (quantized and unquantized) */
tmp = (1.0f + sub)/nbSubframes;
for (i=0;i<lpcSize;i++)
interp_qlsp[i] = (1-tmp)*old_qlsp[i] + tmp*qlsp[i];
/* Make sure the LSP's are stable */
Lsp.enforce_margin(interp_qlsp, lpcSize, .002f);
/* Compute interpolated LPCs (unquantized) */
for (i=0;i<lpcSize;i++)
interp_qlsp[i] = (float)Math.cos(interp_qlsp[i]);
m_lsp.lsp2lpc(interp_qlsp, interp_qlpc, lpcSize);
/* Compute enhanced synthesis filter */
if (enhanced) {
float r=.9f;
float k1,k2,k3;
k1=submodes[submodeID].lpc_enh_k1;
k2=submodes[submodeID].lpc_enh_k2;
k3=(1-(1-r*k1)/(1-r*k2))/r;
Filters.bw_lpc(k1, interp_qlpc, awk1, lpcSize);
Filters.bw_lpc(k2, interp_qlpc, awk2, lpcSize);
Filters.bw_lpc(k3, interp_qlpc, awk3, lpcSize);
}
/* Compute analysis filter at w=pi */
tmp=1;
pi_gain[sub]=0;
for (i=0;i<=lpcSize;i++) {
pi_gain[sub] += tmp*interp_qlpc[i];
tmp = -tmp;
}
/* Reset excitation */
for (i=0;i<subframeSize;i++)
excBuf[extIdx+i]=0;
/*Adaptive codebook contribution*/
int pit_min, pit_max;
/* Handle pitch constraints if any */
if (submodes[submodeID].lbr_pitch != -1) {
int margin= submodes[submodeID].lbr_pitch;
if (margin!=0) {
pit_min = ol_pitch-margin+1;
if (pit_min < min_pitch)
pit_min = min_pitch;
pit_max = ol_pitch+margin;
if (pit_max > max_pitch)
pit_max = max_pitch;
}
else {
pit_min = pit_max = ol_pitch;
}
}
else {
pit_min = min_pitch;
pit_max = max_pitch;
}
/* Pitch synthesis */
pitch = submodes[submodeID].ltp.unquant(excBuf, extIdx, pit_min, ol_pitch_coef,
subframeSize, pitch_gain, bits,
count_lost, offset, last_pitch_gain);
/* If we had lost frames, check energy of last received frame */
if (count_lost != 0 && ol_gain < last_ol_gain) {
float fact = ol_gain/(last_ol_gain+1);
for (i=0;i<subframeSize;i++)
excBuf[excIdx+i]*=fact;
}
tmp = Math.abs(pitch_gain[0]+pitch_gain[1]+pitch_gain[2]);
tmp = Math.abs(pitch_gain[1]);
if (pitch_gain[0]>0)
tmp += pitch_gain[0];
else
tmp -= .5*pitch_gain[0];
if (pitch_gain[2]>0)
tmp += pitch_gain[2];
else
tmp -= .5*pitch_gain[0];
pitch_average += tmp;
if (tmp>best_pitch_gain)
{
best_pitch = pitch;
best_pitch_gain = tmp;
}
/* Unquantize the innovation */
int q_energy, ivi=sub*subframeSize;
float ener;
for (i=ivi;i<ivi+subframeSize;i++)
innov[i]=0.0f;
/* Decode sub-frame gain correction */
if (submodes[submodeID].have_subframe_gain==3) {
q_energy = bits.unpack(3);
ener = (float) (ol_gain*Math.exp(exc_gain_quant_scal3[q_energy]));
}
else if (submodes[submodeID].have_subframe_gain==1) {
q_energy = bits.unpack(1);
ener = (float) (ol_gain*Math.exp(exc_gain_quant_scal1[q_energy]));
}
else {
ener = ol_gain;
}
if (submodes[submodeID].innovation!=null) {
/* Fixed codebook contribution */
submodes[submodeID].innovation.unquant(innov, ivi, subframeSize, bits);
}
/* De-normalize innovation and update excitation */
for (i=ivi;i<ivi+subframeSize;i++)
innov[i]*=ener;
/* Vocoder mode */
if (submodeID==1) {
float g=ol_pitch_coef;
for (i=0;i<subframeSize;i++)
excBuf[extIdx+i]=0;
while (voc_offset<subframeSize) {
if (voc_offset>=0)
excBuf[extIdx+voc_offset]=(float)Math.sqrt(1.0f*ol_pitch);
voc_offset+=ol_pitch;
}
voc_offset -= subframeSize;
g=.5f+2*(g-.6f);
if (g<0)
g=0;
if (g>1)
g=1;
for (i=0;i<subframeSize;i++)
{
float itmp=excBuf[extIdx+i];
excBuf[extIdx+i]=.8f*g*excBuf[extIdx+i]*ol_gain + .6f*g*voc_m1*ol_gain + .5f*g*innov[ivi+i] - .5f*g*voc_m2 + (1-g)*innov[ivi+i];
voc_m1 = itmp;
voc_m2 = innov[ivi+i];
voc_mean = .95f*voc_mean + .05f*excBuf[extIdx+i];
excBuf[extIdx+i]-=voc_mean;
}
}
else {
for (i=0;i<subframeSize;i++)
excBuf[extIdx+i]+=innov[ivi+i];
}
/* Decode second codebook (only for some modes) */
if (submodes[submodeID].double_codebook!=0) {
for (i=0;i<subframeSize;i++)
innov2[i]=0;
submodes[submodeID].innovation.unquant(innov2, 0, subframeSize, bits);
for (i=0;i<subframeSize;i++)
innov2[i]*=ener*(1/2.2);
for (i=0;i<subframeSize;i++)
excBuf[extIdx+i] += innov2[i];
}
for (i=0;i<subframeSize;i++)
frmBuf[spIdx+i]=excBuf[extIdx+i];
/* Signal synthesis */
if (enhanced && submodes[submodeID].comb_gain>0) {
filters.comb_filter(excBuf, extIdx, frmBuf, spIdx, subframeSize,
pitch, pitch_gain, submodes[submodeID].comb_gain);
}
if (enhanced) {
/* Use enhanced LPC filter */
Filters.filter_mem2(frmBuf, spIdx, awk2, awk1, subframeSize, lpcSize, mem_sp, lpcSize);
Filters.filter_mem2(frmBuf, spIdx, awk3, interp_qlpc, subframeSize, lpcSize, mem_sp, 0);
}
else {
/* Use regular filter */
for (i=0;i<lpcSize;i++)
mem_sp[lpcSize+i] = 0;
Filters.iir_mem2(frmBuf, spIdx, interp_qlpc, frmBuf, spIdx, subframeSize, lpcSize, mem_sp);
}
}
/*Copy output signal*/
out[0] = frmBuf[frmIdx] + preemph * pre_mem;
for (i=1;i<frameSize;i++)
out[i] = frmBuf[frmIdx+i] + preemph * out[i-1];
pre_mem = out[frameSize-1];
/* Store the LSPs for interpolation in the next frame */
for (i=0;i<lpcSize;i++)
old_qlsp[i] = qlsp[i];
/* The next frame will not be the first (Duh!) */
first = 0;
count_lost=0;
last_pitch = best_pitch;
last_pitch_gain = .25f*pitch_average;
pitch_gain_buf[pitch_gain_buf_idx++] = last_pitch_gain;
if (pitch_gain_buf_idx > 2) /* rollover */
pitch_gain_buf_idx = 0;
last_ol_gain = ol_gain;
return 0;
}
/**
* Decode when packets are lost.
* @param out - the generated mono audio frame.
* @return 0 if successful.
*/
public int decodeLost(final float[] out)
{
int i;
float pitch_gain, fact, gain_med;
fact = (float) Math.exp(-.04*count_lost*count_lost);
// median3(a, b, c) = (a<b ? (b<c ? b : (a<c ? c : a))
// : (c<b ? b : (c<a ? c : a)))
gain_med = (pitch_gain_buf[0] < pitch_gain_buf[1] ? (pitch_gain_buf[1] < pitch_gain_buf[2] ? pitch_gain_buf[1] : (pitch_gain_buf[0] < pitch_gain_buf[2] ? pitch_gain_buf[2] : pitch_gain_buf[0]))
: (pitch_gain_buf[2] < pitch_gain_buf[1] ? pitch_gain_buf[1] : (pitch_gain_buf[2] < pitch_gain_buf[0] ? pitch_gain_buf[2] : pitch_gain_buf[0])));
if (gain_med < last_pitch_gain)
last_pitch_gain = gain_med;
pitch_gain = last_pitch_gain;
if (pitch_gain>.95f)
pitch_gain=.95f;
pitch_gain *= fact;
/* Shift all buffers by one frame */
System.arraycopy(frmBuf, frameSize, frmBuf, 0, bufSize-frameSize);
System.arraycopy(excBuf, frameSize, excBuf, 0, bufSize-frameSize);
for (int sub=0; sub<nbSubframes; sub++)
{
int offset;
int spIdx, extIdx;
/* Offset relative to start of frame */
offset = subframeSize*sub;
/* Original signal */
spIdx = frmIdx+offset;
/* Excitation */
extIdx = excIdx+offset;
/* Excitation after post-filter*/
/* Calculate perceptually enhanced LPC filter */
if (enhanced) {
float r=.9f;
float k1,k2,k3;
if (submodes[submodeID] != null) {
k1=submodes[submodeID].lpc_enh_k1;
k2=submodes[submodeID].lpc_enh_k2;
}
else {
k1 = k2 = 0.7f;
}
k3=(1-(1-r*k1)/(1-r*k2))/r;
Filters.bw_lpc(k1, interp_qlpc, awk1, lpcSize);
Filters.bw_lpc(k2, interp_qlpc, awk2, lpcSize);
Filters.bw_lpc(k3, interp_qlpc, awk3, lpcSize);
}
/* Make up a plausible excitation */
/* THIS CAN BE IMPROVED */
/*if (pitch_gain>.95)
pitch_gain=.95;*/
{
float innov_gain=0;
for (i=0; i<frameSize; i++)
innov_gain += innov[i]*innov[i];
innov_gain = (float) Math.sqrt(innov_gain/frameSize);
for (i=0; i<subframeSize; i++)
{
//#if 0
// excBuf[extIdx+i] = pitch_gain*excBuf[extIdx+i-last_pitch] + fact*((float)Math.sqrt(1-pitch_gain))*innov[i+offset];
// /*Just so it give the same lost packets as with if 0*/
// /*rand();*/
//#else
/*excBuf[extIdx+i] = pitch_gain*excBuf[extIdx+i-last_pitch] + fact*innov[i+offset];*/
excBuf[extIdx+i] = pitch_gain*excBuf[extIdx+i-last_pitch] + fact*((float)Math.sqrt(1-pitch_gain))*3*innov_gain*((random.nextFloat())-0.5f);
//#endif
}
}
for (i=0;i<subframeSize;i++)
frmBuf[spIdx+i]=excBuf[extIdx+i];
/* Signal synthesis */
if (enhanced) {
/* Use enhanced LPC filter */
Filters.filter_mem2(frmBuf, spIdx, awk2, awk1, subframeSize, lpcSize, mem_sp, lpcSize);
Filters.filter_mem2(frmBuf, spIdx, awk3, interp_qlpc, subframeSize, lpcSize, mem_sp, 0);
}
else {
/* Use regular filter */
for (i=0;i<lpcSize;i++)
mem_sp[lpcSize+i] = 0;
Filters.iir_mem2(frmBuf, spIdx, interp_qlpc, frmBuf, spIdx, subframeSize, lpcSize, mem_sp);
}
}
out[0] = frmBuf[0] + preemph*pre_mem;
for (i=1;i<frameSize;i++)
out[i] = frmBuf[i] + preemph*out[i-1];
pre_mem=out[frameSize-1];
first = 0;
count_lost++;
pitch_gain_buf[pitch_gain_buf_idx++] = pitch_gain;
if (pitch_gain_buf_idx > 2) /* rollover */
pitch_gain_buf_idx = 0;
return 0;
}
/**
* Decode the given bits to stereo.
* @param data - float array of size 2*frameSize, that contains the mono
* audio samples in the first half. When the function has completed, the
* array will contain the interlaced stereo audio samples.
* @param frameSize - the size of a frame of mono audio samples.
*/
public void decodeStereo(final float[] data, final int frameSize)
{
stereo.decode(data, frameSize);
}
/**
* Enables or disables perceptual enhancement.
* @param enhanced
*/
public void setPerceptualEnhancement(final boolean enhanced)
{
this.enhanced = enhanced;
}
/**
* Returns whether perceptual enhancement is enabled or disabled.
* @return whether perceptual enhancement is enabled or disabled.
*/
public boolean getPerceptualEnhancement()
{
return enhanced;
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/NbEncoder.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: NbEncoder.java *
* *
* Author: Marc GIMPEL *
* Based on code by: Jean-Marc VALIN *
* *
* Date: 9th April 2003 *
* *
******************************************************************************/
/* $Id: NbEncoder.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */
/* Copyright (C) 2002 Jean-Marc Valin
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the Xiph.org Foundation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.xiph.speex;
/**
* Narrowband Speex Encoder
*
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.2 $
*/
public class NbEncoder
extends NbCodec
implements Encoder
{
/** The Narrowband Quality map indicates which narrowband submode to use for the given narrowband quality setting */
public static final int[] NB_QUALITY_MAP = {1, 8, 2, 3, 3, 4, 4, 5, 5, 6, 7};
private int bounded_pitch; /** Next frame should not rely on previous frames for pitch */
private int[] pitch; /** */
private float pre_mem2; /** 1-element memory for pre-emphasis */
private float[] exc2Buf; /** "Pitch enhanced" excitation */
private int exc2Idx; /** "Pitch enhanced" excitation */
private float[] swBuf; /** Weighted signal buffer */
private int swIdx; /** Start of weighted signal frame */
private float[] window; /** Temporary (Hanning) window */
private float[] buf2; /** 2nd temporary buffer */
private float[] autocorr; /** auto-correlation */
private float[] lagWindow; /** Window applied to auto-correlation */
private float[] lsp; /** LSPs for current frame */
private float[] old_lsp; /** LSPs for previous frame */
private float[] interp_lsp; /** Interpolated LSPs */
private float[] interp_lpc; /** Interpolated LPCs */
private float[] bw_lpc1; /** LPCs after bandwidth expansion by gamma1 for perceptual weighting*/
private float[] bw_lpc2; /** LPCs after bandwidth expansion by gamma2 for perceptual weighting*/
private float[] rc; /** Reflection coefficients */
private float[] mem_sw; /** Filter memory for perceptually-weighted signal */
private float[] mem_sw_whole; /** Filter memory for perceptually-weighted signal (whole frame)*/
private float[] mem_exc; /** Filter memory for excitation (whole frame) */
private Vbr vbr; /** State of the VBR data */
private int dtx_count; /** Number of consecutive DTX frames */
private float[] innov2;
protected int complexity; /** Complexity setting (0-10 from least complex to most complex) */
protected int vbr_enabled; /** 1 for enabling VBR, 0 otherwise */
protected int vad_enabled; /** 1 for enabling VAD, 0 otherwise */
protected int abr_enabled; /** ABR setting (in bps), 0 if off */
protected float vbr_quality; /** Quality setting for VBR encoding */
protected float relative_quality; /** Relative quality that will be needed by VBR */
protected float abr_drift;
protected float abr_drift2;
protected float abr_count;
protected int sampling_rate;
protected int submodeSelect; /** Mode chosen by the user (may differ from submodeID if VAD is on) */
/**
* Initialisation
* @param frameSize
* @param subframeSize
* @param lpcSize
* @param bufSize
*/
public void init(final int frameSize,
final int subframeSize,
final int lpcSize,
final int bufSize)
{
super.init(frameSize, subframeSize, lpcSize, bufSize);
complexity = 3; // in C it's 2 here, but set to 3 automatically by the encoder
vbr_enabled = 0; // disabled by default
vad_enabled = 0; // disabled by default
abr_enabled = 0; // disabled by default
vbr_quality = 8;
submodeSelect = 5;
pre_mem2 = 0;
bounded_pitch = 1;
exc2Buf = new float[bufSize];
exc2Idx = bufSize - windowSize;
swBuf = new float[bufSize];
swIdx = bufSize - windowSize;
window = Misc.window(windowSize, subframeSize);
lagWindow = Misc.lagWindow(lpcSize, lag_factor);
autocorr = new float[lpcSize+1];
buf2 = new float[windowSize];
interp_lpc = new float[lpcSize+1];
interp_qlpc = new float[lpcSize+1];
bw_lpc1 = new float[lpcSize+1];
bw_lpc2 = new float[lpcSize+1];
lsp = new float[lpcSize];
qlsp = new float[lpcSize];
old_lsp = new float[lpcSize];
old_qlsp = new float[lpcSize];
interp_lsp = new float[lpcSize];
interp_qlsp = new float[lpcSize];
rc = new float[lpcSize];
mem_sp = new float[lpcSize]; // why was there a *5 before ?!?
mem_sw = new float[lpcSize];
mem_sw_whole = new float[lpcSize];
mem_exc = new float[lpcSize];
vbr = new Vbr();
dtx_count = 0;
abr_count = 0;
sampling_rate = 8000;
awk1 = new float[lpcSize+1];
awk2 = new float[lpcSize+1];
awk3 = new float[lpcSize+1];
innov2 = new float[40];
filters.init ();
pitch = new int[nbSubframes];
}
/**
* Encode the given input signal.
* @param bits - Speex bits buffer.
* @param in - the raw mono audio frame to encode.
* @return return 1 if successful.
*/
public int encode(final Bits bits, final float[] in)
{
int i;
float[] res, target, mem;
float[] syn_resp;
float[] orig;
/* Copy new data in input buffer */
System.arraycopy(frmBuf, frameSize, frmBuf, 0, bufSize-frameSize);
frmBuf[bufSize-frameSize] = in[0] - preemph*pre_mem;
for (i=1; i<frameSize; i++)
frmBuf[bufSize-frameSize+i] = in[i] - preemph*in[i-1];
pre_mem = in[frameSize-1];
/* Move signals 1 frame towards the past */
System.arraycopy(exc2Buf, frameSize, exc2Buf, 0, bufSize-frameSize);
System.arraycopy(excBuf, frameSize, excBuf, 0, bufSize-frameSize);
System.arraycopy(swBuf, frameSize, swBuf, 0, bufSize-frameSize);
/* Window for analysis */
for (i=0; i<windowSize; i++)
buf2[i] = frmBuf[i+frmIdx] * window[i];
/* Compute auto-correlation */
Lpc.autocorr(buf2, autocorr, lpcSize+1, windowSize);
autocorr[0] += 10; /* prevents NANs */
autocorr[0] *= lpc_floor; /* Noise floor in auto-correlation domain */
/* Lag windowing: equivalent to filtering in the power-spectrum domain */
for (i=0; i<lpcSize+1; i++)
autocorr[i] *= lagWindow[i];
/* Levinson-Durbin */
Lpc.wld(lpc, autocorr, rc, lpcSize); // tmperr
System.arraycopy(lpc, 0, lpc, 1, lpcSize);
lpc[0]=1;
/* LPC to LSPs (x-domain) transform */
int roots=Lsp.lpc2lsp (lpc, lpcSize, lsp, 15, 0.2f);
/* Check if we found all the roots */
if (roots==lpcSize)
{
/* LSP x-domain to angle domain*/
for (i=0;i<lpcSize;i++)
lsp[i] = (float)Math.acos(lsp[i]);
} else {
/* Search again if we can afford it */
if (complexity>1)
roots = Lsp.lpc2lsp (lpc, lpcSize, lsp, 11, 0.05f);
if (roots==lpcSize)
{
/* LSP x-domain to angle domain*/
for (i=0;i<lpcSize;i++)
lsp[i] = (float)Math.acos(lsp[i]);
} else {
/*If we can't find all LSP's, do some damage control and use previous filter*/
for (i=0;i<lpcSize;i++)
{
lsp[i]=old_lsp[i];
}
}
}
float lsp_dist=0;
for (i=0;i<lpcSize;i++)
lsp_dist += (old_lsp[i] - lsp[i])*(old_lsp[i] - lsp[i]);
/* Whole frame analysis (open-loop estimation of pitch and excitation gain) */
float ol_gain;
int ol_pitch;
float ol_pitch_coef;
{
if (first != 0)
for (i=0; i<lpcSize;i++)
interp_lsp[i] = lsp[i];
else
for (i=0;i<lpcSize;i++)
interp_lsp[i] = .375f*old_lsp[i] + .625f*lsp[i];
Lsp.enforce_margin(interp_lsp, lpcSize, .002f);
/* Compute interpolated LPCs (unquantized) for whole frame*/
for (i=0; i<lpcSize; i++)
interp_lsp[i] = (float)Math.cos(interp_lsp[i]);
m_lsp.lsp2lpc(interp_lsp, interp_lpc, lpcSize);
/*Open-loop pitch*/
if (submodes[submodeID] == null ||
vbr_enabled != 0 || vad_enabled != 0 ||
submodes[submodeID].forced_pitch_gain != 0 ||
submodes[submodeID].lbr_pitch != -1)
{
int[] nol_pitch = new int[6];
float[] nol_pitch_coef = new float[6];
Filters.bw_lpc(gamma1, interp_lpc, bw_lpc1, lpcSize);
Filters.bw_lpc(gamma2, interp_lpc, bw_lpc2, lpcSize);
Filters.filter_mem2(frmBuf, frmIdx, bw_lpc1, bw_lpc2, swBuf, swIdx,
frameSize, lpcSize, mem_sw_whole, 0);
Ltp.open_loop_nbest_pitch(swBuf, swIdx, min_pitch, max_pitch, frameSize,
nol_pitch, nol_pitch_coef, 6);
ol_pitch=nol_pitch[0];
ol_pitch_coef = nol_pitch_coef[0];
/*Try to remove pitch multiples*/
for (i=1;i<6;i++)
{
if ((nol_pitch_coef[i]>.85*ol_pitch_coef) &&
(Math.abs(nol_pitch[i]-ol_pitch/2.0)<=1 ||
Math.abs(nol_pitch[i]-ol_pitch/3.0)<=1 ||
Math.abs(nol_pitch[i]-ol_pitch/4.0)<=1 ||
Math.abs(nol_pitch[i]-ol_pitch/5.0)<=1))
{
/*ol_pitch_coef=nol_pitch_coef[i];*/
ol_pitch = nol_pitch[i];
}
}
/*if (ol_pitch>50)
ol_pitch/=2;*/
/*ol_pitch_coef = sqrt(ol_pitch_coef);*/
} else {
ol_pitch=0;
ol_pitch_coef=0;
}
/*Compute "real" excitation*/
Filters.fir_mem2(frmBuf, frmIdx, interp_lpc, excBuf, excIdx, frameSize, lpcSize, mem_exc);
/* Compute open-loop excitation gain */
ol_gain=0;
for (i=0;i<frameSize;i++)
ol_gain += excBuf[excIdx+i]*excBuf[excIdx+i];
ol_gain=(float)Math.sqrt(1+ol_gain/frameSize);
}
/*VBR stuff*/
if (vbr != null && (vbr_enabled != 0 || vad_enabled != 0)) {
if (abr_enabled != 0)
{
float qual_change=0;
if (abr_drift2 * abr_drift > 0)
{
/* Only adapt if long-term and short-term drift are the same sign */
qual_change = -.00001f*abr_drift/(1+abr_count);
if (qual_change>.05f)
qual_change=.05f;
if (qual_change<-.05f)
qual_change=-.05f;
}
vbr_quality += qual_change;
if (vbr_quality>10)
vbr_quality=10;
if (vbr_quality<0)
vbr_quality=0;
}
relative_quality = vbr.analysis(in, frameSize, ol_pitch, ol_pitch_coef);
/*if (delta_qual<0)*/
/* delta_qual*=.1*(3+st->vbr_quality);*/
if (vbr_enabled != 0) {
int mode;
int choice=0;
float min_diff=100;
mode = 8;
while (mode > 0)
{
int v1;
float thresh;
v1=(int)Math.floor(vbr_quality);
if (v1==10)
thresh = Vbr.nb_thresh[mode][v1];
else
thresh = (vbr_quality-v1)*Vbr.nb_thresh[mode][v1+1] +
(1+v1-vbr_quality)*Vbr.nb_thresh[mode][v1];
if (relative_quality > thresh &&
relative_quality-thresh<min_diff)
{
choice = mode;
min_diff = relative_quality-thresh;
}
mode--;
}
mode=choice;
if (mode==0)
{
if (dtx_count==0 || lsp_dist>.05 || dtx_enabled==0 || dtx_count>20)
{
mode=1;
dtx_count=1;
} else {
mode=0;
dtx_count++;
}
} else {
dtx_count=0;
}
setMode(mode);
if (abr_enabled != 0)
{
int bitrate;
bitrate = getBitRate();
abr_drift+=(bitrate-abr_enabled);
abr_drift2 = .95f*abr_drift2 + .05f*(bitrate-abr_enabled);
abr_count += 1.0;
}
} else {
/*VAD only case*/
int mode;
if (relative_quality<2)
{
if (dtx_count==0 || lsp_dist>.05 || dtx_enabled == 0 || dtx_count>20)
{
dtx_count=1;
mode=1;
} else {
mode=0;
dtx_count++;
}
} else {
dtx_count = 0;
mode=submodeSelect;
}
/*speex_encoder_ctl(state, SPEEX_SET_MODE, &mode);*/
submodeID=mode;
}
} else {
relative_quality = -1;
}
/* First, transmit a zero for narrowband */
bits.pack(0, 1);
/* Transmit the sub-mode we use for this frame */
bits.pack(submodeID, NB_SUBMODE_BITS);
/* If null mode (no transmission), just set a couple things to zero*/
if (submodes[submodeID] == null)
{
for (i=0;i<frameSize;i++)
excBuf[excIdx+i]=exc2Buf[exc2Idx+i]=swBuf[swIdx+i]=VERY_SMALL;
for (i=0;i<lpcSize;i++)
mem_sw[i]=0;
first=1;
bounded_pitch = 1;
/* Final signal synthesis from excitation */
Filters.iir_mem2(excBuf, excIdx, interp_qlpc, frmBuf, frmIdx, frameSize, lpcSize, mem_sp);
in[0] = frmBuf[frmIdx] + preemph*pre_mem2;
for (i=1;i<frameSize;i++)
in[i]=frmBuf[frmIdx=i] + preemph*in[i-1];
pre_mem2=in[frameSize-1];
return 0;
}
/* LSP Quantization */
if (first != 0)
{
for (i=0; i<lpcSize;i++)
old_lsp[i] = lsp[i];
}
/*Quantize LSPs*/
//#if 1 /*0 for unquantized*/
submodes[submodeID].lsqQuant.quant(lsp, qlsp, lpcSize, bits);
//#else
// for (i=0;i<lpcSize;i++)
// qlsp[i]=lsp[i];
//#endif
/*If we use low bit-rate pitch mode, transmit open-loop pitch*/
if (submodes[submodeID].lbr_pitch!=-1)
{
bits.pack(ol_pitch-min_pitch, 7);
}
if (submodes[submodeID].forced_pitch_gain != 0)
{
int quant;
quant = (int)Math.floor(.5+15*ol_pitch_coef);
if (quant>15)
quant=15;
if (quant<0)
quant=0;
bits.pack(quant, 4);
ol_pitch_coef=(float) 0.066667*quant;
}
/*Quantize and transmit open-loop excitation gain*/
{
int qe = (int)(Math.floor(0.5+3.5*Math.log(ol_gain)));
if (qe<0)
qe=0;
if (qe>31)
qe=31;
ol_gain = (float) Math.exp(qe/3.5);
bits.pack(qe, 5);
}
/* Special case for first frame */
if (first != 0)
{
for (i=0;i<lpcSize;i++)
old_qlsp[i] = qlsp[i];
}
/* Filter response */
res = new float[subframeSize];
/* Target signal */
target = new float[subframeSize];
syn_resp = new float[subframeSize];
mem = new float[lpcSize];
orig = new float[frameSize];
for (i=0;i<frameSize;i++)
orig[i]=frmBuf[frmIdx+i];
/* Loop on sub-frames */
for (int sub=0;sub<nbSubframes;sub++)
{
float tmp;
int offset;
int sp, sw, exc, exc2;
int pitchval;
/* Offset relative to start of frame */
offset = subframeSize*sub;
/* Original signal */
sp=frmIdx+offset;
/* Excitation */
exc=excIdx+offset;
/* Weighted signal */
sw=swIdx+offset;
exc2=exc2Idx+offset;
/* LSP interpolation (quantized and unquantized) */
tmp = (float) (1.0 + sub)/nbSubframes;
for (i=0;i<lpcSize;i++)
interp_lsp[i] = (1-tmp)*old_lsp[i] + tmp*lsp[i];
for (i=0;i<lpcSize;i++)
interp_qlsp[i] = (1-tmp)*old_qlsp[i] + tmp*qlsp[i];
/* Make sure the filters are stable */
Lsp.enforce_margin(interp_lsp, lpcSize, .002f);
Lsp.enforce_margin(interp_qlsp, lpcSize, .002f);
/* Compute interpolated LPCs (quantized and unquantized) */
for (i=0;i<lpcSize;i++)
interp_lsp[i] = (float) Math.cos(interp_lsp[i]);
m_lsp.lsp2lpc(interp_lsp, interp_lpc, lpcSize);
for (i=0;i<lpcSize;i++)
interp_qlsp[i] = (float) Math.cos(interp_qlsp[i]);
m_lsp.lsp2lpc(interp_qlsp, interp_qlpc, lpcSize);
/* Compute analysis filter gain at w=pi (for use in SB-CELP) */
tmp=1;
pi_gain[sub]=0;
for (i=0;i<=lpcSize;i++)
{
pi_gain[sub] += tmp*interp_qlpc[i];
tmp = -tmp;
}
/* Compute bandwidth-expanded (unquantized) LPCs for perceptual weighting */
Filters.bw_lpc(gamma1, interp_lpc, bw_lpc1, lpcSize);
if (gamma2>=0)
Filters.bw_lpc(gamma2, interp_lpc, bw_lpc2, lpcSize);
else
{
bw_lpc2[0]=1;
bw_lpc2[1]=-preemph;
for (i=2;i<=lpcSize;i++)
bw_lpc2[i]=0;
}
/* Compute impulse response of A(z/g1) / ( A(z)*A(z/g2) )*/
for (i=0;i<subframeSize;i++)
excBuf[exc+i]=0;
excBuf[exc]=1;
Filters.syn_percep_zero(excBuf, exc, interp_qlpc, bw_lpc1, bw_lpc2, syn_resp, subframeSize, lpcSize);
/* Reset excitation */
for (i=0;i<subframeSize;i++)
excBuf[exc+i]=0;
for (i=0;i<subframeSize;i++)
exc2Buf[exc2+i]=0;
/* Compute zero response of A(z/g1) / ( A(z/g2) * A(z) ) */
for (i=0;i<lpcSize;i++)
mem[i]=mem_sp[i];
Filters.iir_mem2(excBuf, exc, interp_qlpc, excBuf, exc, subframeSize, lpcSize, mem);
for (i=0;i<lpcSize;i++)
mem[i]=mem_sw[i];
Filters.filter_mem2(excBuf, exc, bw_lpc1, bw_lpc2, res, 0, subframeSize, lpcSize, mem, 0);
/* Compute weighted signal */
for (i=0;i<lpcSize;i++)
mem[i]=mem_sw[i];
Filters.filter_mem2(frmBuf, sp, bw_lpc1, bw_lpc2, swBuf, sw, subframeSize, lpcSize, mem, 0);
/* Compute target signal */
for (i=0;i<subframeSize;i++)
target[i]=swBuf[sw+i]-res[i];
for (i=0;i<subframeSize;i++)
excBuf[exc+i]=exc2Buf[exc2+i]=0;
/* If we have a long-term predictor (otherwise, something's wrong) */
// if (submodes[submodeID].ltp.quant)
// {
int pit_min, pit_max;
/* Long-term prediction */
if (submodes[submodeID].lbr_pitch != -1)
{
/* Low bit-rate pitch handling */
int margin;
margin = submodes[submodeID].lbr_pitch;
if (margin != 0)
{
if (ol_pitch < min_pitch+margin-1)
ol_pitch=min_pitch+margin-1;
if (ol_pitch > max_pitch-margin)
ol_pitch=max_pitch-margin;
pit_min = ol_pitch-margin+1;
pit_max = ol_pitch+margin;
} else {
pit_min=pit_max=ol_pitch;
}
} else {
pit_min = min_pitch;
pit_max = max_pitch;
}
/* Force pitch to use only the current frame if needed */
if (bounded_pitch != 0 && pit_max>offset)
pit_max=offset;
/* Perform pitch search */
pitchval = submodes[submodeID].ltp.quant(target, swBuf, sw, interp_qlpc, bw_lpc1, bw_lpc2,
excBuf, exc, pit_min, pit_max, ol_pitch_coef, lpcSize,
subframeSize, bits, exc2Buf, exc2, syn_resp, complexity);
pitch[sub]=pitchval;
// } else {
// speex_error ("No pitch prediction, what's wrong");
// }
/* Update target for adaptive codebook contribution */
Filters.syn_percep_zero(excBuf, exc, interp_qlpc, bw_lpc1, bw_lpc2, res, subframeSize, lpcSize);
for (i=0;i<subframeSize;i++)
target[i]-=res[i];
/* Quantization of innovation */
{
int innovptr;
float ener=0, ener_1;
innovptr = sub*subframeSize;
for (i=0;i<subframeSize;i++)
innov[innovptr+i]=0;
Filters.residue_percep_zero(target, 0, interp_qlpc, bw_lpc1, bw_lpc2, buf2, subframeSize, lpcSize);
for (i=0;i<subframeSize;i++)
ener+=buf2[i]*buf2[i];
ener=(float)Math.sqrt(.1f+ener/subframeSize);
/*for (i=0;i<subframeSize;i++)
System.out.print(buf2[i]/ener + "\t");
*/
ener /= ol_gain;
/* Calculate gain correction for the sub-frame (if any) */
if (submodes[submodeID].have_subframe_gain != 0) {
int qe;
ener=(float)Math.log(ener);
if (submodes[submodeID].have_subframe_gain==3) {
qe = VQ.index(ener, exc_gain_quant_scal3, 8);
bits.pack(qe, 3);
ener=exc_gain_quant_scal3[qe];
}
else {
qe = VQ.index(ener, exc_gain_quant_scal1, 2);
bits.pack(qe, 1);
ener=exc_gain_quant_scal1[qe];
}
ener=(float)Math.exp(ener);
}
else {
ener=1;
}
ener*=ol_gain;
/*System.out.println(ener + " " + ol_gain);*/
ener_1 = 1/ener;
/* Normalize innovation */
for (i=0;i<subframeSize;i++)
target[i]*=ener_1;
/* Quantize innovation */
// if (submodes[submodeID].innovation != null)
// {
/* Codebook search */
submodes[submodeID].innovation.quant(target, interp_qlpc, bw_lpc1, bw_lpc2,
lpcSize, subframeSize, innov,
innovptr, syn_resp, bits, complexity);
/* De-normalize innovation and update excitation */
for (i=0;i<subframeSize;i++)
innov[innovptr+i]*=ener;
for (i=0;i<subframeSize;i++)
excBuf[exc+i] += innov[innovptr+i];
// } else {
// speex_error("No fixed codebook");
// }
/* In some (rare) modes, we do a second search (more bits) to reduce noise even more */
if (submodes[submodeID].double_codebook != 0) {
float[] innov2 = new float[subframeSize];
// for (i=0;i<subframeSize;i++)
// innov2[i]=0;
for (i=0;i<subframeSize;i++)
target[i]*=2.2;
submodes[submodeID].innovation.quant(target, interp_qlpc, bw_lpc1, bw_lpc2,
lpcSize, subframeSize, innov2, 0,
syn_resp, bits, complexity);
for (i=0;i<subframeSize;i++)
innov2[i]*=ener*(1/2.2);
for (i=0;i<subframeSize;i++)
excBuf[exc+i] += innov2[i];
}
for (i=0;i<subframeSize;i++)
target[i]*=ener;
}
/*Keep the previous memory*/
for (i=0;i<lpcSize;i++)
mem[i]=mem_sp[i];
/* Final signal synthesis from excitation */
Filters.iir_mem2(excBuf, exc, interp_qlpc, frmBuf, sp, subframeSize, lpcSize, mem_sp);
/* Compute weighted signal again, from synthesized speech (not sure it's the right thing) */
Filters.filter_mem2(frmBuf, sp, bw_lpc1, bw_lpc2, swBuf, sw, subframeSize, lpcSize, mem_sw, 0);
for (i=0;i<subframeSize;i++)
exc2Buf[exc2+i]=excBuf[exc+i];
}
/* Store the LSPs for interpolation in the next frame */
if (submodeID>=1)
{
for (i=0;i<lpcSize;i++)
old_lsp[i] = lsp[i];
for (i=0;i<lpcSize;i++)
old_qlsp[i] = qlsp[i];
}
if (submodeID==1)
{
if (dtx_count != 0) {
bits.pack(15, 4);
}
else {
bits.pack(0, 4);
}
}
/* The next frame will not be the first (Duh!) */
first = 0;
{
float ener=0, err=0;
float snr;
for (i=0;i<frameSize;i++)
{
ener+=frmBuf[frmIdx+i]*frmBuf[frmIdx+i];
err += (frmBuf[frmIdx+i]-orig[i])*(frmBuf[frmIdx+i]-orig[i]);
}
snr = (float) (10*Math.log((ener+1)/(err+1)));
/*System.out.println("Frame result: SNR="+snr+" E="+ener+" Err="+err+"\r\n");*/
}
/* Replace input by synthesized speech */
in[0] = frmBuf[frmIdx] + preemph*pre_mem2;
for (i=1;i<frameSize;i++)
in[i]=frmBuf[frmIdx+i] + preemph*in[i-1];
pre_mem2=in[frameSize-1];
if (submodes[submodeID].innovation instanceof NoiseSearch || submodeID==0)
bounded_pitch = 1;
else
bounded_pitch = 0;
return 1;
}
/**
* Returns the size in bits of an audio frame encoded with the current mode.
* @return the size in bits of an audio frame encoded with the current mode.
*/
public int getEncodedFrameSize()
{
return NB_FRAME_SIZE[submodeID];
}
//---------------------------------------------------------------------------
// Speex Control Functions
//---------------------------------------------------------------------------
/**
* Sets the Quality
* @param quality
*/
public void setQuality(int quality)
{
if (quality < 0) {
quality = 0;
}
if (quality > 10) {
quality = 10;
}
submodeID = submodeSelect = NB_QUALITY_MAP[quality];
}
/**
* Gets the bitrate.
* @return the bitrate.
*/
public int getBitRate()
{
if (submodes[submodeID] != null)
return sampling_rate*submodes[submodeID].bits_per_frame/frameSize;
else
return sampling_rate*(NB_SUBMODE_BITS+1)/frameSize;
}
/**
*
*/
// public void resetState()
// {
// }
//---------------------------------------------------------------------------
//---------------------------------------------------------------------------
/**
* Sets the encoding submode.
* @param mode
*/
public void setMode(int mode)
{
if (mode < 0) {
mode = 0;
}
submodeID = submodeSelect = mode;
}
/**
* Returns the encoding submode currently in use.
* @return the encoding submode currently in use.
*/
public int getMode()
{
return submodeID;
}
/**
* Sets the bitrate.
* @param bitrate
*/
public void setBitRate(final int bitrate)
{
for (int i=10; i>=0; i--) {
setQuality(i);
if (getBitRate() <= bitrate)
return;
}
}
/**
* Sets whether or not to use Variable Bit Rate encoding.
* @param vbr
*/
public void setVbr(final boolean vbr)
{
vbr_enabled = vbr ? 1 : 0;
}
/**
* Returns whether or not we are using Variable Bit Rate encoding.
* @return whether or not we are using Variable Bit Rate encoding.
*/
public boolean getVbr()
{
return vbr_enabled != 0;
}
/**
* Sets whether or not to use Voice Activity Detection encoding.
* @param vad
*/
public void setVad(final boolean vad)
{
vad_enabled = vad ? 1 : 0;
}
/**
* Returns whether or not we are using Voice Activity Detection encoding.
* @return whether or not we are using Voice Activity Detection encoding.
*/
public boolean getVad()
{
return vad_enabled != 0;
}
/**
* Sets whether or not to use Discontinuous Transmission encoding.
* @param dtx
*/
public void setDtx(final boolean dtx)
{
dtx_enabled = dtx ? 1 : 0;
}
/**
* Returns the Average Bit Rate used (0 if ABR is not turned on).
* @return the Average Bit Rate used (0 if ABR is not turned on).
*/
public int getAbr()
{
return abr_enabled;
}
/**
* Sets the Average Bit Rate.
* @param abr
*/
public void setAbr(final int abr)
{
abr_enabled = (abr!=0) ? 1 : 0;
vbr_enabled = 1;
{
int i=10, rate, target;
float vbr_qual;
target = abr;
while (i>=0)
{
setQuality(i);
rate = getBitRate();
if (rate <= target)
break;
i--;
}
vbr_qual=i;
if (vbr_qual<0)
vbr_qual=0;
setVbrQuality(vbr_qual);
abr_count=0;
abr_drift=0;
abr_drift2=0;
}
}
/**
* Sets the Varible Bit Rate Quality.
* @param quality
*/
public void setVbrQuality(float quality)
{
if (quality < 0f)
quality = 0f;
if (quality > 10f)
quality = 10f;
vbr_quality = quality;
}
/**
* Returns the Varible Bit Rate Quality.
* @return the Varible Bit Rate Quality.
*/
public float getVbrQuality()
{
return vbr_quality;
}
/**
* Sets the algorthmic complexity.
* @param complexity
*/
public void setComplexity(int complexity)
{
if (complexity < 0)
complexity = 0;
if (complexity > 10)
complexity = 10;
this.complexity = complexity;
}
/**
* Returns the algorthmic complexity.
* @return the algorthmic complexity.
*/
public int getComplexity()
{
return complexity;
}
/**
* Sets the sampling rate.
* @param rate
*/
public void setSamplingRate(final int rate)
{
sampling_rate = rate;
}
/**
* Returns the sampling rate.
* @return the sampling rate.
*/
public int getSamplingRate()
{
return sampling_rate;
}
/**
* Return LookAhead.
* @return LookAhead.
*/
public int getLookAhead()
{
return windowSize - frameSize;
}
/**
* Returns the relative quality.
* @return the relative quality.
*/
public float getRelativeQuality()
{
return relative_quality;
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/NbLspQuant.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: NbLU.java *
* *
* Author: James LAWRENCE *
* Modified by: Marc GIMPEL *
* Based on code by: Jean-Marc VALIN *
* *
* Date: March 2003 *
* *
******************************************************************************/
/* $Id: NbLspQuant.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */
/* Copyright (C) 2002 Jean-Marc Valin
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the Xiph.org Foundation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.xiph.speex;
/**
* LSP Quantisation and Unquantisation (narrowband)
*
* @author Jim Lawrence, helloNetwork.com
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.2 $
*/
public class NbLspQuant
extends LspQuant
{
/**
* Line Spectral Pair Quantification (narrowband).
* @param lsp - Line Spectral Pairs table.
* @param qlsp - Quantified Line Spectral Pairs table.
* @param order
* @param bits - Speex bits buffer.
*/
public final void quant(float[] lsp, float[] qlsp, int order, Bits bits)
{
int i;
float tmp1, tmp2;
int id;
float[] quant_weight = new float[MAX_LSP_SIZE];
for (i=0;i<order;i++)
qlsp[i]=lsp[i];
quant_weight[0] = 1/(qlsp[1]-qlsp[0]);
quant_weight[order-1] = 1/(qlsp[order-1]-qlsp[order-2]);
for (i=1;i<order-1;i++) {
tmp1 = 1/((.15f+qlsp[i]-qlsp[i-1])*(.15f+qlsp[i]-qlsp[i-1]));
tmp2 = 1/((.15f+qlsp[i+1]-qlsp[i])*(.15f+qlsp[i+1]-qlsp[i]));
quant_weight[i] = tmp1 > tmp2 ? tmp1 : tmp2;
}
for (i=0;i<order;i++)
qlsp[i]-=(.25*i+.25);
for (i=0;i<order;i++)
qlsp[i]*=256;
id = lsp_quant(qlsp, 0, cdbk_nb, NB_CDBK_SIZE, order);
bits.pack(id, 6);
for (i=0;i<order;i++)
qlsp[i]*=2;
id = lsp_weight_quant(qlsp, 0, quant_weight, 0, cdbk_nb_low1, NB_CDBK_SIZE_LOW1, 5);
bits.pack(id, 6);
for (i=0;i<5;i++)
qlsp[i]*=2;
id = lsp_weight_quant(qlsp, 0, quant_weight, 0, cdbk_nb_low2, NB_CDBK_SIZE_LOW2, 5);
bits.pack(id, 6);
id = lsp_weight_quant(qlsp, 5, quant_weight, 5, cdbk_nb_high1, NB_CDBK_SIZE_HIGH1, 5);
bits.pack(id, 6);
for (i=5;i<10;i++)
qlsp[i]*=2;
id = lsp_weight_quant(qlsp, 5, quant_weight, 5, cdbk_nb_high2, NB_CDBK_SIZE_HIGH2, 5);
bits.pack(id, 6);
for (i=0;i<order;i++)
qlsp[i]*=.00097656;
for (i=0;i<order;i++)
qlsp[i]=lsp[i]-qlsp[i];
}
/**
* Line Spectral Pair Unquantification (narrowband).
* @param lsp - Line Spectral Pairs table.
* @param order
* @param bits - Speex bits buffer.
*/
public final void unquant(float[] lsp, int order, Bits bits)
{
for (int i=0;i<order;i++) {
lsp[i]=.25f*i+.25f;
}
unpackPlus(lsp, cdbk_nb, bits, 0.0039062f, 10, 0);
unpackPlus(lsp, cdbk_nb_low1, bits, 0.0019531f, 5, 0);
unpackPlus(lsp, cdbk_nb_low2, bits, 0.00097656f, 5, 0);
unpackPlus(lsp, cdbk_nb_high1, bits, 0.0019531f, 5, 5);
unpackPlus(lsp, cdbk_nb_high2, bits, 0.00097656f, 5, 5);
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/NoiseSearch.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: NoiseSearch.java *
* *
* Author: James LAWRENCE *
* Modified by: Marc GIMPEL *
* Based on code by: Jean-Marc VALIN *
* *
* Date: March 2003 *
* *
******************************************************************************/
/* $Id: NoiseSearch.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */
/* Copyright (C) 2002 Jean-Marc Valin
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the Xiph.org Foundation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.xiph.speex;
/**
* Noise codebook search
*
* @author Jim Lawrence, helloNetwork.com
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.2 $
*/
public class NoiseSearch
extends CbSearch
{
/**
* Codebook Search Quantification (Noise).
* @param target target vector
* @param ak LPCs for this subframe
* @param awk1 Weighted LPCs for this subframe
* @param awk2 Weighted LPCs for this subframe
* @param p number of LPC coeffs
* @param nsf number of samples in subframe
* @param exc excitation array.
* @param es position in excitation array.
* @param r
* @param bits Speex bits buffer.
* @param complexity
*/
public final void quant(float[] target, float[] ak, float[] awk1, float[] awk2,
int p, int nsf, float[] exc, int es, float[] r,
Bits bits, int complexity)
{
int i;
float[] tmp=new float[nsf];
Filters.residue_percep_zero(target, 0, ak, awk1, awk2, tmp, nsf, p);
for (i=0;i<nsf;i++)
exc[es+i]+=tmp[i];
for (i=0;i<nsf;i++)
target[i]=0;
}
/**
* Codebook Search Unquantification (Noise).
* @param exc - excitation array.
* @param es - position in excitation array.
* @param nsf - number of samples in subframe.
* @param bits - Speex bits buffer.
*/
public final void unquant(float[] exc, int es, int nsf, Bits bits)
{
for (int i=0; i<nsf; i++) {
exc[es+i]+= (float) (3.0*(Math.random()-.5));
}
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/OggCrc.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: OggCrc.java *
* *
* Author: Marc GIMPEL *
* Based on code by: Ross WILLIAMS *
* *
* Date: 20th April 2003 *
* *
******************************************************************************/
/* $Id: OggCrc.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */
/********************************************************************
* *
* THIS FILE IS PART OF THE OggVorbis SOFTWARE CODEC SOURCE CODE. *
* USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS *
* GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE *
* IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. *
* *
* THE OggVorbis SOURCE CODE IS (C) COPYRIGHT 1994-2002 *
* by the Xiph.Org Foundation http://www.xiph.org/ *
* *
********************************************************************
function: code raw [Vorbis] packets into framed OggSquish stream and
decode Ogg streams back into raw packets
last mod: $Id: OggCrc.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $
note: The CRC code is directly derived from public domain code by
Ross Williams (ross@guest.adelaide.edu.au). See docs/framing.html
for details.
********************************************************************/
package org.xiph.speex;
/**
* Calculates the CRC checksum for Ogg packets.
*
* <p>Ogg uses the same generator polynomial as ethernet, although with an
* unreflected alg and an init/final of 0, not 0xffffffff.
*
* @author Jim Lawrence, helloNetwork.com
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.2 $
*/
public class OggCrc
{
// TODO - implement java.util.zip.Checksum
/**
* CRC checksum lookup table
*/
private static int[] crc_lookup;
static {
crc_lookup = new int[256];
for (int i=0; i<crc_lookup.length; i++) {
int r=i<<24;
for (int j=0; j<8; j++) {
if ((r& 0x80000000)!=0) {
/* The same as the ethernet generator polynomial, although we use an
unreflected alg and an init/final of 0, not 0xffffffff */
r=(r << 1)^0x04c11db7;
}
else {
r<<=1;
}
}
crc_lookup[i]=(r&0xffffffff);
}
}
/**
* Calculates the checksum on the given data, from the give offset and
* for the given length, using the given initial value.
* This allows on to calculate the checksum iteratively, by reinjecting the
* last returned value as the initial value when the function is called for
* the next data chunk.
* The initial value should be 0 for the first iteration.
* @param crc - the initial value
* @param data - the data
* @param offset - the offset at which to start calculating the checksum.
* @param length - the length of data over which to calculate the checksum.
* @return the checksum.
*/
public static int checksum(int crc,
final byte[] data,
int offset,
final int length)
{
int end=offset+length;
for (;offset<end;offset++){
crc=(crc<<8)^crc_lookup[((crc>>>24)&0xff)^(data[offset]&0xff)];
}
return crc;
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/OggSpeexWriter.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: OggSpeexWriter.java *
* *
* Author: Marc GIMPEL *
* *
* Date: 9th April 2003 *
* *
******************************************************************************/
/* $Id: OggSpeexWriter.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */
package org.xiph.speex;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.FileOutputStream;
import java.util.Random;
/**
* Ogg Speex Writer
*
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.2 $
*/
public class OggSpeexWriter
extends AudioFileWriter
{
/** Number of packets in an Ogg page (must be less than 255) */
public static final int PACKETS_PER_OGG_PAGE = 250;
/** The OutputStream */
private OutputStream out;
/** Defines the encoder mode (0=NB, 1=WB and 2-UWB). */
private int mode;
/** Defines the sampling rate of the audio input. */
private int sampleRate;
/** Defines the number of channels of the audio input (1=mono, 2=stereo). */
private int channels;
/** Defines the number of frames per speex packet. */
private int nframes;
/** Defines whether or not to use VBR (Variable Bit Rate). */
private boolean vbr;
/** */
private int size;
/** Ogg Stream Serial Number */
private int streamSerialNumber;
/** Data buffer */
private byte[] dataBuffer;
/** Pointer within the Data buffer */
private int dataBufferPtr;
/** Header buffer */
private byte[] headerBuffer;
/** Pointer within the Header buffer */
private int headerBufferPtr;
/** Ogg Page count */
private int pageCount;
/** Speex packet count within an Ogg Page */
private int packetCount;
/**
* Absolute granule position
* (the number of audio samples from beginning of file to end of Ogg Packet).
*/
private long granulepos;
/**
* Builds an Ogg Speex Writer.
*/
public OggSpeexWriter()
{
if (streamSerialNumber == 0)
streamSerialNumber = new Random().nextInt();
dataBuffer = new byte[65565];
dataBufferPtr = 0;
headerBuffer = new byte[255];
headerBufferPtr = 0;
pageCount = 0;
packetCount = 0;
granulepos = 0;
}
/**
* Builds an Ogg Speex Writer.
* @param mode the mode of the encoder (0=NB, 1=WB, 2=UWB).
* @param sampleRate the number of samples per second.
* @param channels the number of audio channels (1=mono, 2=stereo, ...).
* @param nframes the number of frames per speex packet.
* @param vbr
*/
public OggSpeexWriter(final int mode,
final int sampleRate,
final int channels,
final int nframes,
final boolean vbr)
{
this();
setFormat(mode, sampleRate, channels, nframes, vbr);
}
/**
* Sets the output format.
* Must be called before WriteHeader().
* @param mode the mode of the encoder (0=NB, 1=WB, 2=UWB).
* @param sampleRate the number of samples per second.
* @param channels the number of audio channels (1=mono, 2=stereo, ...).
* @param nframes the number of frames per speex packet.
* @param vbr
*/
private void setFormat(final int mode,
final int sampleRate,
final int channels,
final int nframes,
boolean vbr)
{
this.mode = mode;
this.sampleRate = sampleRate;
this.channels = channels;
this.nframes = nframes;
this.vbr = vbr;
}
/**
* Sets the Stream Serial Number.
* Must not be changed mid stream.
* @param serialNumber
*/
public void setSerialNumber(final int serialNumber)
{
this.streamSerialNumber = serialNumber;
}
/**
* Closes the output file.
* @exception IOException if there was an exception closing the Audio Writer.
*/
public void close()
throws IOException
{
flush(true);
out.close();
}
/**
* Open the output file.
* @param file - file to open.
* @exception IOException if there was an exception opening the Audio Writer.
*/
public void open(final File file)
throws IOException
{
file.delete();
out = new FileOutputStream(file);
size = 0;
}
/**
* Open the output file.
* @param filename - file to open.
* @exception IOException if there was an exception opening the Audio Writer.
*/
public void open(final String filename)
throws IOException
{
open(new File(filename));
}
/**
* Writes the header pages that start the Ogg Speex file.
* Prepares file for data to be written.
* @param comment description to be included in the header.
* @exception IOException
*/
public void writeHeader(final String comment)
throws IOException
{
int chksum;
byte[] header;
byte[] data;
/* writes the OGG header page */
header = buildOggPageHeader(2, 0, streamSerialNumber, pageCount++, 1,
new byte[] {80});
data = buildSpeexHeader(sampleRate, mode, channels, vbr, nframes);
chksum = OggCrc.checksum(0, header, 0, header.length);
chksum = OggCrc.checksum(chksum, data, 0, data.length);
writeInt(header, 22, chksum);
out.write(header);
out.write(data);
/* writes the OGG comment page */
header = buildOggPageHeader(0, 0, streamSerialNumber, pageCount++, 1,
new byte[] {(byte) (comment.length() + 8)});
data = buildSpeexComment(comment);
chksum = OggCrc.checksum(0, header, 0, header.length);
chksum = OggCrc.checksum(chksum, data, 0, data.length);
writeInt(header, 22, chksum);
out.write(header);
out.write(data);
}
/**
* Writes a packet of audio.
* @param data - audio data.
* @param offset - the offset from which to start reading the data.
* @param len - the length of data to read.
* @exception IOException
*/
public void writePacket(final byte[] data,
final int offset,
final int len)
throws IOException
{
if (len <= 0) { // nothing to write
return;
}
if (packetCount > PACKETS_PER_OGG_PAGE) {
flush(false);
}
System.arraycopy(data, offset, dataBuffer, dataBufferPtr, len);
dataBufferPtr += len;
headerBuffer[headerBufferPtr++]=(byte)len;
packetCount++;
granulepos += nframes * (mode==2 ? 640 : (mode==1 ? 320 : 160));
}
/**
* Flush the Ogg page out of the buffers into the file.
* @param eos - end of stream
* @exception IOException
*/
private void flush(final boolean eos)
throws IOException
{
int chksum;
byte[] header;
/* writes the OGG header page */
header = buildOggPageHeader((eos ? 4 : 0), granulepos, streamSerialNumber,
pageCount++, packetCount, headerBuffer);
chksum = OggCrc.checksum(0, header, 0, header.length);
chksum = OggCrc.checksum(chksum, dataBuffer, 0, dataBufferPtr);
writeInt(header, 22, chksum);
out.write(header);
out.write(dataBuffer, 0, dataBufferPtr);
dataBufferPtr = 0;
headerBufferPtr = 0;
packetCount = 0;
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/PcmWaveWriter.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: PcmWaveWriter.java *
* *
* Author: James LAWRENCE *
* Modified by: Marc GIMPEL *
* *
* Date: March 2003 *
* *
******************************************************************************/
/* $Id: PcmWaveWriter.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */
package org.xiph.speex;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
/**
* Writes basic PCM wave files from binary audio data.
*
* <p>Here's an example that writes 2 seconds of silence
* <pre>
* PcmWaveWriter s_wsw = new PcmWaveWriter(2, 44100);
* byte[] silence = new byte[16*2*44100];
* wsw.Open("C:\\out.wav");
* wsw.WriteHeader();
* wsw.WriteData(silence, 0, silence.length);
* wsw.WriteData(silence, 0, silence.length);
* wsw.Close();
* </pre>
*
* @author Jim Lawrence, helloNetwork.com
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.2 $
*/
public class PcmWaveWriter
extends AudioFileWriter
{
/** Wave type code of PCM */
public static final short WAVE_FORMAT_PCM = (short) 0x01;
/** Wave type code of Speex */
public static final short WAVE_FORMAT_SPEEX = (short) 0xa109;
/**
* Table describing the number of frames per packet in a Speex Wave file,
* depending on its mode-1 (1=NB, 2=WB, 3=UWB), channels-1 (1=mono, 2=stereo)
* and the quality setting (0 to 10).
* See end of file for exerpt from SpeexACM code for more explanations.
*/
public static final int[][][] WAVE_FRAME_SIZES = new int[][][]
{{{8, 8, 8, 1, 1, 2, 2, 2, 2, 2, 2}, // NB mono
{2, 1, 1, 7, 7, 8, 8, 8, 8, 3, 3}}, // NB stereo
{{8, 8, 8, 2, 1, 1, 2, 2, 2, 2, 2}, // WB mono
{1, 2, 2, 8, 7, 6, 3, 3, 3, 3, 3}}, // WB stereo
{{8, 8, 8, 1, 2, 2, 1, 1, 1, 1, 1}, // UWB mono
{2, 1, 1, 7, 8, 3, 6, 6, 5, 5, 5}}}; // UWB stereo
/**
* Table describing the number of bit per Speex frame, depending on its
* mode-1 (1=NB, 2=WB, 3=UWB), channels-1 (1=mono, 2=stereo) and the quality
* setting (0 to 10).
* See end of file for exerpt from SpeexACM code for more explanations.
*/
public static final int[][][] WAVE_BITS_PER_FRAME = new int[][][]
{{{ 43, 79, 119, 160, 160, 220, 220, 300, 300, 364, 492}, // NB mono
{ 60, 96, 136, 177, 177, 237, 237, 317, 317, 381, 509}}, // NB stereo
{{ 79, 115, 155, 196, 256, 336, 412, 476, 556, 684, 844}, // WB mono
{ 96, 132, 172, 213, 273, 353, 429, 493, 573, 701, 861}}, // WB stereo
{{ 83, 151, 191, 232, 292, 372, 448, 512, 592, 720, 880}, // UWB mono
{100, 168, 208, 249, 309, 389, 465, 529, 609, 737, 897}}}; // UWB stereo
private RandomAccessFile raf;
/** Defines the encoder mode (0=NB, 1=WB and 2-UWB). */
private int mode;
/** */
private int quality;
/** Defines the sampling rate of the audio input. */
private int sampleRate;
/** Defines the number of channels of the audio input (1=mono, 2=stereo). */
private int channels;
/** Defines the number of frames per speex packet. */
private int nframes;
/** Defines whether or not to use VBR (Variable Bit Rate). */
private boolean vbr;
/** */
private int size;
/** */
private boolean isPCM;
/**
* Constructor.
*/
public PcmWaveWriter()
{
size = 0;
}
/**
* Constructor.
* @param sampleRate the number of samples per second.
* @param channels the number of audio channels (1=mono, 2=stereo, ...).
*/
public PcmWaveWriter(final int sampleRate, final int channels)
{
this();
setPCMFormat(sampleRate, channels);
}
/**
* Constructor.
* @param mode the mode of the encoder (0=NB, 1=WB, 2=UWB).
* @param quality
* @param sampleRate the number of samples per second.
* @param channels the number of audio channels (1=mono, 2=stereo, ...).
* @param nframes the number of frames per speex packet.
* @param vbr
*/
public PcmWaveWriter(final int mode,
final int quality,
final int sampleRate,
final int channels,
final int nframes,
final boolean vbr)
{
this();
setSpeexFormat(mode, quality, sampleRate, channels, nframes, vbr);
}
/**
* Sets the output format for a PCM Wave file.
* Must be called before WriteHeader().
* @param sampleRate the number of samples per second.
* @param channels the number of audio channels (1=mono, 2=stereo, ...).
*/
private void setPCMFormat(final int sampleRate, final int channels)
{
this.channels = channels;
this.sampleRate = sampleRate;
isPCM = true;
}
/**
* Sets the output format for a Speex Wave file.
* Must be called before WriteHeader().
* @param mode the mode of the encoder (0=NB, 1=WB, 2=UWB).
* @param quality
* @param sampleRate the number of samples per second.
* @param channels the number of audio channels (1=mono, 2=stereo, ...).
* @param nframes the number of frames per speex packet.
* @param vbr
*/
private void setSpeexFormat(final int mode,
final int quality,
final int sampleRate,
final int channels,
final int nframes,
final boolean vbr)
{
this.mode = mode;
this.quality = quality;
this.sampleRate = sampleRate;
this.channels = channels;
this.nframes = nframes;
this.vbr = vbr;
isPCM = false;
}
/**
* Closes the output file.
* MUST be called to have a correct stream.
* @exception IOException if there was an exception closing the Audio Writer.
*/
public void close()
throws IOException
{
/* update the total file length field from RIFF chunk */
raf.seek(4);
int fileLength = (int) raf.length() - 8;
writeInt(raf, fileLength);
/* update the data chunk length size */
raf.seek(40);
writeInt(raf, size);
/* close the output file */
raf.close();
}
/**
* Open the output file.
* @param file - file to open.
* @exception IOException if there was an exception opening the Audio Writer.
*/
public void open(final File file)
throws IOException
{
file.delete();
raf = new RandomAccessFile(file, "rw");
size = 0;
}
/**
* Open the output file.
* @param filename filename to open.
* @exception IOException if there was an exception opening the Audio Writer.
*/
public void open(final String filename)
throws IOException
{
open(new File(filename));
}
/**
* Writes the initial data chunks that start the wave file.
* Prepares file for data samples to written.
* @param comment ignored by the WAV header.
* @exception IOException
*/
public void writeHeader(final String comment)
throws IOException
{
/* writes the RIFF chunk indicating wave format */
byte[] chkid = "RIFF".getBytes();
raf.write(chkid, 0, chkid.length);
writeInt(raf, 0); /* total length must be blank */
chkid = "WAVE".getBytes();
raf.write(chkid, 0, chkid.length);
/* format subchunk: of size 16 */
chkid = "fmt ".getBytes();
raf.write(chkid, 0, chkid.length);
if (isPCM) {
writeInt(raf, 16); // Size of format chunk
writeShort(raf, WAVE_FORMAT_PCM); // Format tag: PCM
writeShort(raf, (short)channels); // Number of channels
writeInt(raf, sampleRate); // Sampling frequency
writeInt(raf, sampleRate*channels*2); // Average bytes per second
writeShort(raf, (short) (channels*2)); // Blocksize of data
writeShort(raf, (short) 16); // Bits per sample
}
else {
int length = comment.length();
writeInt(raf, (short) (18+2+80+length)); // Size of format chunk
writeShort(raf, WAVE_FORMAT_SPEEX); // Format tag: Speex
writeShort(raf, (short)channels); // Number of channels
writeInt(raf, sampleRate); // Sampling frequency
writeInt(raf, (calculateEffectiveBitrate(mode, channels, quality) + 7) >> 3); // Average bytes per second
writeShort(raf, (short) calculateBlockSize(mode, channels, quality)); // Blocksize of data
writeShort(raf, (short) quality); // Bits per sample
writeShort(raf, (short) (2+80+length)); // The count in bytes of the extra size
raf.writeByte(0xff & 1); // ACM major version number
raf.writeByte(0xff & 0); // ACM minor version number
raf.write(buildSpeexHeader(sampleRate, mode, channels, vbr, nframes));
raf.writeBytes(comment);
}
/* write the start of data chunk */
chkid = "data".getBytes();
raf.write(chkid, 0, chkid.length);
writeInt(raf, 0);
}
/**
* Writes a packet of audio.
* @param data audio data
* @param offset the offset from which to start reading the data.
* @param len the length of data to read.
* @exception IOException
*/
public void writePacket(final byte[] data,
final int offset,
final int len)
throws IOException
{
raf.write(data, offset, len);
size+= len;
}
/**
* Calculates effective bitrate (considering padding).
* See end of file for exerpt from SpeexACM code for more explanations.
* @param mode
* @param channels
* @param quality
* @return effective bitrate (considering padding).
*/
private static final int calculateEffectiveBitrate(final int mode,
final int channels,
final int quality)
{
return ((((WAVE_FRAME_SIZES[mode-1][channels-1][quality] *
WAVE_BITS_PER_FRAME[mode-1][channels-1][quality]) + 7) >> 3) *
50 * 8) / WAVE_BITS_PER_FRAME[mode-1][channels-1][quality];
}
/**
* Calculates block size (considering padding).
* See end of file for exerpt from SpeexACM code for more explanations.
* @param mode
* @param channels
* @param quality
* @return block size (considering padding).
*/
private static final int calculateBlockSize(final int mode,
final int channels,
final int quality)
{
return (((WAVE_FRAME_SIZES[mode-1][channels-1][quality] *
WAVE_BITS_PER_FRAME[mode-1][channels-1][quality]) + 7) >> 3);
}
}
// The following is taken from the SpeexACM 1.0.1.1 Source code (codec.c file).
//
// This array describes how many bits are required by an encoded audio frame.
// It also specifies the optimal framesperblock parameter to minimize
// padding loss. It also lists the effective bitrate (considering padding).
//
// The array indices are rate, channels, quality (each as a 0 based index)
//
/*
struct tagQualityInfo {
UINT nBitsPerFrame;
UINT nFrameSize;
UINT nFramesPerBlock;
UINT nEffectiveBitrate;
} QualityInfo[3][2][11] = {
43, 160, 8, 2150, // 8000 1 0
79, 160, 8, 3950, // 8000 1 1
119, 160, 8, 5950, // 8000 1 2
160, 160, 1, 8000, // 8000 1 3
160, 160, 1, 8000, // 8000 1 4
220, 160, 2, 11000, // 8000 1 5
220, 160, 2, 11000, // 8000 1 6
300, 160, 2, 15000, // 8000 1 7
300, 160, 2, 15000, // 8000 1 8
364, 160, 2, 18200, // 8000 1 9
492, 160, 2, 24600, // 8000 1 10
60, 160, 2, 3000, // 8000 2 0
96, 160, 1, 4800, // 8000 2 1
136, 160, 1, 6800, // 8000 2 2
177, 160, 7, 8857, // 8000 2 3
177, 160, 7, 8857, // 8000 2 4
237, 160, 8, 11850, // 8000 2 5
237, 160, 8, 11850, // 8000 2 6
317, 160, 8, 15850, // 8000 2 7
317, 160, 8, 15850, // 8000 2 8
381, 160, 3, 19066, // 8000 2 9
509, 160, 3, 25466, // 8000 2 10
79, 320, 8, 3950, // 16000 1 0
115, 320, 8, 5750, // 16000 1 1
155, 320, 8, 7750, // 16000 1 2
196, 320, 2, 9800, // 16000 1 3
256, 320, 1, 12800, // 16000 1 4
336, 320, 1, 16800, // 16000 1 5
412, 320, 2, 20600, // 16000 1 6
476, 320, 2, 23800, // 16000 1 7
556, 320, 2, 27800, // 16000 1 8
684, 320, 2, 34200, // 16000 1 9
844, 320, 2, 42200, // 16000 1 10
96, 320, 1, 4800, // 16000 2 0
132, 320, 2, 6600, // 16000 2 1
172, 320, 2, 8600, // 16000 2 2
213, 320, 8, 10650, // 16000 2 3
273, 320, 7, 13657, // 16000 2 4
353, 320, 6, 17666, // 16000 2 5
429, 320, 3, 21466, // 16000 2 6
493, 320, 3, 24666, // 16000 2 7
573, 320, 3, 28666, // 16000 2 8
701, 320, 3, 35066, // 16000 2 9
861, 320, 3, 43066, // 16000 2 10
83, 640, 8, 4150, // 32000 1 0
151, 640, 8, 7550, // 32000 1 1
191, 640, 8, 9550, // 32000 1 2
232, 640, 1, 11600, // 32000 1 3
292, 640, 2, 14600, // 32000 1 4
372, 640, 2, 18600, // 32000 1 5
448, 640, 1, 22400, // 32000 1 6
512, 640, 1, 25600, // 32000 1 7
592, 640, 1, 29600, // 32000 1 8
720, 640, 1, 36000, // 32000 1 9
880, 640, 1, 44000, // 32000 1 10
100, 640, 2, 5000, // 32000 2 0
168, 640, 1, 8400, // 32000 2 1
208, 640, 1, 10400, // 32000 2 2
249, 640, 7, 12457, // 32000 2 3
309, 640, 8, 15450, // 32000 2 4
389, 640, 3, 19466, // 32000 2 5
465, 640, 6, 23266, // 32000 2 6
529, 640, 6, 26466, // 32000 2 7
609, 640, 5, 30480, // 32000 2 8
737, 640, 5, 36880, // 32000 2 9
897, 640, 5, 44880, // 32000 2 10
};
*/
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/RawWriter.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2004 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: RawWriter.java *
* *
* Author: Marc GIMPEL *
* *
* Date: 6th January 2004 *
* *
******************************************************************************/
/* $Id: RawWriter.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */
package org.xiph.speex;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.FileOutputStream;
/**
* Raw Audio File Writer.
*
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.2 $
*/
public class RawWriter
extends AudioFileWriter
{
private OutputStream out;
/**
* Closes the output file.
* @exception IOException if there was an exception closing the Audio Writer.
*/
public void close()
throws IOException
{
out.close();
}
/**
* Open the output file.
* @param file - file to open.
* @exception IOException if there was an exception opening the Audio Writer.
*/
public void open(final File file)
throws IOException
{
file.delete();
out = new FileOutputStream(file);
}
/**
* Open the output file.
* @param filename - file to open.
* @exception IOException if there was an exception opening the Audio Writer.
*/
public void open(final String filename)
throws IOException
{
open(new File(filename));
}
/**
* Writes the header pages that start the Ogg Speex file.
* Prepares file for data to be written.
* @param comment description to be included in the header.
* @exception IOException
*/
public void writeHeader(final String comment)
throws IOException
{
// a raw audio file has no header
}
/**
* Writes a packet of audio.
* @param data audio data
* @param offset the offset from which to start reading the data.
* @param len the length of data to read.
* @exception IOException
*/
public void writePacket(final byte[] data,
final int offset,
final int len)
throws IOException
{
out.write(data, offset, len);
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/SbCodec.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: SbCodec.java *
* *
* Author: Marc GIMPEL *
* Based on code by: Jean-Marc VALIN *
* *
* Date: 14th July 2003 *
* *
******************************************************************************/
/* $Id: SbCodec.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */
package org.xiph.speex;
/**
* Sideband Codec.
* This class contains all the basic structures needed by the Sideband
* encoder and decoder.
*
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.2 $
*/
public class SbCodec
extends NbCodec
{
//---------------------------------------------------------------------------
// Constants
//---------------------------------------------------------------------------
/** The Sideband Frame Size gives the size in bits of a Sideband frame for a given sideband submode. */
public static final int[] SB_FRAME_SIZE = {4, 36, 112, 192, 352, -1, -1, -1};
/** The Sideband Submodes gives the number of submodes possible for the Sideband codec. */
public static final int SB_SUBMODES = 8;
/** The Sideband Submodes Bits gives the number bits used to encode the Sideband Submode*/
public static final int SB_SUBMODE_BITS = 3;
/** Quadratic Mirror Filter Order */
public static final int QMF_ORDER = 64;
//---------------------------------------------------------------------------
// Parameters
//---------------------------------------------------------------------------
/** */
protected int fullFrameSize;
/** */
protected float foldingGain;
//---------------------------------------------------------------------------
// Variables
//---------------------------------------------------------------------------
/** */
protected float[] high;
/** */
protected float[] y0, y1;
/** */
protected float[] x0d;
/** */
protected float[] g0_mem, g1_mem;
/**
* Wideband initialisation
*/
public void wbinit()
{
// Initialize SubModes
submodes = buildWbSubModes();
submodeID = 3;
// Initialize narrwoband parameters and variables
//init(160, 40, 8, 640, .9f);
}
/**
* Ultra-wideband initialisation
*/
public void uwbinit()
{
// Initialize SubModes
submodes = buildUwbSubModes();
submodeID = 1;
// Initialize narrwoband parameters and variables
//init(320, 80, 8, 1280, .7f);
}
/**
* Initialisation
* @param frameSize
* @param subframeSize
* @param lpcSize
* @param bufSize
* @param foldingGain
*/
protected void init(final int frameSize,
final int subframeSize,
final int lpcSize,
final int bufSize,
final float foldingGain)
{
super.init(frameSize, subframeSize, lpcSize, bufSize);
this.fullFrameSize = 2*frameSize;
this.foldingGain = foldingGain;
lag_factor = 0.002f;
high = new float[fullFrameSize];
y0 = new float[fullFrameSize];
y1 = new float[fullFrameSize];
x0d = new float[frameSize];
g0_mem = new float[QMF_ORDER];
g1_mem = new float[QMF_ORDER];
}
/**
* Build wideband submodes.
* @return the wideband submodes.
*/
protected static SubMode[] buildWbSubModes()
{
/* Initialize Long Term Predictions */
HighLspQuant highLU = new HighLspQuant();
/* Initialize Codebook Searches */
SplitShapeSearch ssCbHighLbrSearch = new SplitShapeSearch(40, 10, 4, hexc_10_32_table, 5, 0);
SplitShapeSearch ssCbHighSearch = new SplitShapeSearch(40, 8, 5, hexc_table, 7, 1);
/* Initialize wide-band modes */
SubMode[] wbSubModes = new SubMode[SB_SUBMODES];
wbSubModes[1] = new SubMode(0, 0, 1, 0, highLU, null, null, .75f, .75f, -1, 36);
wbSubModes[2] = new SubMode(0, 0, 1, 0, highLU, null, ssCbHighLbrSearch, .85f, .6f, -1, 112);
wbSubModes[3] = new SubMode(0, 0, 1, 0, highLU, null, ssCbHighSearch, .75f, .7f, -1, 192);
wbSubModes[4] = new SubMode(0, 0, 1, 1, highLU, null, ssCbHighSearch, .75f, .75f, -1, 352);
return wbSubModes;
}
/**
* Build ultra-wideband submodes.
* @return the ultra-wideband submodes.
*/
protected static SubMode[] buildUwbSubModes()
{
/* Initialize Long Term Predictions */
HighLspQuant highLU = new HighLspQuant();
SubMode[] uwbSubModes = new SubMode[SB_SUBMODES];
uwbSubModes[1] = new SubMode(0, 0, 1, 0, highLU, null, null, .75f, .75f, -1, 2);
return uwbSubModes;
}
/**
* Returns the size of a frame (ex: 160 samples for a narrowband frame,
* 320 for wideband and 640 for ultra-wideband).
* @return the size of a frame (number of audio samples in a frame).
*/
public int getFrameSize()
{
return fullFrameSize;
}
/**
* Returns whether or not we are using Discontinuous Transmission encoding.
* @return whether or not we are using Discontinuous Transmission encoding.
*/
public boolean getDtx()
{
// TODO - should return DTX for the NbCodec
return dtx_enabled != 0;
}
/**
* Returns the excitation array.
* @return the excitation array.
*/
public float[] getExc()
{
int i;
float[] excTmp = new float[fullFrameSize];
for (i=0;i<frameSize;i++)
excTmp[2*i]=2*excBuf[excIdx+i];
return excTmp;
}
/**
* Returns the innovation array.
* @return the innovation array.
*/
public float[] getInnov()
{
return getExc();
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/SbDecoder.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: SbDecoder.java *
* *
* Author: James LAWRENCE *
* Modified by: Marc GIMPEL *
* Based on code by: Jean-Marc VALIN *
* *
* Date: March 2003 *
* *
******************************************************************************/
/* $Id: SbDecoder.java,v 1.3 2005/05/27 13:17:00 mgimpel Exp $ */
/* Copyright (C) 2002 Jean-Marc Valin
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the Xiph.org Foundation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.xiph.speex;
import java.io.StreamCorruptedException;
/**
* Sideband Speex Decoder
*
* @author Jim Lawrence, helloNetwork.com
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.3 $
*/
public class SbDecoder
extends SbCodec
implements Decoder
{
/** */
protected Decoder lowdec;
/** */
protected Stereo stereo;
/** */
protected boolean enhanced;
private float[] innov2;
/**
* Constructor
*/
public SbDecoder()
{
stereo = new Stereo();
enhanced = true;
}
/**
* Wideband initialisation
*/
public void wbinit()
{
lowdec = new NbDecoder();
((NbDecoder)lowdec).nbinit();
lowdec.setPerceptualEnhancement(enhanced);
// Initialize SubModes
super.wbinit();
// Initialize variables
init(160, 40, 8, 640, .7f);
}
/**
* Ultra-wideband initialisation
*/
public void uwbinit()
{
lowdec = new SbDecoder();
((SbDecoder)lowdec).wbinit();
lowdec.setPerceptualEnhancement(enhanced);
// Initialize SubModes
super.uwbinit();
// Initialize variables
init(320, 80, 8, 1280, .5f);
}
/**
* Initialisation
* @param frameSize
* @param subframeSize
* @param lpcSize
* @param bufSize
* @param foldingGain
*/
public void init(final int frameSize,
final int subframeSize,
final int lpcSize,
final int bufSize,
final float foldingGain)
{
super.init(frameSize, subframeSize, lpcSize, bufSize, foldingGain);
excIdx = 0;
innov2 = new float[subframeSize];
}
/**
* Decode the given input bits.
* @param bits - Speex bits buffer.
* @param out - the decoded mono audio frame.
* @return 1 if a terminator was found, 0 if not.
* @throws StreamCorruptedException If there is an error detected in the
* data stream.
*/
public int decode(final Bits bits, final float[] out)
throws StreamCorruptedException
{
int i, sub, wideband, ret;
float[] low_pi_gain, low_exc, low_innov;
/* Decode the low-band */
ret = lowdec.decode(bits, x0d);
if (ret != 0) {
return ret;
}
boolean dtx = lowdec.getDtx();
if (bits == null) {
decodeLost(out, dtx);
return 0;
}
/* Check "wideband bit" */
wideband = bits.peek();
if (wideband!=0) {
/*Regular wideband frame, read the submode*/
wideband = bits.unpack(1);
submodeID = bits.unpack(3);
}
else {
/* was a narrowband frame, set "null submode"*/
submodeID = 0;
}
for (i=0;i<frameSize;i++)
excBuf[i]=0;
/* If null mode (no transmission), just set a couple things to zero*/
if (submodes[submodeID] == null) {
if (dtx) {
decodeLost(out, true);
return 0;
}
for (i=0;i<frameSize;i++)
excBuf[i]=VERY_SMALL;
first=1;
/* Final signal synthesis from excitation */
Filters.iir_mem2(excBuf, excIdx, interp_qlpc, high, 0, frameSize,
lpcSize, mem_sp);
filters.fir_mem_up(x0d, h0, y0, fullFrameSize, QMF_ORDER, g0_mem);
filters.fir_mem_up(high, h1, y1, fullFrameSize, QMF_ORDER, g1_mem);
for (i=0;i<fullFrameSize;i++)
out[i]=2*(y0[i]-y1[i]);
return 0;
}
low_pi_gain = lowdec.getPiGain();
low_exc = lowdec.getExc();
low_innov = lowdec.getInnov();
submodes[submodeID].lsqQuant.unquant(qlsp, lpcSize, bits);
if (first!=0) {
for (i=0;i<lpcSize;i++)
old_qlsp[i] = qlsp[i];
}
for (sub=0;sub<nbSubframes;sub++) {
float tmp, filter_ratio, el=0.0f, rl=0.0f,rh=0.0f;
int subIdx=subframeSize*sub;
/* LSP interpolation */
tmp = (1.0f + sub)/nbSubframes;
for (i=0;i<lpcSize;i++)
interp_qlsp[i] = (1-tmp)*old_qlsp[i] + tmp*qlsp[i];
Lsp.enforce_margin(interp_qlsp, lpcSize, .05f);
/* LSPs to x-domain */
for (i=0;i<lpcSize;i++)
interp_qlsp[i] = (float)Math.cos(interp_qlsp[i]);
/* LSP to LPC */
m_lsp.lsp2lpc(interp_qlsp, interp_qlpc, lpcSize);
if (enhanced) {
float k1, k2, k3;
k1=submodes[submodeID].lpc_enh_k1;
k2=submodes[submodeID].lpc_enh_k2;
k3=k1-k2;
Filters.bw_lpc(k1, interp_qlpc, awk1, lpcSize);
Filters.bw_lpc(k2, interp_qlpc, awk2, lpcSize);
Filters.bw_lpc(k3, interp_qlpc, awk3, lpcSize);
}
/* Calculate reponse ratio between low & high filter in band middle (4000 Hz) */
tmp=1;
pi_gain[sub]=0;
for (i=0;i<=lpcSize;i++) {
rh += tmp*interp_qlpc[i];
tmp = -tmp;
pi_gain[sub]+=interp_qlpc[i];
}
rl = low_pi_gain[sub];
rl = 1/(Math.abs(rl)+.01f);
rh = 1/(Math.abs(rh)+.01f);
filter_ratio = Math.abs(.01f+rh)/(.01f+Math.abs(rl));
/* reset excitation buffer */
for (i=subIdx;i<subIdx+subframeSize;i++)
excBuf[i]=0;
if (submodes[submodeID].innovation==null) {
float g;
int quant;
quant = bits.unpack(5);
g = (float)Math.exp(((double)quant-10)/8.0);
g /= filter_ratio;
/* High-band excitation using the low-band excitation and a gain */
for (i=subIdx;i<subIdx+subframeSize;i++)
excBuf[i]=foldingGain*g*low_innov[i];
}
else {
float gc, scale;
int qgc = bits.unpack(4);
for (i=subIdx;i<subIdx+subframeSize;i++)
el+=low_exc[i]*low_exc[i];
gc = (float)Math.exp((1/3.7f)*qgc-2);
scale = gc*(float)Math.sqrt(1+el)/filter_ratio;
submodes[submodeID].innovation.unquant(excBuf, subIdx, subframeSize, bits);
for (i=subIdx;i<subIdx+subframeSize;i++)
excBuf[i]*=scale;
if (submodes[submodeID].double_codebook!=0) {
for (i=0;i<subframeSize;i++)
innov2[i]=0;
submodes[submodeID].innovation.unquant(innov2, 0, subframeSize, bits);
for (i=0;i<subframeSize;i++)
innov2[i]*=scale*(1/2.5f);
for (i=0;i<subframeSize;i++)
excBuf[subIdx+i] += innov2[i];
}
}
for (i=subIdx;i<subIdx+subframeSize;i++)
high[i]=excBuf[i];
if (enhanced) {
/* Use enhanced LPC filter */
Filters.filter_mem2(high, subIdx, awk2, awk1, subframeSize,
lpcSize, mem_sp, lpcSize);
Filters.filter_mem2(high, subIdx, awk3, interp_qlpc, subframeSize,
lpcSize, mem_sp, 0);
}
else {
/* Use regular filter */
for (i=0;i<lpcSize;i++)
mem_sp[lpcSize+i] = 0;
Filters.iir_mem2(high, subIdx, interp_qlpc, high, subIdx,
subframeSize, lpcSize, mem_sp);
}
}
filters.fir_mem_up(x0d, h0, y0, fullFrameSize, QMF_ORDER, g0_mem);
filters.fir_mem_up(high, h1, y1, fullFrameSize, QMF_ORDER, g1_mem);
for (i=0;i<fullFrameSize;i++)
out[i]=2*(y0[i]-y1[i]);
for (i=0;i<lpcSize;i++)
old_qlsp[i] = qlsp[i];
first = 0;
return 0;
}
/**
* Decode when packets are lost.
* @param out - the generated mono audio frame.
* @param dtx
* @return 0 if successful.
*/
public int decodeLost(final float[] out, final boolean dtx)
{
int i;
int saved_modeid=0;
if (dtx) {
saved_modeid=submodeID;
submodeID=1;
}
else {
Filters.bw_lpc(0.99f, interp_qlpc, interp_qlpc, lpcSize);
}
first=1;
awk1=new float[lpcSize+1];
awk2=new float[lpcSize+1];
awk3=new float[lpcSize+1];
if (enhanced) {
float k1,k2,k3;
if (submodes[submodeID] != null) {
k1=submodes[submodeID].lpc_enh_k1;
k2=submodes[submodeID].lpc_enh_k2;
}
else {
k1 = k2 = 0.7f;
}
k3=k1-k2;
Filters.bw_lpc(k1, interp_qlpc, awk1, lpcSize);
Filters.bw_lpc(k2, interp_qlpc, awk2, lpcSize);
Filters.bw_lpc(k3, interp_qlpc, awk3, lpcSize);
}
/* Final signal synthesis from excitation */
if (!dtx) {
for (i=0;i<frameSize;i++)
excBuf[excIdx+i] *= .9;
}
for (i=0;i<frameSize;i++)
high[i]=excBuf[excIdx+i];
if (enhanced) {
/* Use enhanced LPC filter */
Filters.filter_mem2(high, 0, awk2, awk1, high, 0, frameSize,
lpcSize, mem_sp, lpcSize);
Filters.filter_mem2(high, 0, awk3, interp_qlpc, high, 0, frameSize,
lpcSize, mem_sp, 0);
}
else { /* Use regular filter */
for (i=0;i<lpcSize;i++)
mem_sp[lpcSize+i] = 0;
Filters.iir_mem2(high, 0, interp_qlpc, high, 0, frameSize, lpcSize,
mem_sp);
}
/*iir_mem2(st->exc, st->interp_qlpc, st->high, st->frame_size, st->lpcSize, st->mem_sp);*/
/* Reconstruct the original */
filters.fir_mem_up(x0d, h0, y0, fullFrameSize, QMF_ORDER, g0_mem);
filters.fir_mem_up(high, h1, y1, fullFrameSize, QMF_ORDER, g1_mem);
for (i=0;i<fullFrameSize;i++)
out[i]=2*(y0[i]-y1[i]);
if (dtx) {
submodeID=saved_modeid;
}
return 0;
}
/**
* Decode the given bits to stereo.
* @param data - float array of size 2*frameSize, that contains the mono
* audio samples in the first half. When the function has completed, the
* array will contain the interlaced stereo audio samples.
* @param frameSize - the size of a frame of mono audio samples.
*/
public void decodeStereo(final float[] data, final int frameSize)
{
stereo.decode(data, frameSize);
}
/**
* Enables or disables perceptual enhancement.
* @param enhanced
*/
public void setPerceptualEnhancement(boolean enhanced)
{
this.enhanced = enhanced;
}
/**
* Returns whether perceptual enhancement is enabled or disabled.
* @return whether perceptual enhancement is enabled or disabled.
*/
public boolean getPerceptualEnhancement()
{
return enhanced;
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/SbEncoder.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: SbEncoder.java *
* *
* Author: Marc GIMPEL *
* Based on code by: Jean-Marc VALIN *
* *
* Date: 9th April 2003 *
* *
******************************************************************************/
/* $Id: SbEncoder.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */
/* Copyright (C) 2002 Jean-Marc Valin
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the Xiph.org Foundation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.xiph.speex;
/**
* Wideband Speex Encoder
*
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.2 $
*/
public class SbEncoder
extends SbCodec
implements Encoder
{
/** The Narrowband Quality map indicates which narrowband submode to use for the given wideband/ultra-wideband quality setting */
public static final int[] NB_QUALITY_MAP = {1, 8, 2, 3, 4, 5, 5, 6, 6, 7, 7};
/** The Wideband Quality map indicates which sideband submode to use for the given wideband/ultra-wideband quality setting */
public static final int[] WB_QUALITY_MAP = {1, 1, 1, 1, 1, 1, 2, 2, 3, 3, 4};
/** The Ultra-wideband Quality map indicates which sideband submode to use for the given ultra-wideband quality setting */
public static final int[] UWB_QUALITY_MAP = {0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1};
/** The encoder for the lower half of the Spectrum. */
protected Encoder lowenc;
private float[] x1d;
private float[] h0_mem;
private float[] buf;
private float[] swBuf; /** Weighted signal buffer */
private float[] res;
private float[] target;
private float[] window;
private float[] lagWindow;
private float[] rc; /** Reflection coefficients */
private float[] autocorr; /** auto-correlation */
private float[] lsp; /** LSPs for current frame */
private float[] old_lsp; /** LSPs for previous frame */
private float[] interp_lsp; /** Interpolated LSPs */
private float[] interp_lpc; /** Interpolated LPCs */
private float[] bw_lpc1; /** LPCs after bandwidth expansion by gamma1 for perceptual weighting*/
private float[] bw_lpc2; /** LPCs after bandwidth expansion by gamma2 for perceptual weighting*/
private float[] mem_sp2;
private float[] mem_sw; /** Filter memory for perceptually-weighted signal */
/** */
protected int nb_modes;
private boolean uwb;
protected int complexity; /** Complexity setting (0-10 from least complex to most complex) */
protected int vbr_enabled; /** 1 for enabling VBR, 0 otherwise */
protected int vad_enabled; /** 1 for enabling VAD, 0 otherwise */
protected int abr_enabled; /** ABR setting (in bps), 0 if off */
protected float vbr_quality; /** Quality setting for VBR encoding */
protected float relative_quality; /** Relative quality that will be needed by VBR */
protected float abr_drift;
protected float abr_drift2;
protected float abr_count;
protected int sampling_rate;
protected int submodeSelect; /** Mode chosen by the user (may differ from submodeID if VAD is on) */
/**
* Wideband initialisation
*/
public void wbinit()
{
lowenc = new NbEncoder();
((NbEncoder)lowenc).nbinit();
// Initialize SubModes
super.wbinit();
// Initialize variables
init(160, 40, 8, 640, .9f);
uwb = false;
nb_modes = 5;
sampling_rate = 16000;
}
/**
* Ultra-wideband initialisation
*/
public void uwbinit()
{
lowenc = new SbEncoder();
((SbEncoder)lowenc).wbinit();
// Initialize SubModes
super.uwbinit();
// Initialize variables
init(320, 80, 8, 1280, .7f);
uwb = true;
nb_modes = 2;
sampling_rate = 32000;
}
/**
* Initialisation
* @param frameSize
* @param subframeSize
* @param lpcSize
* @param bufSize
* @param foldingGain
*/
public void init(final int frameSize,
final int subframeSize,
final int lpcSize,
final int bufSize,
final float foldingGain)
{
super.init(frameSize, subframeSize, lpcSize, bufSize, foldingGain);
complexity = 3; // in C it's 2 here, but set to 3 automatically by the encoder
vbr_enabled = 0; // disabled by default
vad_enabled = 0; // disabled by default
abr_enabled = 0; // disabled by default
vbr_quality = 8;
submodeSelect = submodeID;
x1d = new float[frameSize];
h0_mem = new float[QMF_ORDER];
buf = new float[windowSize];
swBuf = new float[frameSize];
res = new float[frameSize];
target = new float[subframeSize];
window = Misc.window(windowSize, subframeSize);
lagWindow = Misc.lagWindow(lpcSize, lag_factor);
rc = new float[lpcSize];
autocorr = new float[lpcSize+1];
lsp = new float[lpcSize];
old_lsp = new float[lpcSize];
interp_lsp = new float[lpcSize];
interp_lpc = new float[lpcSize+1];
bw_lpc1 = new float[lpcSize+1];
bw_lpc2 = new float[lpcSize+1];
mem_sp2 = new float[lpcSize];
mem_sw = new float[lpcSize];
abr_count = 0;
}
/**
* Encode the given input signal.
* @param bits - Speex bits buffer.
* @param in - the raw mono audio frame to encode.
* @return 1 if successful.
*/
public int encode(final Bits bits, final float[] in)
{
int i;
float[] mem, innov, syn_resp;
float[] low_pi_gain, low_exc, low_innov;
int dtx;
/* Compute the two sub-bands by filtering with h0 and h1*/
Filters.qmf_decomp(in, h0, x0d, x1d, fullFrameSize, QMF_ORDER, h0_mem);
/* Encode the narrowband part*/
lowenc.encode(bits, x0d);
/* High-band buffering / sync with low band */
for (i=0;i<windowSize-frameSize;i++)
high[i] = high[frameSize+i];
for (i=0;i<frameSize;i++)
high[windowSize-frameSize+i]=x1d[i];
System.arraycopy(excBuf, frameSize, excBuf, 0, bufSize-frameSize);
low_pi_gain = lowenc.getPiGain();
low_exc = lowenc.getExc();
low_innov = lowenc.getInnov();
int low_mode = lowenc.getMode();
if (low_mode==0)
dtx=1;
else
dtx=0;
/* Start encoding the high-band */
for (i=0; i<windowSize; i++)
buf[i] = high[i] * window[i];
/* Compute auto-correlation */
Lpc.autocorr(buf, autocorr, lpcSize+1, windowSize);
autocorr[0] += 1; /* prevents NANs */
autocorr[0] *= lpc_floor; /* Noise floor in auto-correlation domain */
/* Lag windowing: equivalent to filtering in the power-spectrum domain */
for (i=0; i<lpcSize+1; i++)
autocorr[i] *= lagWindow[i];
/* Levinson-Durbin */
Lpc.wld(lpc, autocorr, rc, lpcSize); // tmperr
System.arraycopy(lpc, 0, lpc, 1, lpcSize);
lpc[0]=1;
/* LPC to LSPs (x-domain) transform */
int roots = Lsp.lpc2lsp (lpc, lpcSize, lsp, 15, 0.2f);
if (roots != lpcSize) {
roots = Lsp.lpc2lsp (lpc, lpcSize, lsp, 11, 0.02f);
if (roots != lpcSize) {
/*If we can't find all LSP's, do some damage control and use a flat filter*/
for (i=0; i<lpcSize; i++) {
lsp[i]=(float)Math.cos(Math.PI*((float)(i+1))/(lpcSize+1));
}
}
}
/* x-domain to angle domain*/
for (i=0; i<lpcSize; i++)
lsp[i] = (float) Math.acos(lsp[i]);
float lsp_dist=0;
for (i=0;i<lpcSize;i++)
lsp_dist += (old_lsp[i] - lsp[i])*(old_lsp[i] - lsp[i]);
/*VBR stuff*/
if ((vbr_enabled != 0 || vad_enabled != 0) && dtx == 0) {
float e_low=0, e_high=0;
float ratio;
if (abr_enabled != 0) {
float qual_change=0;
if (abr_drift2 * abr_drift > 0) {
/* Only adapt if long-term and short-term drift are the same sign */
qual_change = -.00001f*abr_drift/(1+abr_count);
if (qual_change>.1f)
qual_change=.1f;
if (qual_change<-.1f)
qual_change=-.1f;
}
vbr_quality += qual_change;
if (vbr_quality>10)
vbr_quality=10;
if (vbr_quality<0)
vbr_quality=0;
}
for (i=0;i<frameSize;i++) {
e_low += x0d[i]* x0d[i];
e_high += high[i]* high[i];
}
ratio = (float) Math.log((1+e_high)/(1+e_low));
relative_quality = lowenc.getRelativeQuality();
if (ratio<-4)
ratio=-4;
if (ratio>2)
ratio=2;
/*if (ratio>-2)*/
if (vbr_enabled != 0) {
int modeid;
modeid = nb_modes-1;
relative_quality+=1.0*(ratio+2);
if (relative_quality<-1) {
relative_quality=-1;
}
while (modeid != 0) {
int v1;
float thresh;
v1=(int)Math.floor(vbr_quality);
if (v1==10)
thresh = Vbr.hb_thresh[modeid][v1];
else
thresh = (vbr_quality-v1) * Vbr.hb_thresh[modeid][v1+1] +
(1+v1-vbr_quality) * Vbr.hb_thresh[modeid][v1];
if (relative_quality >= thresh)
break;
modeid--;
}
setMode(modeid);
if (abr_enabled != 0)
{
int bitrate;
bitrate = getBitRate();
abr_drift+=(bitrate-abr_enabled);
abr_drift2 = .95f*abr_drift2 + .05f*(bitrate-abr_enabled);
abr_count += 1.0;
}
}
else {
/* VAD only */
int modeid;
if (relative_quality<2.0)
modeid=1;
else
modeid=submodeSelect;
/*speex_encoder_ctl(state, SPEEX_SET_MODE, &mode);*/
submodeID=modeid;
}
/*fprintf (stderr, "%f %f\n", ratio, low_qual);*/
}
bits.pack(1, 1);
if (dtx != 0)
bits.pack(0, SB_SUBMODE_BITS);
else
bits.pack(submodeID, SB_SUBMODE_BITS);
/* If null mode (no transmission), just set a couple things to zero*/
if (dtx != 0 || submodes[submodeID] == null)
{
for (i=0; i<frameSize; i++)
excBuf[excIdx+i]=swBuf[i]=VERY_SMALL;
for (i=0; i<lpcSize; i++)
mem_sw[i]=0;
first=1;
/* Final signal synthesis from excitation */
Filters.iir_mem2(excBuf, excIdx, interp_qlpc, high, 0, subframeSize, lpcSize, mem_sp);
/* Reconstruct the original */
filters.fir_mem_up(x0d, h0, y0, fullFrameSize, QMF_ORDER, g0_mem);
filters.fir_mem_up(high, h1, y1, fullFrameSize, QMF_ORDER, g1_mem);
for (i=0; i<fullFrameSize; i++)
in[i]=2*(y0[i]-y1[i]);
if (dtx != 0)
return 0;
else
return 1;
}
/* LSP quantization */
submodes[submodeID].lsqQuant.quant(lsp, qlsp, lpcSize, bits);
if (first != 0)
{
for (i=0; i<lpcSize; i++)
old_lsp[i] = lsp[i];
for (i=0; i<lpcSize; i++)
old_qlsp[i] = qlsp[i];
}
mem = new float[lpcSize];
syn_resp = new float[subframeSize];
innov = new float[subframeSize];
for (int sub=0; sub<nbSubframes; sub++) {
float tmp, filter_ratio;
int exc, sp, sw, resp;
int offset;
float rl, rh, eh=0, el=0;
int fold;
offset = subframeSize*sub;
sp=offset;
exc=excIdx+offset;
resp=offset;
sw=offset;
/* LSP interpolation (quantized and unquantized) */
tmp = (1.0f + sub)/nbSubframes;
for (i=0; i<lpcSize; i++)
interp_lsp[i] = (1-tmp)*old_lsp[i] + tmp*lsp[i];
for (i=0; i<lpcSize; i++)
interp_qlsp[i] = (1-tmp)*old_qlsp[i] + tmp*qlsp[i];
Lsp.enforce_margin(interp_lsp, lpcSize, .05f);
Lsp.enforce_margin(interp_qlsp, lpcSize, .05f);
/* Compute interpolated LPCs (quantized and unquantized) */
for (i=0; i<lpcSize; i++)
interp_lsp[i] = (float) Math.cos(interp_lsp[i]);
for (i=0; i<lpcSize; i++)
interp_qlsp[i] = (float) Math.cos(interp_qlsp[i]);
m_lsp.lsp2lpc(interp_lsp, interp_lpc, lpcSize);
m_lsp.lsp2lpc(interp_qlsp, interp_qlpc, lpcSize);
Filters.bw_lpc(gamma1, interp_lpc, bw_lpc1, lpcSize);
Filters.bw_lpc(gamma2, interp_lpc, bw_lpc2, lpcSize);
/* Compute mid-band (4000 Hz for wideband) response of low-band and high-band
filters */
rl=rh=0;
tmp=1;
pi_gain[sub]=0;
for (i=0; i<=lpcSize; i++) {
rh += tmp*interp_qlpc[i];
tmp = -tmp;
pi_gain[sub]+=interp_qlpc[i];
}
rl = low_pi_gain[sub];
rl=1/(Math.abs(rl)+.01f);
rh=1/(Math.abs(rh)+.01f);
/* Compute ratio, will help predict the gain */
filter_ratio=Math.abs(.01f+rh)/(.01f+Math.abs(rl));
fold = filter_ratio<5 ? 1 : 0;
/*printf ("filter_ratio %f\n", filter_ratio);*/
fold=0;
/* Compute "real excitation" */
Filters.fir_mem2(high, sp, interp_qlpc, excBuf, exc, subframeSize, lpcSize, mem_sp2);
/* Compute energy of low-band and high-band excitation */
for (i=0; i<subframeSize; i++)
eh+=excBuf[exc+i]*excBuf[exc+i];
if (submodes[submodeID].innovation == null) {/* 1 for spectral folding excitation, 0 for stochastic */
float g;
/*speex_bits_pack(bits, 1, 1);*/
for (i=0; i<subframeSize; i++)
el+=low_innov[offset+i]*low_innov[offset+i];
/* Gain to use if we want to use the low-band excitation for high-band */
g=eh/(.01f+el);
g=(float) Math.sqrt(g);
g *= filter_ratio;
/*print_vec(&g, 1, "gain factor");*/
/* Gain quantization */
{
int quant = (int) Math.floor(.5 + 10 + 8.0 * Math.log((g+.0001)));
/*speex_warning_int("tata", quant);*/
if (quant<0)
quant=0;
if (quant>31)
quant=31;
bits.pack(quant, 5);
g=(float)(.1*Math.exp(quant/9.4));
}
/*printf ("folding gain: %f\n", g);*/
g /= filter_ratio;
} else {
float gc, scale, scale_1;
for (i=0; i<subframeSize; i++)
el+=low_exc[offset+i]*low_exc[offset+i];
/*speex_bits_pack(bits, 0, 1);*/
gc = (float) (Math.sqrt(1+eh)*filter_ratio/Math.sqrt((1+el)*subframeSize));
{
int qgc = (int)Math.floor(.5+3.7*(Math.log(gc)+2));
if (qgc<0)
qgc=0;
if (qgc>15)
qgc=15;
bits.pack(qgc, 4);
gc = (float) Math.exp((1/3.7)*qgc-2);
}
scale = gc*(float)Math.sqrt(1+el)/filter_ratio;
scale_1 = 1/scale;
for (i=0; i<subframeSize; i++)
excBuf[exc+i]=0;
excBuf[exc]=1;
Filters.syn_percep_zero(excBuf, exc, interp_qlpc, bw_lpc1, bw_lpc2, syn_resp, subframeSize, lpcSize);
/* Reset excitation */
for (i=0; i<subframeSize; i++)
excBuf[exc+i]=0;
/* Compute zero response (ringing) of A(z/g1) / ( A(z/g2) * Aq(z) ) */
for (i=0; i<lpcSize; i++)
mem[i]=mem_sp[i];
Filters.iir_mem2(excBuf, exc, interp_qlpc, excBuf, exc, subframeSize, lpcSize, mem);
for (i=0; i<lpcSize; i++)
mem[i]=mem_sw[i];
Filters.filter_mem2(excBuf, exc, bw_lpc1, bw_lpc2, res, resp, subframeSize, lpcSize, mem, 0);
/* Compute weighted signal */
for (i=0; i<lpcSize; i++)
mem[i]=mem_sw[i];
Filters.filter_mem2(high, sp, bw_lpc1, bw_lpc2, swBuf, sw, subframeSize, lpcSize, mem, 0);
/* Compute target signal */
for (i=0; i<subframeSize; i++)
target[i]=swBuf[sw+i]-res[resp+i];
for (i=0; i<subframeSize; i++)
excBuf[exc+i]=0;
for (i=0; i<subframeSize; i++)
target[i]*=scale_1;
/* Reset excitation */
for (i=0; i<subframeSize; i++)
innov[i]=0;
/*print_vec(target, st->subframeSize, "\ntarget");*/
submodes[submodeID].innovation.quant(target, interp_qlpc, bw_lpc1, bw_lpc2,
lpcSize, subframeSize, innov, 0, syn_resp,
bits, (complexity+1)>>1);
/*print_vec(target, st->subframeSize, "after");*/
for (i=0; i<subframeSize; i++)
excBuf[exc+i] += innov[i]*scale;
if (submodes[submodeID].double_codebook != 0) {
float[] innov2 = new float[subframeSize];
for (i=0; i<subframeSize; i++)
innov2[i]=0;
for (i=0; i<subframeSize; i++)
target[i]*=2.5;
submodes[submodeID].innovation.quant(target, interp_qlpc, bw_lpc1, bw_lpc2,
lpcSize, subframeSize, innov2, 0, syn_resp,
bits, (complexity+1)>>1);
for (i=0; i<subframeSize; i++)
innov2[i]*=scale*(1/2.5);
for (i=0; i<subframeSize; i++)
excBuf[exc+i] += innov2[i];
}
}
/*Keep the previous memory*/
for (i=0; i<lpcSize; i++)
mem[i]=mem_sp[i];
/* Final signal synthesis from excitation */
Filters.iir_mem2(excBuf, exc, interp_qlpc, high, sp, subframeSize, lpcSize, mem_sp);
/* Compute weighted signal again, from synthesized speech (not sure it's the right thing) */
Filters.filter_mem2(high, sp, bw_lpc1, bw_lpc2, swBuf, sw, subframeSize, lpcSize, mem_sw, 0);
}
//#ifndef RELEASE
/* Reconstruct the original */
filters.fir_mem_up(x0d, h0, y0, fullFrameSize, QMF_ORDER, g0_mem);
filters.fir_mem_up(high, h1, y1, fullFrameSize, QMF_ORDER, g1_mem);
for (i=0; i<fullFrameSize; i++)
in[i]=2*(y0[i]-y1[i]);
//#endif
for (i=0; i<lpcSize; i++)
old_lsp[i] = lsp[i];
for (i=0; i<lpcSize; i++)
old_qlsp[i] = qlsp[i];
first=0;
return 1;
}
/**
* Returns the size in bits of an audio frame encoded with the current mode.
* @return the size in bits of an audio frame encoded with the current mode.
*/
public int getEncodedFrameSize()
{
int size = SB_FRAME_SIZE[submodeID];
size += lowenc.getEncodedFrameSize();
return size;
}
//---------------------------------------------------------------------------
// Speex Control Functions
//---------------------------------------------------------------------------
/**
* Sets the Quality.
* @param quality
*/
public void setQuality(int quality)
{
if (quality < 0) {
quality = 0;
}
if (quality > 10) {
quality = 10;
}
if (uwb) {
lowenc.setQuality(quality);
this.setMode(UWB_QUALITY_MAP[quality]);
}
else {
lowenc.setMode(NB_QUALITY_MAP[quality]);
this.setMode(WB_QUALITY_MAP[quality]);
}
}
/**
* Sets the Varible Bit Rate Quality.
* @param quality
*/
public void setVbrQuality(float quality)
{
vbr_quality = quality;
float qual = quality + 0.6f;
if (qual>10)
qual=10;
lowenc.setVbrQuality(qual);
int q = (int)Math.floor(.5+quality);
if (q>10)
q=10;
setQuality(q);
}
/**
* Sets whether or not to use Variable Bit Rate encoding.
* @param vbr
*/
public void setVbr(final boolean vbr)
{
// super.setVbr(vbr);
vbr_enabled = vbr ? 1 : 0;
lowenc.setVbr(vbr);
}
/**
* Sets the Average Bit Rate.
* @param abr
*/
public void setAbr(final int abr)
{
lowenc.setVbr(true);
// super.setAbr(abr);
abr_enabled = (abr!=0) ? 1 : 0;
vbr_enabled = 1;
{
int i=10, rate, target;
float vbr_qual;
target = abr;
while (i>=0)
{
setQuality(i);
rate = getBitRate();
if (rate <= target)
break;
i--;
}
vbr_qual=i;
if (vbr_qual<0)
vbr_qual=0;
setVbrQuality(vbr_qual);
abr_count=0;
abr_drift=0;
abr_drift2=0;
}
}
/**
* Returns the bitrate.
* @return the bitrate.
*/
public int getBitRate()
{
if (submodes[submodeID] != null)
return lowenc.getBitRate() + sampling_rate*submodes[submodeID].bits_per_frame/frameSize;
else
return lowenc.getBitRate() + sampling_rate*(SB_SUBMODE_BITS+1)/frameSize;
}
/**
* Sets the sampling rate.
* @param rate
*/
public void setSamplingRate(final int rate)
{
// super.setSamplingRate(rate);
sampling_rate = rate;
lowenc.setSamplingRate(rate);
}
/**
* Return LookAhead.
* @return LookAhead.
*/
public int getLookAhead()
{
return 2*lowenc.getLookAhead() + QMF_ORDER - 1;
}
/**
*
*/
// public void resetState()
// {
// }
//---------------------------------------------------------------------------
//---------------------------------------------------------------------------
/**
* Sets the encoding submode.
* @param mode
*/
public void setMode(int mode)
{
if (mode < 0) {
mode = 0;
}
submodeID = submodeSelect = mode;
}
/**
* Returns the encoding submode currently in use.
* @return the encoding submode currently in use.
*/
public int getMode()
{
return submodeID;
}
/**
* Sets the bitrate.
* @param bitrate
*/
public void setBitRate(final int bitrate)
{
for (int i=10; i>=0; i--) {
setQuality(i);
if (getBitRate() <= bitrate)
return;
}
}
/**
* Returns whether or not we are using Variable Bit Rate encoding.
* @return whether or not we are using Variable Bit Rate encoding.
*/
public boolean getVbr()
{
return vbr_enabled != 0;
}
/**
* Sets whether or not to use Voice Activity Detection encoding.
* @param vad
*/
public void setVad(final boolean vad)
{
vad_enabled = vad ? 1 : 0;
}
/**
* Returns whether or not we are using Voice Activity Detection encoding.
* @return whether or not we are using Voice Activity Detection encoding.
*/
public boolean getVad()
{
return vad_enabled != 0;
}
/**
* Sets whether or not to use Discontinuous Transmission encoding.
* @param dtx
*/
public void setDtx(final boolean dtx)
{
dtx_enabled = dtx ? 1 : 0;
}
/**
* Returns the Average Bit Rate used (0 if ABR is not turned on).
* @return the Average Bit Rate used (0 if ABR is not turned on).
*/
public int getAbr()
{
return abr_enabled;
}
/**
* Returns the Varible Bit Rate Quality.
* @return the Varible Bit Rate Quality.
*/
public float getVbrQuality()
{
return vbr_quality;
}
/**
* Sets the algorthmic complexity.
* @param complexity
*/
public void setComplexity(int complexity)
{
if (complexity < 0)
complexity = 0;
if (complexity > 10)
complexity = 10;
this.complexity = complexity;
}
/**
* Returns the algorthmic complexity.
* @return the algorthmic complexity.
*/
public int getComplexity()
{
return complexity;
}
/**
* Returns the sampling rate.
* @return the sampling rate.
*/
public int getSamplingRate()
{
return sampling_rate;
}
/**
* Returns the relative quality.
* @return the relative quality.
*/
public float getRelativeQuality()
{
return relative_quality;
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/SpeexDecoder.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: SpeexDecoder.java *
* *
* Author: James LAWRENCE *
* Modified by: Marc GIMPEL *
* Based on code by: Jean-Marc VALIN *
* *
* Date: March 2003 *
* *
******************************************************************************/
/* $Id: SpeexDecoder.java,v 1.4 2005/05/27 13:15:54 mgimpel Exp $ */
/* Copyright (C) 2002 Jean-Marc Valin
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the Xiph.org Foundation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.xiph.speex;
import java.io.StreamCorruptedException;
/**
* Main Speex Decoder class.
* This class decodes the given Speex packets into PCM 16bit samples.
*
* <p>Here's an example that decodes and recovers one Speex packet.
* <pre>
* SpeexDecoder speexDecoder = new SpeexDecoder();
* speexDecoder.processData(data, packetOffset, packetSize);
* byte[] decoded = new byte[speexDecoder.getProcessedBataByteSize()];
* speexDecoder.getProcessedData(decoded, 0);
* </pre>
*
* @author Jim Lawrence, helloNetwork.com
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.4 $
*/
public class SpeexDecoder
{
/**
* Version of the Speex Decoder
*/
public static final String VERSION = "Java Speex Decoder v0.9.7 ($Revision: 1.4 $)";
private int sampleRate;
private int channels;
private float[] decodedData;
private short[] outputData;
private int outputSize;
private Bits bits;
private Decoder decoder;
private int frameSize;
/**
* Constructor
*/
public SpeexDecoder()
{
bits = new Bits();
sampleRate = 0;
channels = 0;
}
/**
* Initialise the Speex Decoder.
* @param mode the mode of the decoder (0=NB, 1=WB, 2=UWB).
* @param sampleRate the number of samples per second.
* @param channels the number of audio channels (1=mono, 2=stereo, ...).
* @param enhanced whether to enable perceptual enhancement or not.
* @return true if initialisation successful.
*/
public boolean init(final int mode,
final int sampleRate,
final int channels,
final boolean enhanced)
{
switch (mode) {
case 0:
decoder = new NbDecoder();
((NbDecoder)decoder).nbinit();
break;
//Wideband
case 1:
decoder = new SbDecoder();
((SbDecoder)decoder).wbinit();
break;
case 2:
decoder = new SbDecoder();
((SbDecoder)decoder).uwbinit();
break;
//*/
default:
return false;
}
/* initialize the speex decoder */
decoder.setPerceptualEnhancement(enhanced);
/* set decoder format and properties */
this.frameSize = decoder.getFrameSize();
this.sampleRate = sampleRate;
this.channels = channels;
int secondSize = sampleRate*channels;
decodedData = new float[secondSize*2];
outputData = new short[secondSize*2];
outputSize = 0;
bits.init();
return true;
}
/**
* Returns the sample rate.
* @return the sample rate.
*/
public int getSampleRate()
{
return sampleRate;
}
/**
* Returns the number of channels.
* @return the number of channels.
*/
public int getChannels()
{
return channels;
}
/**
* Pull the decoded data out into a byte array at the given offset
* and returns the number of bytes processed and just read.
* @param data
* @param offset
* @return the number of bytes processed and just read.
*/
public int getProcessedData(final byte[] data, final int offset)
{
if (outputSize<=0) {
return outputSize;
}
for (int i=0; i<outputSize; i++) {
int dx = offset + (i<<1);
data[dx] = (byte) (outputData[i] & 0xff);
data[dx+1] = (byte) ((outputData[i] >> 8) & 0xff );
}
int size = outputSize*2;
outputSize = 0;
return size;
}
/**
* Pull the decoded data out into a short array at the given offset
* and returns tne number of shorts processed and just read
* @param data
* @param offset
* @return the number of samples processed and just read.
*/
public int getProcessedData(final short[] data, final int offset)
{
if (outputSize<=0) {
return outputSize;
}
System.arraycopy(outputData, 0, data, offset, outputSize);
int size = outputSize;
outputSize = 0;
return size;
}
/**
* Returns the number of bytes processed and ready to be read.
* @return the number of bytes processed and ready to be read.
*/
public int getProcessedDataByteSize()
{
return (outputSize*2);
}
/**
* This is where the actual decoding takes place
* @param data - the Speex data (frame) to decode.
* If it is null, the packet is supposed lost.
* @param offset - the offset from which to start reading the data.
* @param len - the length of data to read (Speex frame size).
* @throws StreamCorruptedException If the input stream is invalid.
*/
public void processData(final byte[] data,
final int offset,
final int len)
throws StreamCorruptedException
{
if (data == null) {
processData(true);
}
else {
/* read packet bytes into bitstream */
bits.read_from(data, offset, len);
processData(false);
}
}
/**
* This is where the actual decoding takes place.
* @param lost - true if the Speex packet has been lost.
* @throws StreamCorruptedException If the input stream is invalid.
*/
public void processData(final boolean lost)
throws StreamCorruptedException
{
int i;
/* decode the bitstream */
if (lost)
decoder.decode(null, decodedData);
else
decoder.decode(bits, decodedData);
if (channels == 2)
decoder.decodeStereo(decodedData, frameSize);
/* PCM saturation */
for (i=0; i<frameSize*channels; i++) {
if (decodedData[i]>32767.0f)
decodedData[i]=32767.0f;
else if (decodedData[i]<-32768.0f)
decodedData[i]=-32768.0f;
}
/* convert to short and save to buffer */
for (i=0; i<frameSize*channels; i++, outputSize++) {
outputData[outputSize] = (decodedData[i]>0) ?
(short) (decodedData[i]+.5) :
(short) (decodedData[i]-.5);
}
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/SpeexEncoder.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: SpeexEncoder.java *
* *
* Author: Marc GIMPEL *
* Based on code by: Jean-Marc VALIN *
* *
* Date: 9th April 2003 *
* *
******************************************************************************/
/* $Id: SpeexEncoder.java,v 1.6 2005/05/27 13:15:54 mgimpel Exp $ */
/* Copyright (C) 2002 Jean-Marc Valin
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the Xiph.org Foundation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.xiph.speex;
/**
* Main Speex Encoder class.
* This class encodes the given PCM 16bit samples into Speex packets.
*
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.6 $
*/
public class SpeexEncoder
{
/**
* Version of the Speex Encoder
*/
public static final String VERSION = "Java Speex Encoder v0.9.7 ($Revision: 1.6 $)";
private Encoder encoder;
private Bits bits;
private float[] rawData;
private int sampleRate;
private int channels;
private int frameSize;
/**
* Constructor
*/
public SpeexEncoder()
{
bits = new Bits();
}
/**
* Initialisation
* @param mode the mode of the encoder (0=NB, 1=WB, 2=UWB).
* @param quality the quality setting of the encoder (between 0 and 10).
* @param sampleRate the number of samples per second.
* @param channels the number of audio channels (1=mono, 2=stereo, ...).
* @return true if initialisation successful.
*/
public boolean init(final int mode,
final int quality,
final int sampleRate,
final int channels)
{
switch (mode) {
case 0:
encoder = new NbEncoder();
((NbEncoder)encoder).nbinit();
break;
//Wideband
case 1:
encoder = new SbEncoder();
((SbEncoder)encoder).wbinit();
break;
case 2:
encoder = new SbEncoder();
((SbEncoder)encoder).uwbinit();
break;
//*/
default:
return false;
}
/* initialize the speex decoder */
encoder.setQuality(quality);
/* set decoder format and properties */
this.frameSize = encoder.getFrameSize();
this.sampleRate = sampleRate;
this.channels = channels;
rawData = new float[channels*frameSize];
bits.init();
return true;
}
/**
* Returns the Encoder being used (Narrowband, Wideband or Ultrawideband).
* @return the Encoder being used (Narrowband, Wideband or Ultrawideband).
*/
public Encoder getEncoder()
{
return encoder;
}
/**
* Returns the sample rate.
* @return the sample rate.
*/
public int getSampleRate()
{
return sampleRate;
}
/**
* Returns the number of channels.
* @return the number of channels.
*/
public int getChannels()
{
return channels;
}
/**
* Returns the size of a frame.
* @return the size of a frame.
*/
public int getFrameSize()
{
return frameSize;
}
/**
* Pull the decoded data out into a byte array at the given offset
* and returns the number of bytes of encoded data just read.
* @param data
* @param offset
* @return the number of bytes of encoded data just read.
*/
public int getProcessedData(final byte[] data, final int offset)
{
int size = bits.getBufferSize();
System.arraycopy(bits.getBuffer(), 0, data, offset, size);
bits.init();
return size;
}
/**
* Returns the number of bytes of encoded data ready to be read.
* @return the number of bytes of encoded data ready to be read.
*/
public int getProcessedDataByteSize()
{
return bits.getBufferSize();
}
/**
* This is where the actual encoding takes place
* @param data
* @param offset
* @param len
* @return true if successful.
*/
public boolean processData(final byte[] data,
final int offset,
final int len)
{
// converty raw bytes into float samples
mapPcm16bitLittleEndian2Float(data, offset, rawData, 0, len/2);
// encode the bitstream
return processData(rawData, len/2);
}
/**
* Encode an array of shorts.
* @param data
* @param offset
* @param numShorts
* @return true if successful.
*/
public boolean processData(final short[] data,
final int offset,
final int numShorts)
{
int numSamplesRequired = channels * frameSize;
if (numShorts != numSamplesRequired) {
throw new IllegalArgumentException("SpeexEncoder requires " + numSamplesRequired + " samples to process a Frame, not " + numShorts);
}
// convert shorts into float samples,
for (int i=0; i<numShorts; i++) {
rawData[i] = (float) data[offset + i ];
}
// encode the bitstream
return processData(rawData, numShorts);
}
/**
* Encode an array of floats.
* @param data
* @param numSamples
* @return true if successful.
*/
public boolean processData(final float[] data, final int numSamples)
{
int numSamplesRequired = channels * frameSize;
if (numSamples != numSamplesRequired) {
throw new IllegalArgumentException("SpeexEncoder requires " + numSamplesRequired + " samples to process a Frame, not " + numSamples );
}
// encode the bitstream
if (channels==2) {
Stereo.encode(bits, data, frameSize);
}
encoder.encode(bits, data);
return true;
}
/**
* Converts a 16 bit linear PCM stream (in the form of a byte array)
* into a floating point PCM stream (in the form of an float array).
* Here are some important details about the encoding:
* <ul>
* <li> Java uses big endian for shorts and ints, and Windows uses little Endian.
* Therefore, shorts and ints must be read as sequences of bytes and
* combined with shifting operations.
* </ul>
* @param pcm16bitBytes - byte array of linear 16-bit PCM formated audio.
* @param offsetInput
* @param samples - float array to receive the 16-bit linear audio samples.
* @param offsetOutput
* @param length
*/
public static void mapPcm16bitLittleEndian2Float(final byte[] pcm16bitBytes,
final int offsetInput,
final float[] samples,
final int offsetOutput,
final int length)
{
if (pcm16bitBytes.length - offsetInput < 2 * length) {
throw new IllegalArgumentException("Insufficient Samples to convert to floats");
}
if (samples.length - offsetOutput < length) {
throw new IllegalArgumentException("Insufficient float buffer to convert the samples");
}
for (int i = 0; i < length; i++) {
samples[offsetOutput+i] = (float)((pcm16bitBytes[offsetInput+2*i] & 0xff) | (pcm16bitBytes[offsetInput+2*i+1] << 8)); // no & 0xff at the end to keep the sign
}
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/SplitShapeSearch.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: SplitShapeSearch.java *
* *
* Author: James LAWRENCE *
* Modified by: Marc GIMPEL *
* Based on code by: Jean-Marc VALIN *
* *
* Date: March 2003 *
* *
******************************************************************************/
/* $Id: SplitShapeSearch.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */
/* Copyright (C) 2002 Jean-Marc Valin
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the Xiph.org Foundation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.xiph.speex;
/**
* Split shape codebook search
*
* @author Jim Lawrence, helloNetwork.com
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.2 $
*/
public class SplitShapeSearch
extends CbSearch
{
/** */
public static final int MAX_COMPLEXITY = 10;
private int subframesize;
private int subvect_size;
private int nb_subvect;
private int[] shape_cb;
private int shape_cb_size;
private int shape_bits;
private int have_sign;
private int[] ind;
private int[] signs;
// Varibles used by the encoder
private float[] t, e, E, r2;
private float[][] ot, nt;
private int[][] nind, oind;
/**
* Constructor
* @param subframesize
* @param subvect_size
* @param nb_subvect
* @param shape_cb
* @param shape_bits
* @param have_sign
*/
public SplitShapeSearch(final int subframesize,
final int subvect_size,
final int nb_subvect,
final int[] shape_cb,
final int shape_bits,
final int have_sign)
{
this.subframesize = subframesize;
this.subvect_size = subvect_size;
this.nb_subvect = nb_subvect;
this.shape_cb = shape_cb;
this.shape_bits = shape_bits;
this.have_sign = have_sign;
this.ind = new int[nb_subvect];
this.signs = new int[nb_subvect];
shape_cb_size = 1<<shape_bits;
ot=new float[MAX_COMPLEXITY][subframesize];
nt=new float[MAX_COMPLEXITY][subframesize];
oind=new int[MAX_COMPLEXITY][nb_subvect];
nind=new int[MAX_COMPLEXITY][nb_subvect];
t = new float[subframesize];
e = new float[subframesize];
r2 = new float[subframesize];
E = new float[shape_cb_size];
}
/**
* Codebook Search Quantification (Split Shape).
* @param target target vector
* @param ak LPCs for this subframe
* @param awk1 Weighted LPCs for this subframe
* @param awk2 Weighted LPCs for this subframe
* @param p number of LPC coeffs
* @param nsf number of samples in subframe
* @param exc excitation array.
* @param es position in excitation array.
* @param r
* @param bits Speex bits buffer.
* @param complexity
*/
public final void quant(float[] target, float[] ak, float[] awk1, float[] awk2,
int p, int nsf, float[] exc, int es, float[] r,
Bits bits, int complexity)
{
int i,j,k,m,n,q;
float[] resp;
float[] ndist, odist;
int[] best_index;
float[] best_dist;
int N=complexity;
if (N>10)
N=10;
resp = new float[shape_cb_size*subvect_size];
best_index = new int[N];
best_dist = new float[N];
ndist = new float[N];
odist = new float[N];
for (i=0;i<N;i++) {
for (j=0;j<nb_subvect;j++)
nind[i][j]=oind[i][j]=-1;
}
for (j=0;j<N;j++)
for (i=0;i<nsf;i++)
ot[j][i]=target[i];
// System.arraycopy(target, 0, t, 0, nsf);
/* Pre-compute codewords response and energy */
for (i=0; i<shape_cb_size; i++) {
int res;
int shape;
res = i*subvect_size;
shape = i*subvect_size;
/* Compute codeword response using convolution with impulse response */
for (j=0; j<subvect_size; j++) {
resp[res+j]=0;
for (k=0;k<=j;k++)
resp[res+j] += 0.03125*shape_cb[shape+k]*r[j-k];
}
/* Compute codeword energy */
E[i]=0;
for (j=0; j<subvect_size; j++)
E[i]+=resp[res+j]*resp[res+j];
}
for (j=0; j<N; j++)
odist[j]=0;
/*For all subvectors*/
for (i=0; i<nb_subvect; i++) {
int offset = i*subvect_size;
/*"erase" nbest list*/
for (j=0; j<N; j++)
ndist[j]=-1;
/*For all n-bests of previous subvector*/
for (j=0; j<N; j++) {
/*Find new n-best based on previous n-best j*/
if (have_sign != 0)
VQ.nbest_sign(ot[j], offset, resp, subvect_size, shape_cb_size, E, N, best_index, best_dist);
else
VQ.nbest(ot[j], offset, resp, subvect_size, shape_cb_size, E, N, best_index, best_dist);
/*For all new n-bests*/
for (k=0; k<N; k++) {
float[] ct;
float err=0;
ct = ot[j];
/*update target*/
/*previous target*/
for (m=offset; m<offset+subvect_size; m++)
t[m]=ct[m];
/* New code: update only enough of the target to calculate error*/
{
int rind;
int res;
float sign=1;
rind = best_index[k];
if (rind>=shape_cb_size) {
sign = -1;
rind -= shape_cb_size;
}
res = rind*subvect_size;
if (sign>0)
for (m=0;m<subvect_size;m++)
t[offset+m] -= resp[res+m];
else
for (m=0;m<subvect_size;m++)
t[offset+m] += resp[res+m];
}
/*compute error (distance)*/
err=odist[j];
for (m=offset;m<offset+subvect_size;m++)
err += t[m]*t[m];
/*update n-best list*/
if (err<ndist[N-1] || ndist[N-1]<-.5) {
/*previous target (we don't care what happened before*/
for (m=offset+subvect_size; m<nsf; m++)
t[m] = ct[m];
/* New code: update the rest of the target only if it's worth it */
for (m=0; m<subvect_size; m++) {
float g;
int rind;
float sign = 1;
rind = best_index[k];
if (rind>=shape_cb_size) {
sign = -1;
rind -= shape_cb_size;
}
g = sign*0.03125f*shape_cb[rind*subvect_size+m];
q = subvect_size-m;
for (n=offset+subvect_size; n<nsf; n++, q++)
t[n] -= g*r[q];
}
for (m=0; m<N; m++) {
if (err < ndist[m] || ndist[m]<-.5) {
for (n=N-1; n>m; n--) {
for (q=offset+subvect_size; q<nsf; q++)
nt[n][q] = nt[n-1][q];
for (q=0; q<nb_subvect; q++)
nind[n][q] = nind[n-1][q];
ndist[n] = ndist[n-1];
}
for (q=offset+subvect_size; q<nsf; q++)
nt[m][q] = t[q];
for (q=0; q<nb_subvect; q++)
nind[m][q] = oind[j][q];
nind[m][i] = best_index[k];
ndist[m] = err;
break;
}
}
}
}
if (i==0)
break;
}
/*update old-new data*/
/* just swap pointers instead of a long copy */
{
float[][] tmp2;
tmp2=ot;
ot=nt;
nt=tmp2;
}
for (j=0; j<N; j++)
for (m=0; m<nb_subvect; m++)
oind[j][m] = nind[j][m];
for (j=0; j<N; j++)
odist[j] = ndist[j];
}
/*save indices*/
for (i=0; i<nb_subvect; i++) {
ind[i] = nind[0][i];
bits.pack(ind[i], shape_bits+have_sign);
}
/* Put everything back together */
for (i=0; i<nb_subvect; i++) {
int rind;
float sign = 1;
rind = ind[i];
if (rind >= shape_cb_size) {
sign = -1;
rind -= shape_cb_size;
}
for (j=0; j<subvect_size; j++)
e[subvect_size*i+j] = sign*0.03125f*shape_cb[rind*subvect_size+j];
}
/* Update excitation */
for (j=0; j<nsf; j++)
exc[es+j] += e[j];
/* Update target */
Filters.syn_percep_zero(e, 0, ak, awk1, awk2, r2, nsf, p);
for (j=0; j<nsf; j++)
target[j] -= r2[j];
}
/**
* Codebook Search Unquantification (Split Shape).
* @param exc - excitation array.
* @param es - position in excitation array.
* @param nsf - number of samples in subframe.
* @param bits - Speex bits buffer.
*/
public final void unquant(float[] exc, int es, int nsf, Bits bits)
{
int i,j;
/* Decode codewords and gains */
for (i=0; i<nb_subvect; i++) {
if (have_sign!=0)
signs[i] = bits.unpack(1);
else
signs[i] = 0;
ind[i] = bits.unpack(shape_bits);
}
/* Compute decoded excitation */
for (i=0; i<nb_subvect; i++) {
float s=1.0f;
if (signs[i]!=0)
s=-1.0f;
for (j=0; j<subvect_size; j++){
exc[es+subvect_size*i+j]+=s*0.03125f*(float)shape_cb[ind[i]*subvect_size+j];
}
}
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/Stereo.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: Stereo.java *
* *
* Author: Marc GIMPEL *
* Based on code by: Jean-Marc VALIN *
* *
* Date: 22nd April 2003 *
* *
******************************************************************************/
/* $Id: Stereo.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */
/* Copyright (C) 2002 Jean-Marc Valin
File: stereo.c
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the Xiph.org Foundation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.xiph.speex;
/**
* Stereo
*
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.2 $
*/
public class Stereo
{
/** Inband code number for Stereo */
public static final int SPEEX_INBAND_STEREO = 9;
/** */
public static final float[] e_ratio_quant = {.25f, .315f, .397f, .5f};
private float balance = 1f; /** Left/right balance info */
private float e_ratio = 0.5f; /** Ratio of energies: E(left+right)/[E(left)+E(right)] */
private float smooth_left = 1f; /** Smoothed left channel gain */
private float smooth_right = 1f; /** Smoothed right channel gain */
// private float reserved1; /** Reserved for future use */
// private float reserved2; /** Reserved for future use */
/**
* Transforms a stereo frame into a mono frame and stores intensity stereo
* info in 'bits'.
* @param bits - Speex bits buffer.
* @param data
* @param frameSize
*/
public static void encode(final Bits bits,
final float[] data,
final int frameSize)
{
int i, tmp;
float e_left=0, e_right=0, e_tot=0;
float balance, e_ratio;
for (i=0;i<frameSize;i++) {
e_left += data[2*i]*data[2*i];
e_right += data[2*i+1]*data[2*i+1];
data[i] = .5f*(data[2*i]+data[2*i+1]);
e_tot += data[i]*data[i];
}
balance=(e_left+1)/(e_right+1);
e_ratio = e_tot/(1+e_left+e_right);
/*Quantization*/
bits.pack(14, 5);
bits.pack(SPEEX_INBAND_STEREO, 4);
balance=(float)(4*Math.log(balance));
/*Pack balance*/
if (balance>0)
bits.pack(0, 1);
else
bits.pack(1, 1);
balance=(float) Math.floor(.5f+Math.abs(balance));
if (balance>30)
balance=31;
bits.pack((int)balance, 5);
/*Quantize energy ratio*/
tmp=VQ.index(e_ratio, e_ratio_quant, 4);
bits.pack(tmp, 2);
}
/**
* Transforms a mono frame into a stereo frame using intensity stereo info.
* @param data - float array of size 2*frameSize, that contains the mono
* audio samples in the first half. When the function has completed, the
* array will contain the interlaced stereo audio samples.
* @param frameSize - the size of a frame of mono audio samples.
*/
public void decode(final float[] data, final int frameSize)
{
int i;
float e_tot=0, e_left, e_right, e_sum;
for (i=frameSize-1; i>=0; i--) {
e_tot += data[i]*data[i];
}
e_sum=e_tot/e_ratio;
e_left = e_sum*balance / (1+balance);
e_right = e_sum-e_left;
e_left = (float)Math.sqrt(e_left/(e_tot+.01f));
e_right = (float)Math.sqrt(e_right/(e_tot+.01f));
for (i=frameSize-1;i>=0;i--) {
float ftmp=data[i];
smooth_left = .98f*smooth_left + .02f*e_left;
smooth_right = .98f*smooth_right + .02f*e_right;
data[2*i] = smooth_left*ftmp;
data[2*i+1] = smooth_right*ftmp;
}
}
/**
* Callback handler for intensity stereo info
* @param bits - Speex bits buffer.
*/
public void init(Bits bits)
{
float sign=1;
int tmp;
if (bits.unpack(1) != 0)
sign=-1;
tmp = bits.unpack(5);
balance = (float) Math.exp(sign*.25*tmp);
tmp = bits.unpack(2);
e_ratio = e_ratio_quant[tmp];
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/SubMode.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: SubMode.java *
* *
* Author: James LAWRENCE *
* Modified by: Marc GIMPEL *
* Based on code by: Jean-Marc VALIN *
* *
* Date: March 2003 *
* *
******************************************************************************/
/* $Id: SubMode.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */
/* Copyright (C) 2002 Jean-Marc Valin
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the Xiph.org Foundation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.xiph.speex;
/**
* Speex SubMode
*
* @author Jim Lawrence, helloNetwork.com
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.2 $
*/
public class SubMode
{
/** Set to -1 for "normal" modes, otherwise encode pitch using a global pitch and allowing a +- lbr_pitch variation (for low not-rates)*/
public int lbr_pitch;
/** Use the same (forced) pitch gain for all sub-frames */
public int forced_pitch_gain;
/** Number of bits to use as sub-frame innovation gain */
public int have_subframe_gain;
/** Apply innovation quantization twice for higher quality (and higher bit-rate)*/
public int double_codebook;
/** LSP quantization/unquantization function */
public LspQuant lsqQuant;
/** Long-term predictor (pitch) un-quantizer */
public Ltp ltp;
/** Codebook Search un-quantizer*/
public CbSearch innovation;
/** Enhancer constant */
public float lpc_enh_k1;
/** Enhancer constant */
public float lpc_enh_k2;
/** Gain of enhancer comb filter */
public float comb_gain;
/** Number of bits per frame after encoding*/
public int bits_per_frame;
/**
* Constructor
*/
public SubMode(final int lbr_pitch,
final int forced_pitch_gain,
final int have_subframe_gain,
final int double_codebook,
final LspQuant lspQuant,
final Ltp ltp,
final CbSearch innovation,
final float lpc_enh_k1,
final float lpc_enh_k2,
final float comb_gain,
final int bits_per_frame)
{
this.lbr_pitch = lbr_pitch;
this.forced_pitch_gain = forced_pitch_gain;
this.have_subframe_gain = have_subframe_gain;
this.double_codebook = double_codebook;
this.lsqQuant = lspQuant;
this.ltp = ltp;
this.innovation = innovation;
this.lpc_enh_k1 = lpc_enh_k1;
this.lpc_enh_k2 = lpc_enh_k2;
this.comb_gain = comb_gain;
this.bits_per_frame = bits_per_frame;
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/VQ.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: VQ.java *
* *
* Author: Marc GIMPEL *
* Based on code by: Jean-Marc VALIN *
* *
* Date: 15th April 2003 *
* *
******************************************************************************/
/* $Id: VQ.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */
/* Copyright (C) 2002 Jean-Marc Valin
File: vq.c
Vector quantization
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the Xiph.org Foundation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.xiph.speex;
/**
* Vector Quantization.
*
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.2 $
*/
public class VQ
{
/**
* Finds the index of the entry in a codebook that best matches the input.
* @param in - the value to compare.
* @param codebook - the list of values to search through for the best match.
* @param entries - the size of the codebook.
* @return the index of the entry in a codebook that best matches the input.
*/
public static final int index(final float in,
final float[] codebook,
final int entries)
{
int i;
float min_dist=0;
int best_index=0;
for (i=0;i<entries;i++)
{
float dist = in-codebook[i];
dist = dist*dist;
if (i==0 || dist<min_dist)
{
min_dist=dist;
best_index=i;
}
}
return best_index;
}
/**
* Finds the index of the entry in a codebook that best matches the input.
* @param in - the vector to compare.
* @param codebook - the list of values to search through for the best match.
* @param len - the size of the vector.
* @param entries - the size of the codebook.
* @return the index of the entry in a codebook that best matches the input.
*/
public static final int index(final float[] in,
final float[] codebook,
final int len,
final int entries)
{
int i,j,k=0;
float min_dist=0;
int best_index=0;
for (i=0;i<entries;i++)
{
float dist=0;
for (j=0;j<len;j++)
{
float tmp = in[j]-codebook[k++];
dist += tmp*tmp;
}
if (i==0 || dist<min_dist)
{
min_dist=dist;
best_index=i;
}
}
return best_index;
}
/**
* Finds the indices of the n-best entries in a codebook
* @param in
* @param offset
* @param codebook
* @param len
* @param entries
* @param E
* @param N
* @param nbest
* @param best_dist
*/
public static final void nbest(final float[] in,
final int offset,
final float[] codebook,
final int len,
final int entries,
final float[] E,
final int N,
final int[] nbest,
final float[] best_dist)
{
int i, j, k, l=0, used=0;
for (i=0;i<entries;i++)
{
float dist=.5f*E[i];
for (j=0;j<len;j++)
dist -= in[offset+j]*codebook[l++];
if (i<N || dist<best_dist[N-1]) {
for (k=N-1; (k >= 1) && (k > used || dist < best_dist[k-1]); k--) {
best_dist[k] = best_dist[k-1];
nbest[k] = nbest[k-1];
}
best_dist[k]=dist;
nbest[k]=i;
used++;
}
}
}
/**
* Finds the indices of the n-best entries in a codebook with sign
* @param in
* @param offset
* @param codebook
* @param len
* @param entries
* @param E
* @param N
* @param nbest
* @param best_dist
*/
public static final void nbest_sign(final float[] in,
final int offset,
final float[] codebook,
final int len,
final int entries,
final float[] E,
final int N,
final int[] nbest,
final float[] best_dist)
{
int i, j, k, l=0, sign, used=0;
for (i=0;i<entries;i++) {
float dist=0;
for (j=0;j<len;j++)
dist -= in[offset+j]*codebook[l++];
if (dist>0) {
sign=1;
dist=-dist;
}
else {
sign=0;
}
dist += .5*E[i];
if (i<N || dist<best_dist[N-1]) {
for (k=N-1; (k >= 1) && (k > used || dist < best_dist[k-1]); k--)
{
best_dist[k]=best_dist[k-1];
nbest[k] = nbest[k-1];
}
best_dist[k]=dist;
nbest[k]=i;
used++;
if (sign != 0)
nbest[k]+=entries;
}
}
}
}
|
0
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph
|
java-sources/ai/olami/olami-java-client/1.5.0/org/xiph/speex/Vbr.java
|
/******************************************************************************
* *
* Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. *
* *
* COPYRIGHT: *
* This software is the property of Wimba S.A. *
* This software is redistributed under the Xiph.org variant of *
* the BSD license. *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* - Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* - Neither the name of Wimba, the Xiph.org Foundation nor the names of *
* its contributors may be used to endorse or promote products derived *
* from this software without specific prior written permission. *
* *
* WARRANTIES: *
* This software is made available by the authors in the hope *
* that it will be useful, but without any warranty. *
* Wimba S.A. is not liable for any consequence related to the *
* use of the provided software. *
* *
* Class: Vbr.java *
* *
* Author: Marc GIMPEL *
* Based on code by: Jean-Marc VALIN *
* *
* Date: 19th April 2003 *
* *
******************************************************************************/
/* $Id: Vbr.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */
/* Copyright (C) 2002 Jean-Marc Valin
File: vbr.c
VBR-related routines
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the Xiph.org Foundation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.xiph.speex;
/**
* This class analyses the signal to help determine what bitrate to use when
* the Varible BitRate option has been selected.
*
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.2 $
*/
public class Vbr
{
/** */
public static final int VBR_MEMORY_SIZE = 5;
/** */
public static final int MIN_ENERGY = 6000;
/** */
public static final float NOISE_POW = 0.3f;
/**
* Narrowband threshhold table.
*/
public static final float[][] nb_thresh = { //[9][11]
{-1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f}, /* CNG */
{ 3.5f, 2.5f, 2.0f, 1.2f, 0.5f, 0.0f, -0.5f, -0.7f, -0.8f, -0.9f, -1.0f}, /* 2 kbps */
{10.0f, 6.5f, 5.2f, 4.5f, 3.9f, 3.5f, 3.0f, 2.5f, 2.3f, 1.8f, 1.0f}, /* 6 kbps */
{11.0f, 8.8f, 7.5f, 6.5f, 5.0f, 3.9f, 3.9f, 3.9f, 3.5f, 3.0f, 1.0f}, /* 8 kbps */
{11.0f, 11.0f, 9.9f, 9.0f, 8.0f, 7.0f, 6.5f, 6.0f, 5.0f, 4.0f, 2.0f}, /* 11 kbps */
{11.0f, 11.0f, 11.0f, 11.0f, 9.5f, 9.0f, 8.0f, 7.0f, 6.5f, 5.0f, 3.0f}, /* 15 kbps */
{11.0f, 11.0f, 11.0f, 11.0f, 11.0f, 11.0f, 9.5f, 8.5f, 8.0f, 6.5f, 4.0f}, /* 18 kbps */
{11.0f, 11.0f, 11.0f, 11.0f, 11.0f, 11.0f, 11.0f, 11.0f, 9.8f, 7.5f, 5.5f}, /* 24 kbps */
{ 8.0f, 5.0f, 3.7f, 3.0f, 2.5f, 2.0f, 1.8f, 1.5f, 1.0f, 0.0f, 0.0f} /* 4 kbps */
};
/**
* Wideband threshhold table.
*/
public static final float[][] hb_thresh = { //[5][11]
{-1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f}, /* silence */
{-1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f}, /* 2 kbps */
{11.0f, 11.0f, 9.5f, 8.5f, 7.5f, 6.0f, 5.0f, 3.9f, 3.0f, 2.0f, 1.0f}, /* 6 kbps */
{11.0f, 11.0f, 11.0f, 11.0f, 11.0f, 9.5f, 8.7f, 7.8f, 7.0f, 6.5f, 4.0f}, /* 10 kbps */
{11.0f, 11.0f, 11.0f, 11.0f, 11.0f, 11.0f, 11.0f, 11.0f, 9.8f, 7.5f, 5.5f} /* 18 kbps */
};
/**
* Ultra-wideband threshhold table.
*/
public static final float[][] uhb_thresh = { // [2][11]
{-1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f}, /* silence */
{ 3.9f, 2.5f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, -1.0f} /* 2 kbps */
};
private float energy_alpha;
private float average_energy;
private float last_energy;
private float[] last_log_energy;
private float accum_sum;
private float last_pitch_coef;
private float soft_pitch;
private float last_quality;
private float noise_level;
private float noise_accum;
private float noise_accum_count;
private int consec_noise;
/**
* Constructor
*/
public Vbr()
{
average_energy = 0;
last_energy = 1;
accum_sum = 0;
energy_alpha = .1f;
soft_pitch = 0;
last_pitch_coef = 0;
last_quality = 0;
noise_accum = (float) (.05*Math.pow(MIN_ENERGY, NOISE_POW));
noise_accum_count = .05f;
noise_level = noise_accum/noise_accum_count;
consec_noise = 0;
last_log_energy = new float[VBR_MEMORY_SIZE];
for (int i=0; i<VBR_MEMORY_SIZE; i++)
last_log_energy[i] = (float) Math.log(MIN_ENERGY);
}
/**
* This function should analyse the signal and decide how critical the
* coding error will be perceptually. The following factors should be
* taken into account:
* <ul>
* <li>Attacks (positive energy derivative) should be coded with more bits
* <li>Stationary voiced segments should receive more bits
* <li>Segments with (very) low absolute energy should receive less bits
* (maybe only shaped noise?)
* <li>DTX for near-zero energy?
* <li>Stationary fricative segments should have less bits
* <li>Temporal masking: when energy slope is decreasing, decrease the bit-rate
* <li>Decrease bit-rate for males (low pitch)?
* <li>(wideband only) less bits in the high-band when signal is very
* non-stationary (harder to notice high-frequency noise)???
* </ul>
* @param sig - signal.
* @param len - signal length.
* @param pitch - signal pitch.
* @param pitch_coef - pitch coefficient.
* @return quality
*/
public float analysis(final float[] sig,
final int len,
final int pitch,
final float pitch_coef)
{
int i;
float ener=0, ener1=0, ener2=0;
float qual=7;
int va;
float log_energy;
float non_st=0;
float voicing;
float pow_ener;
for (i=0; i<len>>1; i++)
ener1 += sig[i]*sig[i];
for (i=len>>1; i<len; i++)
ener2 += sig[i]*sig[i];
ener=ener1+ener2;
log_energy = (float) Math.log(ener+MIN_ENERGY);
for (i=0; i<VBR_MEMORY_SIZE; i++)
non_st += (log_energy-last_log_energy[i])*(log_energy-last_log_energy[i]);
non_st = non_st/(30*VBR_MEMORY_SIZE);
if (non_st>1)
non_st=1;
voicing = 3*(pitch_coef-.4f)*Math.abs(pitch_coef-.4f);
average_energy = (1-energy_alpha)*average_energy + energy_alpha*ener;
noise_level=noise_accum/noise_accum_count;
pow_ener = (float) Math.pow(ener,NOISE_POW);
if (noise_accum_count<.06f && ener>MIN_ENERGY)
noise_accum = .05f*pow_ener;
if ((voicing<.3f && non_st < .2f && pow_ener < 1.2f*noise_level)
|| (voicing<.3f && non_st < .05f && pow_ener < 1.5f*noise_level)
|| (voicing<.4f && non_st < .05f && pow_ener < 1.2f*noise_level)
|| (voicing<0 && non_st < .05f))
{
float tmp;
va = 0;
consec_noise++;
if (pow_ener > 3*noise_level)
tmp = 3*noise_level;
else
tmp = pow_ener;
if (consec_noise>=4) {
noise_accum = .95f*noise_accum + .05f*tmp;
noise_accum_count = .95f*noise_accum_count + .05f;
}
} else {
va = 1;
consec_noise=0;
}
if (pow_ener < noise_level && ener>MIN_ENERGY) {
noise_accum = .95f*noise_accum + .05f*pow_ener;
noise_accum_count = .95f*noise_accum_count + .05f;
}
/* Checking for very low absolute energy */
if (ener < 30000)
{
qual -= .7f;
if (ener < 10000)
qual-=.7f;
if (ener < 3000)
qual-=.7f;
} else {
float short_diff, long_diff;
short_diff = (float) Math.log((ener+1)/(1+last_energy));
long_diff = (float) Math.log((ener+1)/(1+average_energy));
/*fprintf (stderr, "%f %f\n", short_diff, long_diff);*/
if (long_diff<-5)
long_diff=-5;
if (long_diff>2)
long_diff=2;
if (long_diff>0)
qual += .6f*long_diff;
if (long_diff<0)
qual += .5f*long_diff;
if (short_diff>0)
{
if (short_diff>5)
short_diff=5;
qual += .5f*short_diff;
}
/* Checking for energy increases */
if (ener2 > 1.6f*ener1)
qual += .5f;
}
last_energy = ener;
soft_pitch = .6f*soft_pitch + .4f*pitch_coef;
qual += 2.2f*((pitch_coef-.4) + (soft_pitch-.4));
if (qual < last_quality)
qual = .5f*qual + .5f*last_quality;
if (qual<4)
qual=4;
if (qual>10)
qual=10;
/*
if (consec_noise>=2)
qual-=1.3f;
if (consec_noise>=5)
qual-=1.3f;
if (consec_noise>=12)
qual-=1.3f;
*/
if (consec_noise>=3)
qual=4;
if (consec_noise != 0)
qual -= (float)(1.0 * (Math.log(3.0 + consec_noise)-Math.log(3)));
if (qual<0)
qual=0;
if (ener<60000)
{
if (consec_noise>2)
qual-=(float)(0.5*(Math.log(3.0 + consec_noise)-Math.log(3)));
if (ener<10000&&consec_noise>2)
qual-=(float)(0.5*(Math.log(3.0 + consec_noise)-Math.log(3)));
if (qual<0)
qual=0;
qual += (float)(.3*Math.log(ener/60000.0));
}
if (qual<-1)
qual=-1;
last_pitch_coef = pitch_coef;
last_quality = qual;
for (i=VBR_MEMORY_SIZE-1; i>0; i--)
last_log_energy[i] = last_log_energy[i-1];
last_log_energy[0] = log_energy;
return qual;
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/Main.java
|
package ai.onehouse;
import ai.onehouse.constants.MetricsConstants;
import ai.onehouse.metrics.LakeViewExtractorMetrics;
import com.google.common.annotations.VisibleForTesting;
import com.google.inject.Guice;
import com.google.inject.Injector;
import ai.onehouse.api.AsyncHttpClientWithRetry;
import ai.onehouse.cli_parser.CliParser;
import ai.onehouse.config.Config;
import ai.onehouse.config.ConfigLoader;
import ai.onehouse.config.ConfigProvider;
import ai.onehouse.config.ConfigRefresher;
import ai.onehouse.config.models.configv1.ConfigV1;
import ai.onehouse.config.models.configv1.MetadataExtractorConfig;
import ai.onehouse.metadata_extractor.TableDiscoveryAndUploadJob;
import ai.onehouse.metrics.MetricsModule;
import ai.onehouse.metrics.MetricsServer;
import ai.onehouse.storage.AsyncStorageClient;
import ai.onehouse.RuntimeModule.TableDiscoveryObjectStorageAsyncClient;
import com.google.inject.Key;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils;
import software.amazon.awssdk.awscore.exception.AwsServiceException;
import static ai.onehouse.storage.S3AsyncStorageClient.ACCESS_DENIED_ERROR_CODE;
@Slf4j
public class Main {
private TableDiscoveryAndUploadJob job;
private AsyncHttpClientWithRetry asyncHttpClientWithRetry;
private MetricsServer metricsServer;
private final CliParser parser;
private final ConfigLoader configLoader;
private ConfigRefresher configRefresher;
private LakeViewExtractorMetrics lakeViewExtractorMetrics;
public Main(CliParser parser, ConfigLoader configLoader) {
this.parser = parser;
this.configLoader = configLoader;
}
public static void main(String[] args) {
CliParser parser = new CliParser();
ConfigLoader configLoader = new ConfigLoader();
Main main = new Main(parser, configLoader);
main.start(args);
}
public void start(String[] args) {
log.info("Starting LakeView extractor service");
Config config = null;
try {
parser.parse(args);
if (parser.isHelpRequested()) {
return;
}
String configFilePath = parser.getConfigFilePath();
String configYamlString = parser.getConfigYamlString();
config = loadConfig(configFilePath, configYamlString);
} catch (ParseException e) {
log.error("Failed to parse command line arguments", e);
System.exit(1);
}
Injector injector = Guice.createInjector(new RuntimeModule(config), new MetricsModule());
job = injector.getInstance(TableDiscoveryAndUploadJob.class);
asyncHttpClientWithRetry = injector.getInstance(AsyncHttpClientWithRetry.class);
ConfigProvider configProvider = injector.getInstance(ConfigProvider.class);
metricsServer = injector.getInstance(MetricsServer.class);
lakeViewExtractorMetrics = injector.getInstance(LakeViewExtractorMetrics.class);
// If metadata extractor config is provided externally, then override and refresh config
// periodically.
if (StringUtils.isNotBlank(config.getMetadataExtractorConfigPath())) {
AsyncStorageClient storageClient = injector.getInstance(Key.get(AsyncStorageClient.class, TableDiscoveryObjectStorageAsyncClient.class));
try {
String baseConfigYaml = configLoader.convertConfigToString(config);
configRefresher =
new ConfigRefresher(
baseConfigYaml,
config.getMetadataExtractorConfigPath(),
storageClient,
configLoader,
configProvider);
configRefresher.start();
} catch (Exception ex) {
log.error("Failed to override metadata extractor config", ex);
lakeViewExtractorMetrics.incrementFailedOverrideConfigCounter();
}
}
runJob(configProvider.getConfig());
}
private Config loadConfig(String configFilePath, String configYamlString) {
if (configFilePath != null) {
return configLoader.loadConfigFromConfigFile(configFilePath);
} else if (configYamlString != null) {
return configLoader.loadConfigFromString(configYamlString);
} else {
log.error("No configuration provided. Please specify either a file path or a YAML string.");
System.exit(1);
}
return null;
}
private void runJob(Config config) {
try {
MetadataExtractorConfig.JobRunMode jobRunMode =
((ConfigV1) config).getMetadataExtractorConfig().getJobRunMode();
if (MetadataExtractorConfig.JobRunMode.CONTINUOUS.equals(jobRunMode)) {
job.runInContinuousMode(config);
} else {
job.runOnce(config);
shutdown(config);
}
} catch (AwsServiceException e) {
log.info("Failed to run job with errorCode : {} and errorMessage : {}",
e.awsErrorDetails().errorCode(), e.awsErrorDetails().errorMessage());
if (e.awsErrorDetails().errorCode().equalsIgnoreCase(ACCESS_DENIED_ERROR_CODE)) {
lakeViewExtractorMetrics
.incrementTableDiscoveryFailureCounter(MetricsConstants.MetadataUploadFailureReasons.ACCESS_DENIED);
}
shutdown(config);
} catch (Exception e) {
log.info("Error in runJob message : {}}", e.getMessage(), e.getCause());
log.error(e.getMessage(), e);
shutdown(config);
}
}
@VisibleForTesting
void shutdown(Config config) {
if (config.getMetadataExtractorConfig().getJobRunMode().equals(MetadataExtractorConfig.JobRunMode.ONCE)
|| config.getMetadataExtractorConfig().getJobRunMode().equals(MetadataExtractorConfig.JobRunMode.ONCE_WITH_RETRY)) {
log.info(String.format("Scheduling JVM shutdown after %d seconds",
config.getMetadataExtractorConfig().getWaitTimeBeforeShutdown()));
try {
Thread.sleep(config.getMetadataExtractorConfig().getWaitTimeBeforeShutdown() * 1000L);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
asyncHttpClientWithRetry.shutdownScheduler();
job.shutdown();
metricsServer.shutdown();
if (configRefresher != null) {
configRefresher.shutdown();
}
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/RuntimeModule.java
|
package ai.onehouse;
import ai.onehouse.env.EnvironmentLookupProvider;
import com.google.common.annotations.VisibleForTesting;
import com.google.inject.AbstractModule;
import com.google.inject.BindingAnnotation;
import com.google.inject.Provides;
import ai.onehouse.api.AsyncHttpClientWithRetry;
import ai.onehouse.config.Config;
import ai.onehouse.config.ConfigProvider;
import ai.onehouse.config.models.common.FileSystemConfiguration;
import ai.onehouse.storage.AsyncStorageClient;
import ai.onehouse.storage.GCSAsyncStorageClient;
import ai.onehouse.storage.S3AsyncStorageClient;
import ai.onehouse.storage.StorageUtils;
import ai.onehouse.storage.providers.GcsClientProvider;
import ai.onehouse.storage.providers.S3AsyncClientProvider;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.net.ProxySelector;
import java.net.URI;
import java.util.List;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.ForkJoinWorkerThread;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import javax.inject.Singleton;
import lombok.extern.slf4j.Slf4j;
import okhttp3.Dispatcher;
import okhttp3.OkHttpClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Slf4j
public class RuntimeModule extends AbstractModule {
private static final Logger logger = LoggerFactory.getLogger(RuntimeModule.class);
private static final int IO_WORKLOAD_NUM_THREAD_MULTIPLIER = 5;
private static final int HTTP_CLIENT_DEFAULT_TIMEOUT_SECONDS = 15;
private static final int HTTP_CLIENT_MAX_RETRIES = 3;
private static final long HTTP_CLIENT_RETRY_DELAY_MS = 1000;
private final Config config;
public RuntimeModule(Config config) {
this.config = config;
}
@Retention(RetentionPolicy.RUNTIME)
@BindingAnnotation
@interface TableDiscoveryS3ObjectStorageClient {}
@Retention(RetentionPolicy.RUNTIME)
@BindingAnnotation
@interface TableMetadataUploadS3ObjectStorageClient {}
@Retention(RetentionPolicy.RUNTIME)
@BindingAnnotation
public @interface TableDiscoveryObjectStorageAsyncClient {}
@Retention(RetentionPolicy.RUNTIME)
@BindingAnnotation
public @interface TableMetadataUploadObjectStorageAsyncClient {}
@Provides
@Singleton
@TableDiscoveryS3ObjectStorageClient
static S3AsyncClientProvider providesS3AsyncClientProviderForDiscovery(Config config, ExecutorService executorService) {
return new S3AsyncClientProvider(config, executorService);
}
@Provides
@Singleton
@TableMetadataUploadS3ObjectStorageClient
static S3AsyncClientProvider providesS3AsyncClientProviderForUpload(Config config, ExecutorService executorService) {
return new S3AsyncClientProvider(config, executorService);
}
@Provides
@Singleton
static EnvironmentLookupProvider providesEnvironmentLookupProvider() {
return new EnvironmentLookupProvider.System();
}
@Provides
@Singleton
static OkHttpClient providesOkHttpClient(EnvironmentLookupProvider environmentLookupProvider, ExecutorService executorService) {
Dispatcher dispatcher = new Dispatcher(executorService);
OkHttpClient.Builder builder =
new OkHttpClient.Builder()
.readTimeout(HTTP_CLIENT_DEFAULT_TIMEOUT_SECONDS, TimeUnit.SECONDS)
.writeTimeout(HTTP_CLIENT_DEFAULT_TIMEOUT_SECONDS, TimeUnit.SECONDS)
.connectTimeout(HTTP_CLIENT_DEFAULT_TIMEOUT_SECONDS, TimeUnit.SECONDS)
.dispatcher(dispatcher);
String httpProxyEnv = environmentLookupProvider.getValue("HTTP_PROXY");
if (httpProxyEnv != null && !httpProxyEnv.trim().isEmpty()) {
Proxy proxy = buildProxy(httpProxyEnv);
String noProxyEnv = environmentLookupProvider.getValue("NO_PROXY");
if (noProxyEnv != null && !noProxyEnv.trim().isEmpty()) {
builder.proxySelector(new EnvProxySelector(proxy, noProxyEnv));
} else {
builder.proxy(proxy);
}
log.info("Configured OkHttp client to use proxy from HTTP_PROXY env var: {}", httpProxyEnv);
}
return builder.build();
}
@Provides
@Singleton
static AsyncHttpClientWithRetry providesHttpAsyncClient(OkHttpClient okHttpClient) {
return new AsyncHttpClientWithRetry(
HTTP_CLIENT_MAX_RETRIES, HTTP_CLIENT_RETRY_DELAY_MS, okHttpClient);
}
@Provides
@Singleton
@TableDiscoveryObjectStorageAsyncClient
static AsyncStorageClient providesAsyncStorageClientForDiscovery(
Config config,
StorageUtils storageUtils,
@TableDiscoveryS3ObjectStorageClient S3AsyncClientProvider s3AsyncClientProvider,
GcsClientProvider gcsClientProvider,
ExecutorService executorService) {
FileSystemConfiguration fileSystemConfiguration = config.getFileSystemConfiguration();
if (fileSystemConfiguration.getS3Config() != null) {
return new S3AsyncStorageClient(s3AsyncClientProvider, storageUtils, executorService);
} else {
return new GCSAsyncStorageClient(gcsClientProvider, storageUtils, executorService);
}
}
@Provides
@Singleton
@TableMetadataUploadObjectStorageAsyncClient
static AsyncStorageClient providesAsyncStorageClientForUpload(
Config config,
StorageUtils storageUtils,
@TableMetadataUploadS3ObjectStorageClient S3AsyncClientProvider s3AsyncClientProvider,
GcsClientProvider gcsClientProvider,
ExecutorService executorService) {
FileSystemConfiguration fileSystemConfiguration = config.getFileSystemConfiguration();
if (fileSystemConfiguration.getS3Config() != null) {
return new S3AsyncStorageClient(s3AsyncClientProvider, storageUtils, executorService);
} else {
return new GCSAsyncStorageClient(gcsClientProvider, storageUtils, executorService);
}
}
@Provides
@Singleton
static ConfigProvider configProvider(Config config) {
return new ConfigProvider(config);
}
@Provides
@Singleton
static ExecutorService providesExecutorService() {
// more threads as most operation are IO intensive workload
int numThreads = Runtime.getRuntime().availableProcessors() * IO_WORKLOAD_NUM_THREAD_MULTIPLIER;
log.info("Spinning up {} threads", numThreads);
class ApplicationThreadFactory implements ForkJoinPool.ForkJoinWorkerThreadFactory {
private static final String THREAD_GROUP_NAME_TEMPLATE = "metadata-extractor-%d";
private final AtomicInteger counter = new AtomicInteger(1);
@Override
public ForkJoinWorkerThread newThread(ForkJoinPool pool) {
return new ForkJoinWorkerThread(pool) {
{
setName(String.format(THREAD_GROUP_NAME_TEMPLATE, counter.getAndIncrement()));
}
};
}
}
return new ForkJoinPool(
numThreads,
new ApplicationThreadFactory(),
(thread, throwable) -> {
if (throwable != null) {
logger.error(
String.format("Uncaught exception in a thread (%s)", thread.getName()), throwable);
}
},
// NOTE: It's squarely important to make sure
// that `asyncMode` is true in async applications
true);
}
@VisibleForTesting
long getHttpClientRetryDelayMs() {
return HTTP_CLIENT_RETRY_DELAY_MS;
}
@VisibleForTesting
int getHttpClientMaxRetries() {
return HTTP_CLIENT_MAX_RETRIES;
}
@Override
protected void configure() {
bind(Config.class).toInstance(config);
}
/** Builds a java.net.Proxy object from the HTTP_PROXY environment variable */
private static Proxy buildProxy(String proxyEnv) {
try {
String proxyUrl = proxyEnv.matches("^[a-zA-Z]+://.*") ? proxyEnv : "http://" + proxyEnv;
URI uri = URI.create(proxyUrl);
String host = uri.getHost();
int port = uri.getPort() == -1 ? 80 : uri.getPort();
return new Proxy(Proxy.Type.HTTP, new InetSocketAddress(host, port));
} catch (Exception e) {
logger.error("Failed to parse proxy url: {}", proxyEnv, e);
return Proxy.NO_PROXY;
}
}
/** ProxySelector that respects the NO_PROXY environment variable */
private static class EnvProxySelector extends ProxySelector {
private final Proxy proxy;
private final List<String> noProxyHosts;
EnvProxySelector(Proxy proxy, String noProxyEnv) {
this.proxy = proxy;
this.noProxyHosts = parseNoProxy(noProxyEnv);
}
private static List<String> parseNoProxy(String noProxyEnv) {
String[] parts = Optional.ofNullable(noProxyEnv).orElse("").trim().split(",");
List<String> list = new ArrayList<>();
for (String part : parts) {
list.add(part.trim());
}
return list;
}
@Override
public List<Proxy> select(URI uri) {
String host = uri.getHost();
if (host != null) {
for (String pattern : noProxyHosts) {
if (pattern.isEmpty()) {
continue;
}
if (host.equals(pattern) || host.endsWith(pattern)) {
return Collections.singletonList(Proxy.NO_PROXY);
}
}
}
return Collections.singletonList(proxy);
}
@Override
public void connectFailed(URI uri, java.net.SocketAddress sa, java.io.IOException ioe) {
logger.error("Proxy connection failed to {} via {}", uri, sa, ioe);
}
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/AsyncHttpClientWithRetry.java
|
package ai.onehouse.api;
import static ai.onehouse.constants.ApiConstants.ACCEPTABLE_HTTP_FAILURE_STATUS_CODES;
import com.google.common.annotations.VisibleForTesting;
import java.io.IOException;
import java.util.Random;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nonnull;
import lombok.extern.slf4j.Slf4j;
import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.HttpUrl;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
@Slf4j
public class AsyncHttpClientWithRetry {
private final ScheduledExecutorService scheduler;
private final int maxRetries;
private final long retryDelayMillis;
private final OkHttpClient okHttpClient;
private static final long MAX_RETRY_DELAY_MILLIS = 10000; // 10seconds
private static final Random random = new Random();
public AsyncHttpClientWithRetry(
int maxRetries, long retryDelayMillis, OkHttpClient okHttpClient) {
this.maxRetries = maxRetries;
this.retryDelayMillis = retryDelayMillis;
this.scheduler = Executors.newSingleThreadScheduledExecutor();
this.okHttpClient = okHttpClient;
}
public CompletableFuture<Response> makeRequestWithRetry(Request request) {
return attemptRequest(request, 1);
}
private CompletableFuture<Response> attemptRequest(Request request, int tryCount) {
CompletableFuture<Response> future = new CompletableFuture<>();
okHttpClient
.newCall(request)
.enqueue(
new Callback() {
@Override
public void onFailure(@Nonnull Call call, @Nonnull IOException e) {
if (tryCount < maxRetries) {
Request request = call.request();
HttpUrl url = request.url();
String method = request.method();
log.warn(
"API Request failed with error: {}, attempt: {}, url: {}, method: {}",
e.getMessage(),
tryCount,
url,
method);
scheduleRetry(request, tryCount, future);
} else {
future.completeExceptionally(e);
}
}
@Override
public void onResponse(@Nonnull Call call, @Nonnull Response response) {
if (!response.isSuccessful()
&& !ACCEPTABLE_HTTP_FAILURE_STATUS_CODES.contains(response.code())
&& tryCount < maxRetries) {
Request request = call.request();
HttpUrl url = request.url();
String method = request.method();
int statusCode = response.code();
log.warn(
"API Request failed with HTTP status: {}, attempt: {}, url: {}, method: {}",
statusCode,
tryCount,
url,
method);
response.close();
scheduleRetry(request, tryCount, future);
} else {
future.complete(response);
}
}
});
return future;
}
private void scheduleRetry(Request request, int tryCount, CompletableFuture<Response> future) {
scheduler.schedule(
() -> {
log.info("Scheduling request with attempt: {}", (tryCount + 1));
attemptRequest(request, tryCount + 1)
.whenComplete(
(resp, throwable) -> {
if (throwable != null) {
future.completeExceptionally(throwable);
} else {
future.complete(resp);
}
});
},
calculateDelay(tryCount),
TimeUnit.MILLISECONDS);
}
private long calculateDelay(int tryCount) {
// Exponential backoff with jitter and upper bound
long delay = (long) (retryDelayMillis * Math.pow(2, tryCount));
long jitter = (long) (random.nextDouble() * delay) - (delay / 2);
return Math.min(delay + jitter, MAX_RETRY_DELAY_MILLIS);
}
public void shutdownScheduler() {
scheduler.shutdown();
okHttpClient.connectionPool().evictAll();
okHttpClient.dispatcher().executorService().shutdown();
}
@VisibleForTesting
public long getRetryDelayMillis() {
return retryDelayMillis;
}
@VisibleForTesting
public int getMaxRetries() {
return maxRetries;
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/OnehouseApiClient.java
|
package ai.onehouse.api;
import static ai.onehouse.constants.ApiConstants.ACCEPTABLE_HTTP_FAILURE_STATUS_CODES;
import static ai.onehouse.constants.ApiConstants.GENERATE_COMMIT_METADATA_UPLOAD_URL;
import static ai.onehouse.constants.ApiConstants.GET_TABLE_METRICS_CHECKPOINT;
import static ai.onehouse.constants.ApiConstants.INITIALIZE_TABLE_METRICS_CHECKPOINT;
import static ai.onehouse.constants.ApiConstants.LINK_UID_KEY;
import static ai.onehouse.constants.ApiConstants.ONEHOUSE_API_ENDPOINT;
import static ai.onehouse.constants.ApiConstants.ONEHOUSE_API_KEY;
import static ai.onehouse.constants.ApiConstants.ONEHOUSE_API_SECRET_KEY;
import static ai.onehouse.constants.ApiConstants.ONEHOUSE_REGION_KEY;
import static ai.onehouse.constants.ApiConstants.ONEHOUSE_USER_UUID_KEY;
import static ai.onehouse.constants.ApiConstants.PROJECT_UID_KEY;
import static ai.onehouse.constants.ApiConstants.UNAUTHORIZED_ERROR_MESSAGE;
import static ai.onehouse.constants.ApiConstants.UPSERT_TABLE_METRICS_CHECKPOINT;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;
import com.google.inject.Inject;
import ai.onehouse.api.models.request.GenerateCommitMetadataUploadUrlRequest;
import ai.onehouse.api.models.request.InitializeTableMetricsCheckpointRequest;
import ai.onehouse.api.models.request.UpsertTableMetricsCheckpointRequest;
import ai.onehouse.api.models.response.ApiResponse;
import ai.onehouse.api.models.response.GenerateCommitMetadataUploadUrlResponse;
import ai.onehouse.api.models.response.GetTableMetricsCheckpointResponse;
import ai.onehouse.api.models.response.InitializeTableMetricsCheckpointResponse;
import ai.onehouse.api.models.response.UpsertTableMetricsCheckpointResponse;
import ai.onehouse.config.Config;
import ai.onehouse.config.models.common.OnehouseClientConfig;
import ai.onehouse.constants.MetricsConstants;
import ai.onehouse.metrics.LakeViewExtractorMetrics;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.lang.reflect.InvocationTargetException;
import java.text.MessageFormat;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import javax.annotation.Nonnull;
import lombok.SneakyThrows;
import okhttp3.Headers;
import okhttp3.HttpUrl;
import okhttp3.MediaType;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
import org.apache.commons.lang3.StringUtils;
public class OnehouseApiClient {
private final AsyncHttpClientWithRetry asyncClient;
private final Headers headers;
private final LakeViewExtractorMetrics hudiMetadataExtractorMetrics;
private final ObjectMapper mapper;
@Inject
public OnehouseApiClient(
@Nonnull AsyncHttpClientWithRetry asyncClient,
@Nonnull Config config,
@Nonnull LakeViewExtractorMetrics hudiMetadataExtractorMetrics) {
this.asyncClient = asyncClient;
this.headers = getHeaders(config.getOnehouseClientConfig());
this.hudiMetadataExtractorMetrics = hudiMetadataExtractorMetrics;
this.mapper = new ObjectMapper();
}
@SneakyThrows
public CompletableFuture<InitializeTableMetricsCheckpointResponse>
initializeTableMetricsCheckpoint(InitializeTableMetricsCheckpointRequest request) {
return asyncPost(
INITIALIZE_TABLE_METRICS_CHECKPOINT,
mapper.writeValueAsString(request),
InitializeTableMetricsCheckpointResponse.class);
}
@SneakyThrows
public CompletableFuture<GetTableMetricsCheckpointResponse> getTableMetricsCheckpoints(
List<String> tableIds) {
HttpUrl.Builder urlBuilder =
HttpUrl.parse(ONEHOUSE_API_ENDPOINT + GET_TABLE_METRICS_CHECKPOINT).newBuilder();
for (String tableId : tableIds) {
urlBuilder.addQueryParameter("tableIds", tableId);
}
String url = urlBuilder.build().toString();
return asyncGet(url, GetTableMetricsCheckpointResponse.class);
}
@SneakyThrows
public CompletableFuture<UpsertTableMetricsCheckpointResponse> upsertTableMetricsCheckpoint(
UpsertTableMetricsCheckpointRequest request) {
return asyncPost(
MessageFormat.format(UPSERT_TABLE_METRICS_CHECKPOINT, request.getTableId()),
mapper.writeValueAsString(request),
UpsertTableMetricsCheckpointResponse.class);
}
@SneakyThrows
public CompletableFuture<GenerateCommitMetadataUploadUrlResponse> generateCommitMetadataUploadUrl(
GenerateCommitMetadataUploadUrlRequest request) {
return asyncPost(
MessageFormat.format(GENERATE_COMMIT_METADATA_UPLOAD_URL, request.getTableId()),
mapper.writeValueAsString(request),
GenerateCommitMetadataUploadUrlResponse.class);
}
@VisibleForTesting
Headers getHeaders(OnehouseClientConfig onehouseClientConfig) {
Headers.Builder headersBuilder = new Headers.Builder();
headersBuilder.add(PROJECT_UID_KEY, onehouseClientConfig.getProjectId());
headersBuilder.add(ONEHOUSE_API_KEY, onehouseClientConfig.getApiKey());
headersBuilder.add(ONEHOUSE_API_SECRET_KEY, onehouseClientConfig.getApiSecret());
headersBuilder.add(ONEHOUSE_USER_UUID_KEY, onehouseClientConfig.getUserId());
if (StringUtils.isNotEmpty(onehouseClientConfig.getRequestId())) {
headersBuilder.add(LINK_UID_KEY, onehouseClientConfig.getRequestId());
}
if (StringUtils.isNotEmpty(onehouseClientConfig.getRegion())) {
headersBuilder.add(ONEHOUSE_REGION_KEY, onehouseClientConfig.getRegion());
}
return headersBuilder.build();
}
@VisibleForTesting
<T> CompletableFuture<T> asyncGet(String url, Class<T> typeReference) {
Request request = new Request.Builder().url(url).headers(headers).build();
return asyncClient
.makeRequestWithRetry(request)
.thenApply(response -> handleResponse(response, typeReference));
}
@VisibleForTesting
<T> CompletableFuture<T> asyncPost(String apiEndpoint, String json, Class<T> typeReference) {
RequestBody body = RequestBody.create(MediaType.parse("application/json; charset=utf-8"), json);
Request request =
new Request.Builder()
.url(ONEHOUSE_API_ENDPOINT + apiEndpoint)
.post(body)
.headers(headers)
.build();
return asyncClient
.makeRequestWithRetry(request)
.thenApply(response -> handleResponse(response, typeReference));
}
private <T> T handleResponse(Response response, Class<T> typeReference) {
if (response.isSuccessful()) {
try {
if (response.body() != null) {
return mapper.readValue(response.body().string(), typeReference);
}
return null;
} catch (IOException jsonProcessingException) {
throw new UncheckedIOException("Failed to deserialize", jsonProcessingException);
}
} else {
try {
T errorResponse = typeReference.getDeclaredConstructor().newInstance();
if (errorResponse instanceof ApiResponse) {
if (response.code() == 401) {
((ApiResponse) errorResponse).setError(response.code(), UNAUTHORIZED_ERROR_MESSAGE);
} else {
((ApiResponse) errorResponse).setError(response.code(), response.message());
}
}
response.close();
emmitApiErrorMetric(response.code());
return errorResponse;
} catch (InstantiationException
| IllegalAccessException
| NoSuchMethodException
| InvocationTargetException e) {
throw new RuntimeException("Failed to instantiate error response object", e);
}
}
}
private void emmitApiErrorMetric(int apiStatusCode) {
if (ACCEPTABLE_HTTP_FAILURE_STATUS_CODES.contains(apiStatusCode)) {
hudiMetadataExtractorMetrics.incrementTableMetadataProcessingFailureCounter(
MetricsConstants.MetadataUploadFailureReasons.API_FAILURE_USER_ERROR);
} else {
hudiMetadataExtractorMetrics.incrementTableMetadataProcessingFailureCounter(
MetricsConstants.MetadataUploadFailureReasons.API_FAILURE_SYSTEM_ERROR);
}
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models/request/CommitTimelineType.java
|
package ai.onehouse.api.models.request;
public enum CommitTimelineType {
COMMIT_TIMELINE_TYPE_ACTIVE,
COMMIT_TIMELINE_TYPE_ARCHIVED
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models/request/GenerateCommitMetadataUploadUrlRequest.java
|
package ai.onehouse.api.models.request;
import java.util.List;
import lombok.Builder;
import lombok.NonNull;
import lombok.Value;
import lombok.extern.jackson.Jacksonized;
@Builder
@Jacksonized
@Value
public class GenerateCommitMetadataUploadUrlRequest {
@NonNull private final String tableId;
@NonNull private final CommitTimelineType commitTimelineType;
@NonNull private final List<String> commitInstants;
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models/request/InitializeTableMetricsCheckpointRequest.java
|
package ai.onehouse.api.models.request;
import java.util.List;
import lombok.Builder;
import lombok.NonNull;
import lombok.Value;
import lombok.extern.jackson.Jacksonized;
@Builder
@Value
@Jacksonized
public class InitializeTableMetricsCheckpointRequest {
@Builder
@Value
@Jacksonized
public static class InitializeSingleTableMetricsCheckpointRequest {
@NonNull String tableId;
@NonNull String tableName;
@NonNull TableType tableType;
String lakeName;
String databaseName;
String tableBasePath;
}
List<InitializeSingleTableMetricsCheckpointRequest> tables;
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models/request/TableType.java
|
package ai.onehouse.api.models.request;
public enum TableType {
MERGE_ON_READ,
COPY_ON_WRITE
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models/request/UploadedFile.java
|
package ai.onehouse.api.models.request;
import lombok.Builder;
import lombok.Value;
import lombok.extern.jackson.Jacksonized;
@Builder
@Jacksonized
@Value
public class UploadedFile {
private String name;
private long lastModifiedAt;
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models/request/UpsertTableMetricsCheckpointRequest.java
|
package ai.onehouse.api.models.request;
import java.util.List;
import lombok.Builder;
import lombok.NonNull;
import lombok.Value;
import lombok.extern.jackson.Jacksonized;
@Builder
@Jacksonized
@Value
public class UpsertTableMetricsCheckpointRequest {
@NonNull private final String tableId;
@NonNull private final String checkpoint;
@NonNull private final List<String> filesUploaded;
@NonNull private final List<UploadedFile> uploadedFiles;
@NonNull private final CommitTimelineType commitTimelineType;
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models/response/ApiResponse.java
|
package ai.onehouse.api.models.response;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
public abstract class ApiResponse {
private boolean isFailure = false;
private int statusCode = 200;
private String cause = "";
public void setError(int statusCode, String cause) {
this.isFailure = true;
this.statusCode = statusCode;
this.cause = cause;
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models/response/GenerateCommitMetadataUploadUrlResponse.java
|
package ai.onehouse.api.models.response;
import java.util.List;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
@Builder
@Getter
@AllArgsConstructor
@NoArgsConstructor
public class GenerateCommitMetadataUploadUrlResponse extends ApiResponse {
private List<String> uploadUrls;
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models/response/GetTableMetricsCheckpointResponse.java
|
package ai.onehouse.api.models.response;
import java.util.List;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
@Builder
@Getter
@AllArgsConstructor
@NoArgsConstructor
public class GetTableMetricsCheckpointResponse extends ApiResponse {
@Builder
@Getter
@AllArgsConstructor
@NoArgsConstructor
public static class TableMetadataCheckpoint {
String tableId;
String checkpoint;
}
private List<TableMetadataCheckpoint> checkpoints;
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models/response/InitializeTableMetricsCheckpointResponse.java
|
package ai.onehouse.api.models.response;
import java.util.List;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.ToString;
@Builder
@Getter
@AllArgsConstructor
@NoArgsConstructor
@ToString
public class InitializeTableMetricsCheckpointResponse extends ApiResponse {
@Builder
@Getter
@AllArgsConstructor
@NoArgsConstructor
@ToString
public static class InitializeSingleTableMetricsCheckpointResponse {
String tableId;
String error;
}
List<InitializeSingleTableMetricsCheckpointResponse> response;
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/api/models/response/UpsertTableMetricsCheckpointResponse.java
|
package ai.onehouse.api.models.response;
import lombok.Builder;
import lombok.NoArgsConstructor;
@Builder
@NoArgsConstructor
public class UpsertTableMetricsCheckpointResponse extends ApiResponse {}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/cli_parser/CliParser.java
|
package ai.onehouse.cli_parser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
public class CliParser {
private String configFilePath;
private String configYamlString;
private static final String PATH_OPTION = "p";
private static final String CONFIG_OPTION = "c";
private static final String HELP_OPTION = "h";
private boolean helpRequested = false;
public void parse(String[] args) throws ParseException {
Options options = new Options();
Option pathOption =
Option.builder(PATH_OPTION)
.longOpt("path")
.hasArg()
.desc("The file path to the configuration file")
.build();
options.addOption(pathOption);
Option configOption =
Option.builder(CONFIG_OPTION)
.longOpt("config")
.hasArg()
.desc("The YAML configuration string")
.build();
options.addOption(configOption);
Option helpOption =
Option.builder(HELP_OPTION).longOpt("help").desc("Display help information").build();
options.addOption(helpOption);
CommandLineParser parser = new DefaultParser();
CommandLine cmd = parser.parse(options, args);
if (cmd.hasOption(HELP_OPTION)) {
HelpFormatter formatter = new HelpFormatter();
helpRequested = true;
formatter.printHelp("Onehouse LakeView", options);
return;
}
if (cmd.hasOption(PATH_OPTION) && cmd.hasOption(CONFIG_OPTION)) {
throw new ParseException("Cannot specify both file path and config string.");
}
if (cmd.hasOption(PATH_OPTION)) {
configFilePath = cmd.getOptionValue(PATH_OPTION);
}
if (cmd.hasOption(CONFIG_OPTION)) {
configYamlString = cmd.getOptionValue(CONFIG_OPTION);
}
}
public boolean isHelpRequested() {
return helpRequested;
}
public String getConfigFilePath() {
return configFilePath;
}
public String getConfigYamlString() {
return configYamlString;
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/Config.java
|
package ai.onehouse.config;
import ai.onehouse.config.models.common.FileSystemConfiguration;
import ai.onehouse.config.models.common.OnehouseClientConfig;
import ai.onehouse.config.models.configv1.MetadataExtractorConfig;
public interface Config {
ConfigVersion getVersion();
FileSystemConfiguration getFileSystemConfiguration();
OnehouseClientConfig getOnehouseClientConfig();
String getMetadataExtractorConfigPath();
MetadataExtractorConfig getMetadataExtractorConfig();
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/ConfigLoader.java
|
package ai.onehouse.config;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
import ai.onehouse.config.models.common.OnehouseClientConfig;
import ai.onehouse.config.models.configv1.ConfigV1;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import lombok.NonNull;
import org.apache.commons.lang3.StringUtils;
public class ConfigLoader {
private final ObjectMapper MAPPER;
public ConfigLoader() {
this.MAPPER = new ObjectMapper(new YAMLFactory());
MAPPER.registerModule(new Jdk8Module());
}
public Config loadConfigFromConfigFile(String configFilePath) {
try (InputStream in = Files.newInputStream(Paths.get(configFilePath))) {
return loadConfigFromJsonNode(MAPPER.readTree(in));
} catch (Exception e) {
throw new RuntimeException("Failed to load config", e);
}
}
public Config loadConfigFromString(String configYaml) {
try {
return loadConfigFromJsonNode(MAPPER.readTree(configYaml));
} catch (Exception e) {
throw new RuntimeException("Failed to load config", e);
}
}
private Config loadConfigFromJsonNode(JsonNode jsonNode) throws IOException {
ConfigVersion version = ConfigVersion.valueOf(jsonNode.get("version").asText());
switch (version) {
case V1:
ConfigV1 configV1 = MAPPER.treeToValue(jsonNode, ConfigV1.class);
if (StringUtils.isNotBlank(configV1.getOnehouseClientConfig().getFile())) {
String onehouseClientConfigFileContent =
new String(
Files.readAllBytes(Paths.get(configV1.getOnehouseClientConfig().getFile())));
OnehouseClientConfig onehouseClientConfigFromFile =
MAPPER.readValue(onehouseClientConfigFileContent, OnehouseClientConfig.class);
configV1
.getOnehouseClientConfig()
.setProjectId(onehouseClientConfigFromFile.getProjectId());
configV1.getOnehouseClientConfig().setApiKey(onehouseClientConfigFromFile.getApiKey());
configV1
.getOnehouseClientConfig()
.setApiSecret(onehouseClientConfigFromFile.getApiSecret());
configV1.getOnehouseClientConfig().setUserId(onehouseClientConfigFromFile.getUserId());
configV1
.getOnehouseClientConfig()
.setRequestId(onehouseClientConfigFromFile.getRequestId());
configV1.getOnehouseClientConfig().setRegion(onehouseClientConfigFromFile.getRegion());
}
validateOnehouseClientConfig(configV1);
return configV1;
default:
throw new UnsupportedOperationException("Unsupported config version: " + version);
}
}
private void validateOnehouseClientConfig(ConfigV1 configV1) {
@NonNull OnehouseClientConfig onehouseClientConfig = configV1.getOnehouseClientConfig();
List<String> missingFields = new ArrayList<>();
if (StringUtils.isBlank(onehouseClientConfig.getProjectId())) {
missingFields.add("projectId");
}
if (StringUtils.isBlank(onehouseClientConfig.getApiKey())) {
missingFields.add("apiKey");
}
if (StringUtils.isBlank(onehouseClientConfig.getApiSecret())) {
missingFields.add("apiSecret");
}
if (StringUtils.isBlank(onehouseClientConfig.getUserId())) {
missingFields.add("userId");
}
if (!missingFields.isEmpty()) {
throw new IllegalArgumentException(
String.format(
"Missing config params: %s",
missingFields.stream().reduce((a, b) -> a + ", " + b).orElse("")));
}
if (configV1.getMetadataExtractorConfig().getTableDiscoveryIntervalMinutes() < 1) {
throw new IllegalArgumentException(
"tableDiscoveryIntervalMinutes should be a positive integer");
}
if (configV1.getMetadataExtractorConfig().getTableMetadataUploadIntervalMinutes() < 1) {
throw new IllegalArgumentException(
"tableMetadataUploadIntervalMinutes should be a positive integer");
}
if (configV1.getMetadataExtractorConfig().getProcessTableMetadataSyncDurationSeconds() < 1) {
throw new IllegalArgumentException(
"processTableMetadataSyncDurationSeconds should be a positive integer");
}
if (configV1.getMetadataExtractorConfig().getPresignedUrlRequestBatchSizeArchivedTimeline()
< 1) {
throw new IllegalArgumentException(
"presignedUrlRequestBatchSizeArchivedTimeline should be a positive integer");
}
if (configV1.getMetadataExtractorConfig().getPresignedUrlRequestBatchSizeActiveTimeline() < 1) {
throw new IllegalArgumentException(
"presignedUrlRequestBatchSizeActiveTimeline should be a positive integer");
}
}
public String convertConfigToString(Config config) throws JsonProcessingException {
switch (config.getVersion()) {
case V1:
ConfigV1 configV1 = (ConfigV1) config;
return MAPPER.writeValueAsString(configV1);
default:
throw new UnsupportedOperationException(
"Unsupported config version: " + config.getVersion());
}
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/ConfigProvider.java
|
package ai.onehouse.config;
import java.util.concurrent.atomic.AtomicReference;
public class ConfigProvider {
private AtomicReference<Config> configRef;
public ConfigProvider(Config config) {
configRef = new AtomicReference<>();
setConfig(config);
}
public Config getConfig() {
return configRef.get();
}
public void setConfig(Config config) {
configRef.set(config);
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/ConfigRefresher.java
|
package ai.onehouse.config;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.json.JsonMapper;
import ai.onehouse.storage.AsyncStorageClient;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.config.YamlMapFactoryBean;
import org.springframework.beans.factory.config.YamlProcessor;
import org.springframework.core.io.ByteArrayResource;
@Slf4j
public class ConfigRefresher {
private static final int POLL_PERIOD_MINUTES = 2;
private final String baseConfig;
private final String extractorConfigPath;
private final AsyncStorageClient storageClient;
private final ScheduledExecutorService executorService;
private final ConfigProvider configProvider;
private final ConfigLoader configLoader;
public ConfigRefresher(
String baseConfig,
String extractorConfigPath,
AsyncStorageClient storageClient,
ConfigLoader configLoader,
ConfigProvider configProvider) {
this.baseConfig = baseConfig;
this.extractorConfigPath = extractorConfigPath;
this.storageClient = storageClient;
this.executorService = Executors.newScheduledThreadPool(1);
this.configProvider = configProvider;
this.configLoader = configLoader;
}
public void start() throws Exception {
fetchAndOverrideConfig();
executorService.scheduleAtFixedRate(
() -> {
try {
fetchAndOverrideConfig();
} catch (Exception ex) {
log.error("failed to fetch override config", ex);
}
},
POLL_PERIOD_MINUTES,
POLL_PERIOD_MINUTES,
TimeUnit.MINUTES);
}
public void shutdown() {
if (executorService != null) {
executorService.shutdown();
}
}
private void fetchAndOverrideConfig() throws JsonProcessingException {
byte[] extractorConfigBytes = storageClient.readFileAsBytes(extractorConfigPath).join();
Config newConfigWithOverride =
mergeOverrideConfig(
new ByteArrayResource(baseConfig.getBytes()),
new ByteArrayResource(extractorConfigBytes));
configProvider.setConfig(newConfigWithOverride);
}
private Config mergeOverrideConfig(ByteArrayResource... configs) throws JsonProcessingException {
YamlMapFactoryBean factory = new YamlMapFactoryBean();
factory.setResolutionMethod(YamlProcessor.ResolutionMethod.OVERRIDE);
factory.setResources(configs);
Map<String, Object> configValueMap = factory.getObject();
JsonMapper jsonMapper = new JsonMapper();
String finalConfigString = jsonMapper.writeValueAsString(configValueMap);
return configLoader.loadConfigFromString(finalConfigString);
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/ConfigVersion.java
|
package ai.onehouse.config;
public enum ConfigVersion {
V1
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/models
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/models/common/FileSystemConfiguration.java
|
package ai.onehouse.config.models.common;
import lombok.Builder;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.extern.jackson.Jacksonized;
@Builder
@Jacksonized
@Getter
@EqualsAndHashCode
public class FileSystemConfiguration {
private S3Config s3Config;
private GCSConfig gcsConfig;
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/models
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/models/common/GCSConfig.java
|
package ai.onehouse.config.models.common;
import java.util.Optional;
import lombok.Builder;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.extern.jackson.Jacksonized;
@Builder
@Jacksonized
@Getter
@EqualsAndHashCode
public class GCSConfig {
@Builder.Default private Optional<String> projectId = Optional.empty();
@Builder.Default private Optional<String> gcpServiceAccountKeyPath = Optional.empty();
@Builder.Default private Optional<String> serviceAccountToImpersonate = Optional.empty();
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/models
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/models/common/OnehouseClientConfig.java
|
package ai.onehouse.config.models.common;
import javax.annotation.Nullable;
import lombok.Builder;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.jackson.Jacksonized;
@Builder
@Getter
@Setter
@Jacksonized
@EqualsAndHashCode
public class OnehouseClientConfig {
@Nullable private String projectId;
@Nullable private String requestId;
@Nullable private String region;
@Nullable private String apiKey;
@Nullable private String apiSecret;
@Nullable private String userId;
@Nullable private String file;
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/models
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/models/common/S3Config.java
|
package ai.onehouse.config.models.common;
import java.util.List;
import java.util.Optional;
import lombok.Builder;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NonNull;
import lombok.extern.jackson.Jacksonized;
@Builder
@Getter
@Jacksonized
@EqualsAndHashCode
public class S3Config {
@NonNull private String region;
// optional to be used for quick testing
@Builder.Default private Optional<String> accessKey = Optional.empty();
@Builder.Default private Optional<String> accessSecret = Optional.empty();
@Builder.Default private Optional<String> arnToImpersonate = Optional.empty();
@Builder.Default private Optional<List<String>> arnsKmsKeys = Optional.empty();
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/models
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/models/configv1/ConfigV1.java
|
package ai.onehouse.config.models.configv1;
import ai.onehouse.config.Config;
import ai.onehouse.config.ConfigVersion;
import ai.onehouse.config.models.common.FileSystemConfiguration;
import ai.onehouse.config.models.common.OnehouseClientConfig;
import lombok.Builder;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NonNull;
import lombok.extern.jackson.Jacksonized;
@Builder
@Getter
@Jacksonized
@EqualsAndHashCode
public class ConfigV1 implements Config {
@NonNull private String version;
@NonNull private OnehouseClientConfig onehouseClientConfig;
@NonNull private FileSystemConfiguration fileSystemConfiguration;
// If metadataExtractorConfigPath is provided, it overrides metadataExtractorConfig. If not
// provided, it is mandatory to pass metadataExtractorConfig.
private String metadataExtractorConfigPath;
private MetadataExtractorConfig metadataExtractorConfig;
@Override
public ConfigVersion getVersion() {
return ConfigVersion.valueOf(version);
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/models
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/models/configv1/Database.java
|
package ai.onehouse.config.models.configv1;
import java.util.List;
import lombok.Builder;
import lombok.NonNull;
import lombok.Value;
import lombok.extern.jackson.Jacksonized;
@Builder
@Value
@Jacksonized
public class Database {
String name;
@NonNull List<String> basePaths;
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/models
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/models/configv1/MetadataExtractorConfig.java
|
package ai.onehouse.config.models.configv1;
import static ai.onehouse.constants.MetadataExtractorConstants.DEFAULT_FILE_UPLOAD_STREAM_BATCH_SIZE;
import static ai.onehouse.constants.MetadataExtractorConstants.PRESIGNED_URL_REQUEST_BATCH_SIZE_ACTIVE_TIMELINE;
import static ai.onehouse.constants.MetadataExtractorConstants.PRESIGNED_URL_REQUEST_BATCH_SIZE_ARCHIVED_TIMELINE;
import static ai.onehouse.constants.MetadataExtractorConstants.PROCESS_TABLE_METADATA_SYNC_DURATION_SECONDS;
import static ai.onehouse.constants.MetadataExtractorConstants.TABLE_DISCOVERY_INTERVAL_MINUTES;
import static ai.onehouse.constants.MetadataExtractorConstants.TABLE_METADATA_UPLOAD_INTERVAL_MINUTES;
import static ai.onehouse.constants.MetadataExtractorConstants.WAIT_TIME_BEFORE_SHUTDOWN;
import java.util.List;
import java.util.Optional;
import lombok.Builder;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NonNull;
import lombok.extern.jackson.Jacksonized;
@Builder
@Getter
@Jacksonized
@EqualsAndHashCode
public class MetadataExtractorConfig {
@NonNull private List<ParserConfig> parserConfig;
@Builder.Default private Optional<List<String>> pathExclusionPatterns = Optional.empty();
@Builder.Default private JobRunMode jobRunMode = JobRunMode.CONTINUOUS;
// This is used to estimate next run time for the job in pull model
@Builder.Default private String cronScheduleForPullModel = "0 */1 * * *";
@Builder.Default private Integer maxRunCountForPullModel = 5;
@Builder.Default private Integer minIntervalMinutes = 10;
@Builder.Default
private UploadStrategy uploadStrategy = UploadStrategy.BLOCK_ON_INCOMPLETE_COMMIT;
@Builder.Default
private int presignedUrlRequestBatchSizeActiveTimeline =
PRESIGNED_URL_REQUEST_BATCH_SIZE_ACTIVE_TIMELINE;
@Builder.Default
private int presignedUrlRequestBatchSizeArchivedTimeline =
PRESIGNED_URL_REQUEST_BATCH_SIZE_ARCHIVED_TIMELINE;
@Builder.Default
private int processTableMetadataSyncDurationSeconds =
PROCESS_TABLE_METADATA_SYNC_DURATION_SECONDS;
@Builder.Default private int tableDiscoveryIntervalMinutes = TABLE_DISCOVERY_INTERVAL_MINUTES;
@Builder.Default
private int tableMetadataUploadIntervalMinutes = TABLE_METADATA_UPLOAD_INTERVAL_MINUTES;
@Builder.Default private int fileUploadStreamBatchSize = DEFAULT_FILE_UPLOAD_STREAM_BATCH_SIZE;
@Builder.Default private int waitTimeBeforeShutdown = WAIT_TIME_BEFORE_SHUTDOWN;
@Builder.Default private int objectStoreNumRetries = 10;
@Builder.Default private int nettyMaxConcurrency = 50;
@Builder.Default private long nettyConnectionTimeoutSeconds = 60L;
public enum JobRunMode {
CONTINUOUS,
ONCE,
ONCE_WITH_RETRY
}
public enum UploadStrategy {
BLOCK_ON_INCOMPLETE_COMMIT,
CONTINUE_ON_INCOMPLETE_COMMIT
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/models
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/config/models/configv1/ParserConfig.java
|
package ai.onehouse.config.models.configv1;
import java.util.List;
import lombok.Builder;
import lombok.NonNull;
import lombok.Value;
import lombok.extern.jackson.Jacksonized;
@Builder
@Value
@Jacksonized
public class ParserConfig {
String lake;
@NonNull List<Database> databases;
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/constants/ApiConstants.java
|
package ai.onehouse.constants;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class ApiConstants {
private ApiConstants() {}
public static final String ONEHOUSE_API_ENDPOINT =
System.getenv().getOrDefault("ONEHOUSE_API_ENDPOINT", "https://api.onehouse.ai");
// API Endpoints
public static final String INITIALIZE_TABLE_METRICS_CHECKPOINT =
"/v1/community/initialize-tables";
public static final String UPSERT_TABLE_METRICS_CHECKPOINT = "/v1/community/{0}/checkpoint";
public static final String GET_TABLE_METRICS_CHECKPOINT = "/v1/community/checkpoints";
public static final String GENERATE_COMMIT_METADATA_UPLOAD_URL = "/v1/community/{0}/upload-urls";
// Header constants
public static final String PROJECT_UID_KEY = "x-onehouse-project-uid";
public static final String ONEHOUSE_API_KEY = "x-onehouse-api-key";
public static final String ONEHOUSE_API_SECRET_KEY = "x-onehouse-api-secret";
public static final String LINK_UID_KEY = "x-onehouse-link-uid";
public static final String ONEHOUSE_REGION_KEY = "x-onehouse-region";
public static final String ONEHOUSE_USER_UUID_KEY = "x-onehouse-uuid";
// using mapping from:
// https://chromium.googlesource.com/external/github.com/grpc/grpc/+/refs/tags/v1.21.4-pre1/doc/statuscodes.md
public static final List<Integer> ACCEPTABLE_HTTP_FAILURE_STATUS_CODES =
Collections.unmodifiableList(new ArrayList<>(Arrays.asList(404, 400, 403, 401, 409)));
public static final String UNAUTHORIZED_ERROR_MESSAGE =
"Confirm that your API token is valid and has not expired.";
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/constants/MetadataExtractorConstants.java
|
package ai.onehouse.constants;
import ai.onehouse.metadata_extractor.models.Checkpoint;
import ai.onehouse.storage.models.File;
import java.time.Instant;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.regex.Pattern;
public class MetadataExtractorConstants {
private MetadataExtractorConstants() {}
public static final String HOODIE_FOLDER_NAME = ".hoodie";
public static final String ARCHIVED_FOLDER_NAME = "archived";
public static final String HOODIE_PROPERTIES_FILE = "hoodie.properties";
public static final String HOODIE_TABLE_NAME_KEY = "hoodie.table.name";
public static final String HOODIE_TABLE_TYPE_KEY = "hoodie.table.type";
// The default number of instants in one archived commit metadata file is 10
// so we want to ingest 10x active instants than archived instants in one batch
public static final int PRESIGNED_URL_REQUEST_BATCH_SIZE_ACTIVE_TIMELINE = 20;
public static final int PRESIGNED_URL_REQUEST_BATCH_SIZE_ARCHIVED_TIMELINE = 2;
// process table metadata will be called every 30 seconds,
// but metadata will be uploaded only if TABLE_METADATA_UPLOAD_INTERVAL_MINUTES amount of time has
// passed since last run
public static final int PROCESS_TABLE_METADATA_SYNC_DURATION_SECONDS = 30;
// Wait time so prometheus(if any) is able to scrape metrics in single run mode
public static final int WAIT_TIME_BEFORE_SHUTDOWN = 120;
public static final int TABLE_PROCESSING_BATCH_SIZE =
Math.min(
50,
Integer.parseInt(
System.getenv().getOrDefault("EXTRACTOR_TABLE_PROCESSING_BATCH_SIZE", "20")));
public static final int TABLE_DISCOVERY_INTERVAL_MINUTES = 30;
public static final int TABLE_METADATA_UPLOAD_INTERVAL_MINUTES = 5;
// Default batch size will be 5 MB
public static final int DEFAULT_FILE_UPLOAD_STREAM_BATCH_SIZE =
Integer.parseInt(System.getenv().getOrDefault("FILE_UPLOAD_STREAM_BATCH_SIZE", "5242880"));
public static final Pattern ARCHIVED_COMMIT_INSTANT_PATTERN =
Pattern.compile("\\.commits_\\.archive\\.\\d+_\\d+-\\d+-\\d+");
public static final Pattern ACTIVE_COMMIT_INSTANT_PATTERN =
Pattern.compile("\\d+(\\.[a-z]{1,20}){1,2}");
public static final Checkpoint INITIAL_CHECKPOINT =
Checkpoint.builder()
.batchId(0)
.checkpointTimestamp(Instant.EPOCH)
.lastUploadedFile("")
.firstIncompleteCommitFile("")
.archivedCommitsProcessed(false)
.build();
// hardcoding last modified at to prevent this from causing issues with our checkpoint logic
public static final File HOODIE_PROPERTIES_FILE_OBJ =
File.builder()
.filename(HOODIE_PROPERTIES_FILE)
.isDirectory(false)
.lastModifiedAt(Instant.EPOCH)
.build();
public static final String SAVEPOINT_ACTION = "savepoint";
public static final String ROLLBACK_ACTION = "rollback";
public static final Set<String> VALID_SAVEPOINT_ROLLBACK_ACTIONS =
new HashSet<>(Arrays.asList(SAVEPOINT_ACTION, ROLLBACK_ACTION));
public static final List<String> WHITELISTED_ACTION_TYPES =
Arrays.asList(
"commit",
"deltacommit",
ROLLBACK_ACTION,
SAVEPOINT_ACTION,
"restore",
"clean",
"compaction",
"replacecommit");
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/constants/MetricsConstants.java
|
package ai.onehouse.constants;
public class MetricsConstants {
public static final int PROMETHEUS_METRICS_SCRAPING_DISABLED = 0;
public static final int PROMETHEUS_METRICS_SCRAPE_PORT =
Integer.parseInt(
System.getenv()
.getOrDefault(
"PROMETHEUS_METRICS_SCRAPE_PORT",
String.valueOf(PROMETHEUS_METRICS_SCRAPING_DISABLED)));
public enum MetadataUploadFailureReasons {
API_FAILURE_USER_ERROR,
API_FAILURE_SYSTEM_ERROR,
HOODIE_PROPERTY_NOT_FOUND_OR_CORRUPTED,
PRESIGNED_URL_UPLOAD_FAILURE,
RATE_LIMITING,
ACCESS_DENIED,
NO_TABLES_TO_INITIALIZE,
UNKNOWN,
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/constants/StorageConstants.java
|
package ai.onehouse.constants;
import java.util.regex.Pattern;
public class StorageConstants {
private StorageConstants() {}
// typical s3 path: "s3://bucket-name/path/to/object"
// gcs path format "gs:// [bucket] /path/to/file"
public static final Pattern OBJECT_STORAGE_URI_PATTERN =
Pattern.compile("^(s3://|gs://)([^/]+)(/.*)?");
// https://cloud.google.com/compute/docs/naming-resources#resource-name-format
public static final String GCP_RESOURCE_NAME_FORMAT = "^[a-z]([-a-z0-9]*[a-z0-9])$";
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/env/EnvironmentLookupProvider.java
|
package ai.onehouse.env;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
@FunctionalInterface
public interface EnvironmentLookupProvider {
@Nullable
String getValue(@Nonnull String key);
class System implements EnvironmentLookupProvider {
@Nullable @Override
public String getValue(@Nonnull String key) {
return java.lang.System.getenv(key);
}
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/exceptions/AccessDeniedException.java
|
package ai.onehouse.exceptions;
public class AccessDeniedException extends RuntimeException {
public AccessDeniedException(String message) {
super(message);
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/exceptions/FileUploadException.java
|
package ai.onehouse.exceptions;
import java.io.IOException;
public class FileUploadException extends RuntimeException {
public FileUploadException(String message) {
super(message);
}
public FileUploadException(IOException e) {
super(e);
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/exceptions/ObjectStorageClientException.java
|
package ai.onehouse.exceptions;
public class ObjectStorageClientException extends RuntimeException {
public ObjectStorageClientException(Throwable cause) {
super(cause);
}
public ObjectStorageClientException(String message) {
super(message);
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/exceptions/RateLimitException.java
|
package ai.onehouse.exceptions;
public class RateLimitException extends RuntimeException{
public RateLimitException(String message) {
super(message);
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/metadata_extractor/ActiveTimelineInstantBatcher.java
|
package ai.onehouse.metadata_extractor;
import static ai.onehouse.constants.MetadataExtractorConstants.HOODIE_PROPERTIES_FILE;
import static ai.onehouse.constants.MetadataExtractorConstants.ROLLBACK_ACTION;
import static ai.onehouse.constants.MetadataExtractorConstants.SAVEPOINT_ACTION;
import static ai.onehouse.constants.MetadataExtractorConstants.VALID_SAVEPOINT_ROLLBACK_ACTIONS;
import static ai.onehouse.constants.MetadataExtractorConstants.WHITELISTED_ACTION_TYPES;
import ai.onehouse.config.Config;
import ai.onehouse.config.models.configv1.MetadataExtractorConfig;
import ai.onehouse.metadata_extractor.models.Checkpoint;
import ai.onehouse.storage.models.File;
import com.google.inject.Inject;
import java.math.BigInteger;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import lombok.Builder;
import lombok.Getter;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
public class ActiveTimelineInstantBatcher {
private final MetadataExtractorConfig extractorConfig;
@Inject
public ActiveTimelineInstantBatcher(@Nonnull Config config) {
this.extractorConfig = config.getMetadataExtractorConfig();
}
/**
* Creates batches of Hudi instants, ensuring related instants are grouped together.
*
* @param instants The list of Hudi instants.
* @param maxBatchSize the maximum number of instants per batch.
* @return A list of batches, each batch being a list of instants.
*/
public Pair<String, List<List<File>>> createBatches(
List<File> instants, int maxBatchSize, Checkpoint checkpoint) {
if (maxBatchSize < 3) {
throw new IllegalArgumentException("max batch size cannot be less than 3");
}
List<File> sortedInstants;
if (extractorConfig
.getUploadStrategy()
.equals(MetadataExtractorConfig.UploadStrategy.CONTINUE_ON_INCOMPLETE_COMMIT)) {
// Get sorted instants by grouping them if they belong to the same commit and any of the files
// has a last modified which is greater than the lastModified of the last checkpoint that was
// uploaded
sortedInstants = sortAndFilterInstants(instants, checkpoint.getCheckpointTimestamp());
} else {
sortedInstants = sortAndFilterInstants(instants);
}
List<List<File>> batches = new ArrayList<>();
List<File> currentBatch = new ArrayList<>();
String firstIncompleteCheckpoint = checkpoint.getFirstIncompleteCommitFile();
int startIndex = 0;
if (!sortedInstants.isEmpty()
&& sortedInstants.get(0).getFilename().equals(HOODIE_PROPERTIES_FILE)) {
startIndex = 1;
currentBatch.add(sortedInstants.get(0));
}
// Stop threshold is set to sortedInstants.size() - 2 to ensure we don't miss the case
// when timeline ends with a completed savepoint action
int index = startIndex;
while (index <= sortedInstants.size() - 2) {
ActiveTimelineInstant instant1 =
getActiveTimeLineInstant(sortedInstants.get(index).getFilename());
int groupSize = 3;
boolean areInstantsInGrpRelated;
boolean shouldStopIteration = false;
if (instant1.action.equals(ROLLBACK_ACTION)) {
// For rollback action, requested or inflight commits will be present unless there is
// some error while restoring. Since rollback is not used when calculating metrics,
// we don't want to be blocked by unusual rollback status.
if (index + 2 >= sortedInstants.size()) {
// If the latest rollback is not complete or there is a single completed rollback at the
// end.
// For the second case, we can upload in the following batch as rollback doesn't affect
// metrics.
areInstantsInGrpRelated = false;
shouldStopIteration = true;
} else {
// First try to check for 3-file pattern
ActiveTimelineInstant instant2 =
getActiveTimeLineInstant(sortedInstants.get(index + 1).getFilename());
ActiveTimelineInstant instant3 =
getActiveTimeLineInstant(sortedInstants.get(index + 2).getFilename());
areInstantsInGrpRelated = areRelatedInstants(instant1, instant2, instant3);
if (!areInstantsInGrpRelated) {
// If 3-file pattern doesn't match, check for 2-file pattern
areInstantsInGrpRelated = areRelatedSavepointOrRollbackInstants(instant1, instant2);
groupSize = 2;
// If neither pattern matches but it's a completed rollback, process it individually
if (!areInstantsInGrpRelated && instant1.getState().equals("completed")) {
groupSize = 1;
areInstantsInGrpRelated = true;
}
}
}
} else if (instant1.action.equals(SAVEPOINT_ACTION)) {
if (index + 1 >= sortedInstants.size()) {
// If the latest commit is not complete
areInstantsInGrpRelated = false;
shouldStopIteration = true;
} else {
ActiveTimelineInstant instant2 =
getActiveTimeLineInstant(sortedInstants.get(index + 1).getFilename());
areInstantsInGrpRelated = areRelatedSavepointOrRollbackInstants(instant1, instant2);
groupSize = 2;
}
} else {
if (index + 2 >= sortedInstants.size()) {
// If the latest commit is not complete
areInstantsInGrpRelated = false;
shouldStopIteration = true;
} else {
ActiveTimelineInstant instant2 =
getActiveTimeLineInstant(sortedInstants.get(index + 1).getFilename());
ActiveTimelineInstant instant3 =
getActiveTimeLineInstant(sortedInstants.get(index + 2).getFilename());
areInstantsInGrpRelated = areRelatedInstants(instant1, instant2, instant3);
}
}
if (areInstantsInGrpRelated) {
if (currentBatch.size() + groupSize <= maxBatchSize) {
// Add the next group of three instants to the current batch
currentBatch.addAll(sortedInstants.subList(index, index + groupSize));
} else {
// Current batch size limit reached, start a new batch
batches.add(new ArrayList<>(currentBatch));
currentBatch.clear();
currentBatch.addAll(sortedInstants.subList(index, index + groupSize));
}
} else if (!shouldStopIteration) {
if (extractorConfig
.getUploadStrategy()
.equals(MetadataExtractorConfig.UploadStrategy.CONTINUE_ON_INCOMPLETE_COMMIT)) {
// Instead of blocking the creation of batches, skipping the incomplete commit file and
// updating the first incomplete checkpoint(startAfter) to be a unit before the incomplete
// instant
String firstIncompleteCheckpointUpdated =
getFirstIncompleteCheckpoint(instant1.getTimestamp());
if (StringUtils.isBlank(firstIncompleteCheckpoint)
|| firstIncompleteCheckpointUpdated.compareTo(firstIncompleteCheckpoint) < 0) {
firstIncompleteCheckpoint = firstIncompleteCheckpointUpdated;
}
groupSize = 1;
} else {
shouldStopIteration = true;
}
}
if (shouldStopIteration) {
if (!currentBatch.isEmpty()) {
batches.add(new ArrayList<>(currentBatch));
currentBatch.clear();
}
break;
}
index += groupSize;
}
// Add any remaining instants in the current batch
if (!currentBatch.isEmpty()) {
batches.add(currentBatch);
}
return Pair.of(firstIncompleteCheckpoint, batches);
}
private static String getFirstIncompleteCheckpoint(String numericString) {
BigInteger number = new BigInteger(numericString);
BigInteger decrementedNumber = number.subtract(BigInteger.ONE);
return decrementedNumber.toString();
}
private List<File> sortAndFilterInstants(List<File> instants) {
return instants.stream()
.filter(this::filterFile)
.sorted(getFileComparator())
.collect(Collectors.toList());
}
private List<File> sortAndFilterInstants(List<File> instants, Instant lastModifiedFilter) {
return instants.stream()
.filter(this::filterFile)
.collect(Collectors.groupingBy(file -> file.getFilename().split("\\.", 3)[0]))
.values()
.stream()
.filter(
group ->
group.stream()
.anyMatch(
file ->
file.getFilename().equals(HOODIE_PROPERTIES_FILE)
|| lastModifiedFilter.isBefore(file.getLastModifiedAt())))
.flatMap(List::stream)
.sorted(getFileComparator())
.collect(Collectors.toList());
}
private boolean filterFile(File file) {
return file.getFilename().equals(HOODIE_PROPERTIES_FILE)
|| WHITELISTED_ACTION_TYPES.contains(
getActiveTimeLineInstant(file.getFilename()).getAction());
}
private Comparator<File> getFileComparator() {
return Comparator.comparing(
File::getFilename,
(name1, name2) -> {
if (HOODIE_PROPERTIES_FILE.equals(name1)) {
return -1;
} else if (HOODIE_PROPERTIES_FILE.equals(name2)) {
return 1;
}
return name1.compareTo(name2);
});
}
static boolean areRelatedInstants(
ActiveTimelineInstant instant1,
ActiveTimelineInstant instant2,
ActiveTimelineInstant instant3) {
if (!instant1.getTimestamp().equals(instant2.getTimestamp())
|| !instant2.getTimestamp().equals(instant3.getTimestamp())) {
return false;
}
// Check if all three states are present
Set<String> states =
new HashSet<>(Arrays.asList(instant1.getState(), instant2.getState(), instant3.getState()));
return states.containsAll(Arrays.asList("inflight", "requested", "completed"));
}
// Savepoint and Rollback (Hudi v0.08) instants only have inflight and final commit
static boolean areRelatedSavepointOrRollbackInstants(
ActiveTimelineInstant instant1, ActiveTimelineInstant instant2) {
if (!instant1.getTimestamp().equals(instant2.getTimestamp())) {
return false;
}
Set<String> states = new HashSet<>(Arrays.asList(instant1.getState(), instant2.getState()));
return states.containsAll(Arrays.asList("inflight", "completed")) &&
instant1.getAction().equals(instant2.getAction()) &&
VALID_SAVEPOINT_ROLLBACK_ACTIONS.contains(instant1.getAction());
}
static ActiveTimelineInstant getActiveTimeLineInstant(String instant) {
String[] parts = instant.split("\\.", 3);
String action;
String state;
// For commit action, metadata file in inflight state is in the format of XYZ.inflight
if (parts.length == 2 && parts[1].equals("inflight")) {
action = "commit";
state = "inflight";
} else {
action = parts[1];
state = parts.length == 3 ? parts[2] : "completed";
}
return ActiveTimelineInstant.builder().timestamp(parts[0]).action(action).state(state).build();
}
@Builder
@Getter
static class ActiveTimelineInstant {
private final String timestamp;
private final String action;
private final String state;
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/metadata_extractor/HoodiePropertiesReader.java
|
package ai.onehouse.metadata_extractor;
import static ai.onehouse.constants.MetadataExtractorConstants.HOODIE_TABLE_NAME_KEY;
import static ai.onehouse.constants.MetadataExtractorConstants.HOODIE_TABLE_TYPE_KEY;
import static ai.onehouse.metadata_extractor.MetadataExtractorUtils.getMetadataExtractorFailureReason;
import com.google.inject.Inject;
import ai.onehouse.api.models.request.TableType;
import ai.onehouse.constants.MetricsConstants;
import ai.onehouse.metadata_extractor.models.ParsedHudiProperties;
import ai.onehouse.metrics.LakeViewExtractorMetrics;
import ai.onehouse.storage.AsyncStorageClient;
import ai.onehouse.RuntimeModule.TableMetadataUploadObjectStorageAsyncClient;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import java.util.concurrent.CompletableFuture;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class HoodiePropertiesReader {
private final AsyncStorageClient asyncStorageClient;
private final LakeViewExtractorMetrics hudiMetadataExtractorMetrics;
@Inject
public HoodiePropertiesReader(
@TableMetadataUploadObjectStorageAsyncClient AsyncStorageClient asyncStorageClient,
LakeViewExtractorMetrics hudiMetadataExtractorMetrics) {
this.asyncStorageClient = asyncStorageClient;
this.hudiMetadataExtractorMetrics = hudiMetadataExtractorMetrics;
}
public CompletableFuture<ParsedHudiProperties> readHoodieProperties(String path) {
log.debug("parsing {} file", path);
return asyncStorageClient
.streamFileAsync(path)
.thenApplyAsync(
fileStreamData -> {
Properties properties = new Properties();
try (InputStream is = fileStreamData.getInputStream()) {
properties.load(is);
} catch (IOException e) {
throw new RuntimeException("Failed to load properties file", e);
}
return ParsedHudiProperties.builder()
.tableName(properties.getProperty(HOODIE_TABLE_NAME_KEY))
.tableType(TableType.valueOf(properties.getProperty(HOODIE_TABLE_TYPE_KEY)))
.build();
})
.exceptionally(
throwable -> {
log.error("Error encountered when reading hoodie properties file", throwable);
hudiMetadataExtractorMetrics.incrementTableMetadataProcessingFailureCounter(
getMetadataExtractorFailureReason(
throwable,
MetricsConstants.MetadataUploadFailureReasons.HOODIE_PROPERTY_NOT_FOUND_OR_CORRUPTED)
);
return null;
});
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/metadata_extractor/MetadataExtractorUtils.java
|
package ai.onehouse.metadata_extractor;
import ai.onehouse.constants.MetricsConstants;
import ai.onehouse.exceptions.AccessDeniedException;
import ai.onehouse.exceptions.RateLimitException;
public final class MetadataExtractorUtils {
private MetadataExtractorUtils(){}
public static MetricsConstants.MetadataUploadFailureReasons getMetadataExtractorFailureReason(
Throwable ex,
MetricsConstants.MetadataUploadFailureReasons defaultReason){
if (ex.getCause() instanceof RateLimitException){
return MetricsConstants.MetadataUploadFailureReasons.RATE_LIMITING;
}
if (ex.getCause() instanceof AccessDeniedException){
return MetricsConstants.MetadataUploadFailureReasons.ACCESS_DENIED;
}
return defaultReason;
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/metadata_extractor/TableDiscoveryAndUploadJob.java
|
package ai.onehouse.metadata_extractor;
import ai.onehouse.config.models.configv1.MetadataExtractorConfig;
import ai.onehouse.constants.MetricsConstants;
import ai.onehouse.storage.AsyncStorageClient;
import ai.onehouse.RuntimeModule.TableDiscoveryObjectStorageAsyncClient;
import com.cronutils.model.Cron;
import com.cronutils.model.CronType;
import com.cronutils.model.definition.CronDefinitionBuilder;
import com.cronutils.model.time.ExecutionTime;
import com.cronutils.parser.CronParser;
import com.google.common.annotations.VisibleForTesting;
import com.google.inject.Inject;
import ai.onehouse.config.Config;
import ai.onehouse.metadata_extractor.models.Table;
import ai.onehouse.metrics.LakeViewExtractorMetrics;
import java.time.Duration;
import java.time.Instant;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.annotation.Nonnull;
import lombok.extern.slf4j.Slf4j;
import static ai.onehouse.metadata_extractor.MetadataExtractorUtils.getMetadataExtractorFailureReason;
@Slf4j
public class TableDiscoveryAndUploadJob {
private final TableDiscoveryService tableDiscoveryService;
private final TableMetadataUploaderService tableMetadataUploaderService;
private final ScheduledExecutorService scheduler;
private final Object lock = new Object();
private final LakeViewExtractorMetrics hudiMetadataExtractorMetrics;
private final AsyncStorageClient asyncStorageClient;
private Set<Table> tablesToProcess;
private Instant previousTableMetadataUploadRunStartTime = Instant.EPOCH;
private final Instant firstCronRunStartTime;
@Inject
public TableDiscoveryAndUploadJob(
@Nonnull TableDiscoveryService tableDiscoveryService,
@Nonnull TableMetadataUploaderService tableMetadataUploaderService,
@Nonnull LakeViewExtractorMetrics hudiMetadataExtractorMetrics,
@Nonnull @TableDiscoveryObjectStorageAsyncClient AsyncStorageClient asyncStorageClient) {
this.scheduler = getScheduler();
this.tableDiscoveryService = tableDiscoveryService;
this.tableMetadataUploaderService = tableMetadataUploaderService;
this.hudiMetadataExtractorMetrics = hudiMetadataExtractorMetrics;
this.firstCronRunStartTime = Instant.now();
this.asyncStorageClient = asyncStorageClient;
}
/*
* runs discovery and upload periodically at fixed intervals in a continuous fashion
*/
public void runInContinuousMode(Config config) {
log.debug("Running metadata-extractor in continuous mode");
asyncStorageClient.initializeClient();
// Schedule table discovery
scheduler.scheduleAtFixedRate(
this::discoverTables,
0,
config.getMetadataExtractorConfig().getTableDiscoveryIntervalMinutes(),
TimeUnit.MINUTES);
// Schedule table processing
scheduler.scheduleAtFixedRate(
() -> processTables(config),
0,
config.getMetadataExtractorConfig().getProcessTableMetadataSyncDurationSeconds(),
TimeUnit.SECONDS);
}
/*
* Runs table discovery followed by metadata uploader once
*/
public void runOnce() {
asyncStorageClient.initializeClient();
runOnce(null, 1);
}
public void runOnce(Config config) {
asyncStorageClient.initializeClient();
runOnce(config, 1);
}
public void runOnce(Config config, int runCounter) {
log.info("Running metadata-extractor starting at: {}", firstCronRunStartTime);
Boolean isSucceeded =
tableDiscoveryService
.discoverTables()
.thenCompose(tableMetadataUploaderService::uploadInstantsInTables)
.join();
if (Boolean.TRUE.equals(isSucceeded)) {
log.info("Run Completed");
} else {
log.error("Run failed");
/*
* The retry is done in following known scenarios:
* 1. Session token expiry for pull model customer
* 2. Temporary network issues, sometimes external api call fails despite client retries
* 3, Issues related to throttling of calls to S3/GCS
* */
if (config != null &&
config.getMetadataExtractorConfig().getJobRunMode().equals(MetadataExtractorConfig.JobRunMode.ONCE_WITH_RETRY)
&& shouldRunAgainForRunOnceConfiguration(config)
&& runCounter < config.getMetadataExtractorConfig().getMaxRunCountForPullModel()) {
log.info("Retrying job: Attempt {}/{}",
runCounter + 1,
config.getMetadataExtractorConfig().getMaxRunCountForPullModel());
// Handle client session timeout errors if any
asyncStorageClient.refreshClient();
runOnce(config, runCounter + 1);
}
}
}
@VisibleForTesting
boolean shouldRunAgainForRunOnceConfiguration(Config config) {
MetadataExtractorConfig metadataExtractorConfig = config.getMetadataExtractorConfig();
Cron cron = new CronParser((CronDefinitionBuilder.instanceDefinitionFor(CronType.UNIX)))
.parse(metadataExtractorConfig.getCronScheduleForPullModel());
ExecutionTime executionTime = ExecutionTime.forCron(cron);
Optional<ZonedDateTime> nextExecutionTime =
executionTime.nextExecution(firstCronRunStartTime.atZone(ZoneOffset.UTC));
if (nextExecutionTime.isPresent() && Duration.between(firstCronRunStartTime,
nextExecutionTime.get().toInstant()).toMinutes() < metadataExtractorConfig.getMinIntervalMinutes()) {
log.info("Stopping the job as next scheduled run is less than 10 minutes away");
return false;
}
return true;
}
private void discoverTables() {
log.info("Discovering tables in provided paths");
tableDiscoveryService
.discoverTables()
.thenApply(
tables -> {
synchronized (lock) {
tablesToProcess = tables;
}
hudiMetadataExtractorMetrics.setDiscoveredTablesPerRound(tables.size());
return null;
})
.exceptionally(
ex -> {
log.error("Error discovering tables: ", ex);
hudiMetadataExtractorMetrics
.incrementTableDiscoveryFailureCounter(getMetadataExtractorFailureReason(
ex,
MetricsConstants.MetadataUploadFailureReasons.UNKNOWN));
return null;
})
.join();
}
private void processTables(Config config) {
log.debug("Polling to see if metadata needs to be uploaded");
Instant tableMetadataUploadRunStartTime = Instant.now();
if (Duration.between(previousTableMetadataUploadRunStartTime, tableMetadataUploadRunStartTime)
.toMinutes()
>= config.getMetadataExtractorConfig().getTableMetadataUploadIntervalMinutes()) {
Set<Table> tables = null;
synchronized (lock) {
if (tablesToProcess != null) {
tables = new HashSet<>(tablesToProcess);
}
}
if (tables != null && !tables.isEmpty()) {
log.debug("Uploading table metadata for discovered tables");
hudiMetadataExtractorMetrics.resetTableProcessedGauge();
AtomicBoolean hasError = new AtomicBoolean(false);
tableMetadataUploaderService
.uploadInstantsInTables(tables)
.exceptionally(
ex -> {
log.error("Error uploading instants in tables: ", ex);
hasError.set(true);
hudiMetadataExtractorMetrics.incrementTableSyncFailureCounter();
return null;
})
.join();
if (!hasError.get()) {
hudiMetadataExtractorMetrics.incrementTableSyncSuccessCounter();
}
previousTableMetadataUploadRunStartTime = tableMetadataUploadRunStartTime;
}
}
}
public void shutdown() {
scheduler.shutdown();
}
@VisibleForTesting
ScheduledExecutorService getScheduler() {
return Executors.newScheduledThreadPool(2);
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/metadata_extractor/TableDiscoveryService.java
|
package ai.onehouse.metadata_extractor;
import static ai.onehouse.constants.MetadataExtractorConstants.HOODIE_FOLDER_NAME;
import static ai.onehouse.metadata_extractor.MetadataExtractorUtils.getMetadataExtractorFailureReason;
import static java.util.Collections.emptySet;
import com.google.inject.Inject;
import ai.onehouse.constants.MetricsConstants;
import ai.onehouse.config.ConfigProvider;
import ai.onehouse.config.models.configv1.Database;
import ai.onehouse.config.models.configv1.MetadataExtractorConfig;
import ai.onehouse.config.models.configv1.ParserConfig;
import ai.onehouse.metadata_extractor.models.Table;
import ai.onehouse.metrics.LakeViewExtractorMetrics;
import ai.onehouse.storage.AsyncStorageClient;
import ai.onehouse.storage.StorageUtils;
import ai.onehouse.storage.models.File;
import ai.onehouse.RuntimeModule.TableDiscoveryObjectStorageAsyncClient;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
/*
* Discovers hudi tables by Parsing all folders (including nested folders) in provided base paths
* excluded paths will be skipped.
*/
@Slf4j
public class TableDiscoveryService {
private static final String TABLE_ID_SEPARATOR = "#";
private final AsyncStorageClient asyncStorageClient;
private final StorageUtils storageUtils;
private final ExecutorService executorService;
private final ConfigProvider configProvider;
private final LakeViewExtractorMetrics lakeviewExtractorMetrics;
@Inject
public TableDiscoveryService(
@Nonnull @TableDiscoveryObjectStorageAsyncClient AsyncStorageClient asyncStorageClient,
@Nonnull StorageUtils storageUtils,
@Nonnull ConfigProvider configProvider,
@Nonnull ExecutorService executorService,
@Nonnull LakeViewExtractorMetrics lakeviewExtractorMetrics) {
this.asyncStorageClient = asyncStorageClient;
this.storageUtils = storageUtils;
this.executorService = executorService;
this.configProvider = configProvider;
this.lakeviewExtractorMetrics = lakeviewExtractorMetrics;
}
public CompletableFuture<Set<Table>> discoverTables() {
MetadataExtractorConfig metadataExtractorConfig =
configProvider.getConfig().getMetadataExtractorConfig();
List<String> excludedPathPatterns =
metadataExtractorConfig.getPathExclusionPatterns().orElse(new ArrayList<>());
log.info("Starting table discover service, excluding {}", excludedPathPatterns);
List<Pair<String, CompletableFuture<Set<Table>>>> pathToDiscoveredTablesFuturePairList =
new ArrayList<>();
for (ParserConfig parserConfig : metadataExtractorConfig.getParserConfig()) {
for (Database database : parserConfig.getDatabases()) {
for (String basePathConfig : database.getBasePaths()) {
String basePath = extractBasePath(basePathConfig);
if (isExcluded(basePath, excludedPathPatterns)) {
log.warn("Provided base-path has also been passed under paths to exclude {}", basePath);
}
pathToDiscoveredTablesFuturePairList.add(
Pair.of(
basePathConfig,
discoverTablesInPath(
basePath, parserConfig.getLake(), database.getName(), excludedPathPatterns)));
}
}
}
return CompletableFuture.allOf(
pathToDiscoveredTablesFuturePairList.stream()
.map(Pair::getRight)
.toArray(CompletableFuture[]::new))
.thenApply(
ignored -> {
Set<Table> allTablePaths = ConcurrentHashMap.newKeySet();
for (Pair<String, CompletableFuture<Set<Table>>> pathToDiscoveredTablesPair :
pathToDiscoveredTablesFuturePairList) {
Set<Table> discoveredTables = pathToDiscoveredTablesPair.getRight().join();
String basePathConfig = pathToDiscoveredTablesPair.getLeft();
String tableId = extractTableId(basePathConfig);
if (StringUtils.isNotBlank(tableId)) {
if (discoveredTables.size() != 1) {
log.debug(
String.format(
"For tableId %s, there must be exactly one table in path %s",
tableId, extractBasePath(basePathConfig)));
continue;
}
Table table = discoveredTables.iterator().next();
table = table.toBuilder().tableId(tableId).build();
discoveredTables = Collections.singleton(table);
}
allTablePaths.addAll(discoveredTables);
}
return allTablePaths;
});
}
private String extractBasePath(String basePathConfig) {
String[] basePathConfigParts = basePathConfig.split(TABLE_ID_SEPARATOR);
return basePathConfigParts[0];
}
private String extractTableId(String basePathConfig) {
String[] basePathConfigParts = basePathConfig.split(TABLE_ID_SEPARATOR);
return basePathConfigParts.length > 1 ? basePathConfigParts[1] : "";
}
private CompletableFuture<Set<Table>> discoverTablesInPath(
String path, String lakeName, String databaseName, List<String> excludedPathPatterns) {
try {
log.info(String.format("Discovering tables in %s", path));
return asyncStorageClient
.listAllFilesInDir(path)
.thenComposeAsync(
listedFiles -> {
Set<Table> tablePaths = ConcurrentHashMap.newKeySet();
List<CompletableFuture<Void>> recursiveFutures = new ArrayList<>();
if (isHudiTableFolder(listedFiles)) {
Table table =
Table.builder()
.absoluteTableUri(path)
.databaseName(databaseName)
.lakeName(lakeName)
.build();
if (!isExcluded(table.getAbsoluteTableUri(), excludedPathPatterns)) {
tablePaths.add(table);
}
return CompletableFuture.completedFuture(tablePaths);
}
List<File> directories =
listedFiles.stream().filter(File::isDirectory).collect(Collectors.toList());
for (File file : directories) {
String filePath = storageUtils.constructFileUri(path, file.getFilename());
if (!isExcluded(filePath, excludedPathPatterns)) {
CompletableFuture<Void> recursiveFuture =
discoverTablesInPath(filePath, lakeName, databaseName, excludedPathPatterns)
.thenAccept(tablePaths::addAll);
recursiveFutures.add(recursiveFuture);
}
}
return CompletableFuture.allOf(recursiveFutures.toArray(new CompletableFuture[0]))
.thenApplyAsync(ignored -> tablePaths, executorService);
},
executorService)
.exceptionally(
e -> {
log.error("Failed to discover tables in path: {}", path, e);
lakeviewExtractorMetrics.incrementTableDiscoveryFailureCounter(
getMetadataExtractorFailureReason(
e,
MetricsConstants.MetadataUploadFailureReasons.UNKNOWN)
);
return emptySet();
});
} catch (Exception e) {
log.error("Failed to discover tables in path: {}", path, e);
return CompletableFuture.completedFuture(emptySet());
}
}
/*
* checks the contents of a folder to see if it is a hudi table or not
* a folder is a hudi table if it contains .hoodie folder within it
*/
private static boolean isHudiTableFolder(List<File> listedFiles) {
return listedFiles.stream().anyMatch(file -> file.getFilename().startsWith(HOODIE_FOLDER_NAME));
}
private boolean isExcluded(String filePath, List<String> excludedPathPatterns) {
return excludedPathPatterns.stream().anyMatch(filePath::matches);
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/metadata_extractor/TableMetadataUploaderService.java
|
package ai.onehouse.metadata_extractor;
import static ai.onehouse.constants.MetadataExtractorConstants.ARCHIVED_COMMIT_INSTANT_PATTERN;
import static ai.onehouse.constants.MetadataExtractorConstants.HOODIE_FOLDER_NAME;
import static ai.onehouse.constants.MetadataExtractorConstants.HOODIE_PROPERTIES_FILE;
import static ai.onehouse.constants.MetadataExtractorConstants.INITIAL_CHECKPOINT;
import static ai.onehouse.constants.MetadataExtractorConstants.TABLE_PROCESSING_BATCH_SIZE;
import static ai.onehouse.metadata_extractor.MetadataExtractorUtils.getMetadataExtractorFailureReason;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import com.google.common.collect.Lists;
import com.google.inject.Inject;
import ai.onehouse.api.OnehouseApiClient;
import ai.onehouse.api.models.request.CommitTimelineType;
import ai.onehouse.api.models.request.InitializeTableMetricsCheckpointRequest;
import ai.onehouse.api.models.response.GetTableMetricsCheckpointResponse;
import ai.onehouse.api.models.response.InitializeTableMetricsCheckpointResponse;
import ai.onehouse.constants.MetricsConstants;
import ai.onehouse.metadata_extractor.models.Checkpoint;
import ai.onehouse.metadata_extractor.models.Table;
import ai.onehouse.metrics.LakeViewExtractorMetrics;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.function.Function;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
/*
* Uploads Instants in the active and archived timeline for the tables which were discovered
*/
@Slf4j
public class TableMetadataUploaderService {
private final HoodiePropertiesReader hoodiePropertiesReader;
private final OnehouseApiClient onehouseApiClient;
private final TimelineCommitInstantsUploader timelineCommitInstantsUploader;
private final LakeViewExtractorMetrics hudiMetadataExtractorMetrics;
private final ExecutorService executorService;
private final ObjectMapper mapper;
@Inject
public TableMetadataUploaderService(
@Nonnull HoodiePropertiesReader hoodiePropertiesReader,
@Nonnull OnehouseApiClient onehouseApiClient,
@Nonnull TimelineCommitInstantsUploader timelineCommitInstantsUploader,
@Nonnull LakeViewExtractorMetrics hudiMetadataExtractorMetrics,
@Nonnull ExecutorService executorService) {
this.hoodiePropertiesReader = hoodiePropertiesReader;
this.onehouseApiClient = onehouseApiClient;
this.timelineCommitInstantsUploader = timelineCommitInstantsUploader;
this.hudiMetadataExtractorMetrics = hudiMetadataExtractorMetrics;
this.executorService = executorService;
this.mapper = new ObjectMapper();
mapper.registerModule(new JavaTimeModule());
}
public CompletableFuture<Boolean> uploadInstantsInTables(Set<Table> tablesToProcess) {
log.info("Uploading metadata of following tables: " + tablesToProcess);
List<Table> tableWithIds =
tablesToProcess.stream().map(this::updateTableIdIfNotPresent).collect(Collectors.toList());
List<List<Table>> tableBatches =
Lists.partition(new ArrayList<>(tableWithIds), TABLE_PROCESSING_BATCH_SIZE);
CompletableFuture<Boolean> processTableBatchFuture = CompletableFuture.completedFuture(true);
// process batches one after another
for (List<Table> tableBatch : tableBatches) {
processTableBatchFuture =
processTableBatchFuture.thenComposeAsync(
previousResult ->
uploadInstantsInTableBatch(tableBatch)
.thenApply(currentResult -> previousResult && currentResult),
executorService);
}
return processTableBatchFuture;
}
private Table updateTableIdIfNotPresent(Table table) {
if (StringUtils.isNotBlank(table.getTableId())) {
return table;
}
return table
.toBuilder()
.tableId(getTableIdFromAbsolutePathUrl(table.getAbsoluteTableUri()).toString())
.build();
}
private CompletableFuture<Boolean> uploadInstantsInTableBatch(List<Table> tables) {
log.info("Fetching checkpoint for tables: " + tables);
return onehouseApiClient
.getTableMetricsCheckpoints(
tables.stream().map(Table::getTableId).collect(Collectors.toList()))
.thenComposeAsync(
getTableMetricsCheckpointResponse -> {
if (getTableMetricsCheckpointResponse.isFailure()) {
log.error(
"Error encountered when fetching checkpoint, skipping table processing. status code: {}. message: {}",
getTableMetricsCheckpointResponse.getStatusCode(),
getTableMetricsCheckpointResponse.getCause());
return CompletableFuture.completedFuture(false);
}
Set<String> tableIdsWithCheckpoint =
getTableMetricsCheckpointResponse.getCheckpoints().stream()
.map(GetTableMetricsCheckpointResponse.TableMetadataCheckpoint::getTableId)
.collect(Collectors.toSet());
List<Table> tablesToInitialise =
tables.stream()
.filter(table -> !tableIdsWithCheckpoint.contains(table.getTableId()))
.collect(Collectors.toList());
List<CompletableFuture<Boolean>> processTablesFuture = new ArrayList<>();
Map<String, GetTableMetricsCheckpointResponse.TableMetadataCheckpoint>
tableCheckpointMap =
getTableMetricsCheckpointResponse.getCheckpoints().stream()
.collect(
Collectors.toMap(
GetTableMetricsCheckpointResponse.TableMetadataCheckpoint
::getTableId,
Function.identity()));
for (Table table : tables) {
if (tableCheckpointMap.containsKey(table.getTableId())) {
try {
// checkpoints found, continue from previous checkpoint
String checkpointString =
tableCheckpointMap.get(table.getTableId()).getCheckpoint();
processTablesFuture.add(
uploadNewInstantsSinceCheckpoint(
table.getTableId(),
table,
StringUtils.isNotBlank(checkpointString)
? mapper.readValue(checkpointString, Checkpoint.class)
: INITIAL_CHECKPOINT));
} catch (JsonProcessingException e) {
log.error(
"Error deserializing checkpoint value for table: {}, skipping table",
table,
e);
}
}
}
CompletableFuture<List<CompletableFuture<Boolean>>>
initialiseAndProcessNewlyDiscoveredTablesFuture =
initialiseAndProcessNewlyDiscoveredTables(tablesToInitialise);
return initialiseAndProcessNewlyDiscoveredTablesFuture.thenComposeAsync(
discoveredTablesProcessingFuture -> {
processTablesFuture.addAll(discoveredTablesProcessingFuture);
return CompletableFuture.allOf(
processTablesFuture.toArray(new CompletableFuture[0]))
.thenApply(
ignored ->
processTablesFuture.stream()
.map(CompletableFuture::join)
.allMatch(Boolean.TRUE::equals));
}, // return false if processing any table failed
executorService);
},
executorService)
.exceptionally(
throwable -> {
log.error("Encountered exception when uploading instants", throwable);
hudiMetadataExtractorMetrics.incrementTableMetadataProcessingFailureCounter(
getMetadataExtractorFailureReason(
throwable,
MetricsConstants.MetadataUploadFailureReasons.UNKNOWN));
return false;
});
}
private CompletableFuture<List<CompletableFuture<Boolean>>>
initialiseAndProcessNewlyDiscoveredTables(List<Table> tablesToInitialise) {
List<CompletableFuture<Boolean>> processTablesFuture = new ArrayList<>();
CompletableFuture<List<CompletableFuture<Boolean>>>
initialiseAndProcessNewlyDiscoveredTablesFuture =
CompletableFuture.completedFuture(
Collections.singletonList(CompletableFuture.completedFuture(true)));
if (!tablesToInitialise.isEmpty()) {
log.info("Initializing following tables {}", tablesToInitialise);
List<
CompletableFuture<
InitializeTableMetricsCheckpointRequest
.InitializeSingleTableMetricsCheckpointRequest>>
initializeSingleTableMetricsCheckpointRequestFutureList = new ArrayList<>();
for (Table table : tablesToInitialise) {
initializeSingleTableMetricsCheckpointRequestFutureList.add(
hoodiePropertiesReader
.readHoodieProperties(getHoodiePropertiesFilePath(table))
.thenApply(
properties -> {
if (properties == null) {
log.error(
"Encountered exception when reading hoodie.properties file for table: {}, skipping this table",
table);
return null; // will be filtered out later
}
return InitializeTableMetricsCheckpointRequest
.InitializeSingleTableMetricsCheckpointRequest.builder()
.tableId(table.getTableId())
.tableName(properties.getTableName())
.tableType(properties.getTableType())
.databaseName(table.getDatabaseName())
.lakeName(table.getLakeName())
.tableBasePath(table.getAbsoluteTableUri())
.build();
}));
}
initialiseAndProcessNewlyDiscoveredTablesFuture =
CompletableFuture.allOf(
initializeSingleTableMetricsCheckpointRequestFutureList.toArray(
new CompletableFuture[0]))
.thenComposeAsync(
ignored -> {
List<
InitializeTableMetricsCheckpointRequest
.InitializeSingleTableMetricsCheckpointRequest>
initializeSingleTableMetricsCheckpointRequestList =
initializeSingleTableMetricsCheckpointRequestFutureList.stream()
.map(CompletableFuture::join)
.filter(Objects::nonNull)
.collect(Collectors.toList());
if (initializeSingleTableMetricsCheckpointRequestList.isEmpty()) {
log.error("No valid table to initialise");
hudiMetadataExtractorMetrics.incrementTableMetadataProcessingFailureCounter(
MetricsConstants.MetadataUploadFailureReasons.NO_TABLES_TO_INITIALIZE);
return CompletableFuture.completedFuture(null);
}
return onehouseApiClient.initializeTableMetricsCheckpoint(
InitializeTableMetricsCheckpointRequest.builder()
.tables(initializeSingleTableMetricsCheckpointRequestList)
.build());
},
executorService)
.thenComposeAsync(
initializeTableMetricsCheckpointResponse -> {
if (initializeTableMetricsCheckpointResponse == null) {
return CompletableFuture.completedFuture(
Collections.singletonList(CompletableFuture.completedFuture(false)));
}
if (initializeTableMetricsCheckpointResponse.isFailure()) {
log.error(
"Error encountered when initialising tables, skipping table processing.status code: {} message {}",
initializeTableMetricsCheckpointResponse.getStatusCode(),
initializeTableMetricsCheckpointResponse.getCause());
return CompletableFuture.completedFuture(
Collections.singletonList(CompletableFuture.completedFuture(false)));
}
Map<
String,
InitializeTableMetricsCheckpointResponse
.InitializeSingleTableMetricsCheckpointResponse>
initialiseTableMetricsCheckpointMap =
initializeTableMetricsCheckpointResponse.getResponse().stream()
.collect(
Collectors.toMap(
InitializeTableMetricsCheckpointResponse
.InitializeSingleTableMetricsCheckpointResponse
::getTableId,
Function.identity(),
(oldValue, newValue) -> {
log.warn(
"duplicate found! old value: {} new value: {}",
oldValue,
newValue);
return newValue;
}));
for (Table table : tablesToInitialise) {
InitializeTableMetricsCheckpointResponse
.InitializeSingleTableMetricsCheckpointResponse
response =
initialiseTableMetricsCheckpointMap.getOrDefault(
table.getTableId(), null);
if (response == null) {
// table not initialised due to errors in previous steps
processTablesFuture.add(CompletableFuture.completedFuture(false));
continue;
}
if (!StringUtils.isBlank(response.getError())) {
hudiMetadataExtractorMetrics.incrementTableMetadataProcessingFailureCounter(
MetricsConstants.MetadataUploadFailureReasons.API_FAILURE_USER_ERROR);
log.error(
"Error initialising table: {} error: {}, skipping table processing",
table,
response.getError());
continue;
}
processTablesFuture.add(
uploadNewInstantsSinceCheckpoint(
table.getTableId(), table, INITIAL_CHECKPOINT));
}
return CompletableFuture.completedFuture(processTablesFuture);
},
executorService);
}
return initialiseAndProcessNewlyDiscoveredTablesFuture;
}
private CompletableFuture<Boolean> uploadNewInstantsSinceCheckpoint(
String tableId, Table table, Checkpoint checkpoint) {
if (!checkpoint.isArchivedCommitsProcessed()) {
/*
* if archived commits are not uploaded, we upload those first before moving to active timeline
* commits in archived timeline are uploaded only once.
*/
return timelineCommitInstantsUploader
.batchUploadWithCheckpoint(
tableId, table, checkpoint, CommitTimelineType.COMMIT_TIMELINE_TYPE_ARCHIVED)
.thenComposeAsync(
archivedTimelineCheckpoint -> {
if (archivedTimelineCheckpoint == null) {
// do not upload instants in active timeline if there was failure
log.warn(
"Skipping uploading instants in active timeline due to failures in uploading archived timeline instants for table {}",
table.getAbsoluteTableUri());
return CompletableFuture.completedFuture(false);
}
return timelineCommitInstantsUploader
.paginatedBatchUploadWithCheckpoint(
tableId,
table,
resetCheckpoint(archivedTimelineCheckpoint),
CommitTimelineType.COMMIT_TIMELINE_TYPE_ACTIVE)
.thenApply(Objects::nonNull);
},
executorService);
}
/*
* if the last processed file in the retrieved checkpoint is an archived-commit,
* then we reset the checkpoint timestamp and continuation token
* else we use the retrieved checkpoint.
* this allows us to continue from the previous batch id
*/
Checkpoint activeTimelineCheckpoint =
ARCHIVED_COMMIT_INSTANT_PATTERN.matcher(checkpoint.getLastUploadedFile()).matches()
? resetCheckpoint(checkpoint)
: checkpoint;
return timelineCommitInstantsUploader
.paginatedBatchUploadWithCheckpoint(
tableId,
table,
activeTimelineCheckpoint,
CommitTimelineType.COMMIT_TIMELINE_TYPE_ACTIVE)
.thenApply(Objects::nonNull);
}
private String getHoodiePropertiesFilePath(Table table) {
String basePath = table.getAbsoluteTableUri();
return String.format(
"%s/%s/%s",
basePath.endsWith("/") ? basePath.substring(0, basePath.length() - 1) : basePath,
HOODIE_FOLDER_NAME,
HOODIE_PROPERTIES_FILE);
}
private static UUID getTableIdFromAbsolutePathUrl(String tableAbsolutePathUrl) {
return UUID.nameUUIDFromBytes(tableAbsolutePathUrl.getBytes());
}
private static Checkpoint resetCheckpoint(Checkpoint checkpoint) {
return checkpoint.toBuilder().checkpointTimestamp(Instant.EPOCH).lastUploadedFile("").build();
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/metadata_extractor/TimelineCommitInstantsUploader.java
|
package ai.onehouse.metadata_extractor;
import static ai.onehouse.constants.MetadataExtractorConstants.ACTIVE_COMMIT_INSTANT_PATTERN;
import static ai.onehouse.constants.MetadataExtractorConstants.ARCHIVED_COMMIT_INSTANT_PATTERN;
import static ai.onehouse.constants.MetadataExtractorConstants.ARCHIVED_FOLDER_NAME;
import static ai.onehouse.constants.MetadataExtractorConstants.HOODIE_FOLDER_NAME;
import static ai.onehouse.constants.MetadataExtractorConstants.HOODIE_PROPERTIES_FILE;
import static ai.onehouse.constants.MetadataExtractorConstants.HOODIE_PROPERTIES_FILE_OBJ;
import static ai.onehouse.constants.MetadataExtractorConstants.ROLLBACK_ACTION;
import static ai.onehouse.constants.MetadataExtractorConstants.SAVEPOINT_ACTION;
import static ai.onehouse.metadata_extractor.ActiveTimelineInstantBatcher.areRelatedInstants;
import static ai.onehouse.metadata_extractor.ActiveTimelineInstantBatcher.areRelatedSavepointOrRollbackInstants;
import static ai.onehouse.metadata_extractor.ActiveTimelineInstantBatcher.getActiveTimeLineInstant;
import static ai.onehouse.metadata_extractor.MetadataExtractorUtils.getMetadataExtractorFailureReason;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import com.google.inject.Inject;
import ai.onehouse.api.OnehouseApiClient;
import ai.onehouse.api.models.request.CommitTimelineType;
import ai.onehouse.api.models.request.GenerateCommitMetadataUploadUrlRequest;
import ai.onehouse.api.models.request.UploadedFile;
import ai.onehouse.api.models.request.UpsertTableMetricsCheckpointRequest;
import ai.onehouse.config.Config;
import ai.onehouse.config.models.configv1.MetadataExtractorConfig;
import ai.onehouse.constants.MetricsConstants;
import ai.onehouse.metadata_extractor.models.Checkpoint;
import ai.onehouse.metadata_extractor.models.Table;
import ai.onehouse.metrics.LakeViewExtractorMetrics;
import ai.onehouse.storage.AsyncStorageClient;
import ai.onehouse.storage.PresignedUrlFileUploader;
import ai.onehouse.storage.StorageUtils;
import ai.onehouse.storage.models.File;
import ai.onehouse.RuntimeModule.TableMetadataUploadObjectStorageAsyncClient;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
/*
* Has the core logic for listing and uploading commit instants in a given timeline
*/
@Slf4j
public class TimelineCommitInstantsUploader {
private final AsyncStorageClient asyncStorageClient;
private final StorageUtils storageUtils;
private final PresignedUrlFileUploader presignedUrlFileUploader;
private final OnehouseApiClient onehouseApiClient;
private final ExecutorService executorService;
private final ObjectMapper mapper;
private final ActiveTimelineInstantBatcher activeTimelineInstantBatcher;
private final LakeViewExtractorMetrics hudiMetadataExtractorMetrics;
private final MetadataExtractorConfig extractorConfig;
@Inject
public TimelineCommitInstantsUploader(
@Nonnull @TableMetadataUploadObjectStorageAsyncClient AsyncStorageClient asyncStorageClient,
@Nonnull PresignedUrlFileUploader presignedUrlFileUploader,
@Nonnull OnehouseApiClient onehouseApiClient,
@Nonnull StorageUtils storageUtils,
@Nonnull ExecutorService executorService,
@Nonnull ActiveTimelineInstantBatcher activeTimelineInstantBatcher,
@Nonnull LakeViewExtractorMetrics hudiMetadataExtractorMetrics,
@Nonnull Config config) {
this.asyncStorageClient = asyncStorageClient;
this.presignedUrlFileUploader = presignedUrlFileUploader;
this.onehouseApiClient = onehouseApiClient;
this.storageUtils = storageUtils;
this.executorService = executorService;
this.activeTimelineInstantBatcher = activeTimelineInstantBatcher;
this.hudiMetadataExtractorMetrics = hudiMetadataExtractorMetrics;
this.extractorConfig = config.getMetadataExtractorConfig();
this.mapper = new ObjectMapper();
mapper.registerModule(new JavaTimeModule());
}
/**
* Performs a batch upload of instants from a specified timeline. Initially, it lists all instants
* in the timeline and filters out those that have already been processed, based on the provided
* checkpoint. It then uploads the remaining, new instants. This function is useful in scenarios
* where instants in the timeline are not ordered by their filenames, such as in archived
* timelines.
*
* @param tableId Unique identifier of the table.
* @param table The table object.
* @param checkpoint Checkpoint object used to track already processed instants.
* @param commitTimelineType Type of the commit timeline.
* @return A future that completes with a new checkpoint after the
* upload is finished.
*/
public CompletableFuture<Checkpoint> batchUploadWithCheckpoint(
String tableId, Table table, Checkpoint checkpoint, CommitTimelineType commitTimelineType) {
log.info("uploading instants in table: {} timeline: {}", table, commitTimelineType);
String timelineUri =
storageUtils.constructFileUri(
table.getAbsoluteTableUri(), getPathSuffixForTimeline(commitTimelineType));
return executeFullBatchUpload(tableId, table, timelineUri, checkpoint, commitTimelineType);
}
/**
* Uploads instants in a timeline in a paginated manner. It lists a page of instants from the
* timeline, uploads those that have not been previously processed based on the provided
* checkpoint in batches, and then continues to the next page. This process repeats until the last
* page is reached. This approach is recommended when instants are ordered by their filenames,
* which is typical in active timelines.
*
* @param tableId Unique identifier of the table.
* @param table The table object.
* @param checkpoint Checkpoint object used to track already processed instants.
* @param commitTimelineType Type of the commit timeline.
* @return A future that completes with a new checkpoint after each
* paginated upload.
*/
public CompletableFuture<Checkpoint> paginatedBatchUploadWithCheckpoint(
String tableId, Table table, Checkpoint checkpoint, CommitTimelineType commitTimelineType) {
log.info("uploading instants in table: {} timeline: {}", table, commitTimelineType);
String bucketName = storageUtils.getBucketNameFromUri(table.getAbsoluteTableUri());
String prefix =
storageUtils.getPathFromUrl(
storageUtils.constructFileUri(
table.getAbsoluteTableUri(), getPathSuffixForTimeline(commitTimelineType)));
// startAfter is used only in the first call to get the objects, post that continuation token is
// used
// Resetting the firstIncompleteCommitFile so that we do not process from the same commit again
// All commit files will be processed after firstIncompleteCommitFile, and the checkpoint will be
// updated accordingly
String startAfter = getStartAfterString(prefix, checkpoint, true);
return executePaginatedBatchUpload(
tableId,
table,
bucketName,
prefix,
checkpoint.toBuilder().firstIncompleteCommitFile("").build(),
commitTimelineType,
startAfter);
}
private CompletableFuture<Checkpoint> executeFullBatchUpload(
String tableId,
Table table,
String timelineUri,
Checkpoint checkpoint,
CommitTimelineType commitTimelineType) {
return asyncStorageClient
.listAllFilesInDir(timelineUri)
.thenComposeAsync(
files -> {
List<File> filesToUpload =
getFilesToUploadBasedOnPreviousCheckpoint(
files, checkpoint, commitTimelineType, false);
return filesToUpload.isEmpty()
? CompletableFuture.completedFuture(checkpoint)
: uploadInstantsInSequentialBatches(
tableId, table, filesToUpload, checkpoint, commitTimelineType);
},
executorService)
.exceptionally(
throwable -> {
log.error(
"Encountered exception when uploading instants for table {} timeline {}",
table,
commitTimelineType,
throwable);
hudiMetadataExtractorMetrics.incrementTableMetadataProcessingFailureCounter(
getMetadataExtractorFailureReason(
throwable,
MetricsConstants.MetadataUploadFailureReasons.UNKNOWN));
return null; // handled in uploadNewInstantsSinceCheckpoint function
});
}
private CompletableFuture<Checkpoint> executePaginatedBatchUpload(
String tableId,
Table table,
String bucketName,
String prefix,
Checkpoint checkpoint,
CommitTimelineType commitTimelineType,
String startAfter) {
return asyncStorageClient
.fetchObjectsByPage(bucketName, prefix, null, startAfter)
.thenComposeAsync(
continuationTokenAndFiles -> {
String nextContinuationToken = continuationTokenAndFiles.getLeft();
List<File> filesToUpload =
getFilesToUploadBasedOnPreviousCheckpoint(
continuationTokenAndFiles.getRight(), checkpoint, commitTimelineType, false);
if (!filesToUpload.isEmpty()) {
return uploadInstantsInSequentialBatches(
tableId, table, filesToUpload, checkpoint, commitTimelineType)
.thenComposeAsync(
updatedCheckpoint -> {
if (updatedCheckpoint == null) {
// no batches to process, returning existing checkpoint
hudiMetadataExtractorMetrics.incrementTablesProcessedCounter();
return CompletableFuture.completedFuture(checkpoint);
}
if (StringUtils.isBlank(nextContinuationToken)) {
log.info(
"Reached end of instants in {} for table {}",
commitTimelineType,
table);
hudiMetadataExtractorMetrics.incrementTablesProcessedCounter();
return CompletableFuture.completedFuture(updatedCheckpoint);
}
return executePaginatedBatchUpload(
tableId,
table,
bucketName,
prefix,
updatedCheckpoint,
commitTimelineType,
getStartAfterString(prefix, updatedCheckpoint, false));
},
executorService);
} else {
log.info("Reached end of instants in {} for table {}", commitTimelineType, table);
hudiMetadataExtractorMetrics.incrementTablesProcessedCounter();
return CompletableFuture.completedFuture(checkpoint);
}
},
executorService)
.exceptionally(
throwable -> {
log.error(
"Encountered exception when uploading instants for table {} timeline {}",
table,
commitTimelineType,
throwable);
hudiMetadataExtractorMetrics.incrementTableMetadataProcessingFailureCounter(
getMetadataExtractorFailureReason(
throwable,
MetricsConstants.MetadataUploadFailureReasons.UNKNOWN));
return null; // handled in uploadNewInstantsSinceCheckpoint
});
}
/**
* Executes a sequential upload of files in parallel batches. This function processes multiple
* files in parallel within each batch, ensuring efficient use of resources. However, it maintains
* a sequential order between batches, where a new batch of files is uploaded only after the
* successful completion of the previous batch. This approach balances the benefits of parallel
* processing with the need for sequential control, making it suitable for scenarios where order
* of batch completion is important.
*
* @param tableId Unique identifier for the table associated with the files.
* @param table The table object.
* @param filesToUpload List of files to be uploaded.
* @param checkpoint Checkpoint object used to track already processed instants.
* @param commitTimelineType Type of the commit timeline.
* @return CompletableFuture<Checkpoint> A future that completes with a new checkpoint after each
* paginated upload. if upload fails for the batch then the function returns null instead
*/
private CompletableFuture<Checkpoint> uploadInstantsInSequentialBatches(
String tableId,
Table table,
List<File> filesToUpload,
Checkpoint checkpoint,
CommitTimelineType commitTimelineType) {
List<List<File>> batches;
if (CommitTimelineType.COMMIT_TIMELINE_TYPE_ARCHIVED.equals(commitTimelineType)) {
batches =
Lists.partition(
filesToUpload, getUploadBatchSize(CommitTimelineType.COMMIT_TIMELINE_TYPE_ARCHIVED));
} else {
Pair<String, List<List<File>>> incompleteCheckpointBatchesPair =
activeTimelineInstantBatcher.createBatches(
filesToUpload,
getUploadBatchSize(CommitTimelineType.COMMIT_TIMELINE_TYPE_ACTIVE),
checkpoint);
batches = incompleteCheckpointBatchesPair.getRight();
checkpoint =
checkpoint
.toBuilder()
.firstIncompleteCommitFile(incompleteCheckpointBatchesPair.getLeft())
.build();
}
int numBatches = batches.size();
if (numBatches == 0) {
// In case of CONTINUE_ON_INCOMPLETE_COMMIT, the extractor also needs to check subsequent pages hence
// returning a non-null checkpoint to continue processing.
if (
CommitTimelineType.COMMIT_TIMELINE_TYPE_ACTIVE.equals(commitTimelineType) &&
extractorConfig
.getUploadStrategy()
.equals(MetadataExtractorConfig.UploadStrategy.CONTINUE_ON_INCOMPLETE_COMMIT)
) {
log.info(
"No batches found in current page for table {} timeline {}",
table,
commitTimelineType);
return CompletableFuture.completedFuture(checkpoint);
}
log.info(
"Could not create batches with completed commits for table {} timeline {}",
table,
commitTimelineType);
return CompletableFuture.completedFuture(null);
}
log.info(
"Processing {} instants in table {} timeline {} sequentially in {} batches",
filesToUpload.size(),
table,
commitTimelineType,
numBatches);
CompletableFuture<Checkpoint> sequentialBatchProcessingFuture =
CompletableFuture.completedFuture(checkpoint);
for (List<File> batch : batches) {
sequentialBatchProcessingFuture =
sequentialBatchProcessingFuture.thenComposeAsync(
updatedCheckpoint -> {
if (updatedCheckpoint == null) {
return CompletableFuture.completedFuture(null);
}
File lastUploadedFile = getLastUploadedFileFromBatch(commitTimelineType, batch);
log.info(
"uploading batch {} for table {} timeline: {}",
updatedCheckpoint.getBatchId() + 1,
table,
commitTimelineType);
return uploadBatch(
tableId,
batch,
commitTimelineType,
storageUtils.constructFileUri(
table.getAbsoluteTableUri(),
getPathSuffixForTimeline(commitTimelineType)))
.thenComposeAsync(
ignored2 ->
updateCheckpointAfterProcessingBatch(
tableId,
updatedCheckpoint,
lastUploadedFile,
batch.stream()
.map(
file ->
UploadedFile.builder()
.name(
getFileNameWithPrefix(file, commitTimelineType))
.lastModifiedAt(
file.getLastModifiedAt().toEpochMilli())
.build())
.collect(Collectors.toList()),
commitTimelineType),
executorService)
.exceptionally(
throwable -> {
hudiMetadataExtractorMetrics
.incrementTableMetadataProcessingFailureCounter(
getMetadataExtractorFailureReason(
throwable,
MetricsConstants.MetadataUploadFailureReasons.UNKNOWN));
log.error(
"error processing batch for table: {}. Skipping processing of further batches of table in current run.",
table.getAbsoluteTableUri(),
throwable);
return null;
});
},
executorService);
}
return sequentialBatchProcessingFuture;
}
private CompletableFuture<Void> uploadBatch(
String tableId,
List<File> batch,
CommitTimelineType commitTimelineType,
String directoryUri) {
List<String> commitInstants =
batch.stream()
.map(file -> getFileNameWithPrefix(file, commitTimelineType))
.collect(Collectors.toList());
return onehouseApiClient
.generateCommitMetadataUploadUrl(
GenerateCommitMetadataUploadUrlRequest.builder()
.tableId(tableId)
.commitInstants(commitInstants)
.commitTimelineType(commitTimelineType)
.build())
.thenComposeAsync(
generateCommitMetadataUploadUrlResponse -> {
if (generateCommitMetadataUploadUrlResponse.isFailure()) {
throw new RuntimeException(
String.format(
"failed to generate presigned urls: status_code: %d exception: %s",
generateCommitMetadataUploadUrlResponse.getStatusCode(),
generateCommitMetadataUploadUrlResponse.getCause()));
}
List<CompletableFuture<Void>> uploadFutures = new ArrayList<>();
for (int i = 0; i < batch.size(); i++) {
uploadFutures.add(
presignedUrlFileUploader.uploadFileToPresignedUrl(
generateCommitMetadataUploadUrlResponse.getUploadUrls().get(i),
constructStorageUri(directoryUri, batch.get(i).getFilename()),
extractorConfig.getFileUploadStreamBatchSize())
.thenApply(result -> {
hudiMetadataExtractorMetrics.incrementMetadataUploadSuccessCounter();
return result;
}));
}
return CompletableFuture.allOf(uploadFutures.toArray(new CompletableFuture[0]));
},
executorService);
}
private CompletableFuture<Checkpoint> updateCheckpointAfterProcessingBatch(
String tableId,
Checkpoint previousCheckpoint,
File lastUploadedFile,
List<UploadedFile> uploadedFiles,
CommitTimelineType commitTimelineType) {
// archived instants would be processed if we are currently processing the first batch of active
// timeline
boolean archivedCommitsProcessed = false;
int batchId = previousCheckpoint.getBatchId() + 1;
if (CommitTimelineType.COMMIT_TIMELINE_TYPE_ACTIVE.equals(commitTimelineType)) {
// we have processed atleast 1 batch of active timeline, (hence archived timeline must be
// fully processed)
archivedCommitsProcessed = true;
}
Checkpoint updatedCheckpoint =
Checkpoint.builder()
.batchId(batchId)
.lastUploadedFile(lastUploadedFile.getFilename())
.checkpointTimestamp(lastUploadedFile.getLastModifiedAt())
.archivedCommitsProcessed(archivedCommitsProcessed)
.firstIncompleteCommitFile(previousCheckpoint.getFirstIncompleteCommitFile())
.build();
try {
return onehouseApiClient
.upsertTableMetricsCheckpoint(
UpsertTableMetricsCheckpointRequest.builder()
.commitTimelineType(commitTimelineType)
.tableId(tableId)
.checkpoint(mapper.writeValueAsString(updatedCheckpoint))
.filesUploaded(
uploadedFiles.stream()
.map(UploadedFile::getName)
.collect(Collectors.toList()))
.uploadedFiles(uploadedFiles)
.build())
.thenApply(
upsertTableMetricsCheckpointResponse -> {
if (upsertTableMetricsCheckpointResponse.isFailure()) {
throw new RuntimeException(
String.format(
"failed to update checkpoint: status_code: %d, exception: %s",
upsertTableMetricsCheckpointResponse.getStatusCode(),
upsertTableMetricsCheckpointResponse.getCause()));
}
return updatedCheckpoint;
});
} catch (JsonProcessingException e) {
CompletableFuture<Checkpoint> f = new CompletableFuture<>();
f.completeExceptionally(new RuntimeException("failed to serialise checkpoint", e));
return f;
}
}
/**
* Filters out already uploaded files based on checkpoint information and sorts the remaining
* files. This function filters and sorts files from a given list, considering their last modified
* time and filename. It is used to determine which files need to be uploaded in the current run.
*
* @param filesList List of files to be filtered and sorted.
* @param checkpoint Checkpoint object containing information about previously uploaded files.
* @param commitTimelineType Type of the commit timeline (active or archived).
* @param applyLastModifiedAtFilter Flag to apply last modified timestamp filter.
* @return List<File> List of filtered and sorted files ready for upload.
*/
private List<File> getFilesToUploadBasedOnPreviousCheckpoint(
List<File> filesList,
Checkpoint checkpoint,
CommitTimelineType commitTimelineType,
boolean applyLastModifiedAtFilter) {
if (filesList.isEmpty()) {
return filesList;
}
Comparator<File> fileComparator;
if (CommitTimelineType.COMMIT_TIMELINE_TYPE_ACTIVE.equals(commitTimelineType)) {
fileComparator = Comparator.comparing(File::getFilename);
} else {
fileComparator =
Comparator.comparing(file -> getNumericPartFromArchivedCommit(file.getFilename()));
}
List<File> filesToUpload =
filesList.stream()
.filter(
file ->
shouldIncludeFile(
file, checkpoint, applyLastModifiedAtFilter, commitTimelineType))
.sorted(fileComparator)
.collect(Collectors.toList());
if (checkpoint.getBatchId() == 0) {
// for the first batch, always include hoodie properties file
filesToUpload.add(0, HOODIE_PROPERTIES_FILE_OBJ);
}
return filesToUpload;
}
/**
* Determines if a file should be included based on filters.
*/
private boolean shouldIncludeFile(
File file,
Checkpoint checkpoint,
boolean applyLastModifiedAtFilter,
CommitTimelineType commitTimelineType) {
return !file.isDirectory()
&& (!file.getLastModifiedAt().isBefore(checkpoint.getCheckpointTimestamp())
|| !applyLastModifiedAtFilter)
&& isInstantFile(file.getFilename())
&& !isInstantAlreadyUploaded(checkpoint, file, commitTimelineType)
&& !file.getFilename().equals(HOODIE_PROPERTIES_FILE)
&& StringUtils.isNotBlank(file.getFilename());
}
private boolean isInstantAlreadyUploaded(
Checkpoint checkpoint, File file, CommitTimelineType commitTimelineType) {
if (checkpoint.getBatchId() != 0 && isInstantFile(checkpoint.getLastUploadedFile())) {
if (commitTimelineType.equals(CommitTimelineType.COMMIT_TIMELINE_TYPE_ACTIVE)) {
if (extractorConfig
.getUploadStrategy()
.equals(MetadataExtractorConfig.UploadStrategy.CONTINUE_ON_INCOMPLETE_COMMIT)) {
// The commits can be incomplete even if below condition is true, hence not ignoring for
// non-blocking mode
return false;
}
return getCommitIdFromActiveTimelineInstant(file.getFilename())
.compareTo(getCommitIdFromActiveTimelineInstant(checkpoint.getLastUploadedFile()))
<= 0;
} else {
return getNumericPartFromArchivedCommit(file.getFilename())
<= getNumericPartFromArchivedCommit(checkpoint.getLastUploadedFile());
}
}
return false;
}
private boolean isInstantFile(String fileName) {
return ACTIVE_COMMIT_INSTANT_PATTERN.matcher(fileName).matches()
|| ARCHIVED_COMMIT_INSTANT_PATTERN.matcher(fileName).matches();
}
private String constructStorageUri(String directoryUri, String fileName) {
if (HOODIE_PROPERTIES_FILE.equals(fileName)) {
String archivedSuffix = ARCHIVED_FOLDER_NAME + '/';
String hoodieDirectoryUri =
directoryUri.endsWith(archivedSuffix)
? directoryUri.substring(0, directoryUri.length() - "archived/".length())
: directoryUri;
return storageUtils.constructFileUri(hoodieDirectoryUri, HOODIE_PROPERTIES_FILE);
}
return storageUtils.constructFileUri(directoryUri, fileName);
}
private String getPathSuffixForTimeline(CommitTimelineType commitTimelineType) {
String pathSuffix = HOODIE_FOLDER_NAME + '/';
return CommitTimelineType.COMMIT_TIMELINE_TYPE_ARCHIVED.equals(commitTimelineType)
? pathSuffix + ARCHIVED_FOLDER_NAME + '/'
: pathSuffix;
}
private String getFileNameWithPrefix(File file, CommitTimelineType commitTimelineType) {
String archivedPrefix = "archived/";
return CommitTimelineType.COMMIT_TIMELINE_TYPE_ARCHIVED.equals(commitTimelineType)
&& !HOODIE_PROPERTIES_FILE.equals(file.getFilename())
? archivedPrefix + file.getFilename()
: file.getFilename();
}
private BigDecimal getCommitIdFromActiveTimelineInstant(String activeTimeLineInstant) {
return new BigDecimal(activeTimeLineInstant.split("\\.")[0]);
}
private int getNumericPartFromArchivedCommit(String archivedCommitFileName) {
Pattern pattern = Pattern.compile("\\.archive\\.(\\d+)_");
Matcher matcher = pattern.matcher(archivedCommitFileName);
if (matcher.find()) {
return Integer.parseInt(matcher.group(1));
} else {
throw new IllegalArgumentException("invalid archived commit file type");
}
}
public String getStartAfterString(String prefix, Checkpoint checkpoint, boolean isFirstFetch) {
String lastProcessedFile = checkpoint.getLastUploadedFile();
// Base case to process from the beginning
if (lastProcessedFile.equals(HOODIE_PROPERTIES_FILE)
|| StringUtils.isBlank(lastProcessedFile)) {
return null;
}
// Extractor blocks on incomplete commits, startAfter is the last processed file
if (extractorConfig
.getUploadStrategy()
.equals(MetadataExtractorConfig.UploadStrategy.BLOCK_ON_INCOMPLETE_COMMIT)
|| !isFirstFetch) {
return storageUtils.constructFileUri(prefix, lastProcessedFile);
}
// Extractor does not block on incomplete commits, it resumes from the first incomplete commit
// file if present else takes the lastProcessedFile as the starting point
String firstIncompleteCommitFile = checkpoint.getFirstIncompleteCommitFile();
return StringUtils.isBlank(firstIncompleteCommitFile)
? storageUtils.constructFileUri(prefix, lastProcessedFile)
: storageUtils.constructFileUri(prefix, firstIncompleteCommitFile);
}
/**
* Extracts the last uploaded file from batch. If the commit timeline type is ARCHIVED, we return
* the last file in the batch. If the batch only contains hoodie.properties file, we return
* hoodie.properties. If the batch ends with savepoint commit, we return the second to last item.
* If the batch ends with other commit types, we return third to last item.
*/
public File getLastUploadedFileFromBatch(
CommitTimelineType commitTimelineType, List<File> batch) {
if (commitTimelineType == CommitTimelineType.COMMIT_TIMELINE_TYPE_ARCHIVED) {
return batch.get(batch.size() - 1);
}
if (batch.size() == 1 && batch.get(0).getFilename().equals(HOODIE_PROPERTIES_FILE)) {
return batch.get(0);
}
if (isSavepointCommit(batch.get(batch.size() - 1))) {
return batch.get(batch.size() - 2);
}
if (isRollbackCommit(batch.get(batch.size() - 1))) {
int lastIndex = batch.size() - 1;
ActiveTimelineInstantBatcher.ActiveTimelineInstant lastInstant =
getActiveTimeLineInstant(batch.get(lastIndex).getFilename());
// Case 1: Full rollback sequence (xyz.rollback, xyz.rollback.inflight, xyz.rollback.requested)
if (lastIndex >= 2 &&
areRelatedInstants(lastInstant,
getActiveTimeLineInstant(batch.get(lastIndex-1).getFilename()),
getActiveTimeLineInstant(batch.get(lastIndex-2).getFilename()))) {
return batch.get(lastIndex - 2);
}
// Case 2: Rollback with inflight (xyz.rollback, xyz.inflight)
if (lastIndex >= 1 &&
areRelatedSavepointOrRollbackInstants(lastInstant,
getActiveTimeLineInstant(batch.get(lastIndex-1).getFilename()))) {
return batch.get(lastIndex - 1);
}
// Case 3: Simple rollback (xyz.rollback)
return batch.get(lastIndex);
}
return batch.get(batch.size() - 3);
}
private boolean isSavepointCommit(File file) {
String[] parts = file.getFilename().split("\\.", 3);
if (parts.length < 2) {
return false;
}
return SAVEPOINT_ACTION.equals(parts[1]);
}
private boolean isRollbackCommit(File file) {
String[] parts = file.getFilename().split("\\.", 3);
if (parts.length < 2) {
return false;
}
return ROLLBACK_ACTION.equals(parts[1]);
}
@VisibleForTesting
int getUploadBatchSize(CommitTimelineType commitTimelineType) {
if (commitTimelineType == CommitTimelineType.COMMIT_TIMELINE_TYPE_ARCHIVED) {
return extractorConfig.getPresignedUrlRequestBatchSizeArchivedTimeline();
} else {
return extractorConfig.getPresignedUrlRequestBatchSizeActiveTimeline();
}
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/metadata_extractor
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/metadata_extractor/models/Checkpoint.java
|
package ai.onehouse.metadata_extractor.models;
import java.io.Serializable;
import java.time.Instant;
import lombok.Builder;
import lombok.NonNull;
import lombok.Value;
import lombok.extern.jackson.Jacksonized;
@Builder(toBuilder = true)
@Value
@Jacksonized
public class Checkpoint implements Serializable {
int batchId;
@NonNull Instant checkpointTimestamp;
@NonNull String lastUploadedFile;
String firstIncompleteCommitFile;
boolean archivedCommitsProcessed;
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/metadata_extractor
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/metadata_extractor/models/ParsedHudiProperties.java
|
package ai.onehouse.metadata_extractor.models;
import ai.onehouse.api.models.request.TableType;
import lombok.Builder;
import lombok.NonNull;
import lombok.Value;
@Builder
@Value
public class ParsedHudiProperties {
@NonNull String tableName;
@NonNull TableType tableType;
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/metadata_extractor
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/metadata_extractor/models/Table.java
|
package ai.onehouse.metadata_extractor.models;
import lombok.Builder;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NonNull;
import lombok.ToString;
@Builder(toBuilder = true)
@Getter
@EqualsAndHashCode
@ToString
public class Table {
@NonNull private final String absoluteTableUri;
private final String databaseName;
private final String lakeName;
private String tableId;
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/metrics/LakeViewExtractorMetrics.java
|
package ai.onehouse.metrics;
import ai.onehouse.config.Config;
import ai.onehouse.config.ConfigProvider;
import ai.onehouse.constants.MetricsConstants;
import io.micrometer.core.instrument.Tag;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Nonnull;
import javax.inject.Inject;
import lombok.Getter;
public class LakeViewExtractorMetrics {
private final Metrics metrics;
private final Metrics.Gauge tablesDiscoveredGaugeMetric;
private final Metrics.Gauge tablesProcessedGaugeMetric;
private final Config extractorConfig;
static final String METRICS_COMMON_PREFIX = "lakeView_";
// Tag keys
static final String CONFIG_VERSION_TAG_KEY = "config_version";
static final String EXTRACTOR_JOB_RUN_MODE_TAG_KEY = "extractor_job_run_mode";
static final String METADATA_UPLOAD_FAILURE_REASON_TAG_KEY = "metadata_upload_failure_reason";
static final String METADATA_DISCOVER_FAILURE_REASON_TAG_KEY = "metadata_discover_failure_reason";
// Metrics
static final String TABLE_DISCOVERY_SUCCESS_COUNTER =
METRICS_COMMON_PREFIX + "table_discovery_success";
static final String TABLE_DISCOVERY_FAILURE_COUNTER =
METRICS_COMMON_PREFIX + "table_discovery_failure";
static final String TABLE_SYNC_SUCCESS_COUNTER = METRICS_COMMON_PREFIX + "table_sync_success";
static final String TABLE_SYNC_ERROR_COUNTER = METRICS_COMMON_PREFIX + "table_sync_failure";
static final String METADATA_UPLOAD_SUCCESS_COUNTER = METRICS_COMMON_PREFIX + "metadata_upload";
static final String FAILED_OVERRIDE_CONFIG_COUNTER = METRICS_COMMON_PREFIX + "failed_override_config";
static final String TABLE_METADATA_PROCESSING_FAILURE_COUNTER =
METRICS_COMMON_PREFIX + "table_metadata_processing_failure";
@Inject
public LakeViewExtractorMetrics(
@Nonnull Metrics metrics, @Nonnull ConfigProvider configProvider) {
this.metrics = metrics;
this.extractorConfig = configProvider.getConfig();
this.tablesDiscoveredGaugeMetric =
metrics.gauge(
TablesDiscoveredGaugeMetricsMetadata.NAME,
TablesDiscoveredGaugeMetricsMetadata.DESCRIPTION,
getDefaultTags());
this.tablesProcessedGaugeMetric =
metrics.gauge(
TablesProcessedGaugeMetricsMetadata.NAME,
TablesProcessedGaugeMetricsMetadata.DESCRIPTION,
getDefaultTags());
}
public void setDiscoveredTablesPerRound(long numTablesDiscovered) {
tablesDiscoveredGaugeMetric.setValue(numTablesDiscovered);
incrementTableDiscoverySuccessCounter();
}
private void incrementTableDiscoverySuccessCounter() {
metrics.increment(TABLE_DISCOVERY_SUCCESS_COUNTER, getDefaultTags());
}
public void incrementTableDiscoveryFailureCounter() {
incrementTableDiscoveryFailureCounter(MetricsConstants.MetadataUploadFailureReasons.UNKNOWN);
}
public void incrementTableDiscoveryFailureCounter(
MetricsConstants.MetadataUploadFailureReasons metadataUploadFailureReasons) {
List<Tag> tags = getDefaultTags();
tags.add(Tag.of(METADATA_DISCOVER_FAILURE_REASON_TAG_KEY, metadataUploadFailureReasons.name()));
metrics.increment(TABLE_DISCOVERY_FAILURE_COUNTER, tags);
}
public void incrementTableSyncSuccessCounter() {
metrics.increment(TABLE_SYNC_SUCCESS_COUNTER, getDefaultTags());
}
public void incrementTableSyncFailureCounter() {
metrics.increment(TABLE_SYNC_ERROR_COUNTER, getDefaultTags());
}
public void incrementMetadataUploadSuccessCounter() {
metrics.increment(METADATA_UPLOAD_SUCCESS_COUNTER, getDefaultTags());
}
public void incrementFailedOverrideConfigCounter() {
metrics.increment(FAILED_OVERRIDE_CONFIG_COUNTER, getDefaultTags());
}
public void incrementTableMetadataProcessingFailureCounter(
MetricsConstants.MetadataUploadFailureReasons metadataUploadFailureReasons) {
List<Tag> tags = getDefaultTags();
tags.add(Tag.of(METADATA_UPLOAD_FAILURE_REASON_TAG_KEY, metadataUploadFailureReasons.name()));
metrics.increment(TABLE_METADATA_PROCESSING_FAILURE_COUNTER, tags);
}
public void resetTableProcessedGauge() {
tablesProcessedGaugeMetric.setValue(0L);
}
public void incrementTablesProcessedCounter() {
tablesProcessedGaugeMetric.increment();
}
private List<Tag> getDefaultTags() {
List<Tag> tags = new ArrayList<>();
tags.add(Tag.of(CONFIG_VERSION_TAG_KEY, extractorConfig.getVersion().toString()));
tags.add(
Tag.of(
EXTRACTOR_JOB_RUN_MODE_TAG_KEY,
extractorConfig.getMetadataExtractorConfig().getJobRunMode().toString()));
return tags;
}
@Getter
private static class TablesDiscoveredGaugeMetricsMetadata {
public static final String NAME = METRICS_COMMON_PREFIX + "discovered_tables";
public static final String DESCRIPTION = "Number of tables discovered during extractor run";
}
@Getter
private static class TablesProcessedGaugeMetricsMetadata {
public static final String NAME = METRICS_COMMON_PREFIX + "processed_tables";
public static final String DESCRIPTION = "Number of tables processed during extractor run";
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/metrics/Metrics.java
|
package ai.onehouse.metrics;
import static io.micrometer.prometheus.PrometheusConfig.DEFAULT;
import com.google.common.base.Preconditions;
import io.micrometer.core.instrument.Counter;
import io.micrometer.core.instrument.Meter;
import io.micrometer.core.instrument.Tag;
import io.micrometer.prometheus.PrometheusMeterRegistry;
import io.prometheus.client.CollectorRegistry;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Supplier;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.ToString;
@AllArgsConstructor(access = AccessLevel.PACKAGE)
public class Metrics {
private final PrometheusMeterRegistry meterRegistry;
private static final Metrics INSTANCE =
new Metrics(new PrometheusMeterRegistry(DEFAULT), new HashMap<>());
private Map<String, Gauge> gaugeMap;
public static Metrics getInstance() {
return INSTANCE;
}
public CollectorRegistry getCollectorRegistry() {
return meterRegistry.getPrometheusRegistry();
}
public void increment(String name, List<Tag> tags) {
List<String> tagList = new ArrayList<>();
for (Tag tag : tags) {
tagList.add(tag.getKey());
tagList.add(tag.getValue());
}
createAndIncrementCounter(name, tagList);
}
public Gauge gauge(String name, String description, List<Tag> tags) {
String gaugeKey = generateGaugeKey(name, description, tags);
Gauge gauge = gaugeMap.get(gaugeKey);
if (gauge != null) {
return gauge;
}
gauge = new Gauge();
gauge.setMeterId(getGaugeRegisterId(name, description, gauge, tags));
gaugeMap.put(gaugeKey, gauge);
return gauge;
}
Meter.Id getGaugeRegisterId(String name, String description, Gauge gauge, List<Tag> tags) {
return io.micrometer.core.instrument.Gauge.builder(name, gauge)
.tags(tags)
.description(description)
.register(meterRegistry)
.getId();
}
void createAndIncrementCounter(String name, List<String> tagList) {
Counter.builder(name).tags(tagList.toArray(new String[0])).register(meterRegistry).increment();
}
// Generates a unique key based on the name, description, and tags
private String generateGaugeKey(String name, String description, List<Tag> tags) {
StringBuilder keyBuilder = new StringBuilder();
keyBuilder.append(name);
keyBuilder.append("-");
keyBuilder.append(description);
keyBuilder.append("-");
for (Tag tag : tags) {
keyBuilder.append(tag.getKey());
keyBuilder.append(":");
keyBuilder.append(tag.getValue());
keyBuilder.append("-");
}
return keyBuilder.toString();
}
@EqualsAndHashCode
@ToString
public static class Gauge implements Supplier<Number> {
private final AtomicLong value = new AtomicLong(0);
@Getter private Meter.Id meterId;
public void setValue(long val) {
value.set(val);
}
public void increment() {
value.incrementAndGet();
}
public void setMeterId(Meter.Id id) {
Preconditions.checkArgument(this.meterId == null, "MeterId cannot be set more than once");
this.meterId = id;
}
@Override
public Number get() {
return value.get();
}
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/metrics/MetricsModule.java
|
package ai.onehouse.metrics;
import static ai.onehouse.constants.MetricsConstants.PROMETHEUS_METRICS_SCRAPE_PORT;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import javax.inject.Singleton;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class MetricsModule extends AbstractModule {
@Provides
@Singleton
static Metrics providesMetrics() {
return Metrics.getInstance();
}
@Provides
@Singleton
static MetricsServer providesMetricsServer(Metrics metrics) {
return new MetricsServer(metrics.getCollectorRegistry(), PROMETHEUS_METRICS_SCRAPE_PORT);
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/metrics/MetricsServer.java
|
package ai.onehouse.metrics;
import static ai.onehouse.constants.MetricsConstants.PROMETHEUS_METRICS_SCRAPING_DISABLED;
import io.prometheus.client.CollectorRegistry;
import io.prometheus.client.exporter.HTTPServer;
import java.io.IOException;
import java.net.InetSocketAddress;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class MetricsServer {
private final HTTPServer server;
public MetricsServer(CollectorRegistry registry, int port) {
if (port != PROMETHEUS_METRICS_SCRAPING_DISABLED) {
try {
log.info("Starting metrics server");
server = initHttpServer(new InetSocketAddress(port), registry);
Runtime.getRuntime().addShutdownHook(new Thread(server::close));
} catch (IOException e) {
throw new RuntimeException("Failed to start metrics server", e);
}
} else {
server = null;
}
}
static HTTPServer initHttpServer(InetSocketAddress socketAddress, CollectorRegistry registry)
throws IOException {
return new HTTPServer(socketAddress, registry);
}
public void shutdown() {
if (server != null) {
log.info("Shutting down metrics server");
server.close();
}
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/storage/AbstractAsyncStorageClient.java
|
package ai.onehouse.storage;
import ai.onehouse.exceptions.ObjectStorageClientException;
import ai.onehouse.storage.models.File;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public abstract class AbstractAsyncStorageClient implements AsyncStorageClient {
protected final ExecutorService executorService;
protected final StorageUtils storageUtils;
AbstractAsyncStorageClient(ExecutorService executorService, StorageUtils storageUtils) {
this.executorService = executorService;
this.storageUtils = storageUtils;
}
@Override
public CompletableFuture<List<File>> listAllFilesInDir(String objectStorageUri) {
log.debug("Listing files in {}", objectStorageUri);
String bucketName = storageUtils.getBucketNameFromUri(objectStorageUri);
String prefix = storageUtils.getPathFromUrl(objectStorageUri);
// ensure prefix which is not the root dir always ends with "/"
prefix = prefix.isEmpty() || prefix.endsWith("/") ? prefix : prefix + "/";
return listAllObjectsStorage(bucketName, prefix, null, new ArrayList<>());
}
protected CompletableFuture<List<File>> listAllObjectsStorage(
String bucketName, String prefix, String continuationToken, List<File> files) {
return fetchObjectsByPage(bucketName, prefix, continuationToken, null)
.thenComposeAsync(
continuationTokenAndFiles -> {
String newContinuationToken = continuationTokenAndFiles.getLeft();
files.addAll(continuationTokenAndFiles.getRight());
if (newContinuationToken != null) {
return listAllObjectsStorage(bucketName, prefix, newContinuationToken, files);
} else {
return CompletableFuture.completedFuture(files);
}
},
executorService).exceptionally(throwable -> {
log.error("Failed to list objects from storage", throwable);
throw clientException(throwable, "listAllObjectsStorage", bucketName);
});
}
protected abstract RuntimeException clientException(Throwable ex, String operation, String path);
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/storage/AsyncStorageClient.java
|
package ai.onehouse.storage;
import ai.onehouse.storage.models.File;
import ai.onehouse.storage.models.FileStreamData;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import org.apache.commons.lang3.tuple.Pair;
public interface AsyncStorageClient {
CompletableFuture<List<File>> listAllFilesInDir(String path);
CompletableFuture<FileStreamData> streamFileAsync(String path);
CompletableFuture<byte[]> readFileAsBytes(String path);
CompletableFuture<Pair<String, List<File>>> fetchObjectsByPage(
String bucketName, String prefix, String continuationToken, String startAfter);
void refreshClient();
void initializeClient();
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/storage/GCSAsyncStorageClient.java
|
package ai.onehouse.storage;
import ai.onehouse.exceptions.AccessDeniedException;
import ai.onehouse.exceptions.ObjectStorageClientException;
import com.google.api.gax.paging.Page;
import com.google.cloud.storage.Blob;
import com.google.cloud.storage.BlobId;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.StorageException;
import com.google.common.annotations.VisibleForTesting;
import com.google.inject.Inject;
import ai.onehouse.storage.models.File;
import ai.onehouse.storage.models.FileStreamData;
import ai.onehouse.storage.providers.GcsClientProvider;
import java.nio.channels.Channels;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import javax.annotation.Nonnull;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
@Slf4j
public class GCSAsyncStorageClient extends AbstractAsyncStorageClient {
private final GcsClientProvider gcsClientProvider;
@Inject
public GCSAsyncStorageClient(
@Nonnull GcsClientProvider gcsClientProvider,
@Nonnull StorageUtils storageUtils,
@Nonnull ExecutorService executorService) {
super(executorService, storageUtils);
this.gcsClientProvider = gcsClientProvider;
}
@Override
public CompletableFuture<Pair<String, List<File>>> fetchObjectsByPage(
String bucketName, String prefix, String continuationToken, String startAfter) {
log.debug(
"fetching files in dir {} continuationToken {} startAfter {}",
prefix,
continuationToken,
startAfter);
return CompletableFuture.supplyAsync(
() -> {
List<Storage.BlobListOption> optionList =
new ArrayList<>(
Arrays.asList(
Storage.BlobListOption.prefix(prefix),
Storage.BlobListOption.delimiter("/")));
if (StringUtils.isNotBlank(continuationToken)) {
optionList.add(Storage.BlobListOption.pageToken(continuationToken));
}
if (StringUtils.isNotBlank(startAfter)) {
optionList.add(Storage.BlobListOption.startOffset(startAfter));
}
Page<Blob> blobs =
gcsClientProvider
.getGcsClient()
.list(bucketName, optionList.toArray(new Storage.BlobListOption[0]));
List<File> files = new ArrayList<>();
for (Blob blob : blobs.getValues()) {
files.add(
File.builder()
.filename(blob.getName().replaceFirst(prefix, ""))
.lastModifiedAt(
Instant.ofEpochMilli(!blob.isDirectory() ? blob.getUpdateTime() : 0))
.isDirectory(blob.isDirectory())
.build());
}
String nextPageToken = blobs.hasNextPage() ? blobs.getNextPageToken() : null;
return Pair.of(nextPageToken, files);
},
executorService).exceptionally(
ex -> {
log.error("Failed to fetch objects by page", ex);
throw clientException(ex, "fetchObjectsByPage", bucketName);
}
);
}
@VisibleForTesting
CompletableFuture<Blob> readBlob(String gcsUri) {
log.debug("Reading GCS file: {}", gcsUri);
return CompletableFuture.supplyAsync(
() -> {
Blob blob =
gcsClientProvider
.getGcsClient()
.get(
BlobId.of(
storageUtils.getBucketNameFromUri(gcsUri),
storageUtils.getPathFromUrl(gcsUri)));
if (blob != null) {
return blob;
} else {
throw new ObjectStorageClientException("Blob not found");
}
},
executorService).exceptionally(
ex -> {
log.error("Failed to read blob", ex);
throw clientException(ex, "readBlob", gcsUri);
}
);
}
@Override
public CompletableFuture<FileStreamData> streamFileAsync(String gcsUri) {
return readBlob(gcsUri)
.thenApply(
blob ->
FileStreamData.builder()
.inputStream(Channels.newInputStream(blob.reader()))
.fileSize(blob.getSize())
.build());
}
@Override
public CompletableFuture<byte[]> readFileAsBytes(String gcsUri) {
return readBlob(gcsUri).thenApply(Blob::getContent);
}
@Override
public void refreshClient() {
gcsClientProvider.refreshClient();
}
@Override
public void initializeClient() {
gcsClientProvider.getGcsClient();
}
@Override
protected RuntimeException clientException(Throwable ex, String operation, String path) {
Throwable wrappedException = ex.getCause();
if (wrappedException instanceof StorageException) {
StorageException storageException = (StorageException) wrappedException;
log.info("Error in GCS operation : {} on path : {} code : {} message : {}",
operation, path, storageException.getCode(), storageException.getMessage());
if (storageException.getCode() == 403 || storageException.getCode() == 401
|| storageException.getMessage().equalsIgnoreCase("Error requesting access token")) {
return new AccessDeniedException(
String.format(
"AccessDenied for operation : %s on path : %s with message : %s",
operation, path, storageException.getMessage()));
}
} else if (wrappedException instanceof AccessDeniedException) {
return (RuntimeException) wrappedException;
}
return new ObjectStorageClientException(ex);
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/storage/PresignedUrlFileUploader.java
|
package ai.onehouse.storage;
import com.google.inject.Inject;
import ai.onehouse.api.AsyncHttpClientWithRetry;
import ai.onehouse.constants.MetricsConstants;
import ai.onehouse.exceptions.FileUploadException;
import ai.onehouse.metrics.LakeViewExtractorMetrics;
import ai.onehouse.storage.models.FileStreamData;
import ai.onehouse.RuntimeModule.TableMetadataUploadObjectStorageAsyncClient;
import java.io.IOException;
import java.io.InputStream;
import java.util.concurrent.CompletableFuture;
import javax.annotation.Nonnull;
import lombok.extern.slf4j.Slf4j;
import okhttp3.MediaType;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
import okio.BufferedSink;
import org.apache.commons.io.IOUtils;
@Slf4j
public class PresignedUrlFileUploader {
private final AsyncStorageClient asyncStorageClient;
private final AsyncHttpClientWithRetry asyncHttpClientWithRetry;
private final LakeViewExtractorMetrics hudiMetadataExtractorMetrics;
@Inject
public PresignedUrlFileUploader(
@Nonnull @TableMetadataUploadObjectStorageAsyncClient AsyncStorageClient asyncStorageClient,
@Nonnull AsyncHttpClientWithRetry asyncHttpClientWithRetry,
@Nonnull LakeViewExtractorMetrics hudiMetadataExtractorMetrics) {
this.asyncStorageClient = asyncStorageClient;
this.asyncHttpClientWithRetry = asyncHttpClientWithRetry;
this.hudiMetadataExtractorMetrics = hudiMetadataExtractorMetrics;
}
public CompletableFuture<Void> uploadFileToPresignedUrl(
String presignedUrl, String fileUrl, int fileUploadStreamBatchSize) {
log.debug("Uploading {} to retrieved presigned url", fileUrl);
return asyncStorageClient
.streamFileAsync(fileUrl)
.thenCompose(
fileStreamData ->
CompletableFuture.runAsync(
() -> {
Request request =
getRequest(presignedUrl, fileUploadStreamBatchSize, fileStreamData);
asyncHttpClientWithRetry
.makeRequestWithRetry(request)
.thenAccept(
response -> {
try (Response ignored = response) {
if (!response.isSuccessful()) {
int statusCode = response.code();
String message = response.message();
hudiMetadataExtractorMetrics
.incrementTableMetadataProcessingFailureCounter(
MetricsConstants.MetadataUploadFailureReasons
.PRESIGNED_URL_UPLOAD_FAILURE);
throw new FileUploadException(
String.format(
"File upload failed: response code: %s error message: %s",
statusCode, message));
}
}
})
.join(); // Wait for the upload to complete
}));
}
private @Nonnull Request getRequest(
String presignedUrl, int fileUploadStreamBatchSize, FileStreamData fileStreamData) {
Request request;
MediaType mediaType = MediaType.parse("application/octet-stream");
if (fileStreamData.getFileSize() <= fileUploadStreamBatchSize) {
// if the file size is less than the stream batch size, upload it directly
RequestBody requestBody;
try {
requestBody = RequestBody.create(mediaType, IOUtils.toByteArray(fileStreamData.getInputStream()));
request = new Request.Builder().url(presignedUrl).put(requestBody).build();
} catch (IOException e) {
throw new FileUploadException(e);
}
} else {
request =
new Request.Builder()
.url(presignedUrl)
.put(
// okhttp streaming:
// https://github.com/square/okhttp/blob/master/samples/guide/src/main/java/okhttp3/recipes/PostStreaming.java
new RequestBody() {
@Override
public MediaType contentType() {
return mediaType;
}
@Override
public long contentLength() {
return fileStreamData.getFileSize();
}
@Override
public void writeTo(@Nonnull BufferedSink sink) throws IOException {
try (InputStream is = fileStreamData.getInputStream()) {
byte[] buffer = new byte[fileUploadStreamBatchSize];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
sink.write(buffer, 0, bytesRead);
}
}
}
})
.build();
}
return request;
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/storage/S3AsyncStorageClient.java
|
package ai.onehouse.storage;
import ai.onehouse.exceptions.AccessDeniedException;
import ai.onehouse.exceptions.ObjectStorageClientException;
import ai.onehouse.exceptions.RateLimitException;
import com.google.inject.Inject;
import ai.onehouse.storage.models.File;
import ai.onehouse.storage.models.FileStreamData;
import ai.onehouse.storage.providers.S3AsyncClientProvider;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import software.amazon.awssdk.awscore.exception.AwsServiceException;
import software.amazon.awssdk.awscore.internal.AwsErrorCode;
import software.amazon.awssdk.core.BytesWrapper;
import software.amazon.awssdk.core.async.AsyncResponseTransformer;
import software.amazon.awssdk.core.exception.SdkClientException;
import software.amazon.awssdk.services.s3.model.GetObjectRequest;
import software.amazon.awssdk.services.s3.model.ListObjectsV2Request;
import software.amazon.awssdk.services.s3.model.ListObjectsV2Response;
@Slf4j
public class S3AsyncStorageClient extends AbstractAsyncStorageClient {
public static final String ACCESS_DENIED_ERROR_CODE = "AccessDenied";
public static final String EXPIRED_TOKEN_ERROR_CODE = "ExpiredToken";
private final S3AsyncClientProvider s3AsyncClientProvider;
@Inject
public S3AsyncStorageClient(
@Nonnull S3AsyncClientProvider s3AsyncClientProvider,
@Nonnull StorageUtils storageUtils,
@Nonnull ExecutorService executorService) {
super(executorService, storageUtils);
this.s3AsyncClientProvider = s3AsyncClientProvider;
}
@Override
public CompletableFuture<Pair<String, List<File>>> fetchObjectsByPage(
String bucketName, String prefix, String continuationToken, String startAfter) {
log.debug(
"fetching files in dir {} continuationToken {} startAfter {}",
prefix,
continuationToken,
startAfter);
ListObjectsV2Request.Builder listObjectsV2RequestBuilder =
ListObjectsV2Request.builder().bucket(bucketName).prefix(prefix).delimiter("/");
if (StringUtils.isNotBlank(startAfter)) {
listObjectsV2RequestBuilder.startAfter(startAfter);
}
if (StringUtils.isNotBlank(continuationToken)) {
listObjectsV2RequestBuilder.continuationToken(continuationToken);
}
return s3AsyncClientProvider
.getS3AsyncClient()
.listObjectsV2(listObjectsV2RequestBuilder.build())
.thenComposeAsync(
listObjectsV2Response -> {
// process response
List<File> files = new ArrayList<>(processListObjectsV2Response(listObjectsV2Response, prefix));
String newContinuationToken =
Boolean.TRUE.equals(listObjectsV2Response.isTruncated())
? listObjectsV2Response.nextContinuationToken()
: null;
return CompletableFuture.completedFuture(Pair.of(newContinuationToken, files));
},
executorService)
.exceptionally(
ex -> {
log.error("Failed to fetch objects by page", ex);
throw clientException(ex, "fetchObjectsByPage", bucketName);
}
);
}
private List<File> processListObjectsV2Response(ListObjectsV2Response response, String prefix) {
// process files
List<File> files =
response.contents().stream()
.map(
s3Object ->
File.builder()
.filename(s3Object.key().replaceFirst(prefix, ""))
.lastModifiedAt(s3Object.lastModified())
.isDirectory(false)
.build())
.collect(Collectors.toList());
// process directories
files.addAll(
response.commonPrefixes().stream()
.map(
commonPrefix ->
File.builder()
.filename(commonPrefix.prefix().replaceFirst(prefix, ""))
.isDirectory(true)
.lastModifiedAt(Instant.EPOCH)
.build())
.collect(Collectors.toList()));
return files;
}
@Override
public CompletableFuture<FileStreamData> streamFileAsync(String s3Uri) {
log.debug("Reading S3 file as InputStream: {}", s3Uri);
GetObjectRequest getObjectRequest = getObjectRequest(s3Uri);
return s3AsyncClientProvider
.getS3AsyncClient()
.getObject(getObjectRequest, AsyncResponseTransformer.toBlockingInputStream())
.thenApply(
responseResponseInputStream ->
FileStreamData.builder()
.inputStream(responseResponseInputStream)
.fileSize(responseResponseInputStream.response().contentLength())
.build())
.exceptionally(
ex -> {
log.error("Failed to stream file", ex);
throw clientException(ex, "streamFileAsync", s3Uri);
}
);
}
@Override
public CompletableFuture<byte[]> readFileAsBytes(String s3Uri) {
log.debug("Reading S3 file: {}", s3Uri);
GetObjectRequest getObjectRequest = getObjectRequest(s3Uri);
return s3AsyncClientProvider
.getS3AsyncClient()
.getObject(getObjectRequest, AsyncResponseTransformer.toBytes())
.thenApplyAsync(BytesWrapper::asByteArray)
.exceptionally(
ex -> {
log.error("Failed to read file as bytes", ex);
throw clientException(ex, "readFileAsBytes", s3Uri);
}
);
}
private GetObjectRequest getObjectRequest(String s3Uri) {
return GetObjectRequest.builder()
.bucket(storageUtils.getBucketNameFromUri(s3Uri))
.key(storageUtils.getPathFromUrl(s3Uri))
.build();
}
@Override
protected RuntimeException clientException(Throwable ex, String operation, String path) {
Throwable wrappedException = ex.getCause();
if (wrappedException instanceof AwsServiceException) {
AwsServiceException awsServiceException = (AwsServiceException) wrappedException;
log.info("Error in S3 operation : {} on path : {} code : {} message : {}", operation, path,
awsServiceException.awsErrorDetails().errorCode(), awsServiceException.awsErrorDetails().errorMessage());
if (AwsErrorCode.isThrottlingErrorCode(awsServiceException.awsErrorDetails().errorCode())) {
return new RateLimitException(String.format("Throttled by S3 for operation : %s on path : %s", operation, path));
}
if (awsServiceException.awsErrorDetails().errorCode().equalsIgnoreCase(ACCESS_DENIED_ERROR_CODE)
|| awsServiceException.awsErrorDetails().errorCode().equalsIgnoreCase(EXPIRED_TOKEN_ERROR_CODE)) {
return new AccessDeniedException(
String.format("AccessDenied for operation : %s on path : %s with message : %s",
operation, path, awsServiceException.awsErrorDetails().errorMessage()));
}
} else if (wrappedException instanceof SdkClientException) {
SdkClientException sdkClientException = (SdkClientException) wrappedException;
log.info("Error in S3 Acquire operation : {} on path : {} message : {}", operation, path, sdkClientException.getMessage());
if (sdkClientException.getMessage() != null &&
sdkClientException.getMessage().contains("Acquire operation took longer than the configured maximum time")) {
return new RateLimitException(String.format("Throttled by S3 (connection pool exhausted) for operation : %s on path : %s", operation, path));
}
} else if (wrappedException instanceof RateLimitException || wrappedException instanceof AccessDeniedException) {
return (RuntimeException) wrappedException;
}
return new ObjectStorageClientException(ex);
}
@Override
public void refreshClient() {
s3AsyncClientProvider.refreshClient();
}
@Override
public void initializeClient() {
s3AsyncClientProvider.getS3AsyncClient();
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/storage/StorageUtils.java
|
package ai.onehouse.storage;
import static ai.onehouse.constants.StorageConstants.OBJECT_STORAGE_URI_PATTERN;
import java.util.regex.Matcher;
public class StorageUtils {
private static final String INVALID_STORAGE_URI_ERROR_MSG = "Invalid Object storage Uri: ";
public String getPathFromUrl(String uri) {
if (!OBJECT_STORAGE_URI_PATTERN.matcher(uri).matches()) {
throw new IllegalArgumentException(INVALID_STORAGE_URI_ERROR_MSG + uri);
}
String prefix = "";
// Remove the scheme and bucket name from the S3 path
int startIndex = uri.indexOf('/', 5); // Skip 's3://' and 'gs://'
if (startIndex != -1) {
prefix = uri.substring(startIndex + 1);
}
return prefix;
}
public String constructFileUri(String directoryUri, String filePath) {
return String.format(
"%s/%s",
directoryUri.endsWith("/")
? directoryUri.substring(0, directoryUri.length() - 1)
: directoryUri,
filePath.startsWith("/") ? filePath.substring(1) : filePath);
}
public String getBucketNameFromUri(String uri) {
Matcher matcher = OBJECT_STORAGE_URI_PATTERN.matcher(uri);
if (matcher.matches()) {
return matcher.group(2);
}
throw new IllegalArgumentException(INVALID_STORAGE_URI_ERROR_MSG + uri);
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/storage
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/storage/models/File.java
|
package ai.onehouse.storage.models;
import java.time.Instant;
import lombok.Builder;
import lombok.NonNull;
import lombok.Value;
import lombok.extern.jackson.Jacksonized;
@Builder
@Value
@Jacksonized
public class File {
@NonNull String filename; // filename does not include the path prefix
@NonNull Instant lastModifiedAt;
boolean isDirectory;
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/storage
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/storage/models/FileStreamData.java
|
package ai.onehouse.storage.models;
import java.io.InputStream;
import lombok.Builder;
import lombok.NonNull;
import lombok.Value;
@Builder
@Value
public class FileStreamData {
@NonNull InputStream inputStream;
long fileSize;
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/storage
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/storage/providers/GcsClientProvider.java
|
package ai.onehouse.storage.providers;
import static ai.onehouse.constants.StorageConstants.GCP_RESOURCE_NAME_FORMAT;
import com.google.auth.oauth2.GoogleCredentials;
import com.google.auth.oauth2.ImpersonatedCredentials;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.StorageOptions;
import com.google.common.annotations.VisibleForTesting;
import com.google.inject.Inject;
import ai.onehouse.config.Config;
import ai.onehouse.config.models.common.FileSystemConfiguration;
import ai.onehouse.config.models.common.GCSConfig;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Collections;
import javax.annotation.Nonnull;
import lombok.Getter;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Getter
public class GcsClientProvider {
private final GCSConfig gcsConfig;
private static Storage gcsClient;
private static final Logger logger = LoggerFactory.getLogger(GcsClientProvider.class);
@Inject
public GcsClientProvider(@Nonnull Config config) {
FileSystemConfiguration fileSystemConfiguration = config.getFileSystemConfiguration();
this.gcsConfig =
fileSystemConfiguration.getGcsConfig() != null
? fileSystemConfiguration.getGcsConfig()
: GCSConfig.builder().build();
}
@VisibleForTesting
Storage createGcsClient() {
logger.debug("Instantiating GCS storage client");
validateGcsConfig(gcsConfig);
// Use Google Default ADC if serviceAccountJson not provided
// https://cloud.google.com/docs/authentication/provide-credentials-adc
if (gcsConfig.getGcpServiceAccountKeyPath().isPresent()) {
StorageOptions.Builder storageOptionsBuilder = StorageOptions.newBuilder();
try (FileInputStream serviceAccountStream = readAsStream()) {
if (gcsConfig.getServiceAccountToImpersonate().isPresent()) {
// Impersonate Service Account
ImpersonatedCredentials impersonatedCredentials = ImpersonatedCredentials.create(
GoogleCredentials.fromStream(serviceAccountStream),
gcsConfig.getServiceAccountToImpersonate().get(),
null,
Collections.singletonList("https://www.googleapis.com/auth/cloud-platform"),
3600
);
storageOptionsBuilder.setCredentials(impersonatedCredentials);
} else {
storageOptionsBuilder.setCredentials(GoogleCredentials.fromStream(serviceAccountStream));
}
if (gcsConfig.getProjectId().isPresent()) {
storageOptionsBuilder.setProjectId(gcsConfig.getProjectId().get());
}
return storageOptionsBuilder.build().getService();
} catch (IOException e) {
throw new RuntimeException("Error reading service account JSON key file", e);
}
}
return StorageOptions.getDefaultInstance().getService();
}
public Storage getGcsClient() {
if (gcsClient == null) {
gcsClient = createGcsClient();
}
return gcsClient;
}
public void refreshClient() {
gcsClient = createGcsClient();
}
private void validateGcsConfig(GCSConfig gcsConfig) {
if (gcsConfig.getProjectId().isPresent()
&& !gcsConfig.getProjectId().get().matches(GCP_RESOURCE_NAME_FORMAT)) {
throw new IllegalArgumentException(
"Invalid GCP project ID: " + gcsConfig.getProjectId().get());
}
if (gcsConfig.getGcpServiceAccountKeyPath().isPresent()
&& StringUtils.isBlank(gcsConfig.getGcpServiceAccountKeyPath().get())) {
throw new IllegalArgumentException(
"Invalid GCP Service Account Key Path: " + gcsConfig.getGcpServiceAccountKeyPath().get());
}
}
@VisibleForTesting
FileInputStream readAsStream() throws FileNotFoundException {
return new FileInputStream(gcsConfig.getGcpServiceAccountKeyPath().get());
}
}
|
0
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/storage
|
java-sources/ai/onehouse/lakeview/0.21.0/ai/onehouse/storage/providers/S3AsyncClientProvider.java
|
package ai.onehouse.storage.providers;
import ai.onehouse.config.models.configv1.MetadataExtractorConfig;
import com.google.common.annotations.VisibleForTesting;
import com.google.inject.Inject;
import ai.onehouse.config.Config;
import ai.onehouse.config.models.common.FileSystemConfiguration;
import ai.onehouse.config.models.common.S3Config;
import java.time.Duration;
import java.util.concurrent.ExecutorService;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.annotation.Nonnull;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
import software.amazon.awssdk.auth.credentials.AwsSessionCredentials;
import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
import software.amazon.awssdk.core.client.config.ClientOverrideConfiguration;
import software.amazon.awssdk.core.client.config.SdkAdvancedAsyncClientOption;
import software.amazon.awssdk.core.retry.RetryPolicy;
import software.amazon.awssdk.core.retry.backoff.BackoffStrategy;
import software.amazon.awssdk.core.retry.conditions.RetryCondition;
import software.amazon.awssdk.http.nio.netty.NettyNioAsyncHttpClient;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.s3.S3AsyncClient;
import software.amazon.awssdk.services.s3.S3AsyncClientBuilder;
import software.amazon.awssdk.services.sts.StsClient;
import software.amazon.awssdk.services.sts.model.AssumeRoleRequest;
import software.amazon.awssdk.services.sts.model.AssumeRoleResponse;
public class S3AsyncClientProvider {
private final S3Config s3Config;
private final MetadataExtractorConfig metadataExtractorConfig;
private final ExecutorService executorService;
private static S3AsyncClient s3AsyncClient;
private static final Logger logger = LoggerFactory.getLogger(S3AsyncClientProvider.class);
public S3AsyncClientProvider(@Nonnull Config config, @Nonnull ExecutorService executorService) {
FileSystemConfiguration fileSystemConfiguration = config.getFileSystemConfiguration();
this.s3Config = fileSystemConfiguration.getS3Config();
this.metadataExtractorConfig = config.getMetadataExtractorConfig();
this.executorService = executorService;
}
protected S3AsyncClient createS3AsyncClient() {
logger.debug("Instantiating S3 storage client");
validateS3Config(s3Config);
S3AsyncClientBuilder s3AsyncClientBuilder = S3AsyncClient.builder();
if (s3Config.getAccessKey().isPresent() && s3Config.getAccessSecret().isPresent()) {
logger.debug("Using provided accessKey and accessSecret for authentication");
AwsBasicCredentials awsCredentials =
AwsBasicCredentials.create(
s3Config.getAccessKey().get(), s3Config.getAccessSecret().get());
s3AsyncClientBuilder.credentialsProvider(StaticCredentialsProvider.create(awsCredentials));
} else if (s3Config.getArnToImpersonate().isPresent()) {
// Assume role of Destination ARN
try (StsClient stsClient = StsClient.builder()
.region(Region.of(s3Config.getRegion()))
.build()) {
AssumeRoleRequest assumeRoleRequest = AssumeRoleRequest.builder()
.roleArn(s3Config.getArnToImpersonate().get())
.roleSessionName(String.format("S3AsyncClientSession-%s", extractAccountIdFromArn(s3Config.getArnToImpersonate().get())))
.build();
AssumeRoleResponse assumeRoleResponse = stsClient.assumeRole(assumeRoleRequest);
AwsSessionCredentials tempCredentials = AwsSessionCredentials.create(
assumeRoleResponse.credentials().accessKeyId(),
assumeRoleResponse.credentials().secretAccessKey(),
assumeRoleResponse.credentials().sessionToken()
);
s3AsyncClientBuilder.credentialsProvider(StaticCredentialsProvider.create(tempCredentials));
}
}
RetryPolicy retryPolicy = RetryPolicy.builder()
.numRetries(metadataExtractorConfig.getObjectStoreNumRetries()) // Increase if needed
.backoffStrategy(BackoffStrategy.defaultThrottlingStrategy()) // Exponential backoff for throttling
.throttlingBackoffStrategy(BackoffStrategy.defaultThrottlingStrategy())
.retryCondition(RetryCondition.defaultRetryCondition())
.build();
return s3AsyncClientBuilder
.overrideConfiguration(builder -> builder.retryPolicy(retryPolicy))
.httpClient(NettyNioAsyncHttpClient.builder()
.maxConcurrency(metadataExtractorConfig.getNettyMaxConcurrency())
.connectionTimeout(Duration.ofSeconds(metadataExtractorConfig.getNettyConnectionTimeoutSeconds()))
.build())
.region(Region.of(s3Config.getRegion()))
.asyncConfiguration(
builder ->
builder.advancedOption(
SdkAdvancedAsyncClientOption.FUTURE_COMPLETION_EXECUTOR, executorService))
.build();
}
private static String extractAccountIdFromArn(String arn) {
Matcher matcher = Pattern.compile("arn:aws:iam::(\\d+):role/").matcher(arn);
return matcher.find() ? matcher.group(1) : "";
}
public S3AsyncClient getS3AsyncClient() {
if (s3AsyncClient == null) {
s3AsyncClient = createS3AsyncClient();
}
return s3AsyncClient;
}
public void refreshClient() {
s3AsyncClient = createS3AsyncClient();
}
private void validateS3Config(S3Config s3Config) {
if (s3Config == null) {
throw new IllegalArgumentException("S3 Config not found");
}
if (StringUtils.isBlank(s3Config.getRegion())) {
throw new IllegalArgumentException("Aws region cannot be empty");
}
}
@VisibleForTesting
static void resetS3AsyncClient() {
s3AsyncClient = null;
}
}
|
0
|
java-sources/ai/onehouse/lakeview-sync-tool/0.21.0/ai/onehouse/lakeview
|
java-sources/ai/onehouse/lakeview-sync-tool/0.21.0/ai/onehouse/lakeview/sync/LakeviewGlueSyncTool.java
|
package ai.onehouse.lakeview.sync;
import org.apache.hadoop.conf.Configuration;
import org.apache.hudi.aws.sync.AwsGlueCatalogSyncTool;
import org.apache.hudi.sync.common.HoodieSyncTool;
import java.util.Properties;
public class LakeviewGlueSyncTool extends HoodieSyncTool implements AutoCloseable {
private final AwsGlueCatalogSyncTool awsGlueCatalogSyncTool;
private final LakeviewSyncTool lakeviewSyncTool;
public LakeviewGlueSyncTool(Properties props, Configuration hadoopConf) {
super(props, hadoopConf);
this.awsGlueCatalogSyncTool = new AwsGlueCatalogSyncTool(props, hadoopConf);
this.lakeviewSyncTool = new LakeviewSyncTool(props, hadoopConf);
}
@Override
public void syncHoodieTable() {
// sync with glue
awsGlueCatalogSyncTool.syncHoodieTable();
// perform syncing with lakeview as well
lakeviewSyncTool.syncHoodieTable();
}
@Override
public void close() {
awsGlueCatalogSyncTool.close();
lakeviewSyncTool.close();
}
}
|
0
|
java-sources/ai/onehouse/lakeview-sync-tool/0.21.0/ai/onehouse/lakeview
|
java-sources/ai/onehouse/lakeview-sync-tool/0.21.0/ai/onehouse/lakeview/sync/LakeviewHiveSyncTool.java
|
package ai.onehouse.lakeview.sync;
import org.apache.hadoop.conf.Configuration;
import org.apache.hudi.hive.HiveSyncTool;
import org.apache.hudi.sync.common.HoodieSyncTool;
import java.util.Properties;
public class LakeviewHiveSyncTool extends HoodieSyncTool implements AutoCloseable {
private final HiveSyncTool hiveSyncTool;
private final LakeviewSyncTool lakeviewSyncTool;
public LakeviewHiveSyncTool(Properties props, Configuration hadoopConf) {
super(props, hadoopConf);
this.hiveSyncTool = new HiveSyncTool(props, hadoopConf);
this.lakeviewSyncTool = new LakeviewSyncTool(props, hadoopConf);
}
public void syncHoodieTable() {
// sync with hive
hiveSyncTool.syncHoodieTable();
// perform syncing with lakeview as well
lakeviewSyncTool.syncHoodieTable();
}
@Override
public void close() {
hiveSyncTool.close();
lakeviewSyncTool.close();
}
}
|
0
|
java-sources/ai/onehouse/lakeview-sync-tool/0.21.0/ai/onehouse/lakeview
|
java-sources/ai/onehouse/lakeview-sync-tool/0.21.0/ai/onehouse/lakeview/sync/LakeviewSyncConfigHolder.java
|
package ai.onehouse.lakeview.sync;
import ai.onehouse.lakeview.sync.utilities.IdentitySplitter;
import com.beust.jcommander.Parameter;
import com.beust.jcommander.ParametersDelegate;
import org.apache.hudi.common.config.ConfigProperty;
import org.apache.hudi.common.config.TypedProperties;
import org.apache.hudi.sync.common.HoodieSyncConfig;
import java.util.List;
public class LakeviewSyncConfigHolder {
// this class holds static config fields
private LakeviewSyncConfigHolder() {
}
public static final ConfigProperty<String> BASE_PATH = ConfigProperty.key("hoodie.base.path")
.noDefaultValue()
.withDocumentation("Base path on lake storage, under which all the table data is stored. Always prefix it explicitly with the storage scheme (e.g hdfs://, s3:// etc). Hudi stores all the main meta-data about commits, savepoints, cleaning audit logs etc in .hoodie directory under this base path directory.");
public static final ConfigProperty<Boolean> LAKEVIEW_SYNC_ENABLED = ConfigProperty
.key("hoodie.datasource.lakeview_sync.enable")
.defaultValue(false)
.withDocumentation("When set to true, register/sync the table to Lakeview.");
public static final ConfigProperty<String> LAKEVIEW_VERSION = ConfigProperty
.key("hoodie.meta.sync.lakeview.version")
.defaultValue("V1")
.withDocumentation("Lakeview version");
public static final ConfigProperty<String> LAKEVIEW_PROJECT_ID = ConfigProperty
.key("hoodie.meta.sync.lakeview.project_id")
.noDefaultValue()
.withDocumentation("Project ID in lakeview");
public static final ConfigProperty<String> LAKEVIEW_API_KEY = ConfigProperty
.key("hoodie.meta.sync.lakeview.api_key")
.noDefaultValue()
.withDocumentation("API key to access lakeview");
public static final ConfigProperty<String> LAKEVIEW_API_SECRET = ConfigProperty
.key("hoodie.meta.sync.lakeview.api_secret")
.noDefaultValue()
.withDocumentation("API secret to access lakeview");
public static final ConfigProperty<String> LAKEVIEW_USERID = ConfigProperty
.key("hoodie.meta.sync.lakeview.user_id")
.noDefaultValue()
.withDocumentation("UserId used for creating API key, secret in lakeview");
public static final ConfigProperty<String> LAKEVIEW_S3_REGION = ConfigProperty
.key("hoodie.meta.sync.lakeview.s3.region")
.noDefaultValue()
.withDocumentation("S3 region associated with the table base path");
public static final ConfigProperty<String> LAKEVIEW_S3_ACCESS_KEY = ConfigProperty
.key("hoodie.meta.sync.lakeview.s3.access_key")
.noDefaultValue()
.withDocumentation("[Optional]: Access key required to access table base paths present in S3");
public static final ConfigProperty<String> LAKEVIEW_S3_ACCESS_SECRET = ConfigProperty
.key("hoodie.meta.sync.lakeview.s3.access_secret")
.noDefaultValue()
.withDocumentation("[Optional]: Access secret required to access table base paths present in S3");
public static final ConfigProperty<String> LAKEVIEW_GCS_PROJECT_ID = ConfigProperty
.key("hoodie.meta.sync.lakeview.gcs.project_id")
.noDefaultValue()
.withDocumentation("GCS Project ID the table base path belongs to");
public static final ConfigProperty<String> LAKEVIEW_GCS_SERVICE_ACCOUNT_KEY_PATH = ConfigProperty
.key("hoodie.meta.sync.lakeview.gcs.gcp_service_account_key_path")
.noDefaultValue()
.withDocumentation("[Optional]: GCS Service account key path to access the table base path present in GCS");
public static final ConfigProperty<String> LAKEVIEW_METADATA_EXTRACTOR_PATH_EXCLUSION_PATTERNS = ConfigProperty
.key("hoodie.meta.sync.lakeview.metadata_extractor.path_exclusion_patterns")
.defaultValue("")
.withDocumentation("List of pattens to be ignored by lakeview metadata extractor");
/**
* Eg properties:
* <p>
* hoodie.meta.sync.lakeview.metadata_extractor.lakes.<lake1>.databases.<database1>.base_paths=<basepath11>,<basepath12>
* hoodie.meta.sync.lakeview.metadata_extractor.lakes.<lake1>.databases.<database2>.base_paths=<basepath13>,<basepath14>
* <p>
* NOTE: multiple properties with hoodie.meta.sync.lakeview.metadata_extractor.lakes prefix can be included in the properties
*/
public static final ConfigProperty<String> LAKEVIEW_METADATA_EXTRACTOR_LAKE_PATHS = ConfigProperty
.key("hoodie.meta.sync.lakeview.metadata_extractor.lakes")
.noDefaultValue()
.withDocumentation("Lake name & database name that should be applied to specified list of table base paths in lakeview metadata extractor");
public static final ConfigProperty<Integer> LAKEVIEW_HTTP_CLIENT_TIMEOUT_SECONDS = ConfigProperty
.key("hoodie.datasource.lakeview_sync.http.client.timeout")
.defaultValue(15)
.withDocumentation("Timeout set to http client used by lakeview sync tool");
public static final ConfigProperty<Integer> LAKEVIEW_HTTP_CLIENT_MAX_RETRIES = ConfigProperty
.key("hoodie.datasource.lakeview_sync.http.client.retries")
.defaultValue(3)
.withDocumentation("Max retries by http client used by lakeview sync tool");
public static final ConfigProperty<Integer> LAKEVIEW_HTTP_CLIENT_RETRY_DELAY_MS = ConfigProperty
.key("hoodie.datasource.lakeview_sync.http.client.retry.delay.ms")
.defaultValue(1000)
.withDocumentation("Delay between retries of http client used by lakeview sync tool");
public static final ConfigProperty<Integer> LAKEVIEW_SYNC_TOOL_TIMEOUT_SECONDS = ConfigProperty
.key("hoodie.datasource.lakeview_sync.timeout.seconds")
.defaultValue(1800) // default timeout of 30 minutes
.withDocumentation("Timeout in seconds for each sync in lakeview. Set to -1 to have no timeout");
public static class LakeviewSyncConfigParams {
@ParametersDelegate()
public final HoodieSyncConfig.HoodieSyncConfigParams hoodieSyncConfigParams = new HoodieSyncConfig.HoodieSyncConfigParams();
@Parameter(names = {"--version"}, description = "Version of lakeview config")
public String version;
@Parameter(names = {"--project-id"}, description = "Lakeview project id", required = true, order = 1)
public String projectId;
@Parameter(names = {"--api-key"}, description = "Lakeview API Key", required = true, order = 2)
public String apiKey;
@Parameter(names = {"--api-secret"}, description = "Lakeview API Secret", required = true, password = true, order = 3)
public String apiSecret;
@Parameter(names = {"--userid"}, description = "Lakeview User ID", required = true, order = 4)
public String userId;
@Parameter(names = {"--s3-region"}, description = "S3 Bucket region")
public String s3Region;
@Parameter(names = {"--s3-access-key"}, description = "Access key to use S3 Bucket")
public String s3AccessKey;
@Parameter(names = {"--s3-access-secret"}, description = "Access secret to use S3 Bucket", password = true)
public String s3AccessSecret;
@Parameter(names = {"--gcp-project-id"}, description = "GCP Project ID")
public String gcpProjectId;
@Parameter(names = {"--gcp-service-account-key-path"}, description = "GCP Project Service account key path")
public String gcpServiceAccountKeyPath;
@Parameter(names = {"--path-exclusion-patterns"}, description = "Path exclusion patterns (comma separated)")
public String pathExclusionPatterns;
@Parameter(names = {"--lake-paths"}, description = "Lake/Database paths (eg: <lake1>.databases.<database1>.base_paths=<basepath11>,<basepath12>)",
required = true, order = 5, splitter = IdentitySplitter.class)
public List<String> lakePaths;
@Parameter(names = {"--http-client-timeout"}, description = "Http client timeout")
public int httpClientTimeout;
@Parameter(names = {"--http-client-max-retries"}, description = "Max retries by the http client")
public int httpClientMaxRetries;
@Parameter(names = {"--http-client-retries-delay-ms"}, description = "Delay between retries by the http client in milliseconds")
public int httpClientDelayBetweenRetriesInMs;
@Parameter(names = {"--timeout"}, description = "Timeout in seconds to run a sync operation in lakeview")
public int timeoutInSeconds;
public boolean isHelp() {
return hoodieSyncConfigParams.isHelp();
}
public TypedProperties toProps() {
final TypedProperties props = hoodieSyncConfigParams.toProps();
props.setPropertyIfNonNull(LAKEVIEW_SYNC_ENABLED.key(), Boolean.TRUE.toString().toLowerCase());
props.setPropertyIfNonNull(LAKEVIEW_VERSION.key(), version);
props.setPropertyIfNonNull(LAKEVIEW_PROJECT_ID.key(), projectId);
props.setPropertyIfNonNull(LAKEVIEW_API_KEY.key(), apiKey);
props.setPropertyIfNonNull(LAKEVIEW_API_SECRET.key(), apiSecret);
props.setPropertyIfNonNull(LAKEVIEW_USERID.key(), userId);
props.setPropertyIfNonNull(BASE_PATH.key(), hoodieSyncConfigParams.basePath);
props.setPropertyIfNonNull(LAKEVIEW_S3_REGION.key(), s3Region);
props.setPropertyIfNonNull(LAKEVIEW_S3_ACCESS_KEY.key(), s3AccessKey);
props.setPropertyIfNonNull(LAKEVIEW_S3_ACCESS_SECRET.key(), s3AccessSecret);
props.setPropertyIfNonNull(LAKEVIEW_GCS_PROJECT_ID.key(), gcpProjectId);
props.setPropertyIfNonNull(LAKEVIEW_GCS_SERVICE_ACCOUNT_KEY_PATH.key(), gcpServiceAccountKeyPath);
props.setPropertyIfNonNull(LAKEVIEW_METADATA_EXTRACTOR_PATH_EXCLUSION_PATTERNS.key(), pathExclusionPatterns);
for (String lakePath : lakePaths) {
String[] fields = lakePath.split("=");
String key = fields[0];
String value = fields[1];
props.setPropertyIfNonNull(LAKEVIEW_METADATA_EXTRACTOR_LAKE_PATHS.key() + "." + key, value);
}
props.setPropertyIfNonNull(LAKEVIEW_HTTP_CLIENT_TIMEOUT_SECONDS.key(), httpClientTimeout);
props.setPropertyIfNonNull(LAKEVIEW_HTTP_CLIENT_MAX_RETRIES.key(), httpClientMaxRetries);
props.setPropertyIfNonNull(LAKEVIEW_HTTP_CLIENT_RETRY_DELAY_MS.key(), httpClientDelayBetweenRetriesInMs);
props.setPropertyIfNonNull(LAKEVIEW_SYNC_TOOL_TIMEOUT_SECONDS.key(), timeoutInSeconds);
return props;
}
}
}
|
0
|
java-sources/ai/onehouse/lakeview-sync-tool/0.21.0/ai/onehouse/lakeview
|
java-sources/ai/onehouse/lakeview-sync-tool/0.21.0/ai/onehouse/lakeview/sync/LakeviewSyncTool.java
|
package ai.onehouse.lakeview.sync;
import ai.onehouse.api.AsyncHttpClientWithRetry;
import ai.onehouse.api.OnehouseApiClient;
import ai.onehouse.config.Config;
import ai.onehouse.config.ConfigProvider;
import ai.onehouse.config.models.common.FileSystemConfiguration;
import ai.onehouse.config.models.common.GCSConfig;
import ai.onehouse.config.models.common.OnehouseClientConfig;
import ai.onehouse.config.models.common.S3Config;
import ai.onehouse.config.models.configv1.ConfigV1;
import ai.onehouse.config.models.configv1.Database;
import ai.onehouse.config.models.configv1.MetadataExtractorConfig;
import ai.onehouse.config.models.configv1.ParserConfig;
import ai.onehouse.metadata_extractor.ActiveTimelineInstantBatcher;
import ai.onehouse.metadata_extractor.HoodiePropertiesReader;
import ai.onehouse.metadata_extractor.TableDiscoveryAndUploadJob;
import ai.onehouse.metadata_extractor.TableDiscoveryService;
import ai.onehouse.metadata_extractor.TableMetadataUploaderService;
import ai.onehouse.metadata_extractor.TimelineCommitInstantsUploader;
import ai.onehouse.metrics.LakeViewExtractorMetrics;
import ai.onehouse.metrics.Metrics;
import ai.onehouse.storage.AsyncStorageClient;
import ai.onehouse.storage.GCSAsyncStorageClient;
import ai.onehouse.storage.PresignedUrlFileUploader;
import ai.onehouse.storage.S3AsyncStorageClient;
import ai.onehouse.storage.StorageUtils;
import ai.onehouse.storage.providers.GcsClientProvider;
import ai.onehouse.storage.providers.S3AsyncClientProvider;
import com.beust.jcommander.JCommander;
import com.google.common.annotations.VisibleForTesting;
import okhttp3.Dispatcher;
import okhttp3.OkHttpClient;
import org.apache.hadoop.conf.Configuration;
import org.apache.hudi.common.config.HoodieConfig;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.StringUtils;
import org.apache.hudi.sync.common.HoodieSyncTool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static ai.onehouse.lakeview.sync.LakeviewSyncConfigHolder.BASE_PATH;
import static ai.onehouse.lakeview.sync.LakeviewSyncConfigHolder.LAKEVIEW_METADATA_EXTRACTOR_LAKE_PATHS;
import static ai.onehouse.lakeview.sync.LakeviewSyncConfigHolder.LAKEVIEW_METADATA_EXTRACTOR_PATH_EXCLUSION_PATTERNS;
public class LakeviewSyncTool extends HoodieSyncTool implements AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(LakeviewSyncTool.class);
private static final Pattern LAKEVIEW_METADATA_EXTRACTOR_LAKE_PATHS_PATTERN = Pattern.compile("([^.]+)\\.databases\\.([^.]+)\\.base_paths");
private static final int HTTP_CLIENT_DEFAULT_TIMEOUT_SECONDS = 15;
private static final int HTTP_CLIENT_MAX_RETRIES = 3;
private static final long HTTP_CLIENT_RETRY_DELAY_MS = 1000;
private final boolean isLakeviewSyncToolEnabled;
@Nullable
private final Config config;
private final ExecutorService executorService;
@Nullable
private final TableDiscoveryAndUploadJob tableDiscoveryAndUploadJob;
@Nullable
private final AsyncHttpClientWithRetry asyncHttpClientWithRetry;
private final int httpClientTimeoutSeconds;
private final int httpClientMaxRetries;
private final long httpClientRetryDelayMs;
private final long timeoutInSeconds;
public LakeviewSyncTool(Properties props, Configuration hadoopConf) {
super(props, hadoopConf);
HoodieConfig hoodieConfig = new HoodieConfig(props);
this.isLakeviewSyncToolEnabled = hoodieConfig.getBooleanOrDefault(LakeviewSyncConfigHolder.LAKEVIEW_SYNC_ENABLED);
this.executorService = Executors.newFixedThreadPool(2);
if (isLakeviewSyncToolEnabled) {
this.config = getConfig(hoodieConfig);
this.asyncHttpClientWithRetry = getAsyncHttpClientWithRetry(executorService);
this.tableDiscoveryAndUploadJob = getTableDiscoveryAndUploadJob(this.config, this.executorService, this.asyncHttpClientWithRetry);
this.httpClientTimeoutSeconds = hoodieConfig.getIntOrDefault(LakeviewSyncConfigHolder.LAKEVIEW_HTTP_CLIENT_TIMEOUT_SECONDS);
this.httpClientMaxRetries = hoodieConfig.getIntOrDefault(LakeviewSyncConfigHolder.LAKEVIEW_HTTP_CLIENT_MAX_RETRIES);
this.httpClientRetryDelayMs = Option.ofNullable(hoodieConfig.getLong(LakeviewSyncConfigHolder.LAKEVIEW_HTTP_CLIENT_RETRY_DELAY_MS)).orElse(Long.valueOf(LakeviewSyncConfigHolder.LAKEVIEW_HTTP_CLIENT_RETRY_DELAY_MS.defaultValue()));
this.timeoutInSeconds = Option.ofNullable(hoodieConfig.getLong(LakeviewSyncConfigHolder.LAKEVIEW_SYNC_TOOL_TIMEOUT_SECONDS)).orElse(Long.valueOf(LakeviewSyncConfigHolder.LAKEVIEW_SYNC_TOOL_TIMEOUT_SECONDS.defaultValue()));
} else {
this.config = null;
this.tableDiscoveryAndUploadJob = null;
this.asyncHttpClientWithRetry = null;
this.httpClientTimeoutSeconds = HTTP_CLIENT_DEFAULT_TIMEOUT_SECONDS;
this.httpClientMaxRetries = HTTP_CLIENT_MAX_RETRIES;
this.httpClientRetryDelayMs = HTTP_CLIENT_RETRY_DELAY_MS;
this.timeoutInSeconds = -1;
}
}
private Config getConfig(HoodieConfig hoodieConfig) {
List<ParserConfig> parserConfigList = getParserConfig();
AtomicReference<String> lakeNameRef = new AtomicReference<>();
AtomicReference<String> databaseNameRef = new AtomicReference<>();
String tableBasePath = hoodieConfig.getString(BASE_PATH);
String finalTableBasePath;
if (tableBasePath.startsWith("s3a://")) {
finalTableBasePath = tableBasePath.replace("s3a://", "s3://");
} else {
finalTableBasePath = tableBasePath;
}
// identify the lake & database to which the current table base path belongs to
parserConfigList
.forEach(parserConfig -> parserConfig.getDatabases()
.forEach(database -> {
for (String basePath : database.getBasePaths()) {
if (finalTableBasePath.startsWith(basePath) && lakeNameRef.get() == null) {
lakeNameRef.set(parserConfig.getLake());
databaseNameRef.set(database.getName());
break;
}
}
}));
if (lakeNameRef.get() != null) {
ParserConfig parserConfig = ParserConfig.builder()
.lake(lakeNameRef.get())
.databases(Collections.singletonList(Database.builder()
.name(databaseNameRef.get())
.basePaths(Collections.singletonList(finalTableBasePath))
.build()))
.build();
parserConfigList = Collections.singletonList(parserConfig);
} else {
throw new IllegalArgumentException("Couldn't find any lake/database associated with the current table in the configuration");
}
MetadataExtractorConfig metadataExtractorConfig = MetadataExtractorConfig.builder()
.parserConfig(parserConfigList)
.pathExclusionPatterns(getPathsToExclude(hoodieConfig))
.jobRunMode(MetadataExtractorConfig.JobRunMode.ONCE)
.build();
OnehouseClientConfig onehouseClientConfig = OnehouseClientConfig.builder()
.projectId(hoodieConfig.getString(LakeviewSyncConfigHolder.LAKEVIEW_PROJECT_ID))
.apiKey(hoodieConfig.getString(LakeviewSyncConfigHolder.LAKEVIEW_API_KEY))
.apiSecret(hoodieConfig.getString(LakeviewSyncConfigHolder.LAKEVIEW_API_SECRET))
.userId(hoodieConfig.getString(LakeviewSyncConfigHolder.LAKEVIEW_USERID))
.build();
FileSystemConfiguration fileSystemConfiguration = getFileSystemConfiguration(hoodieConfig);
return ConfigV1.builder()
.version(hoodieConfig.getStringOrDefault(LakeviewSyncConfigHolder.LAKEVIEW_VERSION))
.metadataExtractorConfig(metadataExtractorConfig)
.onehouseClientConfig(onehouseClientConfig)
.fileSystemConfiguration(fileSystemConfiguration)
.build();
}
private FileSystemConfiguration getFileSystemConfiguration(HoodieConfig hoodieConfig) {
FileSystemConfiguration.FileSystemConfigurationBuilder fileSystemConfigurationBuilder = FileSystemConfiguration.builder();
Option<S3Config> s3Config = getS3Config(hoodieConfig);
if (s3Config.isPresent()) {
fileSystemConfigurationBuilder.s3Config(s3Config.get());
} else {
Option<GCSConfig> gcsConfig = getGCSConfig(hoodieConfig);
if (gcsConfig.isPresent()) {
fileSystemConfigurationBuilder.gcsConfig(gcsConfig.get());
} else {
String errorMessage = "Couldn't find any properties related to file system";
LOG.error(errorMessage);
throw new IllegalArgumentException(errorMessage);
}
}
return fileSystemConfigurationBuilder.build();
}
private Option<S3Config> getS3Config(HoodieConfig hoodieConfig) {
String region = hoodieConfig.getString(LakeviewSyncConfigHolder.LAKEVIEW_S3_REGION);
if (!StringUtils.isNullOrEmpty(region)) {
return Option.of(S3Config.builder()
.region(region)
.accessKey(java.util.Optional.ofNullable(hoodieConfig.getString(LakeviewSyncConfigHolder.LAKEVIEW_S3_ACCESS_KEY)))
.accessSecret(java.util.Optional.ofNullable(hoodieConfig.getString(LakeviewSyncConfigHolder.LAKEVIEW_S3_ACCESS_SECRET)))
.build());
} else {
return Option.empty();
}
}
private Option<GCSConfig> getGCSConfig(HoodieConfig hoodieConfig) {
String gcsProjectId = hoodieConfig.getString(LakeviewSyncConfigHolder.LAKEVIEW_GCS_PROJECT_ID);
if (!StringUtils.isNullOrEmpty(gcsProjectId)) {
return Option.of(GCSConfig.builder()
.projectId(java.util.Optional.of(gcsProjectId))
.gcpServiceAccountKeyPath(java.util.Optional.ofNullable(hoodieConfig.getString(LakeviewSyncConfigHolder.LAKEVIEW_GCS_SERVICE_ACCOUNT_KEY_PATH)))
.build());
} else {
return Option.empty();
}
}
private java.util.Optional<List<String>> getPathsToExclude(HoodieConfig hoodieConfig) {
String pathsToExclude = hoodieConfig.getStringOrDefault(LAKEVIEW_METADATA_EXTRACTOR_PATH_EXCLUSION_PATTERNS);
if (StringUtils.isNullOrEmpty(pathsToExclude)) {
return java.util.Optional.empty();
} else {
return java.util.Optional.of(Arrays.stream(pathsToExclude.split(","))
.filter(entry -> !entry.isEmpty())
.collect(Collectors.toList()));
}
}
private List<ParserConfig> getParserConfig() {
Map<String, ParserConfig> lakeNameToParserConfig = new HashMap<>();
props.forEach((key, value) -> {
if (key.toString().startsWith(LAKEVIEW_METADATA_EXTRACTOR_LAKE_PATHS.key())) {
String currentKey = key.toString();
currentKey = currentKey.substring(LAKEVIEW_METADATA_EXTRACTOR_LAKE_PATHS.key().length() + 1);
Matcher matcher = LAKEVIEW_METADATA_EXTRACTOR_LAKE_PATHS_PATTERN.matcher(currentKey);
if (matcher.find()) {
String lakeName = matcher.group(1);
String databaseName = matcher.group(2);
List<String> tableBasePaths = Arrays.asList(value.toString().split(","));
ParserConfig currentParserConfig = lakeNameToParserConfig
.computeIfAbsent(lakeName, lake -> ParserConfig.builder().lake(lake).databases(new ArrayList<>()).build());
Database database = Database.builder().name(databaseName).basePaths(tableBasePaths).build();
currentParserConfig.getDatabases().add(database);
} else {
LOG.warn("Couldn't parse lakes/databases from {}={}", key, value);
}
}
});
return new ArrayList<>(lakeNameToParserConfig.values());
}
private TableDiscoveryAndUploadJob getTableDiscoveryAndUploadJob(@Nonnull Config config,
@Nonnull ExecutorService executorService,
@Nonnull AsyncHttpClientWithRetry asyncHttpClientWithRetry) {
StorageUtils storageUtils = new StorageUtils();
AsyncStorageClient asyncStorageClient = getAsyncStorageClient(config, executorService, storageUtils);
ConfigProvider configProvider = new ConfigProvider(config);
LakeViewExtractorMetrics lakeViewExtractorMetrics = new LakeViewExtractorMetrics(Metrics.getInstance(),
configProvider);
TableDiscoveryService tableDiscoveryService = new TableDiscoveryService(asyncStorageClient, storageUtils,
configProvider, executorService, lakeViewExtractorMetrics);
HoodiePropertiesReader hoodiePropertiesReader = new HoodiePropertiesReader(asyncStorageClient,
lakeViewExtractorMetrics);
OnehouseApiClient onehouseApiClient = new OnehouseApiClient(asyncHttpClientWithRetry, config,
lakeViewExtractorMetrics);
PresignedUrlFileUploader presignedUrlFileUploader = new PresignedUrlFileUploader(asyncStorageClient,
asyncHttpClientWithRetry, lakeViewExtractorMetrics);
TimelineCommitInstantsUploader timelineCommitInstantsUploader = new TimelineCommitInstantsUploader(asyncStorageClient,
presignedUrlFileUploader, onehouseApiClient, storageUtils, executorService, new ActiveTimelineInstantBatcher(config),
lakeViewExtractorMetrics, config);
TableMetadataUploaderService tableMetadataUploaderService = new TableMetadataUploaderService(hoodiePropertiesReader,
onehouseApiClient, timelineCommitInstantsUploader, lakeViewExtractorMetrics, executorService);
return new TableDiscoveryAndUploadJob(tableDiscoveryService, tableMetadataUploaderService, lakeViewExtractorMetrics, asyncStorageClient);
}
private AsyncStorageClient getAsyncStorageClient(@Nonnull Config config, @Nonnull ExecutorService executorService,
StorageUtils storageUtils) {
if (config.getFileSystemConfiguration().getS3Config() != null) {
S3AsyncClientProvider s3AsyncClientProvider = new S3AsyncClientProvider(config, executorService);
return new S3AsyncStorageClient(s3AsyncClientProvider, storageUtils, executorService);
} else {
GcsClientProvider gcsClientProvider = new GcsClientProvider(config);
return new GCSAsyncStorageClient(gcsClientProvider, storageUtils, executorService);
}
}
private AsyncHttpClientWithRetry getAsyncHttpClientWithRetry(@Nonnull ExecutorService executorService) {
Dispatcher dispatcher = new Dispatcher(executorService);
OkHttpClient okHttpClient = new OkHttpClient.Builder()
.readTimeout(httpClientTimeoutSeconds, TimeUnit.SECONDS)
.writeTimeout(httpClientTimeoutSeconds, TimeUnit.SECONDS)
.connectTimeout(httpClientTimeoutSeconds, TimeUnit.SECONDS)
.dispatcher(dispatcher)
.build();
return new AsyncHttpClientWithRetry(
httpClientMaxRetries, httpClientRetryDelayMs, okHttpClient);
}
@VisibleForTesting
public @Nullable Config getConfig() {
return config;
}
@Override
public void syncHoodieTable() {
if (isLakeviewSyncToolEnabled && tableDiscoveryAndUploadJob != null) {
Future<?> future = executorService.submit(() -> tableDiscoveryAndUploadJob.runOnce());
try {
if (timeoutInSeconds > 0) {
future.get(timeoutInSeconds, TimeUnit.SECONDS);
} else {
future.get();
}
} catch (TimeoutException e) {
LOG.error("Lakeview sync operation got timed out", e);
future.cancel(true);
} catch (Exception e) {
LOG.error("Failed to perform sync operation in lakeview", e);
}
}
}
@Override
public void close() {
try {
super.close();
if (executorService != null) {
executorService.shutdown();
}
if (tableDiscoveryAndUploadJob != null) {
tableDiscoveryAndUploadJob.shutdown();
}
if (asyncHttpClientWithRetry != null) {
asyncHttpClientWithRetry.shutdownScheduler();
}
} catch (Exception e) {
LOG.error("Failed to close lakeview sync tool", e);
}
}
public static void main(String[] args) {
final LakeviewSyncConfigHolder.LakeviewSyncConfigParams params = new LakeviewSyncConfigHolder.LakeviewSyncConfigParams();
JCommander cmd = JCommander.newBuilder()
.addObject(params)
.build();
cmd.parse(args);
if (params.isHelp()) {
cmd.usage();
} else {
try (LakeviewSyncTool lakeviewSyncTool = new LakeviewSyncTool(params.toProps(), new Configuration())) {
lakeviewSyncTool.syncHoodieTable();
}
}
}
}
|
0
|
java-sources/ai/onehouse/lakeview-sync-tool/0.21.0/ai/onehouse/lakeview/sync
|
java-sources/ai/onehouse/lakeview-sync-tool/0.21.0/ai/onehouse/lakeview/sync/utilities/IdentitySplitter.java
|
package ai.onehouse.lakeview.sync.utilities;
import com.beust.jcommander.converters.IParameterSplitter;
import java.util.Collections;
import java.util.List;
public class IdentitySplitter implements IParameterSplitter {
public List<String> split(String value) {
return Collections.singletonList(value);
}
}
|
0
|
java-sources/ai/opsmotor/client/client-agent/1.0.1/ai/opsmotor/client
|
java-sources/ai/opsmotor/client/client-agent/1.0.1/ai/opsmotor/client/agent/OpsMotorAgent.java
|
/*
* Copyright (c) 2025, OpsMotor Inc. All rights reserved.
*
* This software is the proprietary information of OpsMotor Inc.
* Use is subject to license terms.
*
* This code is provided "as is" without warranty of any kind, either expressed
* or implied, including but not limited to the implied warranties of
* merchantability and fitness for a particular purpose.
*
* For more information, please contact OpsMotor Inc at support@opsmotor.com.
*/
package ai.opsmotor.client.agent;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
/**
* The agent that communicates with the cloud station
*
* @since 1.0.0
*/
public final class OpsMotorAgent {
private final Map<String, OpsMotorCommand> commands = new ConcurrentHashMap<>();
private final OpsMotorAgentConfiguration config;
OpsMotorAgent(OpsMotorAgentConfiguration config) {
this.config = config;
}
public OpsMotorAgentConfiguration getConfig() {
return config;
}
void registerCommand(OpsMotorCommand.Definition definition) {
commands.put(definition.id(), new OpsMotorCommand(definition, this::submitJob));
}
void unregisterCommand(String id) {
commands.remove(id);
}
private String getCommandEndpoint(String commandId) {
return String.format("%s/agents/%s/commands/%s", config.endpoint(), config.agentId(), commandId);
}
public Map<String, OpsMotorCommand> getCommands() {
return Collections.unmodifiableMap(commands);
}
public OpsMotorCommand getCommand(String id) {
OpsMotorCommand opsMotorCommand = commands.get(id);
if (opsMotorCommand == null) {
throw new NoSuchElementException("No command with id: " + id);
}
return opsMotorCommand;
}
public OpsMotorCommand getCommandByName(String name) {
List<OpsMotorCommand> matches = new ArrayList<>();
for (OpsMotorCommand command : commands.values()) {
if (Objects.equals(name.toLowerCase().strip(), command.getName().toLowerCase().strip())) {
matches.add(command);
}
}
if (matches.isEmpty()) {
throw new NoSuchElementException("No command with name: " + name);
}
if (matches.size() > 1) {
throw new IllegalArgumentException(String.format("Too many matches (%s)!", matches.size()));
}
return matches.get(0);
}
private void submitJob(String commandId, InputStream content) throws IOException, IllegalAccessException {
try {
URL url = URI.create(getCommandEndpoint(commandId)).toURL();
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("POST");
connection.setDoOutput(true);
connection.setDoInput(true);
connection.setRequestProperty("Authorization", config.authorization());
connection.setRequestProperty("Content-Type", "application/octet-stream");
connection.setRequestProperty("Accept", "*/*");
if (content == null) {
connection.getOutputStream().write(new byte[0]);
} else {
content.transferTo(connection.getOutputStream());
}
int responseCode = connection.getResponseCode();
if (responseCode == 401) {
throw new IllegalAccessException("Unauthorized: " + new String(connection.getErrorStream().readAllBytes(), StandardCharsets.UTF_8));
} else if (responseCode == 404) {
throw new NoSuchElementException("No such command: " + new String(connection.getErrorStream().readAllBytes(), StandardCharsets.UTF_8));
} else if (responseCode < 200 || responseCode >= 300) {
throw new IOException("Non SUCCESS status: " + new String(connection.getErrorStream().readAllBytes(), StandardCharsets.UTF_8));
}
connection.disconnect();
} catch (IOException | RuntimeException cause) {
throw new IOException("Failed to submit job: " + cause.getMessage(), cause);
}
}
private String getCheckEndpoint() {
return String.format("%s/agents/%s/check", config.endpoint(), config.agentId());
}
public void check() throws IOException, IllegalAccessException {
try {
URL url = URI.create(getCheckEndpoint()).toURL();
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setDoOutput(true);
connection.setRequestProperty("Authorization", config.authorization());
connection.setRequestProperty("Accept", "*/*");
int responseCode = connection.getResponseCode();
if (responseCode == 401) {
throw new IllegalAccessException("Unauthorized: " + new String(connection.getErrorStream().readAllBytes(), StandardCharsets.UTF_8));
} else if (responseCode < 200 || responseCode >= 300) {
throw new IOException("Non SUCCESS status: " + new String(connection.getErrorStream().readAllBytes(), StandardCharsets.UTF_8));
} else {
String protocolVersion = new String(connection.getInputStream().readAllBytes(), StandardCharsets.UTF_8);
if (!Objects.equals(config.protocolVersion(), protocolVersion)) {
throw new IllegalStateException("Mismatch in protocol version, expected: " + config.protocolVersion() + " but got: " + protocolVersion);
}
}
connection.disconnect();
} catch (IOException | RuntimeException cause) {
throw new IOException("Failed to submit job: " + cause.getMessage(), cause);
}
}
}
|
0
|
java-sources/ai/opsmotor/client/client-agent/1.0.1/ai/opsmotor/client
|
java-sources/ai/opsmotor/client/client-agent/1.0.1/ai/opsmotor/client/agent/OpsMotorAgentConfiguration.java
|
/*
* Copyright (c) 2025, OpsMotor Inc. All rights reserved.
*
* This software is the proprietary information of OpsMotor Inc.
* Use is subject to license terms.
*
* This code is provided "as is" without warranty of any kind, either expressed
* or implied, including but not limited to the implied warranties of
* merchantability and fitness for a particular purpose.
*
* For more information, please contact OpsMotor Inc at support@opsmotor.com.
*/
package ai.opsmotor.client.agent;
import java.io.IOException;
public record OpsMotorAgentConfiguration(String endpoint, String agentId, String authorization,
String protocolVersion) {
public static OpsMotorAgent create() throws IOException, IllegalAccessException {
return OpsMotorAgentFactory.create();
}
public static OpsMotorAgent create(String location) throws IOException, IllegalAccessException {
return OpsMotorAgentFactory.create(location);
}
}
|
0
|
java-sources/ai/opsmotor/client/client-agent/1.0.1/ai/opsmotor/client
|
java-sources/ai/opsmotor/client/client-agent/1.0.1/ai/opsmotor/client/agent/OpsMotorAgentFactory.java
|
/*
* Copyright (c) 2025, OpsMotor Inc. All rights reserved.
*
* This software is the proprietary information of OpsMotor Inc.
* Use is subject to license terms.
*
* This code is provided "as is" without warranty of any kind, either expressed
* or implied, including but not limited to the implied warranties of
* merchantability and fitness for a particular purpose.
*
* For more information, please contact OpsMotor Inc at support@opsmotor.com.
*/
package ai.opsmotor.client.agent;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.util.Base64;
import java.util.Objects;
import java.util.Properties;
public final class OpsMotorAgentFactory {
public static final String CONFIG_PROPERTY = "OMA_CONFIG";
public static final String AGENT_ID_PROPERTY = "OMA_ID";
public static final String DEFAULT_CONFIG_FILE = "oma.cfg";
public static OpsMotorAgent create() throws IOException, IllegalAccessException {
return create(determineLocation());
}
public static OpsMotorAgent create(String location) throws IOException, IllegalAccessException {
ensureFilePresent(location);
return create(loadProperties(location));
}
private static String determineLocation() {
String location = System.getProperty(CONFIG_PROPERTY, System.getenv(CONFIG_PROPERTY));
if (location != null) {
return location;
}
if (Files.exists(Path.of(DEFAULT_CONFIG_FILE))) {
return DEFAULT_CONFIG_FILE;
}
return File.separatorChar + "etc" + File.separatorChar + "opsmotor" + File.separatorChar + DEFAULT_CONFIG_FILE;
}
private static void ensureFilePresent(String location) throws IOException {
if (!Files.exists(Path.of(location))) {
throw new NoSuchFileException("No OpsMotor configuration file found at: " + location);
}
}
private static Properties loadProperties(String location) throws IOException {
try {
Properties properties = new Properties();
properties.load(new FileInputStream(location));
return properties;
} catch (IOException | RuntimeException cause) {
throw new IOException("Failed to load OpsMotor configuration file at '" + location + "': " + cause.getMessage(), cause);
}
}
private static OpsMotorAgent create(Properties properties) throws IOException, IllegalAccessException {
ensureIsJava(properties);
OpsMotorAgentConfiguration configuration = new OpsMotorAgentConfiguration(
getProperty(properties, "OPSMOTOR_CLOUD_STATION_URL"),
determineAgentId(properties),
createAuthorization(properties),
getProperty(properties, "OPSMOTOR_AGENT_PROTOCOL_VERSION"));
OpsMotorAgent opsMotorAgent = create(configuration);
for (Object key : properties.keySet()) {
String keyName = (String) key;
if (keyName.startsWith("CMD_")) {
String commandId = keyName.split("_")[1];
String name = properties.getProperty(String.format("CMD_%s_COMMAND", commandId));
opsMotorAgent.registerCommand(new OpsMotorCommand.Definition(commandId, name));
}
}
return opsMotorAgent;
}
public static OpsMotorAgent create(OpsMotorAgentConfiguration configuration) throws IOException, IllegalAccessException {
OpsMotorAgent opsMotorAgent = new OpsMotorAgent(configuration);
opsMotorAgent.check();
return opsMotorAgent;
}
private static void ensureIsJava(Properties properties) {
if (!Objects.equals("java", getProperty(properties, "OPSMOTOR_PLATFORM"))) {
throw new IllegalArgumentException("Invalid configuration file, not a Java config file!");
}
}
private static String determineAgentId(Properties properties) {
String property = getProperty(properties, "AGENT_ID_COMMAND");
String agentId;
if (Objects.equals("__ENV__", property)) {
agentId = System.getProperty(AGENT_ID_PROPERTY, System.getenv(AGENT_ID_PROPERTY));
} else {
agentId = property.strip();
}
if (agentId == null || agentId.isBlank()) {
throw new IllegalArgumentException("No agent id found!");
}
return agentId;
}
private static String createAuthorization(Properties properties) {
String username = getProperty(properties, "OPSMOTOR_CLOUD_STATION_SUBSCRIPTION");
String password = getProperty(properties, "OPSMOTOR_SECRET_ACCESS_KEY");
String auth = username + ":" + password;
String encodedAuth = Base64.getEncoder().encodeToString(auth.getBytes(StandardCharsets.UTF_8));
return "Basic " + encodedAuth;
}
private static String getProperty(Properties properties, String propertyName) {
Objects.requireNonNull(propertyName);
String property = Quotes.unquote(properties.getProperty(propertyName));
if (property == null || property.isBlank()) {
throw new IllegalArgumentException("No property '" + propertyName + "' is set!");
}
return property;
}
private OpsMotorAgentFactory() {
}
}
|
0
|
java-sources/ai/opsmotor/client/client-agent/1.0.1/ai/opsmotor/client
|
java-sources/ai/opsmotor/client/client-agent/1.0.1/ai/opsmotor/client/agent/OpsMotorCommand.java
|
/*
* Copyright (c) 2025, OpsMotor Inc. All rights reserved.
*
* This software is the proprietary information of OpsMotor Inc.
* Use is subject to license terms.
*
* This code is provided "as is" without warranty of any kind, either expressed
* or implied, including but not limited to the implied warranties of
* merchantability and fitness for a particular purpose.
*
* For more information, please contact OpsMotor Inc at support@opsmotor.com.
*/
package ai.opsmotor.client.agent;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
public class OpsMotorCommand {
public record Definition(String id, String name) {
}
interface Submitter {
void submitJob(String id, InputStream content) throws IllegalAccessException, IOException;
}
private final Definition definition;
private final Submitter submitter;
OpsMotorCommand(Definition definition, Submitter submitter) {
this.definition = definition;
this.submitter = submitter;
}
public String getId() {
return definition.id();
}
public String getName() {
return definition.name();
}
public void run() {
throw new UnsupportedOperationException("We can't run commands!");
}
public void submit(String content) throws IllegalAccessException, IOException {
submit(content.getBytes(StandardCharsets.UTF_8));
}
public void submit(byte[] content) throws IllegalAccessException, IOException {
try (InputStream in = new ByteArrayInputStream(content)) {
submit(in);
}
}
public void submit(InputStream content) throws IllegalAccessException, IOException {
submitter.submitJob(getId(), content);
}
}
|
0
|
java-sources/ai/opsmotor/client/client-agent/1.0.1/ai/opsmotor/client
|
java-sources/ai/opsmotor/client/client-agent/1.0.1/ai/opsmotor/client/agent/Quotes.java
|
/*
* Copyright (c) 2025, OpsMotor Inc. All rights reserved.
*
* This software is the proprietary information of OpsMotor Inc.
* Use is subject to license terms.
*
* This code is provided "as is" without warranty of any kind, either expressed
* or implied, including but not limited to the implied warranties of
* merchantability and fitness for a particular purpose.
*
* For more information, please contact OpsMotor Inc at support@opsmotor.com.
*/
package ai.opsmotor.client.agent;
/**
* Utility class for unquoting strings
*
* @since 1.0.0
*/
public final class Quotes {
/**
* Unquotes a string that might contain single and double quotes, and escape characters
*
* @param quoted the quoted string
* @return the unquoted string
*/
public static String unquote(String quoted) {
String unquoted;
if (quoted == null) {
unquoted = null;
} else if (quoted.startsWith("'")) {
unquoted = unquoteSingle(quoted);
} else if (quoted.startsWith("\"")) {
unquoted = unquoteDouble(quoted);
} else {
unquoted = unquotedEscaped(quoted);
}
return unquoted;
}
private static String unquoteSingle(String quoted) {
if (!quoted.startsWith("'")) {
throw new IllegalStateException("Not a single quote!");
}
int i;
for (i = 1; i < quoted.length() && quoted.charAt(i) != '\''; i++) ;
String first = quoted.substring(1, i);
String second = i == quoted.length() - 1 ? "" : unquote(quoted.substring(i + 1));
return first + second;
}
private static String unquoteDouble(String property) {
if (property == null) {
return null;
}
StringBuilder unquoted = new StringBuilder();
for (int i = 0; i < property.length(); i++) {
char c = property.charAt(i);
if (c == '\\') {
i++;
c = unescape(property.charAt(i));
} else if (c == '\'') {
return unquoted + unquote(property.substring(i));
} else if (c == '"') {
String first = unquoted.toString();
String second = i == property.length() - 1 ? "" : unquote(property.substring(i + 1));
return first + second;
}
unquoted.append(c);
}
return unquoted.toString();
}
private static String unquotedEscaped(String quoted) {
if (quoted == null) {
return null;
}
StringBuilder unquoted = new StringBuilder();
for (int i = 0; i < quoted.length(); i++) {
char c = quoted.charAt(i);
if (c == '\\') {
i++;
c = unescape(quoted.charAt(i));
} else if (c == '\'' || c == '"') {
return unquoted + unquote(quoted.substring(i));
}
unquoted.append(c);
}
return unquoted.toString();
}
private static char unescape(char c) {
return switch (c) {
case 'n' -> '\n';
case 't' -> '\t';
case 'r' -> '\r';
case 'f' -> '\f';
default -> c;
};
}
private Quotes() {
}
}
|
0
|
java-sources/ai/opsmotor/client/client-license/1.0.1/ai/opsmotor/client
|
java-sources/ai/opsmotor/client/client-license/1.0.1/ai/opsmotor/client/license/Authorization.java
|
/*
* Copyright (c) 2025, OpsMotor Inc. All rights reserved.
*
* This software is the proprietary information of OpsMotor Inc.
* Use is subject to license terms.
*
* This code is provided "as is" without warranty of any kind, either expressed
* or implied, including but not limited to the implied warranties of
* merchantability and fitness for a particular purpose.
*
* For more information, please contact OpsMotor Inc at support@opsmotor.com.
*/
package ai.opsmotor.client.license;
public record Authorization(String licenseKey, String deploymentName, String tenantSubdomain, String action) {
}
|
0
|
java-sources/ai/opsmotor/client/client-license/1.0.1/ai/opsmotor/client
|
java-sources/ai/opsmotor/client/client-license/1.0.1/ai/opsmotor/client/license/LicenseClient.java
|
/*
* Copyright (c) 2025, OpsMotor Inc. All rights reserved.
*
* This software is the proprietary information of OpsMotor Inc.
* Use is subject to license terms.
*
* This code is provided "as is" without warranty of any kind, either expressed
* or implied, including but not limited to the implied warranties of
* merchantability and fitness for a particular purpose.
*
* For more information, please contact OpsMotor Inc at support@opsmotor.com.
*/
package ai.opsmotor.client.license;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JacksonException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.NoSuchElementException;
public final class LicenseClient {
private static final ObjectMapper objectMapper = new ObjectMapper();
static {
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
objectMapper.enable(SerializationFeature.INDENT_OUTPUT);
objectMapper.writerWithDefaultPrettyPrinter();
}
private final String licenseServerUrl;
private final String deploymentName;
private final String licenseKey;
LicenseClient(String licenseServerUrl, String deploymentName, String licenseKey) {
this.licenseServerUrl = licenseServerUrl;
this.deploymentName = deploymentName;
this.licenseKey = licenseKey;
}
public void authorize(String tenantSubdomain, String action) throws IllegalAccessException {
try {
URL url = URI.create(licenseServerUrl + "/api/v1/license").toURL();
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("POST");
connection.setDoOutput(true);
connection.setDoInput(true);
connection.setRequestProperty("Content-Type", "application/json");
connection.setRequestProperty("Accept", "*/*");
Authorization authorization = new Authorization(licenseKey, deploymentName, tenantSubdomain, action);
connection.getOutputStream().write(toJsonUtf8Bytes(authorization));
int responseCode = connection.getResponseCode();
if (responseCode == 401) {
throw new IllegalAccessException("Unauthorized: " + new String(connection.getErrorStream().readAllBytes(), StandardCharsets.UTF_8));
} else if (responseCode == 404) {
throw new NoSuchElementException("No such command: " + new String(connection.getErrorStream().readAllBytes(), StandardCharsets.UTF_8));
} else if (responseCode < 200 || responseCode >= 300) {
throw new IOException("Non SUCCESS status: " + new String(connection.getErrorStream().readAllBytes(), StandardCharsets.UTF_8));
}
connection.disconnect();
} catch (IOException | RuntimeException cause) {
throw new RuntimeException("Failed to authorize: " + cause.getMessage(), cause);
}
}
private static String toJson(Object value) {
try {
return objectMapper.writeValueAsString(value);
} catch (JacksonException cause) {
throw new IllegalArgumentException("Failed to serialize json: " + cause, cause);
}
}
private static byte[] toJsonUtf8Bytes(Object value) {
return toJson(value).getBytes(StandardCharsets.UTF_8);
}
public static void main(String[] args) throws IllegalAccessException {
// Given
LicenseClientFactory factory = LicenseClientFactory.getDefault();
LicenseClient client = factory.create("om-eng.opsmotor.net", "fp3f3fj34f3f4333");
// When
client.authorize("acme", "compliance");
}
}
|
0
|
java-sources/ai/opsmotor/client/client-license/1.0.1/ai/opsmotor/client
|
java-sources/ai/opsmotor/client/client-license/1.0.1/ai/opsmotor/client/license/LicenseClientFactory.java
|
/*
* Copyright (c) 2025, OpsMotor Inc. All rights reserved.
*
* This software is the proprietary information of OpsMotor Inc.
* Use is subject to license terms.
*
* This code is provided "as is" without warranty of any kind, either expressed
* or implied, including but not limited to the implied warranties of
* merchantability and fitness for a particular purpose.
*
* For more information, please contact OpsMotor Inc at support@opsmotor.com.
*/
package ai.opsmotor.client.license;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
public final class LicenseClientFactory {
public static final String DEFAULT_LICENSE_SERVER_URL = "https://license.opsmotor.net";
private static final Map<String, LicenseClientFactory> factories = new HashMap<>();
private final String licenseServerUrl;
public static LicenseClientFactory getDefault() {
return get(DEFAULT_LICENSE_SERVER_URL);
}
public static synchronized LicenseClientFactory get(String licenseServerUrl) {
Objects.requireNonNull(licenseServerUrl, "licenseServerUrl must be set!");
if (licenseServerUrl.strip().isBlank()) {
throw new IllegalArgumentException("licenseServerUrl must be set!");
}
return factories.computeIfAbsent(licenseServerUrl, LicenseClientFactory::new);
}
private LicenseClientFactory(String licenseServerUrl) {
this.licenseServerUrl = licenseServerUrl;
}
public LicenseClient create(String deploymentName, String licenseKey) {
Objects.requireNonNull(licenseServerUrl, "Deployment name must be set!");
String normalizedDeploymentName = Objects.requireNonNull(deploymentName, "Deployment name must be set!").strip();
String normalizedLicenseKey = Objects.requireNonNull(licenseKey, "License key must be set!").strip();
if (normalizedDeploymentName.isBlank()) {
throw new IllegalArgumentException("Deployment name must be set!");
}
if (normalizedLicenseKey.isBlank()) {
throw new IllegalArgumentException("License key must be set!");
}
return new LicenseClient(licenseServerUrl, normalizedDeploymentName, normalizedLicenseKey);
}
}
|
0
|
java-sources/ai/optfor/spring-openai-api/0.3.25/ai/optfor
|
java-sources/ai/optfor/spring-openai-api/0.3.25/ai/optfor/springopenaiapi/OpenAIApi.java
|
package ai.optfor.springopenaiapi;
import ai.optfor.springopenaiapi.cache.DefaultPromptCache;
import ai.optfor.springopenaiapi.cache.PromptCache;
import ai.optfor.springopenaiapi.enums.EmbedModel;
import ai.optfor.springopenaiapi.enums.LLMModel;
import ai.optfor.springopenaiapi.enums.TTSModel;
import ai.optfor.springopenaiapi.enums.TTSVoice;
import ai.optfor.springopenaiapi.model.*;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import io.micrometer.common.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.core.io.Resource;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.ResponseEntity;
import org.springframework.http.client.ClientHttpRequestInterceptor;
import org.springframework.http.client.SimpleClientHttpRequestFactory;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import org.springframework.web.client.RestTemplate;
import org.springframework.web.reactive.function.client.WebClient;
import reactor.core.publisher.Flux;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import static ai.optfor.springopenaiapi.enums.Role.*;
import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES;
import static org.springframework.http.MediaType.*;
public class OpenAIApi {
private static final Logger log = LoggerFactory.getLogger(OpenAIApi.class);
private final ObjectMapper mapper = new ObjectMapper();
private final PromptCache promptCache;
private final ExecutorService executorService;
public OpenAIApi(PromptCache promptCache) {
this.promptCache = (promptCache == null ? new DefaultPromptCache() : promptCache);
mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.enable(SerializationFeature.INDENT_OUTPUT);
this.executorService = Executors.newFixedThreadPool(3);
}
public Flux<String> streamingChat(LLMModel model, String system, String user, String assistant, Integer maxTokens, double temperature, Map<Integer, Integer> logit_bias, String openaiKey) {
return streamingChat(model, List.of(SYSTEM.message(system), USER.message(user), ASSISTANT.message(assistant)), maxTokens, temperature, logit_bias, openaiKey);
}
public Flux<String> streamingChat(LLMModel model, List<ChatMessage> messages, Integer maxTokens, double temperature, Map<Integer, Integer> logit_bias, String openaiKey) {
log.info("\nCalling OpenAI API:\n" +
"Model: " + model + " Max tokens:" + maxTokens + " Temperature:" + temperature + "\n" +
messages.stream().map(chatMessage -> chatMessage.role() + ":\n" +
chatMessage.content()).collect(java.util.stream.Collectors.joining("\n")));
ChatCompletionRequest request = new ChatCompletionRequest(model.getApiName(),
messages, temperature, maxTokens, null, true, logit_bias);
String json;
try {
json = mapper.writeValueAsString(request);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
StringBuilder fullResponse = new StringBuilder();
long start = System.currentTimeMillis();
Flux<String> result = WebClient.builder()
.baseUrl("https://api.openai.com/v1/chat/completions")
.defaultHeader("Authorization", "Bearer " + openaiKey)
.build()
.post()
.contentType(APPLICATION_JSON)
.bodyValue(json)
.accept(TEXT_EVENT_STREAM)
.exchangeToFlux((r -> r.bodyToFlux(String.class)))
.takeWhile(response -> !response.equals("[DONE]"))
.handle((jsonResponse, sink) -> {
try {
String delta = mapper.readValue(jsonResponse, ChatCompletionResponse.class).getDelta();
if (delta == null) {
delta = "";
}
fullResponse.append(delta);
sink.next(delta);
} catch (JsonProcessingException e) {
sink.error(new RuntimeException("Error while processing JSON response", e));
}
});
return result.doOnComplete(() -> {
long end = System.currentTimeMillis();
double seconds = ((double) (end - start)) / 1000;
log.info("\nReceived response from OpenAI API: " + seconds + " s.(" + fullResponse + ")");
});
}
public ChatCompletionResponse vision(LLMModel model, List<VisionMessage> messages, Integer maxTokens, double temperature, Map<Integer, Integer> logit_bias, String openaiKey) {
VisionCompletionRequest request = new VisionCompletionRequest(model.getApiName(), messages, temperature, maxTokens, false, logit_bias);
return prepareRestTemplate(openaiKey).postForObject("https://api.openai.com/v1/chat/completions", request, ChatCompletionResponse.class);
}
public Flux<String> visionStreaming(LLMModel model, List<VisionMessage> messages, Integer maxTokens, double temperature, Map<Integer, Integer> logit_bias, String openaiKey) {
log.info("\nCalling OpenAI API:\n" +
"Model: " + model + " Max tokens:" + maxTokens + " Temperature:" + temperature + "\n" +
messages.stream().map(chatMessage -> chatMessage.role() + ":\n" +
chatMessage.content()).collect(java.util.stream.Collectors.joining("\n")));
VisionCompletionRequest request = new VisionCompletionRequest(model.getApiName(), messages, temperature, maxTokens, true, logit_bias);
String json;
try {
json = mapper.writeValueAsString(request);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
StringBuilder fullResponse = new StringBuilder();
long start = System.currentTimeMillis();
Flux<String> result = WebClient.builder()
.baseUrl("https://api.openai.com/v1/chat/completions")
.defaultHeader("Authorization", "Bearer " + openaiKey)
.build()
.post()
.contentType(APPLICATION_JSON)
.bodyValue(json)
.accept(TEXT_EVENT_STREAM)
.exchangeToFlux((r -> r.bodyToFlux(String.class)))
.takeWhile(response -> !response.equals("[DONE]"))
.handle((jsonResponse, sink) -> {
try {
String delta = mapper.readValue(jsonResponse, ChatCompletionResponse.class).getDelta();
if (delta == null) {
delta = "";
}
fullResponse.append(delta);
sink.next(delta);
} catch (JsonProcessingException e) {
sink.error(new RuntimeException("Error while processing JSON response", e));
}
});
return result.doOnComplete(() -> {
long end = System.currentTimeMillis();
double seconds = ((double) (end - start)) / 1000;
log.info("\nReceived response from OpenAI API: " + seconds + " s.(" + fullResponse + ")");
});
}
public byte[] createSpeech(TTSModel model, String input, TTSVoice voice, String openaiKey) {
RestTemplate restTemplate = prepareRestTemplate(openaiKey);
ResponseEntity<byte[]> response = restTemplate.postForEntity("https://api.openai.com/v1/audio/speech",
new STTRequest(model.getApiName(), input, voice.toApiName()), byte[].class
);
if (response.hasBody()) {
return response.getBody();
} else {
throw new RuntimeException("Failed to get audio response from OpenAI API");
}
}
public String transcribeAudio(byte[] audioBytes, String languageKey, String openaiKey) {
// Create an anonymous subclass of ByteArrayResource to override the filename
Resource audioResource = new ByteArrayResource(audioBytes) {
@Override
public String getFilename() {
return "audio.oga";
}
};
MultiValueMap<String, Object> body = new LinkedMultiValueMap<>();
body.add("file", audioResource);
body.add("model", "whisper-1");
body.add("language", languageKey);
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MULTIPART_FORM_DATA);
HttpEntity<MultiValueMap<String, Object>> requestEntity = new HttpEntity<>(body, headers);
String url = "https://api.openai.com/v1/audio/transcriptions";
ResponseEntity<String> response = prepareRestTemplate(openaiKey).postForEntity(url, requestEntity, String.class);
if (response.getStatusCode().is2xxSuccessful()) {
try {
return mapper.readValue(response.getBody(), AudioResponse.class).text();
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
} else {
throw new RuntimeException(response.toString());
}
}
public ChatCompletionResponse chat(LLMModel model, String system, String user, Integer maxTokens, double temperature, boolean jsonMode, String openaiKey) {
return chat(model, List.of(SYSTEM.message(system), USER.message(user)), maxTokens, temperature, jsonMode, null, openaiKey);
}
public ChatCompletionResponse chat(LLMModel model, String system, String user, String assistant, Integer maxTokens, double temperature, boolean jsonMode, String openaiKey) {
return chat(model, List.of(SYSTEM.message(system), USER.message(user), ASSISTANT.message(assistant)), maxTokens, temperature, jsonMode, null, openaiKey);
}
public ChatCompletionResponse chat(LLMModel model, List<ChatMessage> chats, int maxTokens, double temperature, boolean jsonMode, Map<Integer, Integer> logitBias, String openaiKey) {
List<ChatMessage> filteredChats = chats.stream().filter(c -> !StringUtils.isBlank(c.content())).toList();
Future<ChatCompletionResponse> future = executorService.submit(() -> chatInternal(model, filteredChats, maxTokens, temperature, jsonMode, logitBias, openaiKey));
try {
return future.get();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private ChatCompletionResponse chatInternal(LLMModel model, List<ChatMessage> chats, int maxTokens, double temperature, boolean jsonMode,
Map<Integer, Integer> logitBias, String openaiKey) {
log.info("\nCalling OpenAI API:\n" +
"Model: " + model + " Max tokens:" + maxTokens + " Temperature:" + temperature + "\n" +
chats.stream().map(chatMessage -> chatMessage.role() + ":\n" +
chatMessage.content()).collect(java.util.stream.Collectors.joining("\n")));
RestTemplate restTemplate = prepareRestTemplate(openaiKey);
int retryCount = 0;
while (true) {
try {
ChatCompletionRequest request = new ChatCompletionRequest(
model.getApiName(), chats, temperature, maxTokens, jsonMode ? new ResponseFormat("json_object") : null, false, logitBias);
if (Double.compare(temperature, 0) == 0) {
String cached = promptCache.get(createKey(model, chats, maxTokens));
if (cached != null) {
ChatCompletionResponse response = mapper.readValue(cached, ChatCompletionResponse.class);
log.info("\nReturning cached response: {}", mapper.writeValueAsString(response));
return response;
}
}
long start = System.currentTimeMillis();
ChatCompletionResponse response = restTemplate.postForObject("https://api.openai.com/v1/chat/completions",
request, ChatCompletionResponse.class);
long end = System.currentTimeMillis();
double seconds = ((double) (end - start)) / 1000;
log.info("\nReceived response from OpenAI API: " + seconds + " s.(" +
(response.usage().completion_tokens() / seconds) + " TPS) {}", mapper.writeValueAsString(response));
if (Double.compare(temperature, 0) == 0) {
promptCache.put(createKey(model, chats, maxTokens), mapper.writeValueAsString(response));
}
return response;
} catch (Exception e) {
if (++retryCount == 3) throw new RuntimeException(e);
}
}
}
public EmbeddingResponse embedding(EmbedModel model, String content, String openaiKey) {
return embedding(model, List.of(content), openaiKey);
}
public EmbeddingResponse embedding(EmbedModel model, List<String> content, String openaiKey) {
RestTemplate restTemplate = prepareRestTemplate(openaiKey);
EmbeddingRequest request = new EmbeddingRequest(model.getApiName(), content);
try {
log.info("Sending request to OpenAI API: {}", mapper.writeValueAsString(request));
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
return restTemplate.postForObject("https://api.openai.com/v1/embeddings", request, EmbeddingResponse.class);
}
private String createKey(LLMModel model, List<ChatMessage> chats, int maxTokens) {
return model.getApiName() + chats + maxTokens;
}
private RestTemplate prepareRestTemplate(String openaiKey) {
RestTemplate restTemplate = new RestTemplate();
SimpleClientHttpRequestFactory requestFactory = new SimpleClientHttpRequestFactory();
requestFactory.setConnectTimeout(1000 * 5);
requestFactory.setReadTimeout(1000 * 60 * 10);
restTemplate.setRequestFactory(requestFactory);
ClientHttpRequestInterceptor interceptor = (request, body, execution) -> {
request.getHeaders().add("Authorization", "Bearer " + openaiKey);
return execution.execute(request, body);
};
restTemplate.setInterceptors(List.of(interceptor));
return restTemplate;
}
}
|
0
|
java-sources/ai/optfor/spring-openai-api/0.3.25/ai/optfor/springopenaiapi
|
java-sources/ai/optfor/spring-openai-api/0.3.25/ai/optfor/springopenaiapi/cache/DefaultPromptCache.java
|
package ai.optfor.springopenaiapi.cache;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class DefaultPromptCache implements PromptCache {
private final Map<String, String> cache = new ConcurrentHashMap<>();
@Override
public void put(String key, String response) {
cache.put(key, response);
}
@Override
public String get(String key) {
return cache.get(key);
}
}
|
0
|
java-sources/ai/optfor/spring-openai-api/0.3.25/ai/optfor/springopenaiapi
|
java-sources/ai/optfor/spring-openai-api/0.3.25/ai/optfor/springopenaiapi/cache/PromptCache.java
|
package ai.optfor.springopenaiapi.cache;
public interface PromptCache {
void put(String key, String response);
String get(String key);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.