id
int64 1
194k
| buggy
stringlengths 23
37.5k
| fixed
stringlengths 6
37.4k
|
---|---|---|
301 | <BUG>package com.cronutils.descriptor;
import com.cronutils.model.field.expression.FieldExpression;</BUG>
import com.cronutils.model.field.expression.On;
<BUG>import org.joda.time.DateTime;
import java.util.ResourceBundle;
import java.util.function.Function;</BUG>
class DescriptionStrategyFactory {
| package com.cronutils.descriptor;
import java.time.DayOfWeek;
import java.time.Month;
import java.time.format.TextStyle;
import java.util.ResourceBundle;
import java.util.function.Function;
import com.cronutils.model.field.expression.FieldExpression;
import com.cronutils.model.field.expression.On;
class DescriptionStrategyFactory {
|
302 | import java.util.ResourceBundle;
import java.util.function.Function;</BUG>
class DescriptionStrategyFactory {
private DescriptionStrategyFactory() {}
public static DescriptionStrategy daysOfWeekInstance(final ResourceBundle bundle, final FieldExpression expression) {
<BUG>final Function<Integer, String> nominal = integer -> new DateTime().withDayOfWeek(integer).dayOfWeek().getAsText(bundle.getLocale());
</BUG>
NominalDescriptionStrategy dow = new NominalDescriptionStrategy(bundle, nominal, expression);
dow.addDescription(fieldExpression -> {
if (fieldExpression instanceof On) {
| import java.util.ResourceBundle;
import java.util.function.Function;
import com.cronutils.model.field.expression.FieldExpression;
import com.cronutils.model.field.expression.On;
class DescriptionStrategyFactory {
private DescriptionStrategyFactory() {}
public static DescriptionStrategy daysOfWeekInstance(final ResourceBundle bundle, final FieldExpression expression) {
final Function<Integer, String> nominal = integer -> DayOfWeek.of(integer).getDisplayName(TextStyle.FULL, bundle.getLocale());
NominalDescriptionStrategy dow = new NominalDescriptionStrategy(bundle, nominal, expression);
dow.addDescription(fieldExpression -> {
if (fieldExpression instanceof On) {
|
303 | return dom;
}
public static DescriptionStrategy monthsInstance(final ResourceBundle bundle, final FieldExpression expression) {
return new NominalDescriptionStrategy(
bundle,
<BUG>integer -> new DateTime().withMonthOfYear(integer).monthOfYear().getAsText(bundle.getLocale()),
expression</BUG>
);
}
public static DescriptionStrategy plainInstance(ResourceBundle bundle, final FieldExpression expression) {
| return dom;
}
public static DescriptionStrategy monthsInstance(final ResourceBundle bundle, final FieldExpression expression) {
return new NominalDescriptionStrategy(
bundle,
integer -> Month.of(integer).getDisplayName(TextStyle.FULL, bundle.getLocale()),
expression
);
}
public static DescriptionStrategy plainInstance(ResourceBundle bundle, final FieldExpression expression) {
|
304 | package com.cronutils.model.time.generator;
import java.util.Collections;
<BUG>import java.util.List;
import org.apache.commons.lang3.Validate;</BUG>
import com.cronutils.model.field.CronField;
import com.cronutils.model.field.expression.FieldExpression;
public abstract class FieldValueGenerator {
| package com.cronutils.model.time.generator;
import java.util.Collections;
import java.util.List;
import org.apache.commons.lang3.Validate;
import com.cronutils.model.field.CronField;
import com.cronutils.model.field.expression.FieldExpression;
import org.apache.commons.lang3.Validate;
import com.cronutils.model.field.CronField;
import com.cronutils.model.field.expression.FieldExpression;
public abstract class FieldValueGenerator {
|
305 | <BUG>package com.cronutils.model.time.generator;
import com.cronutils.mapper.WeekDay;</BUG>
import com.cronutils.model.field.CronField;
import com.cronutils.model.field.CronFieldName;
import com.cronutils.model.field.constraint.FieldConstraintsBuilder;
| package com.cronutils.model.time.generator;
import java.time.LocalDate;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang3.Validate;
import com.cronutils.mapper.WeekDay;
import com.cronutils.model.field.CronField;
import com.cronutils.model.field.CronFieldName;
import com.cronutils.model.field.constraint.FieldConstraintsBuilder;
|
306 | import com.cronutils.model.field.expression.Between;
import com.cronutils.model.field.expression.FieldExpression;
import com.cronutils.parser.CronParserField;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
<BUG>import org.apache.commons.lang3.Validate;
import org.joda.time.DateTime;
import java.util.Collections;
import java.util.List;
import java.util.Set;</BUG>
class BetweenDayOfWeekValueGenerator extends FieldValueGenerator {
| import com.cronutils.model.field.expression.Between;
import com.cronutils.model.field.expression.FieldExpression;
import com.cronutils.parser.CronParserField;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
class BetweenDayOfWeekValueGenerator extends FieldValueGenerator {
|
307 | package com.cronutils.mapper;
public class ConstantsMapper {
private ConstantsMapper() {}
public static final WeekDay QUARTZ_WEEK_DAY = new WeekDay(2, false);
<BUG>public static final WeekDay JODATIME_WEEK_DAY = new WeekDay(1, false);
</BUG>
public static final WeekDay CRONTAB_WEEK_DAY = new WeekDay(1, true);
public static int weekDayMapping(WeekDay source, WeekDay target, int weekday){
return source.mapTo(weekday, target);
| package com.cronutils.mapper;
public class ConstantsMapper {
private ConstantsMapper() {}
public static final WeekDay QUARTZ_WEEK_DAY = new WeekDay(2, false);
public static final WeekDay JAVA8 = new WeekDay(1, false);
public static final WeekDay CRONTAB_WEEK_DAY = new WeekDay(1, true);
public static int weekDayMapping(WeekDay source, WeekDay target, int weekday){
return source.mapTo(weekday, target);
|
308 | <BUG>package com.cronutils.model.time.generator;
import com.cronutils.model.field.CronField;</BUG>
import com.cronutils.model.field.CronFieldName;
import com.cronutils.model.field.expression.FieldExpression;
import com.cronutils.model.field.expression.On;
| package com.cronutils.model.time.generator;
import java.time.DayOfWeek;
import java.time.LocalDate;
import java.util.List;
import org.apache.commons.lang3.Validate;
import com.cronutils.model.field.CronField;
import com.cronutils.model.field.CronFieldName;
import com.cronutils.model.field.expression.FieldExpression;
import com.cronutils.model.field.expression.On;
|
309 | import com.cronutils.model.field.CronField;</BUG>
import com.cronutils.model.field.CronFieldName;
import com.cronutils.model.field.expression.FieldExpression;
import com.cronutils.model.field.expression.On;
import com.google.common.collect.Lists;
<BUG>import org.apache.commons.lang3.Validate;
import org.joda.time.DateTime;
import java.util.List;</BUG>
class OnDayOfMonthValueGenerator extends FieldValueGenerator {
private int year;
| package com.cronutils.model.time.generator;
import java.time.DayOfWeek;
import java.time.LocalDate;
import java.util.List;
import org.apache.commons.lang3.Validate;
import com.cronutils.model.field.CronField;
import com.cronutils.model.field.CronFieldName;
import com.cronutils.model.field.expression.FieldExpression;
import com.cronutils.model.field.expression.On;
import com.google.common.collect.Lists;
class OnDayOfMonthValueGenerator extends FieldValueGenerator {
private int year;
|
310 | class OnDayOfMonthValueGenerator extends FieldValueGenerator {
private int year;
private int month;
public OnDayOfMonthValueGenerator(CronField cronField, int year, int month) {
super(cronField);
<BUG>Validate.isTrue(CronFieldName.DAY_OF_MONTH.equals(cronField.getField()), "CronField does not belong to day of month");
this.year = year;</BUG>
this.month = month;
}
| class OnDayOfMonthValueGenerator extends FieldValueGenerator {
private int year;
private int month;
public OnDayOfMonthValueGenerator(CronField cronField, int year, int month) {
super(cronField);
Validate.isTrue(CronFieldName.DAY_OF_MONTH.equals(cronField.getField()), "CronField does not belong to day of" +
" month");
this.year = year;
this.month = month;
}
|
311 | }
return value;
}
@Override
public int generatePreviousValue(int reference) throws NoSuchValueException {
<BUG>On on = ((On)cronField.getExpression());
</BUG>
int value = generateValue(on, year, month);
<BUG>if(value>=reference){
</BUG>
throw new NoSuchValueException();
| }
@Override
public int generateNextValue(int reference) throws NoSuchValueException {
On on = ((On) cronField.getExpression());
int value = generateValue(on, year, month);
if (value <= reference) {
|
312 | int reference = generateNextValue(start);
<BUG>while(reference<end){
</BUG>
values.add(reference);
<BUG>reference=generateNextValue(reference);
</BUG>
}
<BUG>} catch (NoSuchValueException e) {}
return values;</BUG>
}
| int reference = generateNextValue(start);
while (reference < end) {
values.add(reference);
reference = generateNextValue(reference);
}
} catch (NoSuchValueException e) {
}
return values;
}
|
313 | this.maxParallelReplicas = getSetting(info, MAX_PARALLEL_REPLICAS);
this.totalsMode = getSetting(info, TOTALS_MODE);
this.quotaKey = getSetting(info, QUOTA_KEY);
this.priority = getSetting(info, PRIORITY);
this.database = getSetting(info, DATABASE);
<BUG>this.compress = getSetting(info, COMPRESS);
this.extremes = getSetting(info, EXTREMES);
</BUG>
this.maxThreads = getSetting(info, MAX_THREADS);
| this.maxParallelReplicas = getSetting(info, MAX_PARALLEL_REPLICAS);
this.totalsMode = getSetting(info, TOTALS_MODE);
this.quotaKey = getSetting(info, QUOTA_KEY);
this.priority = getSetting(info, PRIORITY);
this.database = getSetting(info, DATABASE);
this.compress = (Boolean)getSetting(info, COMPRESS);
this.extremes = (Boolean)getSetting(info, EXTREMES);
this.maxThreads = getSetting(info, MAX_THREADS);
|
314 | package it.geosolutions.jaiext.warp;
<BUG>import it.geosolutions.jaiext.iterators.RandomIterFactory;</BUG>
import it.geosolutions.jaiext.range.Range;
<BUG>import java.awt.Rectangle;</BUG>
import java.awt.image.ColorModel;
import java.awt.image.DataBuffer;
| package it.geosolutions.jaiext.warp;
import it.geosolutions.jaiext.range.Range;
import java.awt.image.ColorModel;
import java.awt.image.DataBuffer;
|
315 | protected void computeRectByte(final PlanarImage src, final RasterAccessor dst,
<BUG>final ROI roiTile) {
RandomIter iterSource;</BUG>
final int minX, maxX, minY, maxY;
<BUG>if (extended) {
final Rectangle bounds = new Rectangle(src.getMinX() - 1, src.getMinY() - 1,
src.getWidth() + 3, src.getHeight() + 3);
iterSource = RandomIterFactory.create(src.getExtendedData(bounds, extender), bounds,
TILE_CACHED, ARRAY_CALC);</BUG>
minX = src.getMinX();
| protected void computeRectByte(final PlanarImage src, final RasterAccessor dst,
final RandomIter roiIter, boolean roiContainsTile) {
RandomIter iterSource;
final int minX, maxX, minY, maxY;
if (extended) {
iterSource = getRandomIterator(src, leftPad, rightPad, topPad, bottomPad, extender);
minX = src.getMinX();
|
316 | minX = src.getMinX();
maxX = src.getMaxX();
minY = src.getMinY();
maxY = src.getMaxY();
} else {
<BUG>iterSource = RandomIterFactory.create(src, src.getBounds(), TILE_CACHED, ARRAY_CALC);
minX = src.getMinX() + 1; // Left padding
maxX = src.getMaxX() - 2; // Right padding
minY = src.getMinY() + 1; // Top padding
maxY = src.getMaxY() - 2; // Bottom padding
</BUG>
}
| minX = src.getMinX();
maxX = src.getMaxX();
minY = src.getMinY();
maxY = src.getMaxY();
} else {
iterSource = getRandomIterator(src, null);
minX = src.getMinX() + leftPad; // Left padding
maxX = src.getMaxX() - rightPad; // Right padding
minY = src.getMinY() + topPad; // Top padding
maxY = src.getMaxY() - bottomPad; // Bottom padding
}
|
317 | final int lineStride = dst.getScanlineStride();
final int pixelStride = dst.getPixelStride();
final int[] bandOffsets = dst.getBandOffsets();
final byte[][] data = dst.getByteDataArrays();
final float[] warpData = new float[2 * dstWidth];
<BUG>int lineOffset = 0;
if (ctable == null) { // source does not have IndexColorModel
if (caseA) {
for (int h = 0; h < dstHeight; h++) {</BUG>
int pixelOffset = lineOffset;
| final int lineStride = dst.getScanlineStride();
final int pixelStride = dst.getPixelStride();
final int[] bandOffsets = dst.getBandOffsets();
final byte[][] data = dst.getByteDataArrays();
final float[] warpData = new float[2 * dstWidth];
int lineOffset = 0;
if(hasROI && !roiContainsTile && roiIter == null){
throw new IllegalArgumentException("Error on creating the ROI iterator");
}
if (ctable == null) { // source does not have IndexColorModel
if (caseA || (caseB && roiContainsTile)) {
for (int h = 0; h < dstHeight; h++) {
int pixelOffset = lineOffset;
|
318 | pixelOffset += pixelStride;
} // COLS LOOP
} // ROWS LOOP
}
} else {// source has IndexColorModel
<BUG>if (caseA) {
for (int h = 0; h < dstHeight; h++) {</BUG>
int pixelOffset = lineOffset;
lineOffset += lineStride;
warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);
| pixelOffset += pixelStride;
} // COLS LOOP
}
} else if (caseB) {
for (int h = 0; h < dstHeight; h++) {
int pixelOffset = lineOffset;
lineOffset += lineStride;
warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);
|
319 | minX = src.getMinX();
maxX = src.getMaxX();
minY = src.getMinY();
maxY = src.getMaxY();
} else {
<BUG>iterSource = RandomIterFactory.create(src, src.getBounds(), TILE_CACHED, ARRAY_CALC);
minX = src.getMinX() + 1; // Left padding
maxX = src.getMaxX() - 2; // Right padding
minY = src.getMinY() + 1; // Top padding
maxY = src.getMaxY() - 2; // Bottom padding
</BUG>
}
| minX = src.getMinX();
maxX = src.getMaxX();
minY = src.getMinY();
maxY = src.getMaxY();
} else {
iterSource = getRandomIterator(src, null);
minX = src.getMinX() + leftPad; // Left padding
maxX = src.getMaxX() - rightPad; // Right padding
minY = src.getMinY() + topPad; // Top padding
maxY = src.getMaxY() - bottomPad; // Bottom padding
}
|
320 | final int pixelStride = dst.getPixelStride();
final int[] bandOffsets = dst.getBandOffsets();
final short[][] data = dst.getShortDataArrays();
final float[] warpData = new float[2 * dstWidth];
int lineOffset = 0;
<BUG>if (caseA) {
for (int h = 0; h < dstHeight; h++) {</BUG>
int pixelOffset = lineOffset;
lineOffset += lineStride;
warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);
| final int pixelStride = dst.getPixelStride();
final int[] bandOffsets = dst.getBandOffsets();
final short[][] data = dst.getShortDataArrays();
final float[] warpData = new float[2 * dstWidth];
int lineOffset = 0;
if(hasROI && !roiContainsTile && roiIter == null){
throw new IllegalArgumentException("Error on creating the ROI iterator");
}
if (caseA || (caseB && roiContainsTile)) {
for (int h = 0; h < dstHeight; h++) {
int pixelOffset = lineOffset;
lineOffset += lineStride;
warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);
|
321 | minX = src.getMinX();
maxX = src.getMaxX();
minY = src.getMinY();
maxY = src.getMaxY();
} else {
<BUG>iterSource = RandomIterFactory.create(src, src.getBounds(), TILE_CACHED, ARRAY_CALC);
minX = src.getMinX() + 1; // Left padding
maxX = src.getMaxX() - 2; // Right padding
minY = src.getMinY() + 1; // Top padding
maxY = src.getMaxY() - 2; // Bottom padding
</BUG>
}
| minX = src.getMinX();
maxX = src.getMaxX();
minY = src.getMinY();
maxY = src.getMaxY();
} else {
iterSource = getRandomIterator(src, null);
minX = src.getMinX() + leftPad; // Left padding
maxX = src.getMaxX() - rightPad; // Right padding
minY = src.getMinY() + topPad; // Top padding
maxY = src.getMaxY() - bottomPad; // Bottom padding
}
|
322 | final int pixelStride = dst.getPixelStride();
final int[] bandOffsets = dst.getBandOffsets();
final short[][] data = dst.getShortDataArrays();
final float[] warpData = new float[2 * dstWidth];
int lineOffset = 0;
<BUG>if (caseA) {
for (int h = 0; h < dstHeight; h++) {</BUG>
int pixelOffset = lineOffset;
lineOffset += lineStride;
warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);
| final int pixelStride = dst.getPixelStride();
final int[] bandOffsets = dst.getBandOffsets();
final short[][] data = dst.getShortDataArrays();
final float[] warpData = new float[2 * dstWidth];
int lineOffset = 0;
if(hasROI && !roiContainsTile && roiIter == null){
throw new IllegalArgumentException("Error on creating the ROI iterator");
}
if (caseA || (caseB && roiContainsTile)) {
for (int h = 0; h < dstHeight; h++) {
int pixelOffset = lineOffset;
lineOffset += lineStride;
warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);
|
323 | minX = src.getMinX();
maxX = src.getMaxX();
minY = src.getMinY();
maxY = src.getMaxY();
} else {
<BUG>iterSource = RandomIterFactory.create(src, src.getBounds(), TILE_CACHED, ARRAY_CALC);
minX = src.getMinX() + 1; // Left padding
maxX = src.getMaxX() - 2; // Right padding
minY = src.getMinY() + 1; // Top padding
maxY = src.getMaxY() - 2; // Bottom padding
</BUG>
}
| minX = src.getMinX();
maxX = src.getMaxX();
minY = src.getMinY();
maxY = src.getMaxY();
} else {
iterSource = getRandomIterator(src, null);
minX = src.getMinX() + leftPad; // Left padding
maxX = src.getMaxX() - rightPad; // Right padding
minY = src.getMinY() + topPad; // Top padding
maxY = src.getMaxY() - bottomPad; // Bottom padding
}
|
324 | final int pixelStride = dst.getPixelStride();
final int[] bandOffsets = dst.getBandOffsets();
final int[][] data = dst.getIntDataArrays();
final float[] warpData = new float[2 * dstWidth];
int lineOffset = 0;
<BUG>if (caseA) {
for (int h = 0; h < dstHeight; h++) {</BUG>
int pixelOffset = lineOffset;
lineOffset += lineStride;
warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);
| final int pixelStride = dst.getPixelStride();
final int[] bandOffsets = dst.getBandOffsets();
final int[][] data = dst.getIntDataArrays();
final float[] warpData = new float[2 * dstWidth];
int lineOffset = 0;
if(hasROI && !roiContainsTile && roiIter == null){
throw new IllegalArgumentException("Error on creating the ROI iterator");
}
if (caseA || (caseB && roiContainsTile)) {
for (int h = 0; h < dstHeight; h++) {
int pixelOffset = lineOffset;
lineOffset += lineStride;
warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);
|
325 | minX = src.getMinX();
maxX = src.getMaxX();
minY = src.getMinY();
maxY = src.getMaxY();
} else {
<BUG>iterSource = RandomIterFactory.create(src, src.getBounds(), TILE_CACHED, ARRAY_CALC);
minX = src.getMinX() + 1; // Left padding
maxX = src.getMaxX() - 2; // Right padding
minY = src.getMinY() + 1; // Top padding
maxY = src.getMaxY() - 2; // Bottom padding
</BUG>
}
| minX = src.getMinX();
maxX = src.getMaxX();
minY = src.getMinY();
maxY = src.getMaxY();
} else {
iterSource = getRandomIterator(src, null);
minX = src.getMinX() + leftPad; // Left padding
maxX = src.getMaxX() - rightPad; // Right padding
minY = src.getMinY() + topPad; // Top padding
maxY = src.getMaxY() - bottomPad; // Bottom padding
}
|
326 | final int pixelStride = dst.getPixelStride();
final int[] bandOffsets = dst.getBandOffsets();
final float[][] data = dst.getFloatDataArrays();
final float[] warpData = new float[2 * dstWidth];
int lineOffset = 0;
<BUG>if (caseA) {
for (int h = 0; h < dstHeight; h++) {</BUG>
int pixelOffset = lineOffset;
lineOffset += lineStride;
warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);
| final int pixelStride = dst.getPixelStride();
final int[] bandOffsets = dst.getBandOffsets();
final float[][] data = dst.getFloatDataArrays();
final float[] warpData = new float[2 * dstWidth];
int lineOffset = 0;
if(hasROI && !roiContainsTile && roiIter == null){
throw new IllegalArgumentException("Error on creating the ROI iterator");
}
if (caseA || (caseB && roiContainsTile)) {
for (int h = 0; h < dstHeight; h++) {
int pixelOffset = lineOffset;
lineOffset += lineStride;
warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);
|
327 | minX = src.getMinX();
maxX = src.getMaxX();
minY = src.getMinY();
maxY = src.getMaxY();
} else {
<BUG>iterSource = RandomIterFactory.create(src, src.getBounds(), TILE_CACHED, ARRAY_CALC);
minX = src.getMinX() + 1; // Left padding
maxX = src.getMaxX() - 2; // Right padding
minY = src.getMinY() + 1; // Top padding
maxY = src.getMaxY() - 2; // Bottom padding
</BUG>
}
| minX = src.getMinX();
maxX = src.getMaxX();
minY = src.getMinY();
maxY = src.getMaxY();
} else {
iterSource = getRandomIterator(src, null);
minX = src.getMinX() + leftPad; // Left padding
maxX = src.getMaxX() - rightPad; // Right padding
minY = src.getMinY() + topPad; // Top padding
maxY = src.getMaxY() - bottomPad; // Bottom padding
}
|
328 | final int pixelStride = dst.getPixelStride();
final int[] bandOffsets = dst.getBandOffsets();
final double[][] data = dst.getDoubleDataArrays();
final float[] warpData = new float[2 * dstWidth];
int lineOffset = 0;
<BUG>if (caseA) {
for (int h = 0; h < dstHeight; h++) {</BUG>
int pixelOffset = lineOffset;
lineOffset += lineStride;
warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);
| final int pixelStride = dst.getPixelStride();
final int[] bandOffsets = dst.getBandOffsets();
final double[][] data = dst.getDoubleDataArrays();
final float[] warpData = new float[2 * dstWidth];
int lineOffset = 0;
if(hasROI && !roiContainsTile && roiIter == null){
throw new IllegalArgumentException("Error on creating the ROI iterator");
}
if (caseA || (caseB && roiContainsTile)) {
for (int h = 0; h < dstHeight; h++) {
int pixelOffset = lineOffset;
lineOffset += lineStride;
warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);
|
329 | customTokens.put("%%mlFinalForestsPerHost%%", hubConfig.finalForestsPerHost.toString());
customTokens.put("%%mlTraceAppserverName%%", hubConfig.traceHttpName);
customTokens.put("%%mlTracePort%%", hubConfig.tracePort.toString());
customTokens.put("%%mlTraceDbName%%", hubConfig.traceDbName);
customTokens.put("%%mlTraceForestsPerHost%%", hubConfig.traceForestsPerHost.toString());
<BUG>customTokens.put("%%mlModulesDbName%%", hubConfig.modulesDbName);
}</BUG>
public void init() {
try {
LOGGER.error("PLUGINS DIR: " + pluginsDir.toString());
| customTokens.put("%%mlFinalForestsPerHost%%", hubConfig.finalForestsPerHost.toString());
customTokens.put("%%mlTraceAppserverName%%", hubConfig.traceHttpName);
customTokens.put("%%mlTracePort%%", hubConfig.tracePort.toString());
customTokens.put("%%mlTraceDbName%%", hubConfig.traceDbName);
customTokens.put("%%mlTraceForestsPerHost%%", hubConfig.traceForestsPerHost.toString());
customTokens.put("%%mlModulesDbName%%", hubConfig.modulesDbName);
customTokens.put("%%mlTriggersDbName%%", hubConfig.triggersDbName);
customTokens.put("%%mlSchemasDbName%%", hubConfig.schemasDbName);
}
public void init() {
try {
LOGGER.error("PLUGINS DIR: " + pluginsDir.toString());
|
330 | }
@RootTask
static Task<Exec.Result> exec(String parameter, int number) {
Task<String> task1 = MyTask.create(parameter);
Task<Integer> task2 = Adder.create(number, number + 2);
<BUG>return Task.ofType(Exec.Result.class).named("exec", "/bin/sh")
.in(() -> task1)</BUG>
.in(() -> task2)
.process(Exec.exec((str, i) -> args("/bin/sh", "-c", "\"echo " + i + "\"")));
}
| }
@RootTask
static Task<Exec.Result> exec(String parameter, int number) {
Task<String> task1 = MyTask.create(parameter);
Task<Integer> task2 = Adder.create(number, number + 2);
return Task.named("exec", "/bin/sh").ofType(Exec.Result.class)
.in(() -> task1)
.in(() -> task2)
.process(Exec.exec((str, i) -> args("/bin/sh", "-c", "\"echo " + i + "\"")));
}
|
331 | return args;
}
static class MyTask {
static final int PLUS = 10;
static Task<String> create(String parameter) {
<BUG>return Task.ofType(String.class).named("MyTask", parameter)
.in(() -> Adder.create(parameter.length(), PLUS))</BUG>
.in(() -> Fib.create(parameter.length()))
.process((sum, fib) -> something(parameter, sum, fib));
}
| return args;
}
static class MyTask {
static final int PLUS = 10;
static Task<String> create(String parameter) {
return Task.named("MyTask", parameter).ofType(String.class)
.in(() -> Adder.create(parameter.length(), PLUS))
.in(() -> Fib.create(parameter.length()))
.process((sum, fib) -> something(parameter, sum, fib));
}
|
332 | final String instanceField = "from instance";
final TaskContext context = TaskContext.inmem();
final AwaitingConsumer<String> val = new AwaitingConsumer<>();
@Test
public void shouldJavaUtilSerialize() throws Exception {
<BUG>Task<Long> task1 = Task.ofType(Long.class).named("Foo", "Bar", 39)
.process(() -> 9999L);
Task<String> task2 = Task.ofType(String.class).named("Baz", 40)
.in(() -> task1)</BUG>
.ins(() -> singletonList(task1))
| final String instanceField = "from instance";
final TaskContext context = TaskContext.inmem();
final AwaitingConsumer<String> val = new AwaitingConsumer<>();
@Test
public void shouldJavaUtilSerialize() throws Exception {
Task<Long> task1 = Task.named("Foo", "Bar", 39).ofType(Long.class)
.process(() -> 9999L);
Task<String> task2 = Task.named("Baz", 40).ofType(String.class)
.in(() -> task1)
.ins(() -> singletonList(task1))
|
333 | assertEquals(des.id().name(), "Baz");
assertEquals(val.awaitAndGet(), "[9999] hello 10004");
}
@Test(expected = NotSerializableException.class)
public void shouldNotSerializeWithInstanceFieldReference() throws Exception {
<BUG>Task<String> task = Task.ofType(String.class).named("WithRef")
.process(() -> instanceField + " causes an outer reference");</BUG>
serialize(task);
}
@Test
| assertEquals(des.id().name(), "Baz");
assertEquals(val.awaitAndGet(), "[9999] hello 10004");
}
@Test(expected = NotSerializableException.class)
public void shouldNotSerializeWithInstanceFieldReference() throws Exception {
Task<String> task = Task.named("WithRef").ofType(String.class)
.process(() -> instanceField + " causes an outer reference");
serialize(task);
}
@Test
|
334 | serialize(task);
}
@Test
public void shouldSerializeWithLocalReference() throws Exception {
String local = instanceField;
<BUG>Task<String> task = Task.ofType(String.class).named("WithLocalRef")
.process(() -> local + " won't cause an outer reference");</BUG>
serialize(task);
Task<String> des = deserialize();
context.evaluate(des).consume(val);
| serialize(task);
}
@Test
public void shouldSerializeWithLocalReference() throws Exception {
String local = instanceField;
Task<String> task = Task.named("WithLocalRef").ofType(String.class)
.process(() -> local + " won't cause an outer reference");
serialize(task);
Task<String> des = deserialize();
context.evaluate(des).consume(val);
|
335 | TaskContext taskContext = TaskContext.inmem();
TaskContext.Value<Long> value = taskContext.evaluate(fib92);
value.consume(f92 -> System.out.println("fib(92) = " + f92));
}
static Task<Long> create(long n) {
<BUG>TaskBuilder<Long> fib = Task.ofType(Long.class).named("Fib", n);
</BUG>
if (n < 2) {
return fib
.process(() -> n);
| TaskContext taskContext = TaskContext.inmem();
TaskContext.Value<Long> value = taskContext.evaluate(fib92);
value.consume(f92 -> System.out.println("fib(92) = " + f92));
}
static Task<Long> create(long n) {
TaskBuilder<Long> fib = Task.named("Fib", n).ofType(Long.class);
if (n < 2) {
return fib
.process(() -> n);
|
336 | }
@RootTask
public static Task<String> standardArgs(int first, String second) {
firstInt = first;
secondString = second;
<BUG>return Task.ofType(String.class).named("StandardArgs", first, second)
.process(() -> second + " " + first * 100);</BUG>
}
@Test
public void shouldParseFlags() throws Exception {
| }
@RootTask
public static Task<String> standardArgs(int first, String second) {
firstInt = first;
secondString = second;
return Task.named("StandardArgs", first, second).ofType(String.class)
.process(() -> second + " " + first * 100);
}
@Test
public void shouldParseFlags() throws Exception {
|
337 | assertThat(parsedEnum, is(CustomEnum.BAR));
}
@RootTask
public static Task<String> enums(CustomEnum enm) {
parsedEnum = enm;
<BUG>return Task.ofType(String.class).named("Enums", enm)
.process(enm::toString);</BUG>
}
@Test
public void shouldParseCustomTypes() throws Exception {
| assertThat(parsedEnum, is(CustomEnum.BAR));
}
@RootTask
public static Task<String> enums(CustomEnum enm) {
parsedEnum = enm;
return Task.named("Enums", enm).ofType(String.class)
.process(enm::toString);
}
@Test
public void shouldParseCustomTypes() throws Exception {
|
338 | assertThat(parsedType.content, is("blarg parsed for you!"));
}
@RootTask
public static Task<String> customType(CustomType myType) {
parsedType = myType;
<BUG>return Task.ofType(String.class).named("Types", myType.content)
.process(() -> myType.content);</BUG>
}
public enum CustomEnum {
BAR
| assertThat(parsedType.content, is("blarg parsed for you!"));
}
@RootTask
public static Task<String> customType(CustomType myType) {
parsedType = myType;
return Task.named("Types", myType.content).ofType(String.class)
.process(() -> myType.content);
}
public enum CustomEnum {
BAR
|
339 | package freenet.store;
import java.io.EOFException;
import java.io.File;
import java.io.IOException;
<BUG>import java.io.RandomAccessFile;
import java.util.ArrayList;</BUG>
import java.util.Arrays;
import java.util.List;
import org.tanukisoftware.wrapper.WrapperManager;
| package freenet.store;
import java.io.EOFException;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.tanukisoftware.wrapper.WrapperManager;
|
340 | private Database keysDB;
private SecondaryDatabase accessTimeDB;
private SecondaryDatabase blockNumDB;
private RandomAccessFile storeRAF;
private RandomAccessFile keysRAF;
<BUG>private RandomAccessFile lruRAF;
private final SortedLongSet freeBlocks;</BUG>
private final String name;
private final StoreCallback callback;
private final boolean collisionPossible;
| private Database keysDB;
private SecondaryDatabase accessTimeDB;
private SecondaryDatabase blockNumDB;
private RandomAccessFile storeRAF;
private RandomAccessFile keysRAF;
private RandomAccessFile lruRAF;
private FileChannel storeFC;
private FileChannel keysFC;
private FileChannel lruFC;
private final SortedLongSet freeBlocks;
private final String name;
private final StoreCallback callback;
private final boolean collisionPossible;
|
341 | System.err.println("Keys which will be wiped anyway: "+unwantedIgnoreNums.length);
System.err.println("Keys to move: "+wantedMoveNums.length);
System.err.println("Keys to be moved over: "+unwantedMoveNums.length);
System.err.println("Free slots to be moved over: "+freeEarlySlots.length);
WrapperManager.signalStarting((int)Math.min(Integer.MAX_VALUE, (5*60*1000 + wantedMoveNums.length*1000L + alreadyDropped.size() * 100L))); // 1 per second
<BUG>byte[] buf = new byte[headerBlockSize + dataBlockSize];
long lruValue;</BUG>
byte[] keyBuf = new byte[keyLength];
t = null;
try {
| System.err.println("Keys which will be wiped anyway: "+unwantedIgnoreNums.length);
System.err.println("Keys to move: "+wantedMoveNums.length);
System.err.println("Keys to be moved over: "+unwantedMoveNums.length);
System.err.println("Free slots to be moved over: "+freeEarlySlots.length);
WrapperManager.signalStarting((int)Math.min(Integer.MAX_VALUE, (5*60*1000 + wantedMoveNums.length*1000L + alreadyDropped.size() * 100L))); // 1 per second
ByteBuffer buf = ByteBuffer.allocate(headerBlockSize + dataBlockSize);
long lruValue;
byte[] keyBuf = new byte[keyLength];
t = null;
try {
|
342 | System.err.println(e);
e.printStackTrace();
throw e;
}
entry = unwantedBlock.longValue();
<BUG>storeRAF.seek(entry * (headerBlockSize + dataBlockSize));
storeRAF.write(buf);
if(readLRU) {
lruRAF.seek(entry * 8);
lruRAF.writeLong(lruValue);</BUG>
}
| System.err.println(e);
e.printStackTrace();
throw e;
|
343 | if(readLRU) {
lruRAF.seek(entry * 8);
lruRAF.writeLong(lruValue);</BUG>
}
<BUG>if(readKey) {
keysRAF.seek(entry * keyLength);
keysRAF.write(keyBuf);</BUG>
}
DatabaseEntry routingKeyDBE = new DatabaseEntry();
DatabaseEntry blockDBE = new DatabaseEntry();
| if(readLRU) {
fcWriteLRU(entry, lruValue);
}
if(readKey) {
fcWriteKey(entry, keyBuf);
}
DatabaseEntry routingKeyDBE = new DatabaseEntry();
DatabaseEntry blockDBE = new DatabaseEntry();
|
344 | StorableBlock block = null;
if(logMINOR) Logger.minor(this, "Reading block "+storeBlock.offset+"...");
try {
byte[] header = new byte[headerBlockSize];
byte[] data = new byte[dataBlockSize];
<BUG>try {
synchronized(storeRAF) {
storeRAF.seek(storeBlock.offset*(long)(dataBlockSize+headerBlockSize));
storeRAF.readFully(header);
storeRAF.readFully(data);
}</BUG>
} catch (EOFException e) {
| StorableBlock block = null;
if(logMINOR) Logger.minor(this, "Reading block "+storeBlock.offset+"...");
try {
byte[] header = new byte[headerBlockSize];
byte[] data = new byte[dataBlockSize];
try {
fcReadStore(storeBlock.offset, header, data);
} catch (EOFException e) {
|
345 | storeBlockTupleBinding.objectToEntry(storeBlock, blockDBE);
try {
keysDB.put(t,routingkeyDBE,blockDBE);
if(fullKey == null)
fullKey = block.getFullKey();
<BUG>synchronized(storeRAF) {
if(keysRAF != null) {
keysRAF.seek(storeBlock.offset * keyLength);
keysRAF.write(fullKey);</BUG>
if(logDEBUG)
| storeBlockTupleBinding.objectToEntry(storeBlock, blockDBE);
try {
keysDB.put(t,routingkeyDBE,blockDBE);
if(fullKey == null)
fullKey = block.getFullKey();
if(keysRAF != null) {
fcWriteKey(storeBlock.offset, fullKey);
if(logDEBUG)
|
346 | } catch(KeyVerifyException ex) {
Logger.normal(this, "Does not verify ("+ex+"), setting accessTime to 0 for : "+HexUtil.bytesToHex(routingkey), ex);
synchronized(this) {
misses++;
}
<BUG>synchronized(storeRAF) {</BUG>
byte[] buf = new byte[keyLength];
for(int i=0;i<buf.length;i++) buf[i] = 0; // FIXME unnecessary?
<BUG>if(keysRAF != null) {
keysRAF.seek(storeBlock.offset * keyLength);
keysRAF.write(buf);
}</BUG>
}
| } catch(KeyVerifyException ex) {
Logger.normal(this, "Does not verify ("+ex+"), setting accessTime to 0 for : "+HexUtil.bytesToHex(routingkey), ex);
synchronized(this) {
misses++;
}
byte[] buf = new byte[keyLength];
for(int i=0;i<buf.length;i++) buf[i] = 0; // FIXME unnecessary?
if(keysRAF != null) {
fcWriteKey(storeBlock.offset, buf);
}
|
347 | import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.Progressable;
<BUG>import static org.apache.hadoop.fs.s3a.S3AConstants.*;
public class S3AFileSystem extends FileSystem {</BUG>
private URI uri;
private Path workingDir;
private AmazonS3Client s3;
| import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.Progressable;
import static org.apache.hadoop.fs.s3a.Constants.*;
public class S3AFileSystem extends FileSystem {
private URI uri;
private Path workingDir;
private AmazonS3Client s3;
|
348 | public void initialize(URI name, Configuration conf) throws IOException {
super.initialize(name, conf);
uri = URI.create(name.getScheme() + "://" + name.getAuthority());
workingDir = new Path("/user", System.getProperty("user.name")).makeQualified(this.uri,
this.getWorkingDirectory());
<BUG>String accessKey = conf.get(ACCESS_KEY, null);
String secretKey = conf.get(SECRET_KEY, null);
</BUG>
String userInfo = name.getUserInfo();
| public void initialize(URI name, Configuration conf) throws IOException {
super.initialize(name, conf);
uri = URI.create(name.getScheme() + "://" + name.getAuthority());
workingDir = new Path("/user", System.getProperty("user.name")).makeQualified(this.uri,
this.getWorkingDirectory());
String accessKey = conf.get(NEW_ACCESS_KEY, conf.get(OLD_ACCESS_KEY, null));
String secretKey = conf.get(NEW_SECRET_KEY, conf.get(OLD_SECRET_KEY, null));
String userInfo = name.getUserInfo();
|
349 | } else {
accessKey = userInfo;
}
}
AWSCredentialsProviderChain credentials = new AWSCredentialsProviderChain(
<BUG>new S3ABasicAWSCredentialsProvider(accessKey, secretKey),
new InstanceProfileCredentialsProvider(),
new S3AAnonymousAWSCredentialsProvider()
);</BUG>
bucket = name.getHost();
| } else {
accessKey = userInfo;
}
}
AWSCredentialsProviderChain credentials = new AWSCredentialsProviderChain(
new BasicAWSCredentialsProvider(accessKey, secretKey),
new InstanceProfileCredentialsProvider(),
new AnonymousAWSCredentialsProvider()
);
bucket = name.getHost();
|
350 |
awsConf.setSocketTimeout(conf.getInt(SOCKET_TIMEOUT, DEFAULT_SOCKET_TIMEOUT));
</BUG>
s3 = new AmazonS3Client(credentials, awsConf);
<BUG>maxKeys = conf.getInt(MAX_PAGING_KEYS, DEFAULT_MAX_PAGING_KEYS);
partSize = conf.getLong(MULTIPART_SIZE, DEFAULT_MULTIPART_SIZE);
partSizeThreshold = conf.getInt(MIN_MULTIPART_THRESHOLD, DEFAULT_MIN_MULTIPART_THRESHOLD);
</BUG>
if (partSize < 5 * 1024 * 1024) {
| new InstanceProfileCredentialsProvider(),
new AnonymousAWSCredentialsProvider()
);
bucket = name.getHost();
ClientConfiguration awsConf = new ClientConfiguration();
awsConf.setMaxConnections(conf.getInt(NEW_MAXIMUM_CONNECTIONS, conf.getInt(OLD_MAXIMUM_CONNECTIONS, DEFAULT_MAXIMUM_CONNECTIONS)));
awsConf.setProtocol(conf.getBoolean(NEW_SECURE_CONNECTIONS, conf.getBoolean(OLD_SECURE_CONNECTIONS, DEFAULT_SECURE_CONNECTIONS)) ? Protocol.HTTPS : Protocol.HTTP);
awsConf.setMaxErrorRetry(conf.getInt(NEW_MAX_ERROR_RETRIES, conf.getInt(OLD_MAX_ERROR_RETRIES, DEFAULT_MAX_ERROR_RETRIES)));
awsConf.setSocketTimeout(conf.getInt(NEW_SOCKET_TIMEOUT, conf.getInt(OLD_SOCKET_TIMEOUT, DEFAULT_SOCKET_TIMEOUT)));
s3 = new AmazonS3Client(credentials, awsConf);
maxKeys = conf.getInt(NEW_MAX_PAGING_KEYS, conf.getInt(OLD_MAX_PAGING_KEYS, DEFAULT_MAX_PAGING_KEYS));
partSize = conf.getLong(NEW_MULTIPART_SIZE, conf.getLong(OLD_MULTIPART_SIZE, DEFAULT_MULTIPART_SIZE));
partSizeThreshold = conf.getInt(NEW_MIN_MULTIPART_THRESHOLD, conf.getInt(OLD_MIN_MULTIPART_THRESHOLD, DEFAULT_MIN_MULTIPART_THRESHOLD));
|
351 | if (partSizeThreshold < 5 * 1024 * 1024) {
<BUG>LOG.error(MIN_MULTIPART_THRESHOLD + " must be at least 5 MB");
</BUG>
partSizeThreshold = 5 * 1024 * 1024;
}
<BUG>String cannedACLName = conf.get(CANNED_ACL, DEFAULT_CANNED_ACL);
</BUG>
if (!cannedACLName.isEmpty()) {
cannedACL = CannedAccessControlList.valueOf(cannedACLName);
} else {
| if (partSizeThreshold < 5 * 1024 * 1024) {
LOG.error(NEW_MIN_MULTIPART_THRESHOLD + " must be at least 5 MB");
partSizeThreshold = 5 * 1024 * 1024;
}
String cannedACLName = conf.get(NEW_CANNED_ACL, conf.get(OLD_CANNED_ACL, DEFAULT_CANNED_ACL));
if (!cannedACLName.isEmpty()) {
cannedACL = CannedAccessControlList.valueOf(cannedACLName);
} else {
|
352 | cannedACL = null;
}
if (!s3.doesBucketExist(bucket)) {
throw new IOException("Bucket " + bucket + " does not exist");
}
<BUG>boolean purgeExistingMultipart = conf.getBoolean(PURGE_EXISTING_MULTIPART, DEFAULT_PURGE_EXISTING_MULTIPART);
long purgeExistingMultipartAge = conf.getLong(PURGE_EXISTING_MULTIPART_AGE, DEFAULT_PURGE_EXISTING_MULTIPART_AGE);
</BUG>
if (purgeExistingMultipart) {
| cannedACL = null;
}
if (!s3.doesBucketExist(bucket)) {
throw new IOException("Bucket " + bucket + " does not exist");
}
boolean purgeExistingMultipart = conf.getBoolean(NEW_PURGE_EXISTING_MULTIPART, conf.getBoolean(OLD_PURGE_EXISTING_MULTIPART, DEFAULT_PURGE_EXISTING_MULTIPART));
long purgeExistingMultipartAge = conf.getLong(NEW_PURGE_EXISTING_MULTIPART_AGE, conf.getLong(OLD_PURGE_EXISTING_MULTIPART_AGE, DEFAULT_PURGE_EXISTING_MULTIPART_AGE));
if (purgeExistingMultipart) {
|
353 | import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
<BUG>import static org.apache.hadoop.fs.s3a.S3AConstants.*;
public class S3AOutputStream extends OutputStream {</BUG>
private OutputStream backupStream;
private File backupFile;
private boolean closed;
| import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import static org.apache.hadoop.fs.s3a.Constants.*;
public class S3AOutputStream extends OutputStream {
private OutputStream backupStream;
private File backupFile;
private boolean closed;
|
354 | this.client = client;
this.progress = progress;
this.fs = fs;
this.cannedACL = cannedACL;
this.statistics = statistics;
<BUG>partSize = conf.getLong(MULTIPART_SIZE, DEFAULT_MULTIPART_SIZE);
partSizeThreshold = conf.getInt(MIN_MULTIPART_THRESHOLD, DEFAULT_MIN_MULTIPART_THRESHOLD);
</BUG>
if (conf.get(BUFFER_DIR, null) != null) {
| this.client = client;
this.progress = progress;
this.fs = fs;
this.cannedACL = cannedACL;
this.statistics = statistics;
partSize = conf.getLong(OLD_MULTIPART_SIZE, DEFAULT_MULTIPART_SIZE);
partSizeThreshold = conf.getInt(OLD_MIN_MULTIPART_THRESHOLD, DEFAULT_MIN_MULTIPART_THRESHOLD);
if (conf.get(BUFFER_DIR, null) != null) {
|
355 | package jetbrains.buildServer.nuget.feed.server.olingo;
<BUG>import jetbrains.buildServer.util.CollectionsUtil;</BUG>
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import java.util.Enumeration;
<BUG>import java.util.Map;</BUG>
public class ODataServletConfig implements ServletConfig {
| package jetbrains.buildServer.nuget.feed.server.olingo;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import java.util.Enumeration;
|
356 | import javax.servlet.ServletContext;
import java.util.Enumeration;
<BUG>import java.util.Map;</BUG>
public class ODataServletConfig implements ServletConfig {
<BUG>private final Map<String, String> myParameters;
public ODataServletConfig() {
myParameters = CollectionsUtil.asMap("org.apache.olingo.odata2.path.split", "4");
}</BUG>
@Override
public String getServletName() {
| import javax.servlet.ServletContext;
import java.util.Enumeration;
public class ODataServletConfig implements ServletConfig {
@Override
public String getServletName() {
|
357 | public ServletContext getServletContext() {
return null;
}
@Override
public String getInitParameter(String name) {
<BUG>return myParameters.get(name);
}</BUG>
@Override
public Enumeration<String> getInitParameterNames() {
return null;
| public ServletContext getServletContext() {
return null;
}
@Override
public String getInitParameter(String name) {
return null;
}
@Override
public Enumeration<String> getInitParameterNames() {
return null;
|
358 | package jetbrains.buildServer.nuget.feed.server.olingo;
import com.intellij.openapi.diagnostic.Logger;
import jetbrains.buildServer.controllers.BaseController;
<BUG>import jetbrains.buildServer.nuget.feed.server.NuGetServerJavaSettings;</BUG>
import jetbrains.buildServer.nuget.feed.server.cache.ResponseCache;
import jetbrains.buildServer.nuget.feed.server.controllers.NuGetFeedHandler;
<BUG>import jetbrains.buildServer.nuget.feed.server.olingo.data.NuGetDataSource;
import jetbrains.buildServer.nuget.feed.server.olingo.data.NuGetDataSourceFactory;</BUG>
import jetbrains.buildServer.nuget.feed.server.olingo.processor.NuGetServiceFactory;
| package jetbrains.buildServer.nuget.feed.server.olingo;
import com.intellij.openapi.diagnostic.Logger;
import jetbrains.buildServer.controllers.BaseController;
import jetbrains.buildServer.nuget.feed.server.cache.ResponseCache;
import jetbrains.buildServer.nuget.feed.server.controllers.NuGetFeedHandler;
import jetbrains.buildServer.nuget.feed.server.olingo.processor.NuGetServiceFactory;
|
359 | <BUG>private final NuGetServerJavaSettings mySettings;
private final NuGetDataSourceFactory myDataSourceFactory;
</BUG>
private final ResponseCache myCache;
<BUG>public OlingoRequestHandler(@NotNull final NuGetServerJavaSettings settings,
@NotNull final NuGetDataSourceFactory dataSourceFactory,
@NotNull final ResponseCache cache) {
mySettings = settings;
myDataSourceFactory = dataSourceFactory;</BUG>
myCache = cache;
| import jetbrains.buildServer.web.util.WebUtil;
import org.apache.olingo.odata2.core.servlet.ODataServlet;
import org.jetbrains.annotations.NotNull;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public class OlingoRequestHandler implements NuGetFeedHandler {
private static final Logger LOG = Logger.getInstance(OlingoRequestHandler.class.getName());
private final NuGetServiceFactory myServiceFactory;
private final ResponseCache myCache;
public OlingoRequestHandler(@NotNull final NuGetServiceFactory serviceFactory,
@NotNull final ResponseCache cache) {
myServiceFactory = serviceFactory;
myCache = cache;
|
360 | processFeedRequest(request, response);
}
}
private void processFeedRequest(final HttpServletRequest request, final HttpServletResponse response) throws Exception {
LOG.debug("NuGet Feed: " + WebUtil.getRequestDump(request) + "|" + request.getRequestURI());
<BUG>final String requestUrl = String.format("%s://%s:%s", request.getScheme(), request.getServerName(), request.getServerPort());
final NuGetDataSource dataSource = myDataSourceFactory.create(new URI(requestUrl));
final NuGetServiceFactory serviceFactory = new NuGetServiceFactory(dataSource);
request.setAttribute("org.apache.olingo.odata2.service.factory.instance", serviceFactory);
</BUG>
ODataServlet servlet = new ODataServlet();
| processFeedRequest(request, response);
}
}
private void processFeedRequest(final HttpServletRequest request, final HttpServletResponse response) throws Exception {
LOG.debug("NuGet Feed: " + WebUtil.getRequestDump(request) + "|" + request.getRequestURI());
request.setAttribute("org.apache.olingo.odata2.service.factory.instance", myServiceFactory);
ODataServlet servlet = new ODataServlet();
|
361 | import jetbrains.buildServer.nuget.feed.server.NuGetAPIVersion;
import jetbrains.buildServer.nuget.feed.server.NuGetServerSettings;
import jetbrains.buildServer.nuget.feed.server.index.NuGetIndexEntry;
import jetbrains.buildServer.nuget.feed.server.index.PackagesIndex;
import jetbrains.buildServer.nuget.feed.server.index.impl.SemanticVersionsComparators;
<BUG>import jetbrains.buildServer.nuget.feed.server.olingo.model.NuGetMapper;
import jetbrains.buildServer.nuget.feed.server.olingo.model.V2FeedPackage;</BUG>
import jetbrains.buildServer.nuget.feedReader.NuGetPackageAttributes;
import jetbrains.buildServer.nuget.server.version.FrameworkConstraints;
import jetbrains.buildServer.nuget.server.version.SemanticVersion;
| import jetbrains.buildServer.nuget.feed.server.NuGetAPIVersion;
import jetbrains.buildServer.nuget.feed.server.NuGetServerSettings;
import jetbrains.buildServer.nuget.feed.server.index.NuGetIndexEntry;
import jetbrains.buildServer.nuget.feed.server.index.PackagesIndex;
import jetbrains.buildServer.nuget.feed.server.index.impl.SemanticVersionsComparators;
import jetbrains.buildServer.nuget.feedReader.NuGetPackageAttributes;
import jetbrains.buildServer.nuget.server.version.FrameworkConstraints;
import jetbrains.buildServer.nuget.server.version.SemanticVersion;
|
362 | import static jetbrains.buildServer.nuget.feedReader.NuGetPackageAttributes.VERSION;
public class NuGetDataSource {
private static final Logger LOG = Logger.getInstance(NuGetDataSource.class.getName());
private final PackagesIndex myIndex;
private final NuGetServerSettings myServerSettings;
<BUG>private final URI myRequestUri;</BUG>
public NuGetDataSource(@NotNull final PackagesIndex index,
<BUG>@NotNull final NuGetServerSettings serverSettings,
@NotNull final URI requestUri) {</BUG>
myIndex = index;
| import static jetbrains.buildServer.nuget.feedReader.NuGetPackageAttributes.VERSION;
public class NuGetDataSource {
private static final Logger LOG = Logger.getInstance(NuGetDataSource.class.getName());
private final PackagesIndex myIndex;
private final NuGetServerSettings myServerSettings;
public NuGetDataSource(@NotNull final PackagesIndex index,
@NotNull final NuGetServerSettings serverSettings) {
myIndex = index;
|
363 | final String version = (String) keys.get(NuGetPackageAttributes.VERSION);
while (indexEntries.hasNext()) {
final NuGetIndexEntry indexEntry = indexEntries.next();
final Map<String, String> attributes = indexEntry.getAttributes();
if (id != null) {
<BUG>if (!attributes.get(NuGetPackageAttributes.ID).equalsIgnoreCase(id)){
</BUG>
continue;
}
}
| final String version = (String) keys.get(NuGetPackageAttributes.VERSION);
while (indexEntries.hasNext()) {
final NuGetIndexEntry indexEntry = indexEntries.next();
final Map<String, String> attributes = indexEntry.getAttributes();
if (id != null) {
if (!attributes.get(NuGetPackageAttributes.ID).equalsIgnoreCase(id)) {
continue;
}
}
|
364 | throw new ODataNotFoundException(ODataNotFoundException.ENTITY);
}
throw new ODataNotImplementedException();
}
<BUG>public Object readData(final EdmFunctionImport function, final Map<String, Object> parameters, final Map<String, Object> keys)
throws ODataNotImplementedException, ODataNotFoundException, EdmException {
if (function.getName().equals(MetadataConstants.SEARCH_FUNCTION_NAME)) {</BUG>
return search(parameters, keys);
} else if (function.getName().equals(MetadataConstants.FIND_PACKAGES_BY_ID_FUNCTION_NAME)) {
return findPackagesById(parameters, keys);
| throw new ODataNotFoundException(ODataNotFoundException.ENTITY);
}
throw new ODataNotImplementedException();
}
@NotNull
public Object readData(@NotNull final EdmFunctionImport function,
@NotNull final Map<String, Object> parameters,
@NotNull final Map<String, Object> keys) throws ODataHttpException, EdmException {
if (function.getName().equals(MetadataConstants.SEARCH_FUNCTION_NAME)) {
return search(parameters, keys);
} else if (function.getName().equals(MetadataConstants.FIND_PACKAGES_BY_ID_FUNCTION_NAME)) {
return findPackagesById(parameters, keys);
|
365 | final List<String> packageIds = StringUtil.split((String) parameters.get(MetadataConstants.PACKAGE_IDS), "|");
final List<String> versions = StringUtil.split((String) parameters.get(MetadataConstants.VERSIONS), "|");
final List<String> versionConstraints = StringUtil.split((String) parameters.get(MetadataConstants.VERSION_CONSTRAINTS), "|");
final Set<String> targetFrameworks = new HashSet<>(StringUtil.split((String) parameters.get(MetadataConstants.TARGET_FRAMEWORKS), "|"));
final boolean includePrerelease = (Boolean) parameters.get(MetadataConstants.INCLUDE_PRERELEASE);
<BUG>final boolean includeAllVersions = (Boolean) parameters.get(MetadataConstants.INCLUDE_ALL_VERSIONS);
final List<NuGetIndexEntry> result = new ArrayList<>();
</BUG>
for (int i = 0; i < packageIds.size(); i++) {
final String requestedPackageId = packageIds.get(i);
| final List<String> packageIds = StringUtil.split((String) parameters.get(MetadataConstants.PACKAGE_IDS), "|");
final List<String> versions = StringUtil.split((String) parameters.get(MetadataConstants.VERSIONS), "|");
final List<String> versionConstraints = StringUtil.split((String) parameters.get(MetadataConstants.VERSION_CONSTRAINTS), "|");
final Set<String> targetFrameworks = new HashSet<>(StringUtil.split((String) parameters.get(MetadataConstants.TARGET_FRAMEWORKS), "|"));
final boolean includePrerelease = (Boolean) parameters.get(MetadataConstants.INCLUDE_PRERELEASE);
final boolean includeAllVersions = (Boolean) parameters.get(MetadataConstants.INCLUDE_ALL_VERSIONS);
if (packageIds.size() != versions.size() || !versionConstraints.isEmpty() && packageIds.size() != versionConstraints.size()) {
return Collections.emptyList();
}
final List<NuGetIndexEntry> packages = new ArrayList<>();
for (int i = 0; i < packageIds.size(); i++) {
final String requestedPackageId = packageIds.get(i);
|
366 | import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
<BUG>import java.util.concurrent.atomic.AtomicReference;
import org.slf4j.Logger;</BUG>
import org.slf4j.LoggerFactory;
<BUG>import org.springframework.beans.factory.DisposableBean;
import org.springframework.scheduling.TaskScheduler;</BUG>
import org.springframework.scheduling.Trigger;
| import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.http.HttpMethod;
import org.springframework.http.ResponseEntity;
import org.springframework.scheduling.TaskScheduler;
import org.springframework.scheduling.Trigger;
|
367 | class LeasingVaultPropertySource extends VaultPropertySource implements DisposableBean {
private final LeaseRenewalScheduler leaseRenewal;
private int minRenewalSeconds = 10;
private int expiryThresholdSeconds = 60;
private volatile Lease lease;
<BUG>public LeasingVaultPropertySource(VaultConfigOperations operations, boolean failFast,
SecretBackendMetadata secretBackendMetadata,
TaskScheduler taskScheduler) {</BUG>
super(operations, failFast, secretBackendMetadata);
Assert.notNull(taskScheduler, "TaskScheduler must not be null");
| class LeasingVaultPropertySource extends VaultPropertySource implements DisposableBean {
private final LeaseRenewalScheduler leaseRenewal;
private int minRenewalSeconds = 10;
private int expiryThresholdSeconds = 60;
private volatile Lease lease;
public LeasingVaultPropertySource(VaultConfigOperations operations, boolean failFast,
SecretBackendMetadata secretBackendMetadata, TaskScheduler taskScheduler) {
super(operations, failFast, secretBackendMetadata);
Assert.notNull(taskScheduler, "TaskScheduler must not be null");
|
368 | Lease currentLease = this.currentLease.get();
this.currentLease.set(lease);
if (currentLease != null) {
cancelSchedule(currentLease);
}
<BUG>ScheduledFuture<?> scheduledFuture = taskScheduler.schedule(new Runnable() {
@Override</BUG>
public void run() {
try {
schedules.remove(lease);
| Lease currentLease = this.currentLease.get();
this.currentLease.set(lease);
if (currentLease != null) {
cancelSchedule(currentLease);
}
ScheduledFuture<?> scheduledFuture = taskScheduler.schedule(
new Runnable() {
@Override
public void run() {
try {
schedules.remove(lease);
|
369 | </BUG>
}
catch (Exception e) {
<BUG>logger.error("Cannot renew lease {}", lease.getLeaseId(), e);
}
}
}, new OneShotTrigger(
getRenewalSeconds(lease, minRenewalSeconds, expiryThresholdSeconds)));</BUG>
schedules.put(lease, scheduledFuture);
| if (LeaseRenewalScheduler.this.currentLease.get() != lease) {
logger.debug("Current lease has changed. Skipping renewal");
return;
}
logger.debug("Renewing lease {}", lease.getLeaseId());
LeaseRenewalScheduler.this.currentLease.compareAndSet(
lease, renewLease.renewLease(lease));
}
catch (Exception e) {
logger.error("Cannot renew lease {}", lease.getLeaseId(),
e);
|
370 | package org.springframework.cloud.vault.config.databases;
import java.util.HashMap;
import java.util.Map;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.vault.config.PropertyNameTransformer;
<BUG>import org.springframework.cloud.vault.config.PropertyTransformer;</BUG>
import org.springframework.cloud.vault.config.SecretBackendMetadata;
import org.springframework.cloud.vault.config.SecretBackendMetadataFactory;
import org.springframework.cloud.vault.config.VaultSecretBackendDescriptor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
<BUG>import org.springframework.util.Assert;
@Configuration</BUG>
@EnableConfigurationProperties({ VaultMySqlProperties.class,
| package org.springframework.cloud.vault.config.databases;
import java.util.HashMap;
import java.util.Map;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.vault.config.PropertyNameTransformer;
import org.springframework.cloud.vault.config.SecretBackendMetadata;
import org.springframework.cloud.vault.config.SecretBackendMetadataFactory;
import org.springframework.cloud.vault.config.VaultSecretBackendDescriptor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.util.Assert;
import org.springframework.vault.core.util.PropertyTransformer;
@Configuration
@EnableConfigurationProperties({ VaultMySqlProperties.class,
|
371 | package org.springframework.cloud.vault.config;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
<BUG>import org.springframework.util.Assert;
public class PropertyNameTransformer implements PropertyTransformer {
private final Map<String, String> nameMapping = new HashMap<>();</BUG>
public PropertyNameTransformer() {
| package org.springframework.cloud.vault.config;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import org.springframework.util.Assert;
import org.springframework.vault.core.util.PropertyTransformer;
public class PropertyNameTransformer extends PropertyTransformerSupport implements
PropertyTransformer {
private final Map<String, String> nameMapping = new HashMap<>();
public PropertyNameTransformer() {
|
372 | package org.springframework.cloud.vault.config.aws;
import java.util.HashMap;
import java.util.Map;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.vault.config.PropertyNameTransformer;
<BUG>import org.springframework.cloud.vault.config.PropertyTransformer;</BUG>
import org.springframework.cloud.vault.config.SecretBackendMetadata;
import org.springframework.cloud.vault.config.SecretBackendMetadataFactory;
import org.springframework.cloud.vault.config.VaultSecretBackendDescriptor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
<BUG>import org.springframework.util.Assert;
@Configuration</BUG>
@EnableConfigurationProperties(VaultAwsProperties.class)
| package org.springframework.cloud.vault.config.aws;
import java.util.HashMap;
import java.util.Map;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.vault.config.PropertyNameTransformer;
import org.springframework.cloud.vault.config.SecretBackendMetadata;
import org.springframework.cloud.vault.config.SecretBackendMetadataFactory;
import org.springframework.cloud.vault.config.VaultSecretBackendDescriptor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.util.Assert;
import org.springframework.vault.core.util.PropertyTransformer;
@Configuration
@EnableConfigurationProperties(VaultAwsProperties.class)
|
373 | package org.springframework.cloud.vault.config.rabbitmq;
import java.util.HashMap;
import java.util.Map;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.vault.config.PropertyNameTransformer;
<BUG>import org.springframework.cloud.vault.config.PropertyTransformer;</BUG>
import org.springframework.cloud.vault.config.SecretBackendMetadata;
import org.springframework.cloud.vault.config.SecretBackendMetadataFactory;
import org.springframework.cloud.vault.config.VaultSecretBackendDescriptor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
<BUG>import org.springframework.util.Assert;
@Configuration</BUG>
@EnableConfigurationProperties
| package org.springframework.cloud.vault.config.rabbitmq;
import java.util.HashMap;
import java.util.Map;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.vault.config.PropertyNameTransformer;
import org.springframework.cloud.vault.config.SecretBackendMetadata;
import org.springframework.cloud.vault.config.SecretBackendMetadataFactory;
import org.springframework.cloud.vault.config.VaultSecretBackendDescriptor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.util.Assert;
import org.springframework.vault.core.util.PropertyTransformer;
@Configuration
@EnableConfigurationProperties
|
374 | package org.springframework.cloud.vault.config.consul;
import java.util.HashMap;
import java.util.Map;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.vault.config.PropertyNameTransformer;
<BUG>import org.springframework.cloud.vault.config.PropertyTransformer;</BUG>
import org.springframework.cloud.vault.config.SecretBackendMetadata;
import org.springframework.cloud.vault.config.SecretBackendMetadataFactory;
import org.springframework.cloud.vault.config.VaultSecretBackendDescriptor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
<BUG>import org.springframework.util.Assert;
@Configuration</BUG>
@EnableConfigurationProperties(VaultConsulProperties.class)
| package org.springframework.cloud.vault.config.consul;
import java.util.HashMap;
import java.util.Map;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.vault.config.PropertyNameTransformer;
import org.springframework.cloud.vault.config.SecretBackendMetadata;
import org.springframework.cloud.vault.config.SecretBackendMetadataFactory;
import org.springframework.cloud.vault.config.VaultSecretBackendDescriptor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.util.Assert;
import org.springframework.vault.core.util.PropertyTransformer;
@Configuration
@EnableConfigurationProperties(VaultConsulProperties.class)
|
375 | package org.springframework.cloud.vault.config;
import java.util.HashMap;
import java.util.Map;
<BUG>import org.springframework.util.Assert;
class GenericSecretBackendMetadata implements SecretBackendMetadata {</BUG>
private final String secretBackendPath;
private final String key;
private GenericSecretBackendMetadata(String secretBackendPath, String key) {
| package org.springframework.cloud.vault.config;
import java.util.HashMap;
import java.util.Map;
import org.springframework.util.Assert;
import org.springframework.vault.core.util.PropertyTransformer;
import org.springframework.vault.core.util.PropertyTransformers;
class GenericSecretBackendMetadata implements SecretBackendMetadata {
private final String secretBackendPath;
private final String key;
private GenericSecretBackendMetadata(String secretBackendPath, String key) {
|
376 | public String getName() {
return String.format("%s/%s", secretBackendPath, key);
}
@Override
public PropertyTransformer getPropertyTransformer() {
<BUG>return null;
}</BUG>
@Override
public Map<String, String> getVariables() {
Map<String, String> variables = new HashMap<>();
| public String getName() {
return String.format("%s/%s", secretBackendPath, key);
}
@Override
public PropertyTransformer getPropertyTransformer() {
return PropertyTransformers.noop();
}
@Override
public Map<String, String> getVariables() {
Map<String, String> variables = new HashMap<>();
|
377 | package org.exist.indexing.range;
import org.apache.log4j.Logger;
<BUG>import org.apache.lucene.analysis.Analyzer;
import org.exist.storage.NodePath;</BUG>
import org.exist.util.DatabaseConfigurationException;
import org.exist.xquery.value.Type;
import org.w3c.dom.Element;
| package org.exist.indexing.range;
import org.apache.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.exist.dom.QName;
import org.exist.storage.NodePath;
import org.exist.util.DatabaseConfigurationException;
import org.exist.xquery.value.Type;
import org.w3c.dom.Element;
|
378 | }
return caseSensitive;
}
@Override
public boolean match(NodePath other) {
<BUG>if (isQNameIndex)
return other.getLastComponent().equalsSimple(path.getLastComponent());
return path.match(other);</BUG>
}
| }
}
}
}
|
379 | package org.apache.ranger.authorization.kafka.authorizer;
import java.io.File;
<BUG>import java.io.FileOutputStream;
import java.math.BigInteger;</BUG>
import java.net.ServerSocket;
import java.nio.file.Files;
import java.nio.file.Path;
| package org.apache.ranger.authorization.kafka.authorizer;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.math.BigInteger;
import java.net.ServerSocket;
import java.nio.file.Files;
import java.nio.file.Path;
|
380 | KafkaTestUtils.createAndStoreKey(serviceDN, serviceDN, BigInteger.valueOf(30),
"sspass", "myservicekey", "skpass", keystore);
clientKeystorePath =
KafkaTestUtils.createAndStoreKey(clientDN, clientDN, BigInteger.valueOf(31),
"cspass", "myclientkey", "ckpass", keystore);
<BUG>File truststoreFile = File.createTempFile("kafkatruststore", ".jks");
keystore.store(new FileOutputStream(truststoreFile), "security".toCharArray());
truststorePath = truststoreFile.getPath();</BUG>
zkServer = new TestingServer();
| KafkaTestUtils.createAndStoreKey(serviceDN, serviceDN, BigInteger.valueOf(30),
"sspass", "myservicekey", "skpass", keystore);
clientKeystorePath =
KafkaTestUtils.createAndStoreKey(clientDN, clientDN, BigInteger.valueOf(31),
"cspass", "myclientkey", "ckpass", keystore);
File truststoreFile = File.createTempFile("kafkatruststore", ".jks");
try (OutputStream output = new FileOutputStream(truststoreFile)) {
keystore.store(output, "security".toCharArray());
}
truststorePath = truststoreFile.getPath();
zkServer = new TestingServer();
|
381 | if (zkServer != null) {
zkServer.stop();
}
File clientKeystoreFile = new File(clientKeystorePath);
if (clientKeystoreFile.exists()) {
<BUG>clientKeystoreFile.delete();
}</BUG>
File serviceKeystoreFile = new File(serviceKeystorePath);
if (serviceKeystoreFile.exists()) {
<BUG>serviceKeystoreFile.delete();
}</BUG>
File truststoreFile = new File(truststorePath);
| if (zkServer != null) {
zkServer.stop();
}
File clientKeystoreFile = new File(clientKeystorePath);
if (clientKeystoreFile.exists()) {
FileUtils.forceDelete(clientKeystoreFile);
}
File serviceKeystoreFile = new File(serviceKeystorePath);
if (serviceKeystoreFile.exists()) {
FileUtils.forceDelete(serviceKeystoreFile);
}
File truststoreFile = new File(truststorePath);
|
382 | if (serviceKeystoreFile.exists()) {
<BUG>serviceKeystoreFile.delete();
}</BUG>
File truststoreFile = new File(truststorePath);
if (truststoreFile.exists()) {
<BUG>truststoreFile.delete();
}</BUG>
if (tempDir != null) {
FileUtils.deleteDirectory(tempDir.toFile());
}
| if (serviceKeystoreFile.exists()) {
FileUtils.forceDelete(serviceKeystoreFile);
}
File truststoreFile = new File(truststorePath);
if (truststoreFile.exists()) {
FileUtils.forceDelete(truststoreFile);
}
if (tempDir != null) {
FileUtils.deleteDirectory(tempDir.toFile());
|
383 | package org.apache.ranger.authorization.kafka.authorizer;
import java.io.File;
<BUG>import java.io.FileOutputStream;
import java.math.BigInteger;</BUG>
import java.net.ServerSocket;
import java.security.KeyStore;
import java.util.Arrays;
| package org.apache.ranger.authorization.kafka.authorizer;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.math.BigInteger;
import java.net.ServerSocket;
import java.security.KeyStore;
import java.util.Arrays;
|
384 | import java.security.KeyStore;
import java.util.Arrays;
import java.util.Properties;
import java.util.concurrent.Future;
import org.I0Itec.zkclient.ZkClient;
<BUG>import org.I0Itec.zkclient.ZkConnection;
import org.apache.curator.test.TestingServer;</BUG>
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.consumer.ConsumerRecord;
| import java.security.KeyStore;
import java.util.Arrays;
import java.util.Properties;
import java.util.concurrent.Future;
import org.I0Itec.zkclient.ZkClient;
import org.I0Itec.zkclient.ZkConnection;
import org.apache.commons.io.FileUtils;
import org.apache.curator.test.TestingServer;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
385 | KafkaTestUtils.createAndStoreKey(serviceDN, serviceDN, BigInteger.valueOf(30),
"sspass", "myservicekey", "skpass", keystore);
clientKeystorePath =
KafkaTestUtils.createAndStoreKey(clientDN, clientDN, BigInteger.valueOf(31),
"cspass", "myclientkey", "ckpass", keystore);
<BUG>File truststoreFile = File.createTempFile("kafkatruststore", ".jks");
keystore.store(new FileOutputStream(truststoreFile), "security".toCharArray());
truststorePath = truststoreFile.getPath();</BUG>
zkServer = new TestingServer();
| KafkaTestUtils.createAndStoreKey(serviceDN, serviceDN, BigInteger.valueOf(30),
"sspass", "myservicekey", "skpass", keystore);
clientKeystorePath =
KafkaTestUtils.createAndStoreKey(clientDN, clientDN, BigInteger.valueOf(31),
"cspass", "myclientkey", "ckpass", keystore);
File truststoreFile = File.createTempFile("kafkatruststore", ".jks");
try (OutputStream output = new FileOutputStream(truststoreFile)) {
keystore.store(output, "security".toCharArray());
}
truststorePath = truststoreFile.getPath();
zkServer = new TestingServer();
|
386 | if (zkServer != null) {
zkServer.stop();
}
File clientKeystoreFile = new File(clientKeystorePath);
if (clientKeystoreFile.exists()) {
<BUG>clientKeystoreFile.delete();
}</BUG>
File serviceKeystoreFile = new File(serviceKeystorePath);
if (serviceKeystoreFile.exists()) {
<BUG>serviceKeystoreFile.delete();
}</BUG>
File truststoreFile = new File(truststorePath);
| if (zkServer != null) {
zkServer.stop();
}
File clientKeystoreFile = new File(clientKeystorePath);
if (clientKeystoreFile.exists()) {
FileUtils.forceDelete(clientKeystoreFile);
}
File serviceKeystoreFile = new File(serviceKeystorePath);
if (serviceKeystoreFile.exists()) {
FileUtils.forceDelete(serviceKeystoreFile);
}
File truststoreFile = new File(truststorePath);
|
387 | if (serviceKeystoreFile.exists()) {
<BUG>serviceKeystoreFile.delete();
}</BUG>
File truststoreFile = new File(truststorePath);
if (truststoreFile.exists()) {
<BUG>truststoreFile.delete();
}</BUG>
}
@Test
public void testAuthorizedRead() throws Exception {
| if (serviceKeystoreFile.exists()) {
FileUtils.forceDelete(serviceKeystoreFile);
}
File truststoreFile = new File(truststorePath);
if (truststoreFile.exists()) {
FileUtils.forceDelete(truststoreFile);
}
}
@Test
public void testAuthorizedRead() throws Exception {
|
388 | package org.apache.ranger.authorization.kafka.authorizer;
import java.io.File;
<BUG>import java.io.FileOutputStream;
import java.math.BigInteger;</BUG>
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.KeyStore;
| package org.apache.ranger.authorization.kafka.authorizer;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.math.BigInteger;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.KeyStore;
|
389 | ContentSigner contentSigner = new JcaContentSignerBuilder("SHA256WithRSAEncryption").build(keyPair.getPrivate());
X509Certificate certificate = new JcaX509CertificateConverter().getCertificate(certBuilder.build(contentSigner));
KeyStore keystore = KeyStore.getInstance(KeyStore.getDefaultType());
keystore.load(null, keystorePassword.toCharArray());
keystore.setKeyEntry(keystoreAlias, keyPair.getPrivate(), keyPassword.toCharArray(), new Certificate[] {certificate});
<BUG>File keystoreFile = File.createTempFile("kafkakeystore", ".jks");
keystore.store(new FileOutputStream(keystoreFile), keystorePassword.toCharArray());
trustStore.setCertificateEntry(keystoreAlias, certificate);</BUG>
return keystoreFile.getPath();
| ContentSigner contentSigner = new JcaContentSignerBuilder("SHA256WithRSAEncryption").build(keyPair.getPrivate());
X509Certificate certificate = new JcaX509CertificateConverter().getCertificate(certBuilder.build(contentSigner));
KeyStore keystore = KeyStore.getInstance(KeyStore.getDefaultType());
keystore.load(null, keystorePassword.toCharArray());
keystore.setKeyEntry(keystoreAlias, keyPair.getPrivate(), keyPassword.toCharArray(), new Certificate[] {certificate});
File keystoreFile = File.createTempFile("kafkakeystore", ".jks");
try (OutputStream output = new FileOutputStream(keystoreFile)) {
keystore.store(output, keystorePassword.toCharArray());
|
390 | import java.net.URL;
import cn.alien95.resthttp.image.cache.CacheDispatcher;
import cn.alien95.resthttp.image.cache.DiskCache;
import cn.alien95.resthttp.image.cache.MemoryCache;
import cn.alien95.resthttp.image.callback.ImageCallback;
<BUG>import cn.alien95.resthttp.request.RequestQueue;
import cn.alien95.resthttp.util.DebugUtils;
public class HttpRequestImage {</BUG>
private CacheDispatcher cacheDispatcher;
| import java.net.URL;
import cn.alien95.resthttp.image.cache.CacheDispatcher;
import cn.alien95.resthttp.image.cache.DiskCache;
import cn.alien95.resthttp.image.cache.MemoryCache;
import cn.alien95.resthttp.image.callback.ImageCallback;
import cn.alien95.resthttp.request.ThreadPool;
import cn.alien95.resthttp.util.DebugUtils;
import cn.alien95.resthttp.util.Util;
public class HttpRequestImage {
private CacheDispatcher cacheDispatcher;
|
391 | urlConnection.setConnectTimeout(10 * 1000);
urlConnection.setReadTimeout(10 * 1000);
return urlConnection;
}
public synchronized void loadImageFromNetWithCompress(final String url, final int reqWidth, final int reqHeight, final ImageCallback callBack) {
<BUG>RequestQueue.getInstance().addReadImgCacheAsyn(new Runnable() {
</BUG>
@Override
public void run() {
HttpURLConnection urlConnection = getHttpUrlConnection(url);
| urlConnection.setConnectTimeout(10 * 1000);
urlConnection.setReadTimeout(10 * 1000);
return urlConnection;
}
public synchronized void loadImageFromNetWithCompress(final String url, final int reqWidth, final int reqHeight, final ImageCallback callBack) {
ThreadPool.getInstance().addReadImgCacheAsyn(new Runnable() {
@Override
public void run() {
HttpURLConnection urlConnection = getHttpUrlConnection(url);
|
392 | handler.post(new Runnable() {
@Override
public void run() {
callBack.success(compressBitmap);
if (compressBitmap != null) {
<BUG>MemoryCache.getInstance().putBitmapToCache(url + reqWidth + reqHeight, compressBitmap);
DiskCache.getInstance().putBitmapToCache(url + reqWidth + reqHeight, compressBitmap);
</BUG>
}
| handler.post(new Runnable() {
@Override
public void run() {
callBack.success(compressBitmap);
if (compressBitmap != null) {
MemoryCache.getInstance().put(Util.getCacheKey(url + reqWidth + reqHeight), compressBitmap);
DiskCache.getInstance().put(Util.getCacheKey(url + reqWidth + reqHeight), compressBitmap);
|
393 | package cn.alien95.resthttp.image.cache;
<BUG>import android.graphics.Bitmap;</BUG>
import java.util.concurrent.LinkedBlockingDeque;
<BUG>import cn.alien95.resthttp.image.callback.DiskCallback;</BUG>
import cn.alien95.resthttp.image.callback.ImageCallback;
<BUG>import cn.alien95.resthttp.util.RestHttpLog;
public class CacheDispatcher {</BUG>
private LinkedBlockingDeque<ImgRequest> cacheQueue;
| package cn.alien95.resthttp.image.cache;
import java.util.concurrent.LinkedBlockingDeque;
import cn.alien95.resthttp.image.callback.ImageCallback;
import cn.alien95.resthttp.util.RestHttpLog;
import cn.alien95.resthttp.util.Util;
public class CacheDispatcher {
private LinkedBlockingDeque<ImgRequest> cacheQueue;
|
394 | if (userInfo != null) {
result.setText(result.getText().toString() + "\n POST : "
+ userInfo.toString());
}
if (userInfo1 != null) {
<BUG>result.setText(result.getText().toString() + "\n GET : " + userInfo1.toString());
</BUG>
} else {
RestHttpLog.i("userInfo1为空");
}
| if (userInfo != null) {
result.setText(result.getText().toString() + "\n POST : "
+ userInfo.toString());
}
if (userInfo1 != null) {
result.setText(result.getText().toString() + "\n GET userInfo1: " + userInfo1.toString());
} else {
RestHttpLog.i("userInfo1为空");
|
395 | import java.net.HttpURLConnection;
import java.net.URL;
import cn.alien95.resthttp.image.cache.DiskCache;
import cn.alien95.resthttp.image.cache.MemoryCache;
import cn.alien95.resthttp.image.callback.ImageCallback;
<BUG>import cn.alien95.resthttp.request.RequestQueue;
</BUG>
import cn.alien95.resthttp.util.DebugUtils;
<BUG>import cn.alien95.resthttp.util.RestHttpLog;
public class NetworkDispatcher {</BUG>
private Handler handler;
| import java.net.HttpURLConnection;
import java.net.URL;
import cn.alien95.resthttp.image.cache.DiskCache;
import cn.alien95.resthttp.image.cache.MemoryCache;
import cn.alien95.resthttp.image.callback.ImageCallback;
import cn.alien95.resthttp.request.ThreadPool;
import cn.alien95.resthttp.util.DebugUtils;
import cn.alien95.resthttp.util.RestHttpLog;
import cn.alien95.resthttp.util.Util;
public class NetworkDispatcher {
private Handler handler;
|
396 | public void addNetworkWithCompress(String url, int inSimpleSize, ImageCallback callback) {
<BUG>networkImageWithCompress(url,inSimpleSize,callback);
</BUG>
}
<BUG>public void addNetworkWithCompress(String url, int reqWidth, int reqHeight,ImageCallback callback) {
networkImageWithCompress(url,reqWidth,reqHeight,callback);
</BUG>
}
public void networkImage(final String url, final ImageCallback callback) {
| public void addNetworkWithCompress(String url, int inSimpleSize, ImageCallback callback) {
networkImageWithCompress(url, inSimpleSize, callback);
}
public void addNetworkWithCompress(String url, int reqWidth, int reqHeight, ImageCallback callback) {
networkImageWithCompress(url, reqWidth, reqHeight, callback);
}
public void networkImage(final String url, final ImageCallback callback) {
|
397 | networkImageWithCompress(url,reqWidth,reqHeight,callback);
</BUG>
}
public void networkImage(final String url, final ImageCallback callback) {
RestHttpLog.i("Get picture from network");
<BUG>RequestQueue.getInstance().addReadImgCacheAsyn(new Runnable() {
</BUG>
@Override
public void run() {
HttpURLConnection urlConnection = getHttpUrlConnection(url);
| public void addNetworkWithCompress(String url, int inSimpleSize, ImageCallback callback) {
networkImageWithCompress(url, inSimpleSize, callback);
}
public void addNetworkWithCompress(String url, int reqWidth, int reqHeight, ImageCallback callback) {
networkImageWithCompress(url, reqWidth, reqHeight, callback);
}
public void networkImage(final String url, final ImageCallback callback) {
RestHttpLog.i("Get picture from network");
ThreadPool.getInstance().addReadImgCacheAsyn(new Runnable() {
@Override
public void run() {
HttpURLConnection urlConnection = getHttpUrlConnection(url);
|
398 | handler.post(new Runnable() {
@Override
public void run() {
callback.success(bitmap);
if (bitmap != null) {
<BUG>MemoryCache.getInstance().putBitmapToCache(url, bitmap);
DiskCache.getInstance().putBitmapToCache(url, bitmap);
</BUG>
}
| handler.post(new Runnable() {
@Override
public void run() {
callback.success(bitmap);
if (bitmap != null) {
MemoryCache.getInstance().put(Util.getCacheKey(url), bitmap);
DiskCache.getInstance().put(Util.getCacheKey(url), bitmap);
|
399 | urlConnection.setReadTimeout(10 * 1000);
return urlConnection;
}
public synchronized void networkImageWithCompress(final String url, final int inSampleSize, final ImageCallback callBack) {
RestHttpLog.i("Get compress picture from network");
<BUG>RequestQueue.getInstance().addReadImgCacheAsyn(new Runnable() {
</BUG>
@Override
public void run() {
HttpURLConnection urlConnection = getHttpUrlConnection(url);
| urlConnection.setReadTimeout(10 * 1000);
return urlConnection;
}
public synchronized void networkImageWithCompress(final String url, final int inSampleSize, final ImageCallback callBack) {
RestHttpLog.i("Get compress picture from network");
ThreadPool.getInstance().addReadImgCacheAsyn(new Runnable() {
@Override
public void run() {
HttpURLConnection urlConnection = getHttpUrlConnection(url);
|
400 | handler.post(new Runnable() {
@Override
public void run() {
callBack.success(compressBitmap);
if (compressBitmap != null) {
<BUG>MemoryCache.getInstance().putBitmapToCache(url + inSampleSize, compressBitmap);
DiskCache.getInstance().putBitmapToCache(url + inSampleSize, compressBitmap);
</BUG>
}
| handler.post(new Runnable() {
@Override
public void run() {
callBack.success(compressBitmap);
if (compressBitmap != null) {
MemoryCache.getInstance().put(Util.getCacheKey(url + inSampleSize), compressBitmap);
DiskCache.getInstance().put(Util.getCacheKey(url + inSampleSize), compressBitmap);
|
Subsets and Splits