code
stringlengths 130
281k
| code_dependency
stringlengths 182
306k
|
---|---|
public class class_name {
private boolean isImageType(String mime) {
if (StringUtils.hasText(mime)) {
for (String imageMime : IMAGE_MIMES) {
if (mime.equalsIgnoreCase(imageMime)) {
return true;
}
}
}
return false;
} } | public class class_name {
private boolean isImageType(String mime) {
if (StringUtils.hasText(mime)) {
for (String imageMime : IMAGE_MIMES) {
if (mime.equalsIgnoreCase(imageMime)) {
return true; // depends on control dependency: [if], data = [none]
}
}
}
return false;
} } |
public class class_name {
@Action(name = "Extract text from PDF",
description = EXTRACT_TEXT_FROM_PDF_DESC,
outputs = {
@Output(value = RETURN_CODE, description = RETURN_CODE_DESC),
@Output(value = RETURN_RESULT, description = RETURN_RESULT_DESC),
@Output(value = TEXT_STRING, description = TEXT_STRING_DESC),
@Output(value = TEXT_JSON, description = TEXT_JSON_DESC),
@Output(value = EXCEPTION, description = EXCEPTION_DESC),
},
responses = {
@Response(text = SUCCESS, field = RETURN_CODE, value = ReturnCodes.SUCCESS, matchType = COMPARE_EQUAL, responseType = RESOLVED, description = SUCCESS_DESC),
@Response(text = FAILURE, field = RETURN_CODE, value = ReturnCodes.FAILURE, matchType = COMPARE_EQUAL, responseType = ERROR, isOnFail = true, description = FAILURE_DESC)
})
public Map<String, String> execute(
@Param(value = FILE_PATH, required = true, description = PDF_FILE_PATH_DESC) String filePath,
@Param(value = DATA_PATH, required = true, description = DATA_PATH_DESC) String dataPath,
@Param(value = LANGUAGE, required = true, description = LANGUAGE_DESC) String language,
@Param(value = DPI, description = DPI_DESC) String dpi,
@Param(value = TEXT_BLOCKS, description = TEXT_BLOCKS_DESC) String textBlocks,
@Param(value = DESKEW, description = DESKEW_DESC) String deskew,
@Param(value = FROM_PAGE, description = FROM_PAGE_DESC) String fromPage,
@Param(value = TO_PAGE, description = TO_PAGE_DESC) String toPage,
@Param(value = PAGE_INDEX, description = PAGE_INDEX_DESC) String pageIndex) {
dataPath = defaultIfEmpty(dataPath, EMPTY);
language = defaultIfEmpty(language, ENG);
dpi = defaultIfEmpty(dpi, DPI_SET);
textBlocks = defaultIfEmpty(textBlocks, FALSE);
deskew = defaultIfEmpty(deskew, FALSE);
fromPage = defaultIfEmpty(fromPage, EMPTY);
toPage = defaultIfEmpty(toPage, EMPTY);
pageIndex = defaultIfEmpty(pageIndex, EMPTY);
final List<String> exceptionMessages = verifyExtractTextFromPDF(filePath, dataPath, textBlocks, deskew, fromPage, toPage, pageIndex, dpi);
if (!exceptionMessages.isEmpty()) {
return getFailureResultsMap(StringUtilities.join(exceptionMessages, NEW_LINE));
}
try {
final String resultText = imageConvert(filePath, dataPath, language, dpi, textBlocks, deskew, fromPage, toPage, pageIndex);
final Map<String, String> result = getSuccessResultsMap(resultText);
if (Boolean.parseBoolean(textBlocks)) {
result.put(TEXT_JSON, resultText);
} else {
result.put(TEXT_STRING, resultText);
}
return result;
} catch (IndexOutOfBoundsException e) {
return getFailureResultsMap(EXCEPTION_EXCEEDS_PAGES);
} catch (Exception e) {
return getFailureResultsMap(e);
}
} } | public class class_name {
@Action(name = "Extract text from PDF",
description = EXTRACT_TEXT_FROM_PDF_DESC,
outputs = {
@Output(value = RETURN_CODE, description = RETURN_CODE_DESC),
@Output(value = RETURN_RESULT, description = RETURN_RESULT_DESC),
@Output(value = TEXT_STRING, description = TEXT_STRING_DESC),
@Output(value = TEXT_JSON, description = TEXT_JSON_DESC),
@Output(value = EXCEPTION, description = EXCEPTION_DESC),
},
responses = {
@Response(text = SUCCESS, field = RETURN_CODE, value = ReturnCodes.SUCCESS, matchType = COMPARE_EQUAL, responseType = RESOLVED, description = SUCCESS_DESC),
@Response(text = FAILURE, field = RETURN_CODE, value = ReturnCodes.FAILURE, matchType = COMPARE_EQUAL, responseType = ERROR, isOnFail = true, description = FAILURE_DESC)
})
public Map<String, String> execute(
@Param(value = FILE_PATH, required = true, description = PDF_FILE_PATH_DESC) String filePath,
@Param(value = DATA_PATH, required = true, description = DATA_PATH_DESC) String dataPath,
@Param(value = LANGUAGE, required = true, description = LANGUAGE_DESC) String language,
@Param(value = DPI, description = DPI_DESC) String dpi,
@Param(value = TEXT_BLOCKS, description = TEXT_BLOCKS_DESC) String textBlocks,
@Param(value = DESKEW, description = DESKEW_DESC) String deskew,
@Param(value = FROM_PAGE, description = FROM_PAGE_DESC) String fromPage,
@Param(value = TO_PAGE, description = TO_PAGE_DESC) String toPage,
@Param(value = PAGE_INDEX, description = PAGE_INDEX_DESC) String pageIndex) {
dataPath = defaultIfEmpty(dataPath, EMPTY);
language = defaultIfEmpty(language, ENG);
dpi = defaultIfEmpty(dpi, DPI_SET);
textBlocks = defaultIfEmpty(textBlocks, FALSE);
deskew = defaultIfEmpty(deskew, FALSE);
fromPage = defaultIfEmpty(fromPage, EMPTY);
toPage = defaultIfEmpty(toPage, EMPTY);
pageIndex = defaultIfEmpty(pageIndex, EMPTY);
final List<String> exceptionMessages = verifyExtractTextFromPDF(filePath, dataPath, textBlocks, deskew, fromPage, toPage, pageIndex, dpi);
if (!exceptionMessages.isEmpty()) {
return getFailureResultsMap(StringUtilities.join(exceptionMessages, NEW_LINE)); // depends on control dependency: [if], data = [none]
}
try {
final String resultText = imageConvert(filePath, dataPath, language, dpi, textBlocks, deskew, fromPage, toPage, pageIndex);
final Map<String, String> result = getSuccessResultsMap(resultText);
if (Boolean.parseBoolean(textBlocks)) {
result.put(TEXT_JSON, resultText); // depends on control dependency: [if], data = [none]
} else {
result.put(TEXT_STRING, resultText); // depends on control dependency: [if], data = [none]
}
return result; // depends on control dependency: [try], data = [none]
} catch (IndexOutOfBoundsException e) {
return getFailureResultsMap(EXCEPTION_EXCEEDS_PAGES);
} catch (Exception e) { // depends on control dependency: [catch], data = [none]
return getFailureResultsMap(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public <TInput, TOutput> VortexFuture<TOutput>
enqueueTasklet(final VortexFunction<TInput, TOutput> function, final TInput input,
final Optional<FutureCallback<TOutput>> callback) {
// TODO[REEF-500]: Simple duplicate Vortex Tasklet launch.
final VortexFuture<TOutput> vortexFuture;
final int id = taskletIdCounter.getAndIncrement();
if (callback.isPresent()) {
vortexFuture = new VortexFuture<>(executor, this, id, callback.get());
} else {
vortexFuture = new VortexFuture<>(executor, this, id);
}
final Tasklet tasklet = new Tasklet<>(id, Optional.<Integer>empty(), function, input, vortexFuture);
putDelegate(Collections.singletonList(tasklet), vortexFuture);
this.pendingTasklets.addLast(tasklet);
return vortexFuture;
} } | public class class_name {
@Override
public <TInput, TOutput> VortexFuture<TOutput>
enqueueTasklet(final VortexFunction<TInput, TOutput> function, final TInput input,
final Optional<FutureCallback<TOutput>> callback) {
// TODO[REEF-500]: Simple duplicate Vortex Tasklet launch.
final VortexFuture<TOutput> vortexFuture;
final int id = taskletIdCounter.getAndIncrement();
if (callback.isPresent()) {
vortexFuture = new VortexFuture<>(executor, this, id, callback.get()); // depends on control dependency: [if], data = [none]
} else {
vortexFuture = new VortexFuture<>(executor, this, id); // depends on control dependency: [if], data = [none]
}
final Tasklet tasklet = new Tasklet<>(id, Optional.<Integer>empty(), function, input, vortexFuture);
putDelegate(Collections.singletonList(tasklet), vortexFuture);
this.pendingTasklets.addLast(tasklet);
return vortexFuture;
} } |
public class class_name {
public void writeHtmlString(String strHTML, PrintWriter out)
{
int iIndex;
if (strHTML == null)
return;
while ((iIndex = strHTML.indexOf(HtmlConstants.TITLE_TAG)) != -1)
{ // ** FIX THIS to look for a <xxx/> and look up the token **
strHTML = strHTML.substring(0, iIndex) + ((BasePanel)this.getScreenField()).getTitle() + strHTML.substring(iIndex + HtmlConstants.TITLE_TAG.length());
}
out.println(strHTML);
} } | public class class_name {
public void writeHtmlString(String strHTML, PrintWriter out)
{
int iIndex;
if (strHTML == null)
return;
while ((iIndex = strHTML.indexOf(HtmlConstants.TITLE_TAG)) != -1)
{ // ** FIX THIS to look for a <xxx/> and look up the token **
strHTML = strHTML.substring(0, iIndex) + ((BasePanel)this.getScreenField()).getTitle() + strHTML.substring(iIndex + HtmlConstants.TITLE_TAG.length()); // depends on control dependency: [while], data = [none]
}
out.println(strHTML);
} } |
public class class_name {
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Path("/metrics")
@Description("Submits externally collected metric data.")
public Map<String, Object> submitMetrics(@Context HttpServletRequest req, final List<MetricDto> metricDtos) {
PrincipalUser remoteUser = getRemoteUser(req);
SystemAssert.requireArgument(metricDtos != null, "Cannot submit null timeseries metrics list.");
List<Metric> legalMetrics = new ArrayList<>();
List<MetricDto> illegalMetrics = new ArrayList<>();
List<String> errorMessages = new ArrayList<>();
for (MetricDto metricDto : metricDtos) {
try {
Metric metric = new Metric(metricDto.getScope(), metricDto.getMetric());
metric.setTags(metricDto.getTags());
if (metricDto.getMetatags() != null) {
metric.setMetatagsRecord(new MetatagsRecord(metricDto.getMetatags(), metricDto.getMetatagsKey()));
}
copyProperties(metric, metricDto);
legalMetrics.add(metric);
} catch (Exception e) {
illegalMetrics.add(metricDto);
errorMessages.add(e.getMessage());
}
}
_collectionService.submitMetrics(remoteUser, legalMetrics);
Map<String, Object> result = new HashMap<>();
result.put("Success", legalMetrics.size() + " metrics");
result.put("Error", illegalMetrics.size() + " metrics");
result.put("Error Messages", errorMessages);
return result;
} } | public class class_name {
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Path("/metrics")
@Description("Submits externally collected metric data.")
public Map<String, Object> submitMetrics(@Context HttpServletRequest req, final List<MetricDto> metricDtos) {
PrincipalUser remoteUser = getRemoteUser(req);
SystemAssert.requireArgument(metricDtos != null, "Cannot submit null timeseries metrics list.");
List<Metric> legalMetrics = new ArrayList<>();
List<MetricDto> illegalMetrics = new ArrayList<>();
List<String> errorMessages = new ArrayList<>();
for (MetricDto metricDto : metricDtos) {
try {
Metric metric = new Metric(metricDto.getScope(), metricDto.getMetric());
metric.setTags(metricDto.getTags()); // depends on control dependency: [try], data = [none]
if (metricDto.getMetatags() != null) {
metric.setMetatagsRecord(new MetatagsRecord(metricDto.getMetatags(), metricDto.getMetatagsKey())); // depends on control dependency: [if], data = [(metricDto.getMetatags()]
}
copyProperties(metric, metricDto); // depends on control dependency: [try], data = [none]
legalMetrics.add(metric); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
illegalMetrics.add(metricDto);
errorMessages.add(e.getMessage());
} // depends on control dependency: [catch], data = [none]
}
_collectionService.submitMetrics(remoteUser, legalMetrics);
Map<String, Object> result = new HashMap<>();
result.put("Success", legalMetrics.size() + " metrics");
result.put("Error", illegalMetrics.size() + " metrics");
result.put("Error Messages", errorMessages);
return result;
} } |
public class class_name {
private int findRow(int rangeStart) {
int index = 0;
// check the vicinity of the last-seen row (start
// searching with an unrolled loop)
index = prevRow * columns;
if (rangeStart >= v[index]) {
if (rangeStart < v[index + 1]) {
// same row as last seen
return index;
} else {
index += columns;
if (rangeStart < v[index + 1]) {
++prevRow;
return index;
} else {
index += columns;
if (rangeStart < v[index + 1]) {
prevRow += 2;
return index;
} else if ((rangeStart - v[index + 1]) < 10) {
// we are close, continue looping
prevRow += 2;
do {
++prevRow;
index += columns;
} while (rangeStart >= v[index + 1]);
return index;
}
}
}
} else if (rangeStart < v[1]) {
// the very first row
prevRow = 0;
return 0;
}
// do a binary search for the start of the range
int start = 0;
int mid = 0;
int limit = rows;
while (start < limit - 1) {
mid = (start + limit) / 2;
index = columns * mid;
if (rangeStart < v[index]) {
limit = mid;
} else if (rangeStart < v[index + 1]) {
prevRow = mid;
return index;
} else {
start = mid;
}
}
// must be found because all ranges together always cover
// all of Unicode
prevRow = start;
index = start * columns;
return index;
} } | public class class_name {
private int findRow(int rangeStart) {
int index = 0;
// check the vicinity of the last-seen row (start
// searching with an unrolled loop)
index = prevRow * columns;
if (rangeStart >= v[index]) {
if (rangeStart < v[index + 1]) {
// same row as last seen
return index; // depends on control dependency: [if], data = [none]
} else {
index += columns; // depends on control dependency: [if], data = [none]
if (rangeStart < v[index + 1]) {
++prevRow; // depends on control dependency: [if], data = [none]
return index; // depends on control dependency: [if], data = [none]
} else {
index += columns; // depends on control dependency: [if], data = [none]
if (rangeStart < v[index + 1]) {
prevRow += 2; // depends on control dependency: [if], data = [none]
return index; // depends on control dependency: [if], data = [none]
} else if ((rangeStart - v[index + 1]) < 10) {
// we are close, continue looping
prevRow += 2; // depends on control dependency: [if], data = [none]
do {
++prevRow;
index += columns;
} while (rangeStart >= v[index + 1]);
return index; // depends on control dependency: [if], data = [none]
}
}
}
} else if (rangeStart < v[1]) {
// the very first row
prevRow = 0; // depends on control dependency: [if], data = [none]
return 0; // depends on control dependency: [if], data = [none]
}
// do a binary search for the start of the range
int start = 0;
int mid = 0;
int limit = rows;
while (start < limit - 1) {
mid = (start + limit) / 2; // depends on control dependency: [while], data = [(start]
index = columns * mid; // depends on control dependency: [while], data = [none]
if (rangeStart < v[index]) {
limit = mid; // depends on control dependency: [if], data = [none]
} else if (rangeStart < v[index + 1]) {
prevRow = mid; // depends on control dependency: [if], data = [none]
return index; // depends on control dependency: [if], data = [none]
} else {
start = mid; // depends on control dependency: [if], data = [none]
}
}
// must be found because all ranges together always cover
// all of Unicode
prevRow = start;
index = start * columns;
return index;
} } |
public class class_name {
public void marshall(Offering offering, ProtocolMarshaller protocolMarshaller) {
if (offering == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(offering.getArn(), ARN_BINDING);
protocolMarshaller.marshall(offering.getCurrencyCode(), CURRENCYCODE_BINDING);
protocolMarshaller.marshall(offering.getDuration(), DURATION_BINDING);
protocolMarshaller.marshall(offering.getDurationUnits(), DURATIONUNITS_BINDING);
protocolMarshaller.marshall(offering.getFixedPrice(), FIXEDPRICE_BINDING);
protocolMarshaller.marshall(offering.getOfferingDescription(), OFFERINGDESCRIPTION_BINDING);
protocolMarshaller.marshall(offering.getOfferingId(), OFFERINGID_BINDING);
protocolMarshaller.marshall(offering.getOfferingType(), OFFERINGTYPE_BINDING);
protocolMarshaller.marshall(offering.getRegion(), REGION_BINDING);
protocolMarshaller.marshall(offering.getResourceSpecification(), RESOURCESPECIFICATION_BINDING);
protocolMarshaller.marshall(offering.getUsagePrice(), USAGEPRICE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(Offering offering, ProtocolMarshaller protocolMarshaller) {
if (offering == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(offering.getArn(), ARN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(offering.getCurrencyCode(), CURRENCYCODE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(offering.getDuration(), DURATION_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(offering.getDurationUnits(), DURATIONUNITS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(offering.getFixedPrice(), FIXEDPRICE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(offering.getOfferingDescription(), OFFERINGDESCRIPTION_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(offering.getOfferingId(), OFFERINGID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(offering.getOfferingType(), OFFERINGTYPE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(offering.getRegion(), REGION_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(offering.getResourceSpecification(), RESOURCESPECIFICATION_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(offering.getUsagePrice(), USAGEPRICE_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void ensureFailover( HttpServletRequest request )
{
//
// remove() puts the pageflow instance into a request attribute. Make sure not to re-save this
// instance if it's being removed. Also, if the session is null (after having been invalidated
// by the user), don't recreate it.
//
if ( request.getAttribute( REMOVING_PAGEFLOW_ATTR ) != this && request.getSession( false ) != null )
{
StorageHandler sh = Handlers.get( getServletContext() ).getStorageHandler();
HttpServletRequest unwrappedRequest = PageFlowUtils.unwrapMultipart( request );
RequestContext rc = new RequestContext( unwrappedRequest, null );
//
// If this is a long-lived page flow, there are two attributes to deal with, and ensure that
// both failover correctly.
//
if ( isLongLived() )
{
String longLivedAttrName = InternalUtils.getLongLivedFlowAttr( getModulePath() );
longLivedAttrName = ScopedServletUtils.getScopedSessionAttrName( longLivedAttrName, unwrappedRequest );
String currentLongLivedAttrName =
ScopedServletUtils.getScopedSessionAttrName( CURRENT_LONGLIVED_ATTR, unwrappedRequest );
sh.ensureFailover( rc, longLivedAttrName, this );
sh.ensureFailover( rc, currentLongLivedAttrName, getModulePath() );
}
//
// This Page Flow is not long lived, so just the Page Flow itself needs to be added to the session.
//
else
{
String attrName = ScopedServletUtils.getScopedSessionAttrName( CURRENT_JPF_ATTR, unwrappedRequest );
sh.ensureFailover( rc, attrName, this );
}
}
} } | public class class_name {
public void ensureFailover( HttpServletRequest request )
{
//
// remove() puts the pageflow instance into a request attribute. Make sure not to re-save this
// instance if it's being removed. Also, if the session is null (after having been invalidated
// by the user), don't recreate it.
//
if ( request.getAttribute( REMOVING_PAGEFLOW_ATTR ) != this && request.getSession( false ) != null )
{
StorageHandler sh = Handlers.get( getServletContext() ).getStorageHandler();
HttpServletRequest unwrappedRequest = PageFlowUtils.unwrapMultipart( request );
RequestContext rc = new RequestContext( unwrappedRequest, null );
//
// If this is a long-lived page flow, there are two attributes to deal with, and ensure that
// both failover correctly.
//
if ( isLongLived() )
{
String longLivedAttrName = InternalUtils.getLongLivedFlowAttr( getModulePath() );
longLivedAttrName = ScopedServletUtils.getScopedSessionAttrName( longLivedAttrName, unwrappedRequest ); // depends on control dependency: [if], data = [none]
String currentLongLivedAttrName =
ScopedServletUtils.getScopedSessionAttrName( CURRENT_LONGLIVED_ATTR, unwrappedRequest );
sh.ensureFailover( rc, longLivedAttrName, this ); // depends on control dependency: [if], data = [none]
sh.ensureFailover( rc, currentLongLivedAttrName, getModulePath() ); // depends on control dependency: [if], data = [none]
}
//
// This Page Flow is not long lived, so just the Page Flow itself needs to be added to the session.
//
else
{
String attrName = ScopedServletUtils.getScopedSessionAttrName( CURRENT_JPF_ATTR, unwrappedRequest );
sh.ensureFailover( rc, attrName, this ); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public SecretsManagerSecretResourceData withAdditionalStagingLabelsToDownload(String... additionalStagingLabelsToDownload) {
if (this.additionalStagingLabelsToDownload == null) {
setAdditionalStagingLabelsToDownload(new java.util.ArrayList<String>(additionalStagingLabelsToDownload.length));
}
for (String ele : additionalStagingLabelsToDownload) {
this.additionalStagingLabelsToDownload.add(ele);
}
return this;
} } | public class class_name {
public SecretsManagerSecretResourceData withAdditionalStagingLabelsToDownload(String... additionalStagingLabelsToDownload) {
if (this.additionalStagingLabelsToDownload == null) {
setAdditionalStagingLabelsToDownload(new java.util.ArrayList<String>(additionalStagingLabelsToDownload.length)); // depends on control dependency: [if], data = [none]
}
for (String ele : additionalStagingLabelsToDownload) {
this.additionalStagingLabelsToDownload.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
public static void addElement(@Nonnull StringBuilder buf, @Nonnull String separator, @Nonnull String element) {
if (buf.length() > 0) {
buf.append(separator);
}
buf.append(element);
} } | public class class_name {
public static void addElement(@Nonnull StringBuilder buf, @Nonnull String separator, @Nonnull String element) {
if (buf.length() > 0) {
buf.append(separator); // depends on control dependency: [if], data = [none]
}
buf.append(element);
} } |
public class class_name {
private static Properties getFilterInitParameters(final FilterConfig config) {
Properties props = new Properties();
Enumeration en = config.getInitParameterNames();
while (en.hasMoreElements()) {
String key = (String)en.nextElement();
String value = config.getInitParameter(key);
props.setProperty(key, value);
}
return props;
} } | public class class_name {
private static Properties getFilterInitParameters(final FilterConfig config) {
Properties props = new Properties();
Enumeration en = config.getInitParameterNames();
while (en.hasMoreElements()) {
String key = (String)en.nextElement();
String value = config.getInitParameter(key);
props.setProperty(key, value); // depends on control dependency: [while], data = [none]
}
return props;
} } |
public class class_name {
public EnvironmentPlatform withLanguages(EnvironmentLanguage... languages) {
if (this.languages == null) {
setLanguages(new java.util.ArrayList<EnvironmentLanguage>(languages.length));
}
for (EnvironmentLanguage ele : languages) {
this.languages.add(ele);
}
return this;
} } | public class class_name {
public EnvironmentPlatform withLanguages(EnvironmentLanguage... languages) {
if (this.languages == null) {
setLanguages(new java.util.ArrayList<EnvironmentLanguage>(languages.length)); // depends on control dependency: [if], data = [none]
}
for (EnvironmentLanguage ele : languages) {
this.languages.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
public static INDArray pLeftInvert(INDArray arr, boolean inPlace) {
try {
final INDArray inv = invert(arr.transpose().mmul(arr), inPlace).mmul(arr.transpose());
if (inPlace) arr.assign(inv);
return inv;
} catch (SingularMatrixException e) {
throw new IllegalArgumentException(
"Full column rank condition for left pseudo inverse was not met.");
}
} } | public class class_name {
public static INDArray pLeftInvert(INDArray arr, boolean inPlace) {
try {
final INDArray inv = invert(arr.transpose().mmul(arr), inPlace).mmul(arr.transpose());
if (inPlace) arr.assign(inv);
return inv; // depends on control dependency: [try], data = [none]
} catch (SingularMatrixException e) {
throw new IllegalArgumentException(
"Full column rank condition for left pseudo inverse was not met.");
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private static LongConsumer adapt(Sink<Long> sink) {
if (sink instanceof LongConsumer) {
return (LongConsumer) sink;
} else {
// if (Tripwire.ENABLED)
// Tripwire.trip(AbstractPipeline.class,
// "using LongStream.adapt(Sink<Long> s)");
return sink::accept;
}
} } | public class class_name {
private static LongConsumer adapt(Sink<Long> sink) {
if (sink instanceof LongConsumer) {
return (LongConsumer) sink; // depends on control dependency: [if], data = [none]
} else {
// if (Tripwire.ENABLED)
// Tripwire.trip(AbstractPipeline.class,
// "using LongStream.adapt(Sink<Long> s)");
return sink::accept; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@SuppressWarnings({"unchecked", "rawtypes"})
public <T extends BaseWrapper<T>, U> CollectionWrapper<T> createCollection(final Collection<U> collection, final Class<U> entityClass,
boolean isRevisionCollection) {
if (collection == null) {
return null;
}
// Create the key
final DBWrapperKey key = new DBWrapperKey(collection, entityClass);
// Check to see if a wrapper has already been cached for the key
final DBCollectionWrapper cachedWrapper = wrapperCache.getCollection(key);
if (cachedWrapper != null) {
return cachedWrapper;
}
final DBCollectionWrapper wrapper;
if (entityClass == Locale.class) {
// LOCALE
wrapper = new DBLocaleCollectionWrapper(this, (Collection<Locale>) collection, isRevisionCollection);
} else if (entityClass == TranslationServer.class) {
// TRANSLATION SERVER
throw new UnsupportedOperationException("A return class needs to be specified for TranslationServer entities.");
} else if (entityClass == Topic.class) {
// TOPIC
wrapper = new DBTopicCollectionWrapper(this, (Collection<Topic>) collection, isRevisionCollection);
} else if (entityClass == TopicSourceUrl.class) {
// TOPIC SOURCE URL
wrapper = new DBTopicSourceURLCollectionWrapper(this, (Collection<TopicSourceUrl>) collection, isRevisionCollection);
} else if (entityClass == TranslatedTopicData.class) {
// TRANSLATED TOPIC
wrapper = new DBTranslatedTopicDataCollectionWrapper(this, (Collection<TranslatedTopicData>) collection, isRevisionCollection);
} else if (entityClass == TranslatedTopicString.class) {
// TRANSLATED TOPIC STRING
wrapper = new DBTranslatedTopicStringCollectionWrapper(this, (Collection<TranslatedTopicString>) collection,
isRevisionCollection);
} else if (entityClass == Tag.class) {
// TAG
wrapper = new DBTagCollectionWrapper(this, (Collection<Tag>) collection, isRevisionCollection);
} else if (entityClass == Category.class) {
// CATEGORY
wrapper = new DBCategoryCollectionWrapper(this, (Collection<Category>) collection, isRevisionCollection);
} else if (entityClass == TagToCategory.class) {
throw new UnsupportedOperationException("A return class needs to be specified for TagToCategory entities.");
} else if (entityClass == PropertyTagToPropertyTagCategory.class) {
// PROPERTY TAGS
wrapper = new DBPropertyTagInPropertyCategoryCollectionWrapper(this, (Collection<PropertyTagToPropertyTagCategory>) collection,
isRevisionCollection);
} else if (entityClass == PropertyTag.class) {
wrapper = new DBPropertyTagCollectionWrapper(this, (Collection<PropertyTag>) collection, isRevisionCollection);
} else if (entityClass == TopicToPropertyTag.class) {
wrapper = new DBTopicToPropertyTagCollectionWrapper(this, (Collection<TopicToPropertyTag>) collection, isRevisionCollection);
} else if (entityClass == TagToPropertyTag.class) {
wrapper = new DBTagToPropertyTagCollectionWrapper(this, (Collection<TagToPropertyTag>) collection, isRevisionCollection);
} else if (entityClass == ContentSpecToPropertyTag.class) {
wrapper = new DBContentSpecToPropertyTagCollectionWrapper(this, (Collection<ContentSpecToPropertyTag>) collection,
isRevisionCollection);
} else if (entityClass == BlobConstants.class) {
// BLOB CONSTANT
wrapper = new DBBlobConstantCollectionWrapper(this, (Collection<BlobConstants>) collection, isRevisionCollection);
} else if (entityClass == StringConstants.class) {
// STRING CONSTANT
wrapper = new DBStringConstantCollectionWrapper(this, (Collection<StringConstants>) collection, isRevisionCollection);
} else if (entityClass == File.class) {
// FILE
wrapper = new DBFileCollectionWrapper(this, (Collection<File>) collection, isRevisionCollection);
} else if (entityClass == LanguageFile.class) {
// LANGUAGE IMAGE
wrapper = new DBLanguageFileCollectionWrapper(this, (Collection<LanguageFile>) collection, isRevisionCollection);
} else if (entityClass == ImageFile.class) {
// IMAGE
wrapper = new DBImageCollectionWrapper(this, (Collection<ImageFile>) collection, isRevisionCollection);
} else if (entityClass == LanguageImage.class) {
// LANGUAGE IMAGE
wrapper = new DBLanguageImageCollectionWrapper(this, (Collection<LanguageImage>) collection, isRevisionCollection);
} else if (entityClass == User.class) {
// USER
wrapper = new DBUserCollectionWrapper(this, (Collection<User>) collection, isRevisionCollection);
} else if (entityClass == ContentSpec.class) {
// CONTENT SPEC
wrapper = new DBContentSpecCollectionWrapper(this, (Collection<ContentSpec>) collection, isRevisionCollection);
} else if (entityClass == CSNode.class) {
// CONTENT SPEC NODE
wrapper = new DBCSNodeCollectionWrapper(this, (Collection<CSNode>) collection, isRevisionCollection);
} else if (entityClass == CSNodeToCSNode.class) {
wrapper = new DBCSRelatedNodeCollectionWrapper(this, (Collection<CSNodeToCSNode>) collection, isRevisionCollection);
} else if (entityClass == CSNode.class) {
// CONTENT SPEC INFO NODE
wrapper = new DBCSInfoNodeCollectionWrapper(this, (Collection<CSInfoNode>) collection, isRevisionCollection);
} else if (entityClass == TranslatedCSNode.class) {
// CONTENT SPEC TRANSLATED NODE
wrapper = new DBTranslatedCSNodeCollectionWrapper(this, (Collection<TranslatedCSNode>) collection, isRevisionCollection);
} else if (entityClass == TranslatedCSNodeString.class) {
// CONTENT SPEC TRANSLATED NODE STRING
wrapper = new DBTranslatedCSNodeStringCollectionWrapper(this, (Collection<TranslatedCSNodeString>) collection,
isRevisionCollection);
} else if (entityClass == CSTranslationDetail.class) {
// CONTENT SPEC TRANSLATION DETAIL
wrapper = new DBCSTranslationDetailCollectionWrapper(this, (Collection<CSTranslationDetail>) collection,
isRevisionCollection);
} else if (entityClass == UndefinedEntity.class) {
// UNDEFINED APPLICATION ENTITY
wrapper = new DBServerUndefinedEntityCollectionWrapper(this, (Collection<UndefinedEntity>) collection,
isRevisionCollection);
} else if (entityClass == UndefinedSetting.class) {
// UNDEFINED APPLICATION SETTING
wrapper = new DBServerUndefinedSettingCollectionWrapper(this, (Collection<UndefinedSetting>) collection,
isRevisionCollection);
} else {
throw new IllegalArgumentException(
"Failed to create a Collection Wrapper instance as there is no wrapper available for the Collection.");
}
// Add the wrapper to the cache
wrapperCache.putCollection(key, wrapper);
return wrapper;
} } | public class class_name {
@SuppressWarnings({"unchecked", "rawtypes"})
public <T extends BaseWrapper<T>, U> CollectionWrapper<T> createCollection(final Collection<U> collection, final Class<U> entityClass,
boolean isRevisionCollection) {
if (collection == null) {
return null; // depends on control dependency: [if], data = [none]
}
// Create the key
final DBWrapperKey key = new DBWrapperKey(collection, entityClass);
// Check to see if a wrapper has already been cached for the key
final DBCollectionWrapper cachedWrapper = wrapperCache.getCollection(key);
if (cachedWrapper != null) {
return cachedWrapper; // depends on control dependency: [if], data = [none]
}
final DBCollectionWrapper wrapper;
if (entityClass == Locale.class) {
// LOCALE
wrapper = new DBLocaleCollectionWrapper(this, (Collection<Locale>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == TranslationServer.class) {
// TRANSLATION SERVER
throw new UnsupportedOperationException("A return class needs to be specified for TranslationServer entities.");
} else if (entityClass == Topic.class) {
// TOPIC
wrapper = new DBTopicCollectionWrapper(this, (Collection<Topic>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == TopicSourceUrl.class) {
// TOPIC SOURCE URL
wrapper = new DBTopicSourceURLCollectionWrapper(this, (Collection<TopicSourceUrl>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == TranslatedTopicData.class) {
// TRANSLATED TOPIC
wrapper = new DBTranslatedTopicDataCollectionWrapper(this, (Collection<TranslatedTopicData>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == TranslatedTopicString.class) {
// TRANSLATED TOPIC STRING
wrapper = new DBTranslatedTopicStringCollectionWrapper(this, (Collection<TranslatedTopicString>) collection,
isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == Tag.class) {
// TAG
wrapper = new DBTagCollectionWrapper(this, (Collection<Tag>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == Category.class) {
// CATEGORY
wrapper = new DBCategoryCollectionWrapper(this, (Collection<Category>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == TagToCategory.class) {
throw new UnsupportedOperationException("A return class needs to be specified for TagToCategory entities.");
} else if (entityClass == PropertyTagToPropertyTagCategory.class) {
// PROPERTY TAGS
wrapper = new DBPropertyTagInPropertyCategoryCollectionWrapper(this, (Collection<PropertyTagToPropertyTagCategory>) collection,
isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == PropertyTag.class) {
wrapper = new DBPropertyTagCollectionWrapper(this, (Collection<PropertyTag>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == TopicToPropertyTag.class) {
wrapper = new DBTopicToPropertyTagCollectionWrapper(this, (Collection<TopicToPropertyTag>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == TagToPropertyTag.class) {
wrapper = new DBTagToPropertyTagCollectionWrapper(this, (Collection<TagToPropertyTag>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == ContentSpecToPropertyTag.class) {
wrapper = new DBContentSpecToPropertyTagCollectionWrapper(this, (Collection<ContentSpecToPropertyTag>) collection,
isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == BlobConstants.class) {
// BLOB CONSTANT
wrapper = new DBBlobConstantCollectionWrapper(this, (Collection<BlobConstants>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == StringConstants.class) {
// STRING CONSTANT
wrapper = new DBStringConstantCollectionWrapper(this, (Collection<StringConstants>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == File.class) {
// FILE
wrapper = new DBFileCollectionWrapper(this, (Collection<File>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == LanguageFile.class) {
// LANGUAGE IMAGE
wrapper = new DBLanguageFileCollectionWrapper(this, (Collection<LanguageFile>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == ImageFile.class) {
// IMAGE
wrapper = new DBImageCollectionWrapper(this, (Collection<ImageFile>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == LanguageImage.class) {
// LANGUAGE IMAGE
wrapper = new DBLanguageImageCollectionWrapper(this, (Collection<LanguageImage>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == User.class) {
// USER
wrapper = new DBUserCollectionWrapper(this, (Collection<User>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == ContentSpec.class) {
// CONTENT SPEC
wrapper = new DBContentSpecCollectionWrapper(this, (Collection<ContentSpec>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == CSNode.class) {
// CONTENT SPEC NODE
wrapper = new DBCSNodeCollectionWrapper(this, (Collection<CSNode>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == CSNodeToCSNode.class) {
wrapper = new DBCSRelatedNodeCollectionWrapper(this, (Collection<CSNodeToCSNode>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == CSNode.class) {
// CONTENT SPEC INFO NODE
wrapper = new DBCSInfoNodeCollectionWrapper(this, (Collection<CSInfoNode>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == TranslatedCSNode.class) {
// CONTENT SPEC TRANSLATED NODE
wrapper = new DBTranslatedCSNodeCollectionWrapper(this, (Collection<TranslatedCSNode>) collection, isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == TranslatedCSNodeString.class) {
// CONTENT SPEC TRANSLATED NODE STRING
wrapper = new DBTranslatedCSNodeStringCollectionWrapper(this, (Collection<TranslatedCSNodeString>) collection,
isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == CSTranslationDetail.class) {
// CONTENT SPEC TRANSLATION DETAIL
wrapper = new DBCSTranslationDetailCollectionWrapper(this, (Collection<CSTranslationDetail>) collection,
isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == UndefinedEntity.class) {
// UNDEFINED APPLICATION ENTITY
wrapper = new DBServerUndefinedEntityCollectionWrapper(this, (Collection<UndefinedEntity>) collection,
isRevisionCollection); // depends on control dependency: [if], data = [none]
} else if (entityClass == UndefinedSetting.class) {
// UNDEFINED APPLICATION SETTING
wrapper = new DBServerUndefinedSettingCollectionWrapper(this, (Collection<UndefinedSetting>) collection,
isRevisionCollection); // depends on control dependency: [if], data = [none]
} else {
throw new IllegalArgumentException(
"Failed to create a Collection Wrapper instance as there is no wrapper available for the Collection.");
}
// Add the wrapper to the cache
wrapperCache.putCollection(key, wrapper);
return wrapper;
} } |
public class class_name {
private static UriPart unionUriParts(UriPart a, UriPart b) {
Preconditions.checkArgument(a != b);
if (a == UriPart.DANGEROUS_SCHEME || b == UriPart.DANGEROUS_SCHEME) {
// Dangerous schemes (like javascript:) are poison -- if either side is dangerous, the whole
// thing is.
return UriPart.DANGEROUS_SCHEME;
} else if (a == UriPart.FRAGMENT
|| b == UriPart.FRAGMENT
|| a == UriPart.UNKNOWN
|| b == UriPart.UNKNOWN) {
// UNKNOWN means one part is in the #fragment and one is not. This is the case if one is
// FRAGMENT and the other is not, or if one of the branches was UNKNOWN to begin with.
return UriPart.UNKNOWN;
} else if ((a == UriPart.MAYBE_VARIABLE_SCHEME || b == UriPart.MAYBE_VARIABLE_SCHEME)
&& a != UriPart.UNKNOWN_PRE_FRAGMENT
&& b != UriPart.UNKNOWN_PRE_FRAGMENT) {
// This is the case you might see on a URL that starts with a print statement, and one
// branch has a slash or ampersand but the other doesn't. Re-entering
// MAYBE_VARIABLE_SCHEME allows us to pretend that the last branch was just part of the
// leading print statement, which leaves us in a relatively-unknown state, but no more
// unknown had it just been completely opaque.
//
// Good Example 1: {$urlWithQuery}{if $a}&a={$a}{/if}{if $b}&b={$b}{/if}
// In this example, the first "if" statement has two branches:
// - "true": {$urlWithQuery}&a={$a} looks like a QUERY due to hueristics
// - "false": {$urlWithQuery} only, which Soy doesn't know at compile-time to actually
// have a query, and it remains in MAYBE_VARIABLE_SCHEME.
// Instead of yielding UNKNOWN, this yields MAYBE_VARIABLE_SCHEME, which the second
// {if $b} can safely deal with.
//
// Good Example 2: {$base}{if $a}/a{/if}{if $b}/b{/if}
// In this, one branch transitions definitely into an authority or path, but the other
// might not. However, we can remain in MAYBE_VARIABLE_SCHEME safely.
return UriPart.MAYBE_VARIABLE_SCHEME;
} else {
// The part is unknown, but we think it's before the fragment. In this case, it's clearly
// ambiguous at compile-time that it's not clear what to do. Examples:
//
// /foo/{if $cond}?a={/if}
// {$base}{if $cond}?a={$a}{else}/b{/if}
// {if $cond}{$base}{else}/a{if $cond2}?b=1{/if}{/if}
//
// Unlike MAYBE_VARIABLE_SCHEME, we don't need to try to gracefully recover here, because
// the template author can easily disambiguate this.
return UriPart.UNKNOWN_PRE_FRAGMENT;
}
} } | public class class_name {
private static UriPart unionUriParts(UriPart a, UriPart b) {
Preconditions.checkArgument(a != b);
if (a == UriPart.DANGEROUS_SCHEME || b == UriPart.DANGEROUS_SCHEME) {
// Dangerous schemes (like javascript:) are poison -- if either side is dangerous, the whole
// thing is.
return UriPart.DANGEROUS_SCHEME; // depends on control dependency: [if], data = [none]
} else if (a == UriPart.FRAGMENT
|| b == UriPart.FRAGMENT
|| a == UriPart.UNKNOWN
|| b == UriPart.UNKNOWN) {
// UNKNOWN means one part is in the #fragment and one is not. This is the case if one is
// FRAGMENT and the other is not, or if one of the branches was UNKNOWN to begin with.
return UriPart.UNKNOWN; // depends on control dependency: [if], data = [none]
} else if ((a == UriPart.MAYBE_VARIABLE_SCHEME || b == UriPart.MAYBE_VARIABLE_SCHEME)
&& a != UriPart.UNKNOWN_PRE_FRAGMENT
&& b != UriPart.UNKNOWN_PRE_FRAGMENT) {
// This is the case you might see on a URL that starts with a print statement, and one
// branch has a slash or ampersand but the other doesn't. Re-entering
// MAYBE_VARIABLE_SCHEME allows us to pretend that the last branch was just part of the
// leading print statement, which leaves us in a relatively-unknown state, but no more
// unknown had it just been completely opaque.
//
// Good Example 1: {$urlWithQuery}{if $a}&a={$a}{/if}{if $b}&b={$b}{/if}
// In this example, the first "if" statement has two branches:
// - "true": {$urlWithQuery}&a={$a} looks like a QUERY due to hueristics
// - "false": {$urlWithQuery} only, which Soy doesn't know at compile-time to actually
// have a query, and it remains in MAYBE_VARIABLE_SCHEME.
// Instead of yielding UNKNOWN, this yields MAYBE_VARIABLE_SCHEME, which the second
// {if $b} can safely deal with.
//
// Good Example 2: {$base}{if $a}/a{/if}{if $b}/b{/if}
// In this, one branch transitions definitely into an authority or path, but the other
// might not. However, we can remain in MAYBE_VARIABLE_SCHEME safely.
return UriPart.MAYBE_VARIABLE_SCHEME; // depends on control dependency: [if], data = [UriPart.MAYBE_VARIABLE_S]
} else {
// The part is unknown, but we think it's before the fragment. In this case, it's clearly
// ambiguous at compile-time that it's not clear what to do. Examples:
//
// /foo/{if $cond}?a={/if}
// {$base}{if $cond}?a={$a}{else}/b{/if}
// {if $cond}{$base}{else}/a{if $cond2}?b=1{/if}{/if}
//
// Unlike MAYBE_VARIABLE_SCHEME, we don't need to try to gracefully recover here, because
// the template author can easily disambiguate this.
return UriPart.UNKNOWN_PRE_FRAGMENT; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public File getTargetFile(final MiscContentItem item) {
final State state = this.state;
if (state == State.NEW || state == State.ROLLBACK_ONLY) {
return getTargetFile(miscTargetRoot, item);
} else {
throw new IllegalStateException(); // internal wrong usage, no i18n
}
} } | public class class_name {
public File getTargetFile(final MiscContentItem item) {
final State state = this.state;
if (state == State.NEW || state == State.ROLLBACK_ONLY) {
return getTargetFile(miscTargetRoot, item); // depends on control dependency: [if], data = [none]
} else {
throw new IllegalStateException(); // internal wrong usage, no i18n
}
} } |
public class class_name {
public static boolean isUri(String potentialUri) {
if (StringUtils.isBlank(potentialUri)) {
return false;
}
try {
URI uri = new URI(potentialUri);
return uri.getScheme() != null && uri.getHost() != null;
} catch (URISyntaxException e) {
return false;
}
} } | public class class_name {
public static boolean isUri(String potentialUri) {
if (StringUtils.isBlank(potentialUri)) {
return false; // depends on control dependency: [if], data = [none]
}
try {
URI uri = new URI(potentialUri);
return uri.getScheme() != null && uri.getHost() != null; // depends on control dependency: [try], data = [none]
} catch (URISyntaxException e) {
return false;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
protected void suspendForcedlyBegunLazyTransactionIfNeeds() throws SystemException {
if (logger.isDebugEnabled()) {
logger.debug("#lazyTx ...Suspending the outer forcedly-begun lazy transaction: {}", buildLazyTxExp());
}
final Transaction suspended = transactionManager.suspend();
arrangeForcedlyBegunResumer(() -> {
if (isHerarchyLevelFirst()) {
if (logger.isDebugEnabled()) {
logger.debug("#lazyTx ...Resuming the outer forcedly-begun lazy transaction: {}", buildLazyTxExp());
}
doResumeForcedlyBegunLazyTransaction(suspended);
return true;
} else {
return false;
}
});
} } | public class class_name {
protected void suspendForcedlyBegunLazyTransactionIfNeeds() throws SystemException {
if (logger.isDebugEnabled()) {
logger.debug("#lazyTx ...Suspending the outer forcedly-begun lazy transaction: {}", buildLazyTxExp());
}
final Transaction suspended = transactionManager.suspend();
arrangeForcedlyBegunResumer(() -> {
if (isHerarchyLevelFirst()) {
if (logger.isDebugEnabled()) {
logger.debug("#lazyTx ...Resuming the outer forcedly-begun lazy transaction: {}", buildLazyTxExp()); // depends on control dependency: [if], data = [none]
}
doResumeForcedlyBegunLazyTransaction(suspended); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
} else {
return false; // depends on control dependency: [if], data = [none]
}
});
} } |
public class class_name {
public static String getText(TextView view) {
String text = "";
if (view != null) {
text = view.getText().toString();
} else {
Log.e("Caffeine", "Null view given to getText(). \"\" will be returned.");
}
return text;
} } | public class class_name {
public static String getText(TextView view) {
String text = "";
if (view != null) {
text = view.getText().toString(); // depends on control dependency: [if], data = [none]
} else {
Log.e("Caffeine", "Null view given to getText(). \"\" will be returned."); // depends on control dependency: [if], data = [none]
}
return text;
} } |
public class class_name {
public Choice<T> or(final Choice<T> other) {
checkNotNull(other);
if (other == none()) {
return this;
} else {
final Choice<T> thisChoice = this;
return new Choice<T>() {
@Override
protected Iterator<T> iterator() {
return Iterators.concat(thisChoice.iterator(), other.iterator());
}
@Override
public String toString() {
return String.format("%s.or(%s)", thisChoice, other);
}
};
}
} } | public class class_name {
public Choice<T> or(final Choice<T> other) {
checkNotNull(other);
if (other == none()) {
return this; // depends on control dependency: [if], data = [none]
} else {
final Choice<T> thisChoice = this;
return new Choice<T>() {
@Override
protected Iterator<T> iterator() {
return Iterators.concat(thisChoice.iterator(), other.iterator());
}
@Override
public String toString() {
return String.format("%s.or(%s)", thisChoice, other);
}
}; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
public CommerceWishList fetchByG_U_D_Last(long groupId, long userId,
boolean defaultWishList,
OrderByComparator<CommerceWishList> orderByComparator) {
int count = countByG_U_D(groupId, userId, defaultWishList);
if (count == 0) {
return null;
}
List<CommerceWishList> list = findByG_U_D(groupId, userId,
defaultWishList, count - 1, count, orderByComparator);
if (!list.isEmpty()) {
return list.get(0);
}
return null;
} } | public class class_name {
@Override
public CommerceWishList fetchByG_U_D_Last(long groupId, long userId,
boolean defaultWishList,
OrderByComparator<CommerceWishList> orderByComparator) {
int count = countByG_U_D(groupId, userId, defaultWishList);
if (count == 0) {
return null; // depends on control dependency: [if], data = [none]
}
List<CommerceWishList> list = findByG_U_D(groupId, userId,
defaultWishList, count - 1, count, orderByComparator);
if (!list.isEmpty()) {
return list.get(0); // depends on control dependency: [if], data = [none]
}
return null;
} } |
public class class_name {
public synchronized BeanO createSingletonBeanO()
{
boolean isTraceOn = TraceComponent.isAnyTracingEnabled();
if (isTraceOn && tc.isEntryEnabled())
Tr.entry(tc, "createSingletonBeanO: " + j2eeName);
// F743-4950 - This check is done (unsynchronized) from createBeanO, but
// it also needs to happen for dependencies.
homeEnabled();
BeanO result = null;
if (ivSingletonBeanO == null)
{
if (ivCreatingSingletonBeanO)
{
if (isTraceOn && tc.isEntryEnabled())
Tr.exit(tc, "createSingletonBeanO: IllegalLoopbackException");
throw new IllegalLoopbackException("Cannot call a method on a singleton session bean while " +
"constructing the bean instance : " + j2eeName);
}
// F7434950.CodRev - The EG has decided that the container must only
// attempt to initialize a singleton once.
// d632115 - and the required exception is NoSuchEJBException.
if (ivSingletonBeanOCreateFailed)
{
if (isTraceOn && tc.isEntryEnabled())
Tr.exit(tc, "createSingletonBeanO: NoSuchEJBException - prior failure");
throw ExceptionUtil.NoSuchEJBException
("An error occurred during a previous attempt to initialize the " +
"singleton session bean " + j2eeName + ".", null);
}
// F743-4950 - Avoid direct (or indirect) attempts to use this
// singleton before it has finished initializing.
ivCreatingSingletonBeanO = true;
try
{
// If anything below this point fails, then any subsequent attempts
// to create the bean instance must also fail. d632115
ivSingletonBeanOCreateFailed = true; // F7434950.CodRev
// F743-20281 - Resolve dependencies.
List<J2EEName> dependsOn;
try
{
dependsOn = beanMetaData._moduleMetaData.getEJBApplicationMetaData().resolveBeanDependencies(beanMetaData);
} catch (RuntimeWarning rw)
{
if (isTraceOn && tc.isDebugEnabled())
Tr.debug(tc, "dependency resolution error", rw);
throw ExceptionUtil.NoSuchEJBException(rw.getMessage(), rw);
}
// F743-4950 - Initialize dependencies before this singleton.
if (dependsOn != null) // F743-20281
{
for (J2EEName dependency : dependsOn) // F743-20281
{
if (isTraceOn && tc.isDebugEnabled())
Tr.debug(tc, "initializing dependency " + dependency);
try
{
EJSHome dependencyHome = (EJSHome) EJSContainer.homeOfHomes.getHome(dependency);
dependencyHome.createSingletonBeanO();
} catch (Throwable t)
{
if (isTraceOn && tc.isDebugEnabled())
Tr.exit(tc, "createSingletonBeanO: failed to initialize dependency",
t);
// d632115 - required exception is NoSuchEJBException.
throw ExceptionUtil.NoSuchEJBException
("Failed to initialize singleton session bean " + j2eeName +
" because the dependency " + dependency +
" failed to initialize.", t);
}
}
}
// Now that dependencies have been initialized, add the singleton
// to the initialization list. It doesn't matter if initialization
// fails since addInitializedSingleton is idempotent, and we really
// only care that the initialization is recorded for this home at
// some point after it is recorded for all its dependencies.
beanMetaData._moduleMetaData.getEJBApplicationMetaData().addSingletonInitialization(this);
long createStartTime = -1;
Object oldClassLoader = ThreadContextAccessor.UNCHANGED; // d627931
try
{
// For Singleton, create time should include creating the
// instance and calling any lifecycle callbacks. d626533.1
if (pmiBean != null)
{
createStartTime = pmiBean.initialTime(EJBPMICollaborator.CREATE_RT);
}
// To support injection, etc. we must put the bmd and classloader
// on the thread. // d627931
ivCMDAccessor.beginContext(beanMetaData);
oldClassLoader = EJBThreadData.svThreadContextAccessor.pushContextClassLoaderForUnprivileged(beanMetaData.ivContextClassLoader); // F85059
ivSingletonBeanO = (SingletonBeanO) beanOFactory.create(container, this, false);
ivSingletonBeanOCreateFailed = false; // F7434950.CodRev
} catch (Throwable t)
{
FFDCFilter.processException(t, CLASS_NAME + ".createBeanO", "1047", this);
if (t instanceof InvocationTargetException)
{
t = t.getCause();
}
// F743-1751CodRev - Always wrap the exception in EJBException to
// satisfy the contract of preInvokeForLifecycleInterceptors.
// d632115 - and the required exception is NoSuchEJBException.
String msgTxt = "An error occurred during initialization of singleton session bean " +
j2eeName + ", resulting in the discarding of the singleton instance.";
throw ExceptionUtil.NoSuchEJBException(msgTxt, t);
} finally // d627931
{
EJBThreadData.svThreadContextAccessor.popContextClassLoaderForUnprivileged(oldClassLoader);
ivCMDAccessor.endContext();
// Even if the create fails, go ahead and add the time, so
// the number of times counted matches the create count.
if (createStartTime > -1)
{
pmiBean.finalTime(EJBPMICollaborator.CREATE_RT, createStartTime);
}
}
} finally
{
ivCreatingSingletonBeanO = false;
}
}
// Return the cached Singleton instance.
result = ivSingletonBeanO;
if (isTraceOn && tc.isEntryEnabled())
Tr.exit(tc, "createSingletonBeanO");
return result;
} } | public class class_name {
public synchronized BeanO createSingletonBeanO()
{
boolean isTraceOn = TraceComponent.isAnyTracingEnabled();
if (isTraceOn && tc.isEntryEnabled())
Tr.entry(tc, "createSingletonBeanO: " + j2eeName);
// F743-4950 - This check is done (unsynchronized) from createBeanO, but
// it also needs to happen for dependencies.
homeEnabled();
BeanO result = null;
if (ivSingletonBeanO == null)
{
if (ivCreatingSingletonBeanO)
{
if (isTraceOn && tc.isEntryEnabled())
Tr.exit(tc, "createSingletonBeanO: IllegalLoopbackException");
throw new IllegalLoopbackException("Cannot call a method on a singleton session bean while " +
"constructing the bean instance : " + j2eeName);
}
// F7434950.CodRev - The EG has decided that the container must only
// attempt to initialize a singleton once.
// d632115 - and the required exception is NoSuchEJBException.
if (ivSingletonBeanOCreateFailed)
{
if (isTraceOn && tc.isEntryEnabled())
Tr.exit(tc, "createSingletonBeanO: NoSuchEJBException - prior failure");
throw ExceptionUtil.NoSuchEJBException
("An error occurred during a previous attempt to initialize the " +
"singleton session bean " + j2eeName + ".", null);
}
// F743-4950 - Avoid direct (or indirect) attempts to use this
// singleton before it has finished initializing.
ivCreatingSingletonBeanO = true; // depends on control dependency: [if], data = [none]
try
{
// If anything below this point fails, then any subsequent attempts
// to create the bean instance must also fail. d632115
ivSingletonBeanOCreateFailed = true; // F7434950.CodRev // depends on control dependency: [try], data = [none]
// F743-20281 - Resolve dependencies.
List<J2EEName> dependsOn;
try
{
dependsOn = beanMetaData._moduleMetaData.getEJBApplicationMetaData().resolveBeanDependencies(beanMetaData); // depends on control dependency: [try], data = [none]
} catch (RuntimeWarning rw)
{
if (isTraceOn && tc.isDebugEnabled())
Tr.debug(tc, "dependency resolution error", rw);
throw ExceptionUtil.NoSuchEJBException(rw.getMessage(), rw);
} // depends on control dependency: [catch], data = [none]
// F743-4950 - Initialize dependencies before this singleton.
if (dependsOn != null) // F743-20281
{
for (J2EEName dependency : dependsOn) // F743-20281
{
if (isTraceOn && tc.isDebugEnabled())
Tr.debug(tc, "initializing dependency " + dependency);
try
{
EJSHome dependencyHome = (EJSHome) EJSContainer.homeOfHomes.getHome(dependency);
dependencyHome.createSingletonBeanO(); // depends on control dependency: [try], data = [none]
} catch (Throwable t)
{
if (isTraceOn && tc.isDebugEnabled())
Tr.exit(tc, "createSingletonBeanO: failed to initialize dependency",
t);
// d632115 - required exception is NoSuchEJBException.
throw ExceptionUtil.NoSuchEJBException
("Failed to initialize singleton session bean " + j2eeName +
" because the dependency " + dependency +
" failed to initialize.", t);
} // depends on control dependency: [catch], data = [none]
}
}
// Now that dependencies have been initialized, add the singleton
// to the initialization list. It doesn't matter if initialization
// fails since addInitializedSingleton is idempotent, and we really
// only care that the initialization is recorded for this home at
// some point after it is recorded for all its dependencies.
beanMetaData._moduleMetaData.getEJBApplicationMetaData().addSingletonInitialization(this); // depends on control dependency: [try], data = [none]
long createStartTime = -1;
Object oldClassLoader = ThreadContextAccessor.UNCHANGED; // d627931
try
{
// For Singleton, create time should include creating the
// instance and calling any lifecycle callbacks. d626533.1
if (pmiBean != null)
{
createStartTime = pmiBean.initialTime(EJBPMICollaborator.CREATE_RT); // depends on control dependency: [if], data = [none]
}
// To support injection, etc. we must put the bmd and classloader
// on the thread. // d627931
ivCMDAccessor.beginContext(beanMetaData); // depends on control dependency: [try], data = [none]
oldClassLoader = EJBThreadData.svThreadContextAccessor.pushContextClassLoaderForUnprivileged(beanMetaData.ivContextClassLoader); // F85059 // depends on control dependency: [try], data = [none]
ivSingletonBeanO = (SingletonBeanO) beanOFactory.create(container, this, false); // depends on control dependency: [try], data = [none]
ivSingletonBeanOCreateFailed = false; // F7434950.CodRev // depends on control dependency: [try], data = [none]
} catch (Throwable t)
{
FFDCFilter.processException(t, CLASS_NAME + ".createBeanO", "1047", this);
if (t instanceof InvocationTargetException)
{
t = t.getCause(); // depends on control dependency: [if], data = [none]
}
// F743-1751CodRev - Always wrap the exception in EJBException to
// satisfy the contract of preInvokeForLifecycleInterceptors.
// d632115 - and the required exception is NoSuchEJBException.
String msgTxt = "An error occurred during initialization of singleton session bean " +
j2eeName + ", resulting in the discarding of the singleton instance.";
throw ExceptionUtil.NoSuchEJBException(msgTxt, t);
} finally // d627931 // depends on control dependency: [catch], data = [none]
{
EJBThreadData.svThreadContextAccessor.popContextClassLoaderForUnprivileged(oldClassLoader);
ivCMDAccessor.endContext();
// Even if the create fails, go ahead and add the time, so
// the number of times counted matches the create count.
if (createStartTime > -1)
{
pmiBean.finalTime(EJBPMICollaborator.CREATE_RT, createStartTime); // depends on control dependency: [if], data = [none]
}
}
} finally
{
ivCreatingSingletonBeanO = false;
}
}
// Return the cached Singleton instance.
result = ivSingletonBeanO;
if (isTraceOn && tc.isEntryEnabled())
Tr.exit(tc, "createSingletonBeanO");
return result;
} } |
public class class_name {
@Transactional
@Override
public void importData(ExternalStylesheetDescriptor data) {
final String stylesheetName = data.getName();
final String uri = data.getUri();
IStylesheetDescriptor stylesheetDescriptor =
this.stylesheetDescriptorDao.getStylesheetDescriptorByName(stylesheetName);
if (stylesheetDescriptor == null) {
stylesheetDescriptor =
this.stylesheetDescriptorDao.createStylesheetDescriptor(stylesheetName, uri);
} else {
stylesheetDescriptor.setStylesheetResource(uri);
}
stylesheetDescriptor.setUrlNodeSyntaxHelperName(data.getUrlSyntaxHelper());
stylesheetDescriptor.setDescription(data.getDescription());
final List<ExternalOutputPropertyDescriptor> extOutputProperties =
data.getOutputProperties();
final List<IOutputPropertyDescriptor> outputPropertyDescriptors =
new ArrayList<IOutputPropertyDescriptor>(extOutputProperties.size());
for (final ExternalOutputPropertyDescriptor extOutputProperty : extOutputProperties) {
final String name = extOutputProperty.getName();
final Scope scope = Scope.valueOf(extOutputProperty.getScope().name());
final OutputPropertyDescriptorImpl outputPropertyDescriptor =
new OutputPropertyDescriptorImpl(name, scope);
outputPropertyDescriptor.setDefaultValue(extOutputProperty.getDefaultValue());
outputPropertyDescriptor.setDescription(extOutputProperty.getDescription());
outputPropertyDescriptors.add(outputPropertyDescriptor);
}
stylesheetDescriptor.setOutputPropertyDescriptors(outputPropertyDescriptors);
final List<ExternalStylesheetParameterDescriptor> extStylesheetParameters =
data.getStylesheetParameters();
final List<IStylesheetParameterDescriptor> stylesheetParameterDescriptors =
new ArrayList<IStylesheetParameterDescriptor>(extOutputProperties.size());
for (final ExternalStylesheetParameterDescriptor extStylesheetParameter :
extStylesheetParameters) {
final String name = extStylesheetParameter.getName();
final Scope scope = Scope.valueOf(extStylesheetParameter.getScope().name());
final StylesheetParameterDescriptorImpl stylesheetParameterDescriptor =
new StylesheetParameterDescriptorImpl(name, scope);
stylesheetParameterDescriptor.setDefaultValue(extStylesheetParameter.getDefaultValue());
stylesheetParameterDescriptor.setDescription(extStylesheetParameter.getDescription());
stylesheetParameterDescriptors.add(stylesheetParameterDescriptor);
}
stylesheetDescriptor.setStylesheetParameterDescriptors(stylesheetParameterDescriptors);
final List<ExternalLayoutAttributeDescriptor> extLayoutAttributes =
data.getLayoutAttributes();
final List<ILayoutAttributeDescriptor> layoutAttributeDescriptors =
new ArrayList<ILayoutAttributeDescriptor>(extOutputProperties.size());
for (final ExternalLayoutAttributeDescriptor extLayoutAttribute : extLayoutAttributes) {
final String name = extLayoutAttribute.getName();
final Scope scope = Scope.valueOf(extLayoutAttribute.getScope().name());
final LayoutAttributeDescriptorImpl layoutAttributeDescriptor =
new LayoutAttributeDescriptorImpl(name, scope);
layoutAttributeDescriptor.setDefaultValue(extLayoutAttribute.getDefaultValue());
layoutAttributeDescriptor.setDescription(extLayoutAttribute.getDescription());
layoutAttributeDescriptor.setTargetElementNames(
new LinkedHashSet<String>(extLayoutAttribute.getTargetElements()));
layoutAttributeDescriptors.add(layoutAttributeDescriptor);
}
stylesheetDescriptor.setLayoutAttributeDescriptors(layoutAttributeDescriptors);
this.stylesheetDescriptorDao.updateStylesheetDescriptor(stylesheetDescriptor);
} } | public class class_name {
@Transactional
@Override
public void importData(ExternalStylesheetDescriptor data) {
final String stylesheetName = data.getName();
final String uri = data.getUri();
IStylesheetDescriptor stylesheetDescriptor =
this.stylesheetDescriptorDao.getStylesheetDescriptorByName(stylesheetName);
if (stylesheetDescriptor == null) {
stylesheetDescriptor =
this.stylesheetDescriptorDao.createStylesheetDescriptor(stylesheetName, uri); // depends on control dependency: [if], data = [none]
} else {
stylesheetDescriptor.setStylesheetResource(uri); // depends on control dependency: [if], data = [none]
}
stylesheetDescriptor.setUrlNodeSyntaxHelperName(data.getUrlSyntaxHelper());
stylesheetDescriptor.setDescription(data.getDescription());
final List<ExternalOutputPropertyDescriptor> extOutputProperties =
data.getOutputProperties();
final List<IOutputPropertyDescriptor> outputPropertyDescriptors =
new ArrayList<IOutputPropertyDescriptor>(extOutputProperties.size());
for (final ExternalOutputPropertyDescriptor extOutputProperty : extOutputProperties) {
final String name = extOutputProperty.getName();
final Scope scope = Scope.valueOf(extOutputProperty.getScope().name());
final OutputPropertyDescriptorImpl outputPropertyDescriptor =
new OutputPropertyDescriptorImpl(name, scope);
outputPropertyDescriptor.setDefaultValue(extOutputProperty.getDefaultValue()); // depends on control dependency: [for], data = [extOutputProperty]
outputPropertyDescriptor.setDescription(extOutputProperty.getDescription()); // depends on control dependency: [for], data = [extOutputProperty]
outputPropertyDescriptors.add(outputPropertyDescriptor); // depends on control dependency: [for], data = [none]
}
stylesheetDescriptor.setOutputPropertyDescriptors(outputPropertyDescriptors);
final List<ExternalStylesheetParameterDescriptor> extStylesheetParameters =
data.getStylesheetParameters();
final List<IStylesheetParameterDescriptor> stylesheetParameterDescriptors =
new ArrayList<IStylesheetParameterDescriptor>(extOutputProperties.size());
for (final ExternalStylesheetParameterDescriptor extStylesheetParameter :
extStylesheetParameters) {
final String name = extStylesheetParameter.getName();
final Scope scope = Scope.valueOf(extStylesheetParameter.getScope().name());
final StylesheetParameterDescriptorImpl stylesheetParameterDescriptor =
new StylesheetParameterDescriptorImpl(name, scope);
stylesheetParameterDescriptor.setDefaultValue(extStylesheetParameter.getDefaultValue()); // depends on control dependency: [for], data = [extStylesheetParameter]
stylesheetParameterDescriptor.setDescription(extStylesheetParameter.getDescription()); // depends on control dependency: [for], data = [extStylesheetParameter]
stylesheetParameterDescriptors.add(stylesheetParameterDescriptor); // depends on control dependency: [for], data = [none]
}
stylesheetDescriptor.setStylesheetParameterDescriptors(stylesheetParameterDescriptors);
final List<ExternalLayoutAttributeDescriptor> extLayoutAttributes =
data.getLayoutAttributes();
final List<ILayoutAttributeDescriptor> layoutAttributeDescriptors =
new ArrayList<ILayoutAttributeDescriptor>(extOutputProperties.size());
for (final ExternalLayoutAttributeDescriptor extLayoutAttribute : extLayoutAttributes) {
final String name = extLayoutAttribute.getName();
final Scope scope = Scope.valueOf(extLayoutAttribute.getScope().name());
final LayoutAttributeDescriptorImpl layoutAttributeDescriptor =
new LayoutAttributeDescriptorImpl(name, scope);
layoutAttributeDescriptor.setDefaultValue(extLayoutAttribute.getDefaultValue()); // depends on control dependency: [for], data = [extLayoutAttribute]
layoutAttributeDescriptor.setDescription(extLayoutAttribute.getDescription()); // depends on control dependency: [for], data = [extLayoutAttribute]
layoutAttributeDescriptor.setTargetElementNames(
new LinkedHashSet<String>(extLayoutAttribute.getTargetElements())); // depends on control dependency: [for], data = [none]
layoutAttributeDescriptors.add(layoutAttributeDescriptor); // depends on control dependency: [for], data = [none]
}
stylesheetDescriptor.setLayoutAttributeDescriptors(layoutAttributeDescriptors);
this.stylesheetDescriptorDao.updateStylesheetDescriptor(stylesheetDescriptor);
} } |
public class class_name {
protected String getStringArg(int index, String defaultValue) {
String result = defaultValue;
String[] arg = getArgs();
if (arg != null) {
if (arg.length > index) {
result = arg[index];
}
}
return result;
} } | public class class_name {
protected String getStringArg(int index, String defaultValue) {
String result = defaultValue;
String[] arg = getArgs();
if (arg != null) {
if (arg.length > index) {
result = arg[index]; // depends on control dependency: [if], data = [none]
}
}
return result;
} } |
public class class_name {
public StackSetOperationPreferences withRegionOrder(String... regionOrder) {
if (this.regionOrder == null) {
setRegionOrder(new com.amazonaws.internal.SdkInternalList<String>(regionOrder.length));
}
for (String ele : regionOrder) {
this.regionOrder.add(ele);
}
return this;
} } | public class class_name {
public StackSetOperationPreferences withRegionOrder(String... regionOrder) {
if (this.regionOrder == null) {
setRegionOrder(new com.amazonaws.internal.SdkInternalList<String>(regionOrder.length)); // depends on control dependency: [if], data = [none]
}
for (String ele : regionOrder) {
this.regionOrder.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
public static String joinWithOriginalWhiteSpace(List<CoreLabel> tokens) {
if (tokens.size() == 0) {
return "";
}
CoreLabel lastToken = tokens.get(0);
StringBuffer buffer = new StringBuffer(lastToken.word());
for (int i = 1; i < tokens.size(); i++) {
CoreLabel currentToken = tokens.get(i);
int numSpaces = currentToken.beginPosition() - lastToken.endPosition();
if (numSpaces < 0) {
numSpaces = 0;
}
buffer.append(repeat(' ', numSpaces) + currentToken.word());
lastToken = currentToken;
}
return buffer.toString();
} } | public class class_name {
public static String joinWithOriginalWhiteSpace(List<CoreLabel> tokens) {
if (tokens.size() == 0) {
return "";
// depends on control dependency: [if], data = [none]
}
CoreLabel lastToken = tokens.get(0);
StringBuffer buffer = new StringBuffer(lastToken.word());
for (int i = 1; i < tokens.size(); i++) {
CoreLabel currentToken = tokens.get(i);
int numSpaces = currentToken.beginPosition() - lastToken.endPosition();
if (numSpaces < 0) {
numSpaces = 0;
// depends on control dependency: [if], data = [none]
}
buffer.append(repeat(' ', numSpaces) + currentToken.word());
// depends on control dependency: [for], data = [none]
lastToken = currentToken;
// depends on control dependency: [for], data = [none]
}
return buffer.toString();
} } |
public class class_name {
public static void decodeUtf8To(String query, int offset, int length, MultiMap<String> map) {
Utf8StringBuilder buffer = new Utf8StringBuilder();
synchronized (map) {
String key = null;
String value = null;
int end = offset + length;
for (int i = offset; i < end; i++) {
char c = query.charAt(i);
switch (c) {
case '&':
value = buffer.toReplacedString();
buffer.reset();
if (key != null) {
map.add(key, value);
} else if (value != null && value.length() > 0) {
map.add(value, "");
}
key = null;
value = null;
break;
case '=':
if (key != null) {
buffer.append(c);
break;
}
key = buffer.toReplacedString();
buffer.reset();
break;
case '+':
buffer.append((byte) ' ');
break;
case '%':
if (i + 2 < end) {
char hi = query.charAt(++i);
char lo = query.charAt(++i);
buffer.append(decodeHexByte(hi, lo));
} else {
throw new Utf8Appendable.NotUtf8Exception("Incomplete % encoding");
}
break;
default:
buffer.append(c);
break;
}
}
if (key != null) {
value = buffer.toReplacedString();
buffer.reset();
map.add(key, value);
} else if (buffer.length() > 0) {
map.add(buffer.toReplacedString(), "");
}
}
} } | public class class_name {
public static void decodeUtf8To(String query, int offset, int length, MultiMap<String> map) {
Utf8StringBuilder buffer = new Utf8StringBuilder();
synchronized (map) {
String key = null;
String value = null;
int end = offset + length;
for (int i = offset; i < end; i++) {
char c = query.charAt(i);
switch (c) {
case '&':
value = buffer.toReplacedString();
buffer.reset();
if (key != null) {
map.add(key, value); // depends on control dependency: [if], data = [(key]
} else if (value != null && value.length() > 0) {
map.add(value, ""); // depends on control dependency: [if], data = [(value]
}
key = null;
value = null;
break;
case '=':
if (key != null) {
buffer.append(c); // depends on control dependency: [if], data = [none]
break;
}
key = buffer.toReplacedString();
buffer.reset();
break;
case '+':
buffer.append((byte) ' ');
break;
case '%':
if (i + 2 < end) {
char hi = query.charAt(++i);
char lo = query.charAt(++i);
buffer.append(decodeHexByte(hi, lo)); // depends on control dependency: [if], data = [none]
} else {
throw new Utf8Appendable.NotUtf8Exception("Incomplete % encoding");
}
break;
default:
buffer.append(c);
break;
}
}
if (key != null) {
value = buffer.toReplacedString(); // depends on control dependency: [if], data = [none]
buffer.reset(); // depends on control dependency: [if], data = [none]
map.add(key, value); // depends on control dependency: [if], data = [(key]
} else if (buffer.length() > 0) {
map.add(buffer.toReplacedString(), ""); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public void setItems(java.util.Collection<RequestValidator> items) {
if (items == null) {
this.items = null;
return;
}
this.items = new java.util.ArrayList<RequestValidator>(items);
} } | public class class_name {
public void setItems(java.util.Collection<RequestValidator> items) {
if (items == null) {
this.items = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.items = new java.util.ArrayList<RequestValidator>(items);
} } |
public class class_name {
private String requestShareUrl(DropboxAPI<AndroidAuthSession> dropboxApi) {
String shareUrl = null;
if (dropboxApi != null) {
try {
shareUrl = dropboxApi.share("/" + mContext.getString(R.string.wings_dropbox__photo_folder)).url;
} catch (DropboxException e) {
// Do nothing.
}
}
return shareUrl;
} } | public class class_name {
private String requestShareUrl(DropboxAPI<AndroidAuthSession> dropboxApi) {
String shareUrl = null;
if (dropboxApi != null) {
try {
shareUrl = dropboxApi.share("/" + mContext.getString(R.string.wings_dropbox__photo_folder)).url; // depends on control dependency: [try], data = [none]
} catch (DropboxException e) {
// Do nothing.
} // depends on control dependency: [catch], data = [none]
}
return shareUrl;
} } |
public class class_name {
BusGroup getBus()
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
{
SibTr.entry(tc, "getBus");
SibTr.exit(tc, "getBus", iBusGroup);
}
return iBusGroup;
} } | public class class_name {
BusGroup getBus()
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
{
SibTr.entry(tc, "getBus"); // depends on control dependency: [if], data = [none]
SibTr.exit(tc, "getBus", iBusGroup); // depends on control dependency: [if], data = [none]
}
return iBusGroup;
} } |
public class class_name {
private String fillClazz(String configClazz, String clazz) {
if (!clazz.equals(configClazz)) {
return configClazz;
} else {
return null;
}
} } | public class class_name {
private String fillClazz(String configClazz, String clazz) {
if (!clazz.equals(configClazz)) {
return configClazz; // depends on control dependency: [if], data = [none]
} else {
return null; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static FieldExtension getField(DelegateExecution execution,
String fieldName) {
if (isExecutingExecutionListener(execution)) {
return getListenerField(execution,
fieldName);
} else {
return getFlowElementField(execution,
fieldName);
}
} } | public class class_name {
public static FieldExtension getField(DelegateExecution execution,
String fieldName) {
if (isExecutingExecutionListener(execution)) {
return getListenerField(execution,
fieldName); // depends on control dependency: [if], data = [none]
} else {
return getFlowElementField(execution,
fieldName); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static Object getFieldValue(Object bean, String fieldNameOrIndex) {
if (null == bean || null == fieldNameOrIndex) {
return null;
}
if (bean instanceof Map) {
return ((Map<?, ?>) bean).get(fieldNameOrIndex);
} else if (bean instanceof Collection) {
return CollUtil.get((Collection<?>) bean, Integer.parseInt(fieldNameOrIndex));
} else if (ArrayUtil.isArray(bean)) {
return ArrayUtil.get(bean, Integer.parseInt(fieldNameOrIndex));
} else {// 普通Bean对象
return ReflectUtil.getFieldValue(bean, fieldNameOrIndex);
}
} } | public class class_name {
public static Object getFieldValue(Object bean, String fieldNameOrIndex) {
if (null == bean || null == fieldNameOrIndex) {
return null;
// depends on control dependency: [if], data = [none]
}
if (bean instanceof Map) {
return ((Map<?, ?>) bean).get(fieldNameOrIndex);
// depends on control dependency: [if], data = [none]
} else if (bean instanceof Collection) {
return CollUtil.get((Collection<?>) bean, Integer.parseInt(fieldNameOrIndex));
// depends on control dependency: [if], data = [none]
} else if (ArrayUtil.isArray(bean)) {
return ArrayUtil.get(bean, Integer.parseInt(fieldNameOrIndex));
// depends on control dependency: [if], data = [none]
} else {// 普通Bean对象
return ReflectUtil.getFieldValue(bean, fieldNameOrIndex);
// depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static <T> void finish(T query, long correlationId, EventBus bus, String... types) {
for (String type : types) {
RemoveQuery<T> next = finish(query, correlationId, type);
bus.post(next);
}
} } | public class class_name {
public static <T> void finish(T query, long correlationId, EventBus bus, String... types) {
for (String type : types) {
RemoveQuery<T> next = finish(query, correlationId, type);
bus.post(next); // depends on control dependency: [for], data = [none]
}
} } |
public class class_name {
public String resolveExpression() {
String expr = getType().getMetricExpression();
if (expr == null || expr.isEmpty()) {
expr = "$metric";
}
if (expr.contains("$metric")) {
StringBuilder metricString = new StringBuilder();
if (getMetricFamily() != null) {
metricString.append(getMetricFamily());
}
if (getMetricLabels() != null && !getMetricLabels().isEmpty()) {
String comma = "";
metricString.append("{");
for (Map.Entry<String, String> label : getMetricLabels().entrySet()) {
metricString.append(comma)
.append(label.getKey())
.append("=\"")
.append(label.getValue())
.append("\"");
comma = ",";
}
metricString.append("}");
}
expr = expr.replace("$metric", metricString.toString());
}
return expr;
} } | public class class_name {
public String resolveExpression() {
String expr = getType().getMetricExpression();
if (expr == null || expr.isEmpty()) {
expr = "$metric"; // depends on control dependency: [if], data = [none]
}
if (expr.contains("$metric")) {
StringBuilder metricString = new StringBuilder();
if (getMetricFamily() != null) {
metricString.append(getMetricFamily()); // depends on control dependency: [if], data = [(getMetricFamily()]
}
if (getMetricLabels() != null && !getMetricLabels().isEmpty()) {
String comma = "";
metricString.append("{");
for (Map.Entry<String, String> label : getMetricLabels().entrySet()) {
metricString.append(comma)
.append(label.getKey())
.append("=\"")
.append(label.getValue())
.append("\""); // depends on control dependency: [for], data = [none]
comma = ","; // depends on control dependency: [for], data = [none]
}
metricString.append("}"); // depends on control dependency: [if], data = [none]
}
expr = expr.replace("$metric", metricString.toString()); // depends on control dependency: [if], data = [none]
}
return expr;
} } |
public class class_name {
public static void main(String[] args) {
//============================================================================================//
//=============================== Submitting a Request =======================================//
//============================================================================================//
/*
* The ProfileCredentialsProvider will return your [default]
* credential profile by reading from the credentials file located at
* (~/.aws/credentials).
*/
AWSCredentials credentials = null;
try {
credentials = new ProfileCredentialsProvider().getCredentials();
} catch (Exception e) {
throw new AmazonClientException(
"Cannot load the credentials from the credential profiles file. " +
"Please make sure that your credentials file is at the correct " +
"location (~/.aws/credentials), and is in valid format.",
e);
}
// Create the AmazonEC2Client object so we can call various APIs.
AmazonEC2 ec2 = AmazonEC2ClientBuilder.standard()
.withCredentials(new AWSStaticCredentialsProvider(credentials))
.withRegion("us-west-2")
.build();
// Initializes a Spot Instance Request
RequestSpotInstancesRequest requestRequest = new RequestSpotInstancesRequest();
// Request 1 x t1.micro instance with a bid price of $0.03.
requestRequest.setSpotPrice("0.03");
requestRequest.setInstanceCount(Integer.valueOf(1));
// Setup the specifications of the launch. This includes the instance type (e.g. t1.micro)
// and the latest Amazon Linux AMI id available. Note, you should always use the latest
// Amazon Linux AMI id or another of your choosing.
LaunchSpecification launchSpecification = new LaunchSpecification();
launchSpecification.setImageId("ami-8c1fece5");
launchSpecification.setInstanceType("t1.micro");
// Add the security group to the request.
ArrayList<String> securityGroups = new ArrayList<String>();
securityGroups.add("GettingStartedGroup");
launchSpecification.setSecurityGroups(securityGroups);
// Add the launch specifications to the request.
requestRequest.setLaunchSpecification(launchSpecification);
//============================================================================================//
//=========================== Getting the Request ID from the Request ========================//
//============================================================================================//
// Call the RequestSpotInstance API.
RequestSpotInstancesResult requestResult = ec2.requestSpotInstances(requestRequest);
List<SpotInstanceRequest> requestResponses = requestResult.getSpotInstanceRequests();
// Setup an arraylist to collect all of the request ids we want to watch hit the running
// state.
ArrayList<String> spotInstanceRequestIds = new ArrayList<String>();
// Add all of the request ids to the hashset, so we can determine when they hit the
// active state.
for (SpotInstanceRequest requestResponse : requestResponses) {
System.out.println("Created Spot Request: "+requestResponse.getSpotInstanceRequestId());
spotInstanceRequestIds.add(requestResponse.getSpotInstanceRequestId());
}
//============================================================================================//
//====================================== Tag the Spot Requests ===============================//
//============================================================================================//
// Create the list of tags we want to create
ArrayList<Tag> requestTags = new ArrayList<Tag>();
requestTags.add(new Tag("keyname1","value1"));
// Create a tag request for requests.
CreateTagsRequest createTagsRequest_requests = new CreateTagsRequest();
createTagsRequest_requests.setResources(spotInstanceRequestIds);
createTagsRequest_requests.setTags(requestTags);
// Try to tag the Spot request submitted.
try {
ec2.createTags(createTagsRequest_requests);
} catch (AmazonServiceException e) {
// Write out any exceptions that may have occurred.
System.out.println("Error terminating instances");
System.out.println("Caught Exception: " + e.getMessage());
System.out.println("Reponse Status Code: " + e.getStatusCode());
System.out.println("Error Code: " + e.getErrorCode());
System.out.println("Request ID: " + e.getRequestId());
}
//============================================================================================//
//=========================== Determining the State of the Spot Request ======================//
//============================================================================================//
// Create a variable that will track whether there are any requests still in the open state.
boolean anyOpen;
// Initialize variables.
ArrayList<String> instanceIds = new ArrayList<String>();
do {
// Create the describeRequest with tall of the request id to monitor (e.g. that we started).
DescribeSpotInstanceRequestsRequest describeRequest = new DescribeSpotInstanceRequestsRequest();
describeRequest.setSpotInstanceRequestIds(spotInstanceRequestIds);
// Initialize the anyOpen variable to false, which assumes there are no requests open unless
// we find one that is still open.
anyOpen=false;
try {
// Retrieve all of the requests we want to monitor.
DescribeSpotInstanceRequestsResult describeResult = ec2.describeSpotInstanceRequests(describeRequest);
List<SpotInstanceRequest> describeResponses = describeResult.getSpotInstanceRequests();
// Look through each request and determine if they are all in the active state.
for (SpotInstanceRequest describeResponse : describeResponses) {
// If the state is open, it hasn't changed since we attempted to request it.
// There is the potential for it to transition almost immediately to closed or
// cancelled so we compare against open instead of active.
if (describeResponse.getState().equals("open")) {
anyOpen = true;
break;
}
// Add the instance id to the list we will eventually terminate.
instanceIds.add(describeResponse.getInstanceId());
}
} catch (AmazonServiceException e) {
// If we have an exception, ensure we don't break out of the loop.
// This prevents the scenario where there was blip on the wire.
anyOpen = true;
}
try {
// Sleep for 60 seconds.
Thread.sleep(60*1000);
} catch (Exception e) {
// Do nothing because it woke up early.
}
} while (anyOpen);
//============================================================================================//
//====================================== Tag the Spot Instances ===============================//
//============================================================================================//
// Create the list of tags we want to create
ArrayList<Tag> instanceTags = new ArrayList<Tag>();
instanceTags.add(new Tag("keyname1","value1"));
// Create a tag request for instances.
CreateTagsRequest createTagsRequest_instances = new CreateTagsRequest();
createTagsRequest_instances.setResources(instanceIds);
createTagsRequest_instances.setTags(instanceTags);
// Try to tag the Spot instance started.
try {
ec2.createTags(createTagsRequest_instances);
} catch (AmazonServiceException e) {
// Write out any exceptions that may have occurred.
System.out.println("Error terminating instances");
System.out.println("Caught Exception: " + e.getMessage());
System.out.println("Reponse Status Code: " + e.getStatusCode());
System.out.println("Error Code: " + e.getErrorCode());
System.out.println("Request ID: " + e.getRequestId());
}
//============================================================================================//
//====================================== Canceling the Request ==============================//
//============================================================================================//
try {
// Cancel requests.
CancelSpotInstanceRequestsRequest cancelRequest = new CancelSpotInstanceRequestsRequest(spotInstanceRequestIds);
ec2.cancelSpotInstanceRequests(cancelRequest);
} catch (AmazonServiceException e) {
// Write out any exceptions that may have occurred.
System.out.println("Error cancelling instances");
System.out.println("Caught Exception: " + e.getMessage());
System.out.println("Reponse Status Code: " + e.getStatusCode());
System.out.println("Error Code: " + e.getErrorCode());
System.out.println("Request ID: " + e.getRequestId());
}
//============================================================================================//
//=================================== Terminating any Instances ==============================//
//============================================================================================//
try {
// Terminate instances.
TerminateInstancesRequest terminateRequest = new TerminateInstancesRequest(instanceIds);
ec2.terminateInstances(terminateRequest);
} catch (AmazonServiceException e) {
// Write out any exceptions that may have occurred.
System.out.println("Error terminating instances");
System.out.println("Caught Exception: " + e.getMessage());
System.out.println("Reponse Status Code: " + e.getStatusCode());
System.out.println("Error Code: " + e.getErrorCode());
System.out.println("Request ID: " + e.getRequestId());
}
} } | public class class_name {
public static void main(String[] args) {
//============================================================================================//
//=============================== Submitting a Request =======================================//
//============================================================================================//
/*
* The ProfileCredentialsProvider will return your [default]
* credential profile by reading from the credentials file located at
* (~/.aws/credentials).
*/
AWSCredentials credentials = null;
try {
credentials = new ProfileCredentialsProvider().getCredentials();
// depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new AmazonClientException(
"Cannot load the credentials from the credential profiles file. " +
"Please make sure that your credentials file is at the correct " +
"location (~/.aws/credentials), and is in valid format.",
e);
}
// depends on control dependency: [catch], data = [none]
// Create the AmazonEC2Client object so we can call various APIs.
AmazonEC2 ec2 = AmazonEC2ClientBuilder.standard()
.withCredentials(new AWSStaticCredentialsProvider(credentials))
.withRegion("us-west-2")
.build();
// Initializes a Spot Instance Request
RequestSpotInstancesRequest requestRequest = new RequestSpotInstancesRequest();
// Request 1 x t1.micro instance with a bid price of $0.03.
requestRequest.setSpotPrice("0.03");
requestRequest.setInstanceCount(Integer.valueOf(1));
// Setup the specifications of the launch. This includes the instance type (e.g. t1.micro)
// and the latest Amazon Linux AMI id available. Note, you should always use the latest
// Amazon Linux AMI id or another of your choosing.
LaunchSpecification launchSpecification = new LaunchSpecification();
launchSpecification.setImageId("ami-8c1fece5");
launchSpecification.setInstanceType("t1.micro");
// Add the security group to the request.
ArrayList<String> securityGroups = new ArrayList<String>();
securityGroups.add("GettingStartedGroup");
launchSpecification.setSecurityGroups(securityGroups);
// Add the launch specifications to the request.
requestRequest.setLaunchSpecification(launchSpecification);
//============================================================================================//
//=========================== Getting the Request ID from the Request ========================//
//============================================================================================//
// Call the RequestSpotInstance API.
RequestSpotInstancesResult requestResult = ec2.requestSpotInstances(requestRequest);
List<SpotInstanceRequest> requestResponses = requestResult.getSpotInstanceRequests();
// Setup an arraylist to collect all of the request ids we want to watch hit the running
// state.
ArrayList<String> spotInstanceRequestIds = new ArrayList<String>();
// Add all of the request ids to the hashset, so we can determine when they hit the
// active state.
for (SpotInstanceRequest requestResponse : requestResponses) {
System.out.println("Created Spot Request: "+requestResponse.getSpotInstanceRequestId());
// depends on control dependency: [for], data = [requestResponse]
spotInstanceRequestIds.add(requestResponse.getSpotInstanceRequestId());
// depends on control dependency: [for], data = [requestResponse]
}
//============================================================================================//
//====================================== Tag the Spot Requests ===============================//
//============================================================================================//
// Create the list of tags we want to create
ArrayList<Tag> requestTags = new ArrayList<Tag>();
requestTags.add(new Tag("keyname1","value1"));
// Create a tag request for requests.
CreateTagsRequest createTagsRequest_requests = new CreateTagsRequest();
createTagsRequest_requests.setResources(spotInstanceRequestIds);
createTagsRequest_requests.setTags(requestTags);
// Try to tag the Spot request submitted.
try {
ec2.createTags(createTagsRequest_requests);
} catch (AmazonServiceException e) {
// Write out any exceptions that may have occurred.
System.out.println("Error terminating instances");
System.out.println("Caught Exception: " + e.getMessage());
System.out.println("Reponse Status Code: " + e.getStatusCode());
System.out.println("Error Code: " + e.getErrorCode());
System.out.println("Request ID: " + e.getRequestId());
}
//============================================================================================//
//=========================== Determining the State of the Spot Request ======================//
//============================================================================================//
// Create a variable that will track whether there are any requests still in the open state.
boolean anyOpen;
// Initialize variables.
ArrayList<String> instanceIds = new ArrayList<String>();
do {
// Create the describeRequest with tall of the request id to monitor (e.g. that we started).
DescribeSpotInstanceRequestsRequest describeRequest = new DescribeSpotInstanceRequestsRequest();
describeRequest.setSpotInstanceRequestIds(spotInstanceRequestIds);
// Initialize the anyOpen variable to false, which assumes there are no requests open unless
// we find one that is still open.
anyOpen=false;
try {
// Retrieve all of the requests we want to monitor.
DescribeSpotInstanceRequestsResult describeResult = ec2.describeSpotInstanceRequests(describeRequest);
List<SpotInstanceRequest> describeResponses = describeResult.getSpotInstanceRequests();
// Look through each request and determine if they are all in the active state.
for (SpotInstanceRequest describeResponse : describeResponses) {
// If the state is open, it hasn't changed since we attempted to request it.
// There is the potential for it to transition almost immediately to closed or
// cancelled so we compare against open instead of active.
if (describeResponse.getState().equals("open")) {
anyOpen = true;
break;
}
// Add the instance id to the list we will eventually terminate.
instanceIds.add(describeResponse.getInstanceId());
}
} catch (AmazonServiceException e) {
// If we have an exception, ensure we don't break out of the loop.
// This prevents the scenario where there was blip on the wire.
anyOpen = true;
}
try {
// Sleep for 60 seconds.
Thread.sleep(60*1000);
} catch (Exception e) {
// Do nothing because it woke up early.
}
} while (anyOpen);
//============================================================================================//
//====================================== Tag the Spot Instances ===============================//
//============================================================================================//
// Create the list of tags we want to create
ArrayList<Tag> instanceTags = new ArrayList<Tag>();
instanceTags.add(new Tag("keyname1","value1"));
// Create a tag request for instances.
CreateTagsRequest createTagsRequest_instances = new CreateTagsRequest();
createTagsRequest_instances.setResources(instanceIds);
createTagsRequest_instances.setTags(instanceTags);
// Try to tag the Spot instance started.
try {
ec2.createTags(createTagsRequest_instances);
} catch (AmazonServiceException e) {
// Write out any exceptions that may have occurred.
System.out.println("Error terminating instances");
System.out.println("Caught Exception: " + e.getMessage());
System.out.println("Reponse Status Code: " + e.getStatusCode());
System.out.println("Error Code: " + e.getErrorCode());
System.out.println("Request ID: " + e.getRequestId());
}
//============================================================================================//
//====================================== Canceling the Request ==============================//
//============================================================================================//
try {
// Cancel requests.
CancelSpotInstanceRequestsRequest cancelRequest = new CancelSpotInstanceRequestsRequest(spotInstanceRequestIds);
ec2.cancelSpotInstanceRequests(cancelRequest);
} catch (AmazonServiceException e) {
// Write out any exceptions that may have occurred.
System.out.println("Error cancelling instances");
System.out.println("Caught Exception: " + e.getMessage());
System.out.println("Reponse Status Code: " + e.getStatusCode());
System.out.println("Error Code: " + e.getErrorCode());
System.out.println("Request ID: " + e.getRequestId());
}
//============================================================================================//
//=================================== Terminating any Instances ==============================//
//============================================================================================//
try {
// Terminate instances.
TerminateInstancesRequest terminateRequest = new TerminateInstancesRequest(instanceIds);
ec2.terminateInstances(terminateRequest);
} catch (AmazonServiceException e) {
// Write out any exceptions that may have occurred.
System.out.println("Error terminating instances");
System.out.println("Caught Exception: " + e.getMessage());
System.out.println("Reponse Status Code: " + e.getStatusCode());
System.out.println("Error Code: " + e.getErrorCode());
System.out.println("Request ID: " + e.getRequestId());
}
} } |
public class class_name {
void insertMacros(TokenList tokens ) {
TokenList.Token t = tokens.getFirst();
while( t != null ) {
if( t.getType() == Type.WORD ) {
Macro v = lookupMacro(t.word);
if (v != null) {
TokenList.Token before = t.previous;
List<TokenList.Token> inputs = new ArrayList<TokenList.Token>();
t = parseMacroInput(inputs,t.next);
TokenList sniplet = v.execute(inputs);
tokens.extractSubList(before.next,t);
tokens.insertAfter(before,sniplet);
t = sniplet.last;
}
}
t = t.next;
}
} } | public class class_name {
void insertMacros(TokenList tokens ) {
TokenList.Token t = tokens.getFirst();
while( t != null ) {
if( t.getType() == Type.WORD ) {
Macro v = lookupMacro(t.word);
if (v != null) {
TokenList.Token before = t.previous;
List<TokenList.Token> inputs = new ArrayList<TokenList.Token>();
t = parseMacroInput(inputs,t.next); // depends on control dependency: [if], data = [none]
TokenList sniplet = v.execute(inputs);
tokens.extractSubList(before.next,t); // depends on control dependency: [if], data = [none]
tokens.insertAfter(before,sniplet); // depends on control dependency: [if], data = [none]
t = sniplet.last; // depends on control dependency: [if], data = [none]
}
}
t = t.next; // depends on control dependency: [while], data = [none]
}
} } |
public class class_name {
public RedisURI getViewedBy(Map<RedisURI, Partitions> map, Partitions partitions) {
for (Map.Entry<RedisURI, Partitions> entry : map.entrySet()) {
if (entry.getValue() == partitions) {
return entry.getKey();
}
}
return null;
} } | public class class_name {
public RedisURI getViewedBy(Map<RedisURI, Partitions> map, Partitions partitions) {
for (Map.Entry<RedisURI, Partitions> entry : map.entrySet()) {
if (entry.getValue() == partitions) {
return entry.getKey(); // depends on control dependency: [if], data = [none]
}
}
return null;
} } |
public class class_name {
@Override
public void prepare(FeatureProvider provider)
{
super.prepare(provider);
if (provider instanceof ProducerListener)
{
addListener((ProducerListener) provider);
}
if (provider instanceof ProducerChecker)
{
checker = (ProducerChecker) provider;
}
} } | public class class_name {
@Override
public void prepare(FeatureProvider provider)
{
super.prepare(provider);
if (provider instanceof ProducerListener)
{
addListener((ProducerListener) provider);
// depends on control dependency: [if], data = [none]
}
if (provider instanceof ProducerChecker)
{
checker = (ProducerChecker) provider;
// depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private synchronized static void delete(File file) {
// Check if file is directory
if (file.isDirectory()) {
// Get all files in the folder
File[] files = file.listFiles();
// Delete each file in the folder
for (int i = 0; i < files.length; ++i) {
delete(files[i]);
}
// Delete the folder
file.delete();
} else {
// Delete the file if it is not a folder
file.delete();
}
} } | public class class_name {
private synchronized static void delete(File file) {
// Check if file is directory
if (file.isDirectory()) {
// Get all files in the folder
File[] files = file.listFiles();
// Delete each file in the folder
for (int i = 0; i < files.length; ++i) {
delete(files[i]); // depends on control dependency: [for], data = [i]
}
// Delete the folder
file.delete(); // depends on control dependency: [if], data = [none]
} else {
// Delete the file if it is not a folder
file.delete(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static String relative(String startPath, String targetPath) {
// If the start and target path's are the same then link to the current directory
if (startPath.equals(targetPath)) {
return CURRENT_DIR;
}
String[] start = toSegments(canonical(startPath));
String[] target = toSegments(canonical(targetPath));
// If start path has no trailing separator (a "file" path), then drop file segment
if (!startPath.endsWith(SEPARATOR)) start = Arrays.copyOfRange(start, 0, start.length - 1);
// If target path has no trailing separator, then drop file segment, but keep a reference to add
// it later
String targetFile = "";
if (!targetPath.endsWith(SEPARATOR)) {
targetFile = target[target.length - 1];
target = Arrays.copyOfRange(target, 0, target.length - 1);
}
// Work out how much of the filepath is shared by start and path.
String[] common = commonPrefix(start, target);
String[] parents = toParentDirs(start.length - common.length);
int relativeStartIdx = common.length;
String[] relativeDirs = Arrays.copyOfRange(target, relativeStartIdx, target.length);
String[] relativePath = Arrays.copyOf(parents, parents.length + relativeDirs.length);
System.arraycopy(relativeDirs, 0, relativePath, parents.length, relativeDirs.length);
// If this is not a sibling reference append a trailing / to path
String trailingSep = "";
if (relativePath.length > 0) trailingSep = SEPARATOR;
return Arrays.stream(relativePath).collect(Collectors.joining(SEPARATOR))
+ trailingSep
+ targetFile;
} } | public class class_name {
public static String relative(String startPath, String targetPath) {
// If the start and target path's are the same then link to the current directory
if (startPath.equals(targetPath)) {
return CURRENT_DIR; // depends on control dependency: [if], data = [none]
}
String[] start = toSegments(canonical(startPath));
String[] target = toSegments(canonical(targetPath));
// If start path has no trailing separator (a "file" path), then drop file segment
if (!startPath.endsWith(SEPARATOR)) start = Arrays.copyOfRange(start, 0, start.length - 1);
// If target path has no trailing separator, then drop file segment, but keep a reference to add
// it later
String targetFile = "";
if (!targetPath.endsWith(SEPARATOR)) {
targetFile = target[target.length - 1]; // depends on control dependency: [if], data = [none]
target = Arrays.copyOfRange(target, 0, target.length - 1); // depends on control dependency: [if], data = [none]
}
// Work out how much of the filepath is shared by start and path.
String[] common = commonPrefix(start, target);
String[] parents = toParentDirs(start.length - common.length);
int relativeStartIdx = common.length;
String[] relativeDirs = Arrays.copyOfRange(target, relativeStartIdx, target.length);
String[] relativePath = Arrays.copyOf(parents, parents.length + relativeDirs.length);
System.arraycopy(relativeDirs, 0, relativePath, parents.length, relativeDirs.length);
// If this is not a sibling reference append a trailing / to path
String trailingSep = "";
if (relativePath.length > 0) trailingSep = SEPARATOR;
return Arrays.stream(relativePath).collect(Collectors.joining(SEPARATOR))
+ trailingSep
+ targetFile;
} } |
public class class_name {
private Map<String, Map> volumes() {
final ImmutableMap.Builder<String, Map> volumes = ImmutableMap.builder();
for (final Map.Entry<String, String> entry : job.getVolumes().entrySet()) {
final String path = entry.getKey();
final String source = entry.getValue();
if (Strings.isNullOrEmpty(source)) {
volumes.put(path, new HashMap());
}
}
return volumes.build();
} } | public class class_name {
private Map<String, Map> volumes() {
final ImmutableMap.Builder<String, Map> volumes = ImmutableMap.builder();
for (final Map.Entry<String, String> entry : job.getVolumes().entrySet()) {
final String path = entry.getKey();
final String source = entry.getValue();
if (Strings.isNullOrEmpty(source)) {
volumes.put(path, new HashMap()); // depends on control dependency: [if], data = [none]
}
}
return volumes.build();
} } |
public class class_name {
private static BigInteger checkAndReturn(String which,
BigInteger expected,
String asbigintcs)
{
BigInteger other =
BigIntStringChecksum.fromString(asbigintcs).asBigInteger();
if (expected.equals(other))
{
return expected;
}
else
{
throw new SecretShareException(which + " failure");
}
} } | public class class_name {
private static BigInteger checkAndReturn(String which,
BigInteger expected,
String asbigintcs)
{
BigInteger other =
BigIntStringChecksum.fromString(asbigintcs).asBigInteger();
if (expected.equals(other))
{
return expected; // depends on control dependency: [if], data = [none]
}
else
{
throw new SecretShareException(which + " failure");
}
} } |
public class class_name {
public void associate(HttpSession context) {
try {
context.setAttribute(COMPONENTS, new ConcurrentHashMap<Descriptor<?>, Object>());
} catch (Exception e) {
logger.debug("HTTP session is disabled or invalid in the current environment", e);
}
} } | public class class_name {
public void associate(HttpSession context) {
try {
context.setAttribute(COMPONENTS, new ConcurrentHashMap<Descriptor<?>, Object>()); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
logger.debug("HTTP session is disabled or invalid in the current environment", e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
protected List<ConfigIssue> init() {
List<ConfigIssue> issues = super.init();
errorRecordHandler = new DefaultErrorRecordHandler(getContext()); // NOSONAR
double rateLimit = conf.rateLimit > 0 ? (1000.0 / conf.rateLimit) : Double.MAX_VALUE;
rateLimiter = RateLimiter.create(rateLimit);
httpClientCommon.init(issues, getContext());
conf.dataFormatConfig.init(
getContext(),
conf.dataFormat,
Groups.HTTP.name(),
HttpClientCommon.DATA_FORMAT_CONFIG_PREFIX,
issues
);
bodyVars = getContext().createELVars();
bodyEval = getContext().createELEval(REQUEST_BODY_CONFIG_NAME);
if (issues.isEmpty()) {
parserFactory = conf.dataFormatConfig.getParserFactory();
}
return issues;
} } | public class class_name {
@Override
protected List<ConfigIssue> init() {
List<ConfigIssue> issues = super.init();
errorRecordHandler = new DefaultErrorRecordHandler(getContext()); // NOSONAR
double rateLimit = conf.rateLimit > 0 ? (1000.0 / conf.rateLimit) : Double.MAX_VALUE;
rateLimiter = RateLimiter.create(rateLimit);
httpClientCommon.init(issues, getContext());
conf.dataFormatConfig.init(
getContext(),
conf.dataFormat,
Groups.HTTP.name(),
HttpClientCommon.DATA_FORMAT_CONFIG_PREFIX,
issues
);
bodyVars = getContext().createELVars();
bodyEval = getContext().createELEval(REQUEST_BODY_CONFIG_NAME);
if (issues.isEmpty()) {
parserFactory = conf.dataFormatConfig.getParserFactory(); // depends on control dependency: [if], data = [none]
}
return issues;
} } |
public class class_name {
public ByteBuffer getElementAt(int index) {
if (index >= elementsInBucket) {
return null;
} else {
int chunk = index * gp.getElementSize() / memory[0].length;
int offset = index * gp.getElementSize() % memory[0].length;
return ByteBuffer.wrap(memory[chunk], offset, gp.getElementSize()).asReadOnlyBuffer();
}
} } | public class class_name {
public ByteBuffer getElementAt(int index) {
if (index >= elementsInBucket) {
return null; // depends on control dependency: [if], data = [none]
} else {
int chunk = index * gp.getElementSize() / memory[0].length;
int offset = index * gp.getElementSize() % memory[0].length;
return ByteBuffer.wrap(memory[chunk], offset, gp.getElementSize()).asReadOnlyBuffer(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public String getScheme() {
if (WCCustomProperties.CHECK_REQUEST_OBJECT_IN_USE){
checkRequestObjectInUse();
}
// 321485
String scheme = this._request.getScheme();
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15
logger.logp(Level.FINE, CLASS_NAME,"getScheme", "this->"+this+": "+" scheme --> " + scheme);
}
return scheme;
} } | public class class_name {
public String getScheme() {
if (WCCustomProperties.CHECK_REQUEST_OBJECT_IN_USE){
checkRequestObjectInUse(); // depends on control dependency: [if], data = [none]
}
// 321485
String scheme = this._request.getScheme();
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15
logger.logp(Level.FINE, CLASS_NAME,"getScheme", "this->"+this+": "+" scheme --> " + scheme); // depends on control dependency: [if], data = [none]
}
return scheme;
} } |
public class class_name {
public void put(URI uri, byte[] bimg, BufferedImage img)
{
synchronized (bytemap)
{
while (bytesize > 1000 * 1000 * 50)
{
URI olduri = bytemapAccessQueue.removeFirst();
byte[] oldbimg = bytemap.remove(olduri);
bytesize -= oldbimg.length;
log("removed 1 img from byte cache");
}
bytemap.put(uri, bimg);
bytesize += bimg.length;
bytemapAccessQueue.addLast(uri);
}
addToImageCache(uri, img);
} } | public class class_name {
public void put(URI uri, byte[] bimg, BufferedImage img)
{
synchronized (bytemap)
{
while (bytesize > 1000 * 1000 * 50)
{
URI olduri = bytemapAccessQueue.removeFirst();
byte[] oldbimg = bytemap.remove(olduri);
bytesize -= oldbimg.length; // depends on control dependency: [while], data = [none]
log("removed 1 img from byte cache"); // depends on control dependency: [while], data = [none]
}
bytemap.put(uri, bimg);
bytesize += bimg.length;
bytemapAccessQueue.addLast(uri);
}
addToImageCache(uri, img);
} } |
public class class_name {
static public String makeStandardFilename(String appName, String storeName) {
// the directory
String userHome = null;
try {
userHome = System.getProperty("user.home");
} catch (Exception e) {
System.out.println( "XMLStore.makeStandardFilename: error System.getProperty(user.home) "+e);
}
if (null == userHome) userHome = ".";
String dirFilename = userHome+"/"+appName;
File f = new File(dirFilename);
if (!f.exists()) {
boolean ok = f.mkdirs(); // now ready for file creation in writeXML
if (!ok)
System.out.println("Error creating directories: " + f.getAbsolutePath());
}
return dirFilename +"/"+ storeName;
} } | public class class_name {
static public String makeStandardFilename(String appName, String storeName) {
// the directory
String userHome = null;
try {
userHome = System.getProperty("user.home"); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
System.out.println( "XMLStore.makeStandardFilename: error System.getProperty(user.home) "+e);
} // depends on control dependency: [catch], data = [none]
if (null == userHome) userHome = ".";
String dirFilename = userHome+"/"+appName;
File f = new File(dirFilename);
if (!f.exists()) {
boolean ok = f.mkdirs(); // now ready for file creation in writeXML
if (!ok)
System.out.println("Error creating directories: " + f.getAbsolutePath());
}
return dirFilename +"/"+ storeName;
} } |
public class class_name {
public static Map getProperties( JSONObject jsonObject ) {
Map properties = new HashMap();
for( Iterator keys = jsonObject.keys(); keys.hasNext(); ){
String key = (String) keys.next();
/*
* String parsedKey = key; if( !JSONUtils.isJavaIdentifier( parsedKey ) ){
* parsedKey = JSONUtils.convertToJavaIdentifier( key ); }
*/
properties.put( key, getTypeClass( jsonObject.get( key ) ) );
}
return properties;
} } | public class class_name {
public static Map getProperties( JSONObject jsonObject ) {
Map properties = new HashMap();
for( Iterator keys = jsonObject.keys(); keys.hasNext(); ){
String key = (String) keys.next();
/*
* String parsedKey = key; if( !JSONUtils.isJavaIdentifier( parsedKey ) ){
* parsedKey = JSONUtils.convertToJavaIdentifier( key ); }
*/
properties.put( key, getTypeClass( jsonObject.get( key ) ) ); // depends on control dependency: [for], data = [none]
}
return properties;
} } |
public class class_name {
void writeCode(Code code) {
databuf.appendChar(code.max_stack);
databuf.appendChar(code.max_locals);
databuf.appendInt(code.cp);
databuf.appendBytes(code.code, 0, code.cp);
databuf.appendChar(code.catchInfo.length());
for (List<char[]> l = code.catchInfo.toList();
l.nonEmpty();
l = l.tail) {
for (int i = 0; i < l.head.length; i++)
databuf.appendChar(l.head[i]);
}
int acountIdx = beginAttrs();
int acount = 0;
if (code.lineInfo.nonEmpty()) {
int alenIdx = writeAttr(names.LineNumberTable);
databuf.appendChar(code.lineInfo.length());
for (List<char[]> l = code.lineInfo.reverse();
l.nonEmpty();
l = l.tail)
for (int i = 0; i < l.head.length; i++)
databuf.appendChar(l.head[i]);
endAttr(alenIdx);
acount++;
}
if (genCrt && (code.crt != null)) {
CRTable crt = code.crt;
int alenIdx = writeAttr(names.CharacterRangeTable);
int crtIdx = beginAttrs();
int crtEntries = crt.writeCRT(databuf, code.lineMap, log);
endAttrs(crtIdx, crtEntries);
endAttr(alenIdx);
acount++;
}
// counter for number of generic local variables
if (code.varDebugInfo && code.varBufferSize > 0) {
int nGenericVars = 0;
int alenIdx = writeAttr(names.LocalVariableTable);
databuf.appendChar(code.getLVTSize());
for (int i=0; i<code.varBufferSize; i++) {
Code.LocalVar var = code.varBuffer[i];
for (Code.LocalVar.Range r: var.aliveRanges) {
// write variable info
Assert.check(r.start_pc >= 0
&& r.start_pc <= code.cp);
databuf.appendChar(r.start_pc);
Assert.check(r.length > 0
&& (r.start_pc + r.length) <= code.cp);
databuf.appendChar(r.length);
VarSymbol sym = var.sym;
databuf.appendChar(pool.put(sym.name));
Type vartype = sym.erasure(types);
databuf.appendChar(pool.put(typeSig(vartype)));
databuf.appendChar(var.reg);
if (needsLocalVariableTypeEntry(var.sym.type)) {
nGenericVars++;
}
}
}
endAttr(alenIdx);
acount++;
if (nGenericVars > 0) {
alenIdx = writeAttr(names.LocalVariableTypeTable);
databuf.appendChar(nGenericVars);
int count = 0;
for (int i=0; i<code.varBufferSize; i++) {
Code.LocalVar var = code.varBuffer[i];
VarSymbol sym = var.sym;
if (!needsLocalVariableTypeEntry(sym.type))
continue;
for (Code.LocalVar.Range r : var.aliveRanges) {
// write variable info
databuf.appendChar(r.start_pc);
databuf.appendChar(r.length);
databuf.appendChar(pool.put(sym.name));
databuf.appendChar(pool.put(typeSig(sym.type)));
databuf.appendChar(var.reg);
count++;
}
}
Assert.check(count == nGenericVars);
endAttr(alenIdx);
acount++;
}
}
if (code.stackMapBufferSize > 0) {
if (debugstackmap) System.out.println("Stack map for " + code.meth);
int alenIdx = writeAttr(code.stackMap.getAttributeName(names));
writeStackMap(code);
endAttr(alenIdx);
acount++;
}
acount += writeTypeAnnotations(code.meth.getRawTypeAttributes(), true);
endAttrs(acountIdx, acount);
} } | public class class_name {
void writeCode(Code code) {
databuf.appendChar(code.max_stack);
databuf.appendChar(code.max_locals);
databuf.appendInt(code.cp);
databuf.appendBytes(code.code, 0, code.cp);
databuf.appendChar(code.catchInfo.length());
for (List<char[]> l = code.catchInfo.toList();
l.nonEmpty();
l = l.tail) {
for (int i = 0; i < l.head.length; i++)
databuf.appendChar(l.head[i]);
}
int acountIdx = beginAttrs();
int acount = 0;
if (code.lineInfo.nonEmpty()) {
int alenIdx = writeAttr(names.LineNumberTable);
databuf.appendChar(code.lineInfo.length()); // depends on control dependency: [if], data = [none]
for (List<char[]> l = code.lineInfo.reverse();
l.nonEmpty();
l = l.tail)
for (int i = 0; i < l.head.length; i++)
databuf.appendChar(l.head[i]);
endAttr(alenIdx); // depends on control dependency: [if], data = [none]
acount++; // depends on control dependency: [if], data = [none]
}
if (genCrt && (code.crt != null)) {
CRTable crt = code.crt;
int alenIdx = writeAttr(names.CharacterRangeTable);
int crtIdx = beginAttrs();
int crtEntries = crt.writeCRT(databuf, code.lineMap, log);
endAttrs(crtIdx, crtEntries); // depends on control dependency: [if], data = [none]
endAttr(alenIdx); // depends on control dependency: [if], data = [none]
acount++; // depends on control dependency: [if], data = [none]
}
// counter for number of generic local variables
if (code.varDebugInfo && code.varBufferSize > 0) {
int nGenericVars = 0;
int alenIdx = writeAttr(names.LocalVariableTable);
databuf.appendChar(code.getLVTSize()); // depends on control dependency: [if], data = [none]
for (int i=0; i<code.varBufferSize; i++) {
Code.LocalVar var = code.varBuffer[i];
for (Code.LocalVar.Range r: var.aliveRanges) {
// write variable info
Assert.check(r.start_pc >= 0
&& r.start_pc <= code.cp); // depends on control dependency: [for], data = [r]
databuf.appendChar(r.start_pc); // depends on control dependency: [for], data = [r]
Assert.check(r.length > 0
&& (r.start_pc + r.length) <= code.cp); // depends on control dependency: [for], data = [r]
databuf.appendChar(r.length); // depends on control dependency: [for], data = [r]
VarSymbol sym = var.sym;
databuf.appendChar(pool.put(sym.name)); // depends on control dependency: [for], data = [r]
Type vartype = sym.erasure(types);
databuf.appendChar(pool.put(typeSig(vartype))); // depends on control dependency: [for], data = [r]
databuf.appendChar(var.reg); // depends on control dependency: [for], data = [r]
if (needsLocalVariableTypeEntry(var.sym.type)) {
nGenericVars++; // depends on control dependency: [if], data = [none]
}
}
}
endAttr(alenIdx); // depends on control dependency: [if], data = [none]
acount++; // depends on control dependency: [if], data = [none]
if (nGenericVars > 0) {
alenIdx = writeAttr(names.LocalVariableTypeTable); // depends on control dependency: [if], data = [none]
databuf.appendChar(nGenericVars); // depends on control dependency: [if], data = [(nGenericVars]
int count = 0;
for (int i=0; i<code.varBufferSize; i++) {
Code.LocalVar var = code.varBuffer[i];
VarSymbol sym = var.sym;
if (!needsLocalVariableTypeEntry(sym.type))
continue;
for (Code.LocalVar.Range r : var.aliveRanges) {
// write variable info
databuf.appendChar(r.start_pc); // depends on control dependency: [for], data = [r]
databuf.appendChar(r.length); // depends on control dependency: [for], data = [r]
databuf.appendChar(pool.put(sym.name)); // depends on control dependency: [for], data = [r]
databuf.appendChar(pool.put(typeSig(sym.type))); // depends on control dependency: [for], data = [r]
databuf.appendChar(var.reg); // depends on control dependency: [for], data = [r]
count++; // depends on control dependency: [for], data = [none]
}
}
Assert.check(count == nGenericVars); // depends on control dependency: [if], data = [none]
endAttr(alenIdx); // depends on control dependency: [if], data = [none]
acount++; // depends on control dependency: [if], data = [none]
}
}
if (code.stackMapBufferSize > 0) {
if (debugstackmap) System.out.println("Stack map for " + code.meth);
int alenIdx = writeAttr(code.stackMap.getAttributeName(names));
writeStackMap(code); // depends on control dependency: [if], data = [none]
endAttr(alenIdx); // depends on control dependency: [if], data = [none]
acount++; // depends on control dependency: [if], data = [none]
}
acount += writeTypeAnnotations(code.meth.getRawTypeAttributes(), true);
endAttrs(acountIdx, acount);
} } |
public class class_name {
public void remove( Element<T> element ) {
if( element.next == null ) {
last = element.previous;
} else {
element.next.previous = element.previous;
}
if( element.previous == null ) {
first = element.next;
} else {
element.previous.next = element.next;
}
size--;
element.clear();
available.push(element);
} } | public class class_name {
public void remove( Element<T> element ) {
if( element.next == null ) {
last = element.previous; // depends on control dependency: [if], data = [none]
} else {
element.next.previous = element.previous; // depends on control dependency: [if], data = [none]
}
if( element.previous == null ) {
first = element.next; // depends on control dependency: [if], data = [none]
} else {
element.previous.next = element.next; // depends on control dependency: [if], data = [none]
}
size--;
element.clear();
available.push(element);
} } |
public class class_name {
public void marshall(ApiKey apiKey, ProtocolMarshaller protocolMarshaller) {
if (apiKey == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(apiKey.getId(), ID_BINDING);
protocolMarshaller.marshall(apiKey.getValue(), VALUE_BINDING);
protocolMarshaller.marshall(apiKey.getName(), NAME_BINDING);
protocolMarshaller.marshall(apiKey.getCustomerId(), CUSTOMERID_BINDING);
protocolMarshaller.marshall(apiKey.getDescription(), DESCRIPTION_BINDING);
protocolMarshaller.marshall(apiKey.getEnabled(), ENABLED_BINDING);
protocolMarshaller.marshall(apiKey.getCreatedDate(), CREATEDDATE_BINDING);
protocolMarshaller.marshall(apiKey.getLastUpdatedDate(), LASTUPDATEDDATE_BINDING);
protocolMarshaller.marshall(apiKey.getStageKeys(), STAGEKEYS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(ApiKey apiKey, ProtocolMarshaller protocolMarshaller) {
if (apiKey == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(apiKey.getId(), ID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(apiKey.getValue(), VALUE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(apiKey.getName(), NAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(apiKey.getCustomerId(), CUSTOMERID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(apiKey.getDescription(), DESCRIPTION_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(apiKey.getEnabled(), ENABLED_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(apiKey.getCreatedDate(), CREATEDDATE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(apiKey.getLastUpdatedDate(), LASTUPDATEDDATE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(apiKey.getStageKeys(), STAGEKEYS_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public Set<String> generateIdsToUse(long nextFreeTransactionalId) {
Set<String> transactionalIds = new HashSet<>();
for (int i = 0; i < poolSize; i++) {
long transactionalId = nextFreeTransactionalId + subtaskIndex * poolSize + i;
transactionalIds.add(generateTransactionalId(transactionalId));
}
return transactionalIds;
} } | public class class_name {
public Set<String> generateIdsToUse(long nextFreeTransactionalId) {
Set<String> transactionalIds = new HashSet<>();
for (int i = 0; i < poolSize; i++) {
long transactionalId = nextFreeTransactionalId + subtaskIndex * poolSize + i;
transactionalIds.add(generateTransactionalId(transactionalId)); // depends on control dependency: [for], data = [none]
}
return transactionalIds;
} } |
public class class_name {
public static String message(final Throwable throwable) {
String message = throwable.getMessage();
if (StringUtil.isBlank(message)) {
message = throwable.toString();
}
return message;
} } | public class class_name {
public static String message(final Throwable throwable) {
String message = throwable.getMessage();
if (StringUtil.isBlank(message)) {
message = throwable.toString(); // depends on control dependency: [if], data = [none]
}
return message;
} } |
public class class_name {
protected void validate(List<TableInfo> tableInfos)
{
try
{
HTableDescriptor hTableDescriptor = admin.getTableDescriptor(databaseName.getBytes());
HColumnDescriptor[] columnFamilies = hTableDescriptor.getColumnFamilies();
for (TableInfo tableInfo : tableInfos)
{
if (tableInfo != null)
{
boolean isColumnFound = false;
for (HColumnDescriptor columnDescriptor : columnFamilies)
{
if (columnDescriptor.getNameAsString().equalsIgnoreCase(tableInfo.getTableName()))
{
isColumnFound = true;
break;
}
}
if (!isColumnFound)
{
throw new SchemaGenerationException("column " + tableInfo.getTableName()
+ " does not exist in table " + databaseName + "", "Hbase", databaseName,
tableInfo.getTableName());
}
}
}
}
catch (TableNotFoundException tnfex)
{
throw new SchemaGenerationException("table " + databaseName + " does not exist ", tnfex, "Hbase");
}
catch (IOException ioe)
{
logger.error("Either check for network connection or table isn't in enabled state, Caused by:", ioe);
throw new SchemaGenerationException(ioe, "Hbase");
}
} } | public class class_name {
protected void validate(List<TableInfo> tableInfos)
{
try
{
HTableDescriptor hTableDescriptor = admin.getTableDescriptor(databaseName.getBytes());
HColumnDescriptor[] columnFamilies = hTableDescriptor.getColumnFamilies();
for (TableInfo tableInfo : tableInfos)
{
if (tableInfo != null)
{
boolean isColumnFound = false;
for (HColumnDescriptor columnDescriptor : columnFamilies)
{
if (columnDescriptor.getNameAsString().equalsIgnoreCase(tableInfo.getTableName()))
{
isColumnFound = true; // depends on control dependency: [if], data = [none]
break;
}
}
if (!isColumnFound)
{
throw new SchemaGenerationException("column " + tableInfo.getTableName()
+ " does not exist in table " + databaseName + "", "Hbase", databaseName,
tableInfo.getTableName());
}
}
}
}
catch (TableNotFoundException tnfex)
{
throw new SchemaGenerationException("table " + databaseName + " does not exist ", tnfex, "Hbase");
}
catch (IOException ioe)
{
logger.error("Either check for network connection or table isn't in enabled state, Caused by:", ioe);
throw new SchemaGenerationException(ioe, "Hbase");
}
} } |
public class class_name {
public static String randomIdString() {
MersenneTwister twister = new MersenneTwister();
final int length = 27;
char[] chars = new char[length];
for (int i = 0; i < length; i++) {
chars[i] = (char) ('a' + twister.nextInt(26));
}
// Add a visual separator, which makes these easier to distinguish at a glance
chars[4] = '_';
return new String(chars);
} } | public class class_name {
public static String randomIdString() {
MersenneTwister twister = new MersenneTwister();
final int length = 27;
char[] chars = new char[length];
for (int i = 0; i < length; i++) {
chars[i] = (char) ('a' + twister.nextInt(26)); // depends on control dependency: [for], data = [i]
}
// Add a visual separator, which makes these easier to distinguish at a glance
chars[4] = '_';
return new String(chars);
} } |
public class class_name {
public static void encode(StringBuilder sb, long data)
{
for (int i = 58; i > 0; i -= 6) {
sb.append(encode(data >> i));
}
sb.append(encode(data << 2));
} } | public class class_name {
public static void encode(StringBuilder sb, long data)
{
for (int i = 58; i > 0; i -= 6) {
sb.append(encode(data >> i)); // depends on control dependency: [for], data = [i]
}
sb.append(encode(data << 2));
} } |
public class class_name {
protected void reConnect(VirtualConnection inVC, IOException ioe) {
if (getLink().isReconnectAllowed()) {
// start the reconnect
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Attempting reconnect: " + getLink().getVirtualConnection());
}
// 359362 - null out the JIT read buffers
getTSC().getReadInterface().setBuffer(null);
getLink().reConnectAsync(ioe);
} else {
callErrorCallback(inVC, ioe);
}
} } | public class class_name {
protected void reConnect(VirtualConnection inVC, IOException ioe) {
if (getLink().isReconnectAllowed()) {
// start the reconnect
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Attempting reconnect: " + getLink().getVirtualConnection()); // depends on control dependency: [if], data = [none]
}
// 359362 - null out the JIT read buffers
getTSC().getReadInterface().setBuffer(null); // depends on control dependency: [if], data = [none]
getLink().reConnectAsync(ioe); // depends on control dependency: [if], data = [none]
} else {
callErrorCallback(inVC, ioe); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static boolean isPrime(int number) {
// For performance, check if the number is divisable by severl of
// the smaller prime numbers. Note that a prime is divisible by
// itself, so check for the smaller primes prior to performing
// the divion check.
if (number == 2 || number == 3 || number == 5 || number == 7 ||
number == 11 || number == 13 || number == 17 || number == 19)
return true;
if (number % 2 == 0 || number % 3 == 0 || number % 5 == 0 || number % 7 == 0 ||
number % 11 == 0 || number % 13 == 0 || number % 17 == 0 || number % 19 == 0)
return false;
// Now perform the Euler's criterion check for the bases 2, 3, and 5. This is
// valid only for odd numbers, so the even must be filtered out above.
long numberMinusOne = number - 1;
long s = 0;
long d = numberMinusOne;
while (isEven(d)) {
d = d / 2;
++s;
}
long d1 = d;
for (int a = 2; a <= 5; ++a) {
if (a == 4)
continue; // Skip the base 4.
long prod = 1;
long a2j = a;
d = d1;
while (d > 0) {
if (isOdd(d))
prod = (prod * a2j) % number;
d = d / 2;
a2j = (a2j * a2j) % number;
}
// prod = a^d mod number has been calculated.
if (prod == 1 || prod == (numberMinusOne))
continue;
for (long i = 1; i <= s; ++i) {
prod = (prod * prod) % number;
if (prod == (numberMinusOne))
break;
}
if (prod != (numberMinusOne)) {
return false;
}
}
// There are a few non-prime numbers below 25x10^9 that pass Euler's
// criterion, so check for them here. Note that severl of them are larger
// than an integer, but they are commented out here for completeness.
if (number == 25326001 || number == 161304001 ||
number == 960946321 || number == 1157839381)
// 3215031751, 3697278427, 5764643587, 6770862367, 14386156093,
// 15579919981, 18459366157, 19887974881, 21276028621
return false;
return true;
} } | public class class_name {
public static boolean isPrime(int number) {
// For performance, check if the number is divisable by severl of
// the smaller prime numbers. Note that a prime is divisible by
// itself, so check for the smaller primes prior to performing
// the divion check.
if (number == 2 || number == 3 || number == 5 || number == 7 ||
number == 11 || number == 13 || number == 17 || number == 19)
return true;
if (number % 2 == 0 || number % 3 == 0 || number % 5 == 0 || number % 7 == 0 ||
number % 11 == 0 || number % 13 == 0 || number % 17 == 0 || number % 19 == 0)
return false;
// Now perform the Euler's criterion check for the bases 2, 3, and 5. This is
// valid only for odd numbers, so the even must be filtered out above.
long numberMinusOne = number - 1;
long s = 0;
long d = numberMinusOne;
while (isEven(d)) {
d = d / 2; // depends on control dependency: [while], data = [none]
++s; // depends on control dependency: [while], data = [none]
}
long d1 = d;
for (int a = 2; a <= 5; ++a) {
if (a == 4)
continue; // Skip the base 4.
long prod = 1;
long a2j = a;
d = d1; // depends on control dependency: [for], data = [none]
while (d > 0) {
if (isOdd(d))
prod = (prod * a2j) % number;
d = d / 2; // depends on control dependency: [while], data = [none]
a2j = (a2j * a2j) % number; // depends on control dependency: [while], data = [none]
}
// prod = a^d mod number has been calculated.
if (prod == 1 || prod == (numberMinusOne))
continue;
for (long i = 1; i <= s; ++i) {
prod = (prod * prod) % number; // depends on control dependency: [for], data = [none]
if (prod == (numberMinusOne))
break;
}
if (prod != (numberMinusOne)) {
return false; // depends on control dependency: [if], data = [none]
}
}
// There are a few non-prime numbers below 25x10^9 that pass Euler's
// criterion, so check for them here. Note that severl of them are larger
// than an integer, but they are commented out here for completeness.
if (number == 25326001 || number == 161304001 ||
number == 960946321 || number == 1157839381)
// 3215031751, 3697278427, 5764643587, 6770862367, 14386156093,
// 15579919981, 18459366157, 19887974881, 21276028621
return false;
return true;
} } |
public class class_name {
synchronized @CheckForNull RunExecution peek() {
Executor e = Executor.currentExecutor();
if (e != null) {
Stack<RunExecution> s = stack.get(e);
if (s != null && !s.isEmpty()) {
return s.peek();
}
}
return null;
} } | public class class_name {
synchronized @CheckForNull RunExecution peek() {
Executor e = Executor.currentExecutor();
if (e != null) {
Stack<RunExecution> s = stack.get(e);
if (s != null && !s.isEmpty()) {
return s.peek(); // depends on control dependency: [if], data = [none]
}
}
return null;
} } |
public class class_name {
public String getInitField(FieldData recFieldData, boolean bConstantsNotVariables, boolean bThinConstants)
{
boolean bConstantValue = false;
boolean bValidThinConstant = true;
String strInitField = recFieldData.getField(FieldData.INITIAL_VALUE).getString();
if (strInitField != null) if (strInitField.length() > 0)
{
if (strInitField.charAt(0) == '\"')
bConstantValue = true; // This is a constant value (a string)
if (Character.isDigit(strInitField.charAt(0)))
bConstantValue = true; // This is a constant value (a number)
if ((strInitField.equalsIgnoreCase("true")) || (strInitField.equalsIgnoreCase("false")))
bConstantValue = true; // This is a constant value (a number)
if (strInitField.startsWith("new "))
bConstantValue = true; // This is a constant value (an object)
if (strInitField.indexOf('.') != -1)
if (strInitField.lastIndexOf('(') <= 1) // Not found or a class cast.
if (!Utility.isNumeric(strInitField))
{
bConstantValue = true; // This is a constant value (a member constant - not a function call)
bValidThinConstant = false;
}
}
if (bConstantsNotVariables != bConstantValue)
strInitField = DBConstants.BLANK; // Not asking for this type of value
if (bThinConstants)
if (!bValidThinConstant)
strInitField = DBConstants.BLANK; // Not asking for this type of value
return strInitField;
} } | public class class_name {
public String getInitField(FieldData recFieldData, boolean bConstantsNotVariables, boolean bThinConstants)
{
boolean bConstantValue = false;
boolean bValidThinConstant = true;
String strInitField = recFieldData.getField(FieldData.INITIAL_VALUE).getString();
if (strInitField != null) if (strInitField.length() > 0)
{
if (strInitField.charAt(0) == '\"')
bConstantValue = true; // This is a constant value (a string)
if (Character.isDigit(strInitField.charAt(0)))
bConstantValue = true; // This is a constant value (a number)
if ((strInitField.equalsIgnoreCase("true")) || (strInitField.equalsIgnoreCase("false")))
bConstantValue = true; // This is a constant value (a number)
if (strInitField.startsWith("new "))
bConstantValue = true; // This is a constant value (an object)
if (strInitField.indexOf('.') != -1)
if (strInitField.lastIndexOf('(') <= 1) // Not found or a class cast.
if (!Utility.isNumeric(strInitField))
{
bConstantValue = true; // This is a constant value (a member constant - not a function call) // depends on control dependency: [if], data = [none]
bValidThinConstant = false; // depends on control dependency: [if], data = [none]
}
}
if (bConstantsNotVariables != bConstantValue)
strInitField = DBConstants.BLANK; // Not asking for this type of value
if (bThinConstants)
if (!bValidThinConstant)
strInitField = DBConstants.BLANK; // Not asking for this type of value
return strInitField;
} } |
public class class_name {
private final int readPIPrimary()
throws XMLStreamException
{
// Ok, first we need the name:
String target = parseFullName();
mCurrName = target;
if (target.length() == 0) {
throwParseError(ErrorConsts.ERR_WF_PI_MISSING_TARGET);
}
// As per XML specs, #17, case-insensitive 'xml' is illegal:
if (target.equalsIgnoreCase("xml")) {
// 07-Oct-2005, TSa: Still legal in multi-doc mode...
if (!mConfig.inputParsingModeDocuments()) {
throwParseError(ErrorConsts.ERR_WF_PI_XML_TARGET, target, null);
}
// Ok, let's just verify we get space then
char c = getNextCharFromCurrent(SUFFIX_IN_XML_DECL);
if (!isSpaceChar(c)) {
throwUnexpectedChar(c, "excepted a space in xml declaration after 'xml'");
}
return handleMultiDocStart(START_DOCUMENT);
}
// And then either white space before data, or end marker:
char c = (mInputPtr < mInputEnd) ?
mInputBuffer[mInputPtr++] : getNextCharFromCurrent(SUFFIX_IN_PROC_INSTR);
if (isSpaceChar(c)) { // Ok, space to skip
mTokenState = TOKEN_STARTED;
// Need to skip the WS...
skipWS(c);
} else { // Nope; apparently finishes right away...
mTokenState = TOKEN_FULL_COALESCED;
mTextBuffer.resetWithEmpty();
// or does it?
if (c != '?' || getNextCharFromCurrent(SUFFIX_IN_PROC_INSTR) != '>') {
throwUnexpectedChar(c, ErrorConsts.ERR_WF_PI_XML_MISSING_SPACE);
}
}
return PROCESSING_INSTRUCTION;
} } | public class class_name {
private final int readPIPrimary()
throws XMLStreamException
{
// Ok, first we need the name:
String target = parseFullName();
mCurrName = target;
if (target.length() == 0) {
throwParseError(ErrorConsts.ERR_WF_PI_MISSING_TARGET);
}
// As per XML specs, #17, case-insensitive 'xml' is illegal:
if (target.equalsIgnoreCase("xml")) {
// 07-Oct-2005, TSa: Still legal in multi-doc mode...
if (!mConfig.inputParsingModeDocuments()) {
throwParseError(ErrorConsts.ERR_WF_PI_XML_TARGET, target, null); // depends on control dependency: [if], data = [none]
}
// Ok, let's just verify we get space then
char c = getNextCharFromCurrent(SUFFIX_IN_XML_DECL);
if (!isSpaceChar(c)) {
throwUnexpectedChar(c, "excepted a space in xml declaration after 'xml'"); // depends on control dependency: [if], data = [none]
}
return handleMultiDocStart(START_DOCUMENT);
}
// And then either white space before data, or end marker:
char c = (mInputPtr < mInputEnd) ?
mInputBuffer[mInputPtr++] : getNextCharFromCurrent(SUFFIX_IN_PROC_INSTR);
if (isSpaceChar(c)) { // Ok, space to skip
mTokenState = TOKEN_STARTED;
// Need to skip the WS...
skipWS(c);
} else { // Nope; apparently finishes right away...
mTokenState = TOKEN_FULL_COALESCED;
mTextBuffer.resetWithEmpty();
// or does it?
if (c != '?' || getNextCharFromCurrent(SUFFIX_IN_PROC_INSTR) != '>') {
throwUnexpectedChar(c, ErrorConsts.ERR_WF_PI_XML_MISSING_SPACE); // depends on control dependency: [if], data = [(c]
}
}
return PROCESSING_INSTRUCTION;
} } |
public class class_name {
public EMap<String, String> getXMLNSPrefixMap() {
if (xMLNSPrefixMap == null) {
xMLNSPrefixMap = new EcoreEMap<String,String>(EcorePackage.Literals.ESTRING_TO_STRING_MAP_ENTRY, EStringToStringMapEntryImpl.class, this, DroolsPackage.DOCUMENT_ROOT__XMLNS_PREFIX_MAP);
}
return xMLNSPrefixMap;
} } | public class class_name {
public EMap<String, String> getXMLNSPrefixMap() {
if (xMLNSPrefixMap == null) {
xMLNSPrefixMap = new EcoreEMap<String,String>(EcorePackage.Literals.ESTRING_TO_STRING_MAP_ENTRY, EStringToStringMapEntryImpl.class, this, DroolsPackage.DOCUMENT_ROOT__XMLNS_PREFIX_MAP); // depends on control dependency: [if], data = [none]
}
return xMLNSPrefixMap;
} } |
public class class_name {
protected String readText(Postcard postcard, String path, boolean html, boolean filesystem, OptionalThing<Locale> receiverLocale,
OptionalThing<Object> dynamicData) {
if (dynamicData.isPresent()) {
final OptionalThing<String> assisted = assistDynamicText(postcard, path, html, filesystem, receiverLocale, dynamicData.get());
if (assisted.isPresent()) {
return assisted.get();
}
}
final String cacheKey = generateCacheKey(path, filesystem, receiverLocale);
final String cached = textCacheMap.get(cacheKey);
if (cached != null) {
return cached;
}
synchronized (this) {
final String retried = textCacheMap.get(cacheKey);
if (retried != null) {
return retried;
}
final String read = doReadText(postcard, path, filesystem, receiverLocale);
if (read == null) { // just in case
String msg = "Not found the text from the path: " + path + ", filesystem=" + filesystem;
throw new SMailIllegalStateException(msg);
}
textCacheMap.put(cacheKey, read);
return textCacheMap.get(cacheKey);
}
} } | public class class_name {
protected String readText(Postcard postcard, String path, boolean html, boolean filesystem, OptionalThing<Locale> receiverLocale,
OptionalThing<Object> dynamicData) {
if (dynamicData.isPresent()) {
final OptionalThing<String> assisted = assistDynamicText(postcard, path, html, filesystem, receiverLocale, dynamicData.get());
if (assisted.isPresent()) {
return assisted.get(); // depends on control dependency: [if], data = [none]
}
}
final String cacheKey = generateCacheKey(path, filesystem, receiverLocale);
final String cached = textCacheMap.get(cacheKey);
if (cached != null) {
return cached; // depends on control dependency: [if], data = [none]
}
synchronized (this) {
final String retried = textCacheMap.get(cacheKey);
if (retried != null) {
return retried; // depends on control dependency: [if], data = [none]
}
final String read = doReadText(postcard, path, filesystem, receiverLocale);
if (read == null) { // just in case
String msg = "Not found the text from the path: " + path + ", filesystem=" + filesystem;
throw new SMailIllegalStateException(msg);
}
textCacheMap.put(cacheKey, read);
return textCacheMap.get(cacheKey);
}
} } |
public class class_name {
@Nullable
public static String javaScriptEscape (@Nullable final String sInput)
{
if (StringHelper.hasNoText (sInput))
return sInput;
final char [] aInput = sInput.toCharArray ();
if (!StringHelper.containsAny (aInput, CHARS_TO_MASK))
return sInput;
final char [] ret = new char [aInput.length * 2];
int nIndex = 0;
char cPrevChar = '\u0000';
for (final char cCurrent : aInput)
{
switch (cCurrent)
{
case '"':
case '\'':
case '\\':
case '/':
ret[nIndex++] = MASK_CHAR;
ret[nIndex++] = cCurrent;
break;
case '\t':
ret[nIndex++] = MASK_CHAR;
ret[nIndex++] = 't';
break;
case '\n':
if (cPrevChar != '\r')
{
ret[nIndex++] = MASK_CHAR;
ret[nIndex++] = 'n';
}
break;
case '\r':
ret[nIndex++] = MASK_CHAR;
ret[nIndex++] = 'n';
break;
case '\f':
ret[nIndex++] = MASK_CHAR;
ret[nIndex++] = 'f';
break;
default:
ret[nIndex++] = cCurrent;
break;
}
cPrevChar = cCurrent;
}
return new String (ret, 0, nIndex);
} } | public class class_name {
@Nullable
public static String javaScriptEscape (@Nullable final String sInput)
{
if (StringHelper.hasNoText (sInput))
return sInput;
final char [] aInput = sInput.toCharArray ();
if (!StringHelper.containsAny (aInput, CHARS_TO_MASK))
return sInput;
final char [] ret = new char [aInput.length * 2];
int nIndex = 0;
char cPrevChar = '\u0000';
for (final char cCurrent : aInput)
{
switch (cCurrent)
{
case '"':
case '\'':
case '\\':
case '/':
ret[nIndex++] = MASK_CHAR;
ret[nIndex++] = cCurrent;
break;
case '\t':
ret[nIndex++] = MASK_CHAR;
ret[nIndex++] = 't';
break;
case '\n':
if (cPrevChar != '\r')
{
ret[nIndex++] = MASK_CHAR; // depends on control dependency: [if], data = [none]
ret[nIndex++] = 'n'; // depends on control dependency: [if], data = [none]
}
break;
case '\r':
ret[nIndex++] = MASK_CHAR;
ret[nIndex++] = 'n';
break;
case '\f':
ret[nIndex++] = MASK_CHAR;
ret[nIndex++] = 'f';
break;
default:
ret[nIndex++] = cCurrent;
break;
}
cPrevChar = cCurrent; // depends on control dependency: [for], data = [cCurrent]
}
return new String (ret, 0, nIndex);
} } |
public class class_name {
protected <IPW extends IndentingPrintWriter> IPW writeChildrenTo(IPW output) {
Collection<? extends UMLNode> children = getChildren();
if (!children.isEmpty()) {
IndentingPrintWriter indented = output.indent();
children.forEach(child -> child.writeTo(indented));
}
return output;
} } | public class class_name {
protected <IPW extends IndentingPrintWriter> IPW writeChildrenTo(IPW output) {
Collection<? extends UMLNode> children = getChildren();
if (!children.isEmpty()) {
IndentingPrintWriter indented = output.indent();
children.forEach(child -> child.writeTo(indented)); // depends on control dependency: [if], data = [none]
}
return output;
} } |
public class class_name {
public CompressionCodec getCodec(Path file) {
CompressionCodec result = null;
if (codecs != null) {
String filename = file.getName();
String reversedFilename = new StringBuffer(filename).reverse().toString();
SortedMap<String, CompressionCodec> subMap =
codecs.headMap(reversedFilename);
if (!subMap.isEmpty()) {
String potentialSuffix = subMap.lastKey();
if (reversedFilename.startsWith(potentialSuffix)) {
result = codecs.get(potentialSuffix);
}
}
}
return result;
} } | public class class_name {
public CompressionCodec getCodec(Path file) {
CompressionCodec result = null;
if (codecs != null) {
String filename = file.getName();
String reversedFilename = new StringBuffer(filename).reverse().toString();
SortedMap<String, CompressionCodec> subMap =
codecs.headMap(reversedFilename);
if (!subMap.isEmpty()) {
String potentialSuffix = subMap.lastKey();
if (reversedFilename.startsWith(potentialSuffix)) {
result = codecs.get(potentialSuffix); // depends on control dependency: [if], data = [none]
}
}
}
return result;
} } |
public class class_name {
public static boolean deleteDirRecursively(File dir) {
if (!dir.exists() || !dir.isDirectory()) {
// No hay ficheros que borrar a si que OK
log.warn("El directorio:'" + dir.getAbsolutePath()
+ "' no existe o no es un directorio");
return false;
}
// el fichero es un directorio
boolean succed = true;
File listFile[] = dir.listFiles();
for (File file:listFile) {
if (file.isDirectory()) {
deleteDirRecursively(file);
} else {
if (!file.delete()) {
log.warn("No se ha podido borrar el fichero:'"
+ file.getAbsolutePath() + "'");
succed = false;
}
}
}
if (!dir.delete()) {
log.warn("No se ha podido borrar el fichero:'"
+ dir.getAbsolutePath() + "'");
succed = false;
}
return succed;
} } | public class class_name {
public static boolean deleteDirRecursively(File dir) {
if (!dir.exists() || !dir.isDirectory()) {
// No hay ficheros que borrar a si que OK
log.warn("El directorio:'" + dir.getAbsolutePath()
+ "' no existe o no es un directorio");
// depends on control dependency: [if], data = [none]
return false;
// depends on control dependency: [if], data = [none]
}
// el fichero es un directorio
boolean succed = true;
File listFile[] = dir.listFiles();
for (File file:listFile) {
if (file.isDirectory()) {
deleteDirRecursively(file);
// depends on control dependency: [if], data = [none]
} else {
if (!file.delete()) {
log.warn("No se ha podido borrar el fichero:'"
+ file.getAbsolutePath() + "'");
// depends on control dependency: [if], data = [none]
succed = false;
// depends on control dependency: [if], data = [none]
}
}
}
if (!dir.delete()) {
log.warn("No se ha podido borrar el fichero:'"
+ dir.getAbsolutePath() + "'");
// depends on control dependency: [if], data = [none]
succed = false;
// depends on control dependency: [if], data = [none]
}
return succed;
} } |
public class class_name {
public boolean match(String word) {
if(word == null || word.length() < 2) {
return false;
}
CharNode cn = dict.get(word.charAt(0));
return search(cn, word.toCharArray(), 0, word.length()-1) >= 0;
} } | public class class_name {
public boolean match(String word) {
if(word == null || word.length() < 2) {
return false;
// depends on control dependency: [if], data = [none]
}
CharNode cn = dict.get(word.charAt(0));
return search(cn, word.toCharArray(), 0, word.length()-1) >= 0;
} } |
public class class_name {
public static String toSeparatedString(List<?> values, String separator, String prefix) {
StringBuilder result = new StringBuilder();
for (Object each : values) {
if (each == null) {
continue;
}
if (result.length() > 0) {
result.append(separator);
}
if (prefix != null) {
result.append(String.valueOf(each));
} else {
result.append(prefix + String.valueOf(each));
}
}
return result.toString();
} } | public class class_name {
public static String toSeparatedString(List<?> values, String separator, String prefix) {
StringBuilder result = new StringBuilder();
for (Object each : values) {
if (each == null) {
continue;
}
if (result.length() > 0) {
result.append(separator);
// depends on control dependency: [if], data = [none]
}
if (prefix != null) {
result.append(String.valueOf(each));
// depends on control dependency: [if], data = [none]
} else {
result.append(prefix + String.valueOf(each));
// depends on control dependency: [if], data = [(prefix]
}
}
return result.toString();
} } |
public class class_name {
public void discardFirstWords(long x) {
if (this.runningLength >= x) {
this.runningLength -= x;
return;
}
x -= this.runningLength;
this.runningLength = 0;
this.literalWordOffset += x;
this.numberOfLiteralWords -= x;
} } | public class class_name {
public void discardFirstWords(long x) {
if (this.runningLength >= x) {
this.runningLength -= x; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
x -= this.runningLength;
this.runningLength = 0;
this.literalWordOffset += x;
this.numberOfLiteralWords -= x;
} } |
public class class_name {
public <T extends WebServiceTemplate> T configure(T webServiceTemplate) {
Assert.notNull(webServiceTemplate, "WebServiceTemplate must not be null");
configureMessageSenders(webServiceTemplate);
PropertyMapper map = PropertyMapper.get().alwaysApplyingWhenNonNull();
applyCustomizers(webServiceTemplate, this.internalCustomizers);
map.from(this.marshaller).to(webServiceTemplate::setMarshaller);
map.from(this.unmarshaller).to(webServiceTemplate::setUnmarshaller);
map.from(this.destinationProvider).to(webServiceTemplate::setDestinationProvider);
map.from(this.transformerFactoryClass)
.to(webServiceTemplate::setTransformerFactoryClass);
map.from(this.messageFactory).to(webServiceTemplate::setMessageFactory);
if (!CollectionUtils.isEmpty(this.interceptors)) {
Set<ClientInterceptor> merged = new LinkedHashSet<>(this.interceptors);
if (webServiceTemplate.getInterceptors() != null) {
merged.addAll(Arrays.asList(webServiceTemplate.getInterceptors()));
}
webServiceTemplate.setInterceptors(merged.toArray(new ClientInterceptor[0]));
}
applyCustomizers(webServiceTemplate, this.customizers);
return webServiceTemplate;
} } | public class class_name {
public <T extends WebServiceTemplate> T configure(T webServiceTemplate) {
Assert.notNull(webServiceTemplate, "WebServiceTemplate must not be null");
configureMessageSenders(webServiceTemplate);
PropertyMapper map = PropertyMapper.get().alwaysApplyingWhenNonNull();
applyCustomizers(webServiceTemplate, this.internalCustomizers);
map.from(this.marshaller).to(webServiceTemplate::setMarshaller);
map.from(this.unmarshaller).to(webServiceTemplate::setUnmarshaller);
map.from(this.destinationProvider).to(webServiceTemplate::setDestinationProvider);
map.from(this.transformerFactoryClass)
.to(webServiceTemplate::setTransformerFactoryClass);
map.from(this.messageFactory).to(webServiceTemplate::setMessageFactory);
if (!CollectionUtils.isEmpty(this.interceptors)) {
Set<ClientInterceptor> merged = new LinkedHashSet<>(this.interceptors);
if (webServiceTemplate.getInterceptors() != null) {
merged.addAll(Arrays.asList(webServiceTemplate.getInterceptors())); // depends on control dependency: [if], data = [(webServiceTemplate.getInterceptors()]
}
webServiceTemplate.setInterceptors(merged.toArray(new ClientInterceptor[0])); // depends on control dependency: [if], data = [none]
}
applyCustomizers(webServiceTemplate, this.customizers);
return webServiceTemplate;
} } |
public class class_name {
public static ImmutableSet<UnicodeFriendlyString> unicodeFriendlySet(Iterable<String> strings) {
final ImmutableSet.Builder<UnicodeFriendlyString> ret = ImmutableSet.builder();
for (final String s : strings) {
ret.add(unicodeFriendly(s));
}
return ret.build();
} } | public class class_name {
public static ImmutableSet<UnicodeFriendlyString> unicodeFriendlySet(Iterable<String> strings) {
final ImmutableSet.Builder<UnicodeFriendlyString> ret = ImmutableSet.builder();
for (final String s : strings) {
ret.add(unicodeFriendly(s)); // depends on control dependency: [for], data = [s]
}
return ret.build();
} } |
public class class_name {
@Override
protected DataPoint[] getAllInRange(long timestampStartMs, long timestampEndMs) {
SortedSet<DataPoint> result = new TreeSet<DataPoint>(new Comparator<DataPoint>() {
@Override
public int compare(DataPoint block1, DataPoint block2) {
return Longs.compare(block1.timestamp(), block2.timestamp());
}
});
long keyStart = toTimeSeriesPoint(timestampStartMs);
long keyEnd = toTimeSeriesPoint(timestampEndMs);
if (keyEnd == timestampStartMs) {
keyEnd = toTimeSeriesPoint(timestampEndMs - 1);
}
// build list of Redis map & field names
List<Long> keys = new ArrayList<Long>();
List<String> mapNames = new ArrayList<String>();
List<String> fieldNames = new ArrayList<String>();
String _name = getName();
for (long timestamp = keyStart, _end = keyEnd; timestamp <= _end; timestamp += RESOLUTION_MS) {
long bucketOffset = toTimeSeriesPoint(timestamp);
long delta = bucketOffset % (RESOLUTION_MS * BUCKET_SIZE);
long bucketId = bucketOffset - delta;
long[] bucket = { bucketId, bucketOffset };
keys.add(bucketOffset);
String redisKey = _name + ":" + bucket[0];
String redisField = String.valueOf(bucket[1]);
mapNames.add(redisKey);
fieldNames.add(redisField);
}
// use pipeline to get all data points at once
try (ShardedJedis jedis = getJedis()) {
ShardedJedisPipeline p = jedis.pipelined();
for (int i = 0, n = mapNames.size(); i < n; i++) {
String mapName = mapNames.get(i);
String fieldName = fieldNames.get(i);
p.hget(mapName, fieldName);
}
List<?> _pointValues = p.syncAndReturnAll();
for (int i = 0, n = keys.size(); i < n; i++) {
Long _key = keys.get(i);
Long _value = null;
try {
_value = Long.parseLong(_pointValues.get(i).toString());
} catch (Exception e) {
_value = null;
}
DataPoint dp = _value != null
? new DataPoint(Type.SUM, _key.longValue(), _value.longValue(),
RESOLUTION_MS)
: new DataPoint(Type.NONE, _key.longValue(), 0, RESOLUTION_MS);
result.add(dp);
}
}
return result.toArray(DataPoint.EMPTY_ARR);
} } | public class class_name {
@Override
protected DataPoint[] getAllInRange(long timestampStartMs, long timestampEndMs) {
SortedSet<DataPoint> result = new TreeSet<DataPoint>(new Comparator<DataPoint>() {
@Override
public int compare(DataPoint block1, DataPoint block2) {
return Longs.compare(block1.timestamp(), block2.timestamp());
}
});
long keyStart = toTimeSeriesPoint(timestampStartMs);
long keyEnd = toTimeSeriesPoint(timestampEndMs);
if (keyEnd == timestampStartMs) {
keyEnd = toTimeSeriesPoint(timestampEndMs - 1); // depends on control dependency: [if], data = [none]
}
// build list of Redis map & field names
List<Long> keys = new ArrayList<Long>();
List<String> mapNames = new ArrayList<String>();
List<String> fieldNames = new ArrayList<String>();
String _name = getName();
for (long timestamp = keyStart, _end = keyEnd; timestamp <= _end; timestamp += RESOLUTION_MS) {
long bucketOffset = toTimeSeriesPoint(timestamp);
long delta = bucketOffset % (RESOLUTION_MS * BUCKET_SIZE);
long bucketId = bucketOffset - delta;
long[] bucket = { bucketId, bucketOffset };
keys.add(bucketOffset); // depends on control dependency: [for], data = [none]
String redisKey = _name + ":" + bucket[0];
String redisField = String.valueOf(bucket[1]);
mapNames.add(redisKey); // depends on control dependency: [for], data = [none]
fieldNames.add(redisField); // depends on control dependency: [for], data = [none]
}
// use pipeline to get all data points at once
try (ShardedJedis jedis = getJedis()) {
ShardedJedisPipeline p = jedis.pipelined();
for (int i = 0, n = mapNames.size(); i < n; i++) {
String mapName = mapNames.get(i);
String fieldName = fieldNames.get(i);
p.hget(mapName, fieldName); // depends on control dependency: [for], data = [none]
}
List<?> _pointValues = p.syncAndReturnAll();
for (int i = 0, n = keys.size(); i < n; i++) {
Long _key = keys.get(i);
Long _value = null;
try {
_value = Long.parseLong(_pointValues.get(i).toString()); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
_value = null;
} // depends on control dependency: [catch], data = [none]
DataPoint dp = _value != null
? new DataPoint(Type.SUM, _key.longValue(), _value.longValue(),
RESOLUTION_MS)
: new DataPoint(Type.NONE, _key.longValue(), 0, RESOLUTION_MS);
result.add(dp); // depends on control dependency: [for], data = [none]
}
}
return result.toArray(DataPoint.EMPTY_ARR);
} } |
public class class_name {
private Node tryFoldLeftChildOp(Node n, Node left, Node right) {
Token opType = n.getToken();
checkState((NodeUtil.isAssociative(opType) && NodeUtil.isCommutative(opType)) || n.isAdd());
checkState(!n.isAdd() || !NodeUtil.mayBeString(n, shouldUseTypes));
// Use getNumberValue to handle constants like "NaN" and "Infinity"
// other values are converted to numbers elsewhere.
Double rightValObj = NodeUtil.getNumberValue(right);
if (rightValObj != null && left.getToken() == opType) {
checkState(left.hasTwoChildren());
Node ll = left.getFirstChild();
Node lr = ll.getNext();
Node valueToCombine = ll;
Node replacement = performArithmeticOp(opType, valueToCombine, right);
if (replacement == null) {
valueToCombine = lr;
replacement = performArithmeticOp(opType, valueToCombine, right);
}
if (replacement != null) {
// Remove the child that has been combined
left.removeChild(valueToCombine);
// Replace the left op with the remaining child.
n.replaceChild(left, left.removeFirstChild());
// New "-Infinity" node need location info explicitly
// added.
replacement.useSourceInfoIfMissingFromForTree(right);
n.replaceChild(right, replacement);
reportChangeToEnclosingScope(n);
}
}
return n;
} } | public class class_name {
private Node tryFoldLeftChildOp(Node n, Node left, Node right) {
Token opType = n.getToken();
checkState((NodeUtil.isAssociative(opType) && NodeUtil.isCommutative(opType)) || n.isAdd());
checkState(!n.isAdd() || !NodeUtil.mayBeString(n, shouldUseTypes));
// Use getNumberValue to handle constants like "NaN" and "Infinity"
// other values are converted to numbers elsewhere.
Double rightValObj = NodeUtil.getNumberValue(right);
if (rightValObj != null && left.getToken() == opType) {
checkState(left.hasTwoChildren()); // depends on control dependency: [if], data = [none]
Node ll = left.getFirstChild();
Node lr = ll.getNext();
Node valueToCombine = ll;
Node replacement = performArithmeticOp(opType, valueToCombine, right);
if (replacement == null) {
valueToCombine = lr; // depends on control dependency: [if], data = [none]
replacement = performArithmeticOp(opType, valueToCombine, right); // depends on control dependency: [if], data = [none]
}
if (replacement != null) {
// Remove the child that has been combined
left.removeChild(valueToCombine); // depends on control dependency: [if], data = [none]
// Replace the left op with the remaining child.
n.replaceChild(left, left.removeFirstChild()); // depends on control dependency: [if], data = [none]
// New "-Infinity" node need location info explicitly
// added.
replacement.useSourceInfoIfMissingFromForTree(right); // depends on control dependency: [if], data = [none]
n.replaceChild(right, replacement); // depends on control dependency: [if], data = [none]
reportChangeToEnclosingScope(n); // depends on control dependency: [if], data = [none]
}
}
return n;
} } |
public class class_name {
private static BundleContext checkBundleContext() {
if (bundleContext == null) {
if (tc.isDebugEnabled()) {
Tr.debug(tc, "BundleContext is null and should not be");
}
}
return bundleContext;
} } | public class class_name {
private static BundleContext checkBundleContext() {
if (bundleContext == null) {
if (tc.isDebugEnabled()) {
Tr.debug(tc, "BundleContext is null and should not be"); // depends on control dependency: [if], data = [none]
}
}
return bundleContext;
} } |
public class class_name {
public void error(String message, Object ... args) {
if (log.isErrorEnabled()) {
log.error(String.format(message, args));
}
} } | public class class_name {
public void error(String message, Object ... args) {
if (log.isErrorEnabled()) {
log.error(String.format(message, args)); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public final void ruleXAnnotationElementValueOrCommaList() throws RecognitionException {
int stackSize = keepStackSize();
try {
// InternalXbaseWithAnnotations.g:121:2: ( ( ( rule__XAnnotationElementValueOrCommaList__Alternatives ) ) )
// InternalXbaseWithAnnotations.g:122:2: ( ( rule__XAnnotationElementValueOrCommaList__Alternatives ) )
{
// InternalXbaseWithAnnotations.g:122:2: ( ( rule__XAnnotationElementValueOrCommaList__Alternatives ) )
// InternalXbaseWithAnnotations.g:123:3: ( rule__XAnnotationElementValueOrCommaList__Alternatives )
{
if ( state.backtracking==0 ) {
before(grammarAccess.getXAnnotationElementValueOrCommaListAccess().getAlternatives());
}
// InternalXbaseWithAnnotations.g:124:3: ( rule__XAnnotationElementValueOrCommaList__Alternatives )
// InternalXbaseWithAnnotations.g:124:4: rule__XAnnotationElementValueOrCommaList__Alternatives
{
pushFollow(FOLLOW_2);
rule__XAnnotationElementValueOrCommaList__Alternatives();
state._fsp--;
if (state.failed) return ;
}
if ( state.backtracking==0 ) {
after(grammarAccess.getXAnnotationElementValueOrCommaListAccess().getAlternatives());
}
}
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
restoreStackSize(stackSize);
}
return ;
} } | public class class_name {
public final void ruleXAnnotationElementValueOrCommaList() throws RecognitionException {
int stackSize = keepStackSize();
try {
// InternalXbaseWithAnnotations.g:121:2: ( ( ( rule__XAnnotationElementValueOrCommaList__Alternatives ) ) )
// InternalXbaseWithAnnotations.g:122:2: ( ( rule__XAnnotationElementValueOrCommaList__Alternatives ) )
{
// InternalXbaseWithAnnotations.g:122:2: ( ( rule__XAnnotationElementValueOrCommaList__Alternatives ) )
// InternalXbaseWithAnnotations.g:123:3: ( rule__XAnnotationElementValueOrCommaList__Alternatives )
{
if ( state.backtracking==0 ) {
before(grammarAccess.getXAnnotationElementValueOrCommaListAccess().getAlternatives()); // depends on control dependency: [if], data = [none]
}
// InternalXbaseWithAnnotations.g:124:3: ( rule__XAnnotationElementValueOrCommaList__Alternatives )
// InternalXbaseWithAnnotations.g:124:4: rule__XAnnotationElementValueOrCommaList__Alternatives
{
pushFollow(FOLLOW_2);
rule__XAnnotationElementValueOrCommaList__Alternatives();
state._fsp--;
if (state.failed) return ;
}
if ( state.backtracking==0 ) {
after(grammarAccess.getXAnnotationElementValueOrCommaListAccess().getAlternatives()); // depends on control dependency: [if], data = [none]
}
}
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
restoreStackSize(stackSize);
}
return ;
} } |
public class class_name {
public void declareRequestedFieldname(String name) {
if (name.endsWith(".*")) {
stringSetValues.put(name, new HashMap<>());
stringSetPrefixes.put(name.substring(0, name.length() - 1), name);
}
} } | public class class_name {
public void declareRequestedFieldname(String name) {
if (name.endsWith(".*")) {
stringSetValues.put(name, new HashMap<>()); // depends on control dependency: [if], data = [none]
stringSetPrefixes.put(name.substring(0, name.length() - 1), name); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static String getPrimaryType(Resource resource) {
String result = null;
if (resource != null) {
if (resource instanceof JcrResource) {
// use the resource itself if it implements the JcrResource interface (maybe a version of a resource)
result = ((JcrResource) resource).getPrimaryType();
} else {
Node node = resource.adaptTo(Node.class);
if (node != null) {
try {
NodeType type = node.getPrimaryNodeType();
if (type != null) {
result = type.getName();
}
} catch (RepositoryException ignore) {
}
}
if (result == null) {
ValueMap values = resource.adaptTo(ValueMap.class);
if (values != null) {
result = values.get(JcrConstants.JCR_PRIMARYTYPE, (String) null);
}
}
}
}
return result;
} } | public class class_name {
public static String getPrimaryType(Resource resource) {
String result = null;
if (resource != null) {
if (resource instanceof JcrResource) {
// use the resource itself if it implements the JcrResource interface (maybe a version of a resource)
result = ((JcrResource) resource).getPrimaryType(); // depends on control dependency: [if], data = [none]
} else {
Node node = resource.adaptTo(Node.class);
if (node != null) {
try {
NodeType type = node.getPrimaryNodeType();
if (type != null) {
result = type.getName(); // depends on control dependency: [if], data = [none]
}
} catch (RepositoryException ignore) {
} // depends on control dependency: [catch], data = [none]
}
if (result == null) {
ValueMap values = resource.adaptTo(ValueMap.class);
if (values != null) {
result = values.get(JcrConstants.JCR_PRIMARYTYPE, (String) null); // depends on control dependency: [if], data = [null)]
}
}
}
}
return result;
} } |
public class class_name {
@Override
public Msg genKeys() {
final Msg infoLines = new Msg();
try {
final PKITools pki = new PKITools();
if (getPrivKeyFileName() == null) {
infoLines.add("Must provide a -privkey <file> parameter");
return infoLines;
}
if (getPublicKeyFileName() == null) {
infoLines.add("Must provide a -pubkey <file> parameter");
return infoLines;
}
final PKITools.RSAKeys keys =
pki.genRSAKeysIntoFiles(getPrivKeyFileName(),
getPublicKeyFileName(),
true);
if (keys == null) {
infoLines.add("Generation of keys failed");
return infoLines;
}
// Now try the keys on the test text.
final int numKeys = pki.countKeys(getPrivKeyFileName());
//if (debug) {
// infoLines.add("Number of keys: " + numKeys);
//}
infoLines.add("test with---->" + testText);
final String etext = pki.encryptWithKeyFile(getPublicKeyFileName(),
testText, numKeys - 1);
infoLines.add("encrypts to-->" + etext);
final String detext = pki.decryptWithKeyFile(getPrivKeyFileName(),
etext, numKeys - 1);
infoLines.add("decrypts to-->" + detext);
if (!testText.equals(detext)) {
infoLines.add("Validity check failed: encrypt/decrypt failure");
} else {
infoLines.add("");
infoLines.add("Validity check succeeded");
}
} catch (final Throwable t) {
error(t);
infoLines.add("Exception - check logs: " + t.getMessage());
}
return infoLines;
} } | public class class_name {
@Override
public Msg genKeys() {
final Msg infoLines = new Msg();
try {
final PKITools pki = new PKITools();
if (getPrivKeyFileName() == null) {
infoLines.add("Must provide a -privkey <file> parameter"); // depends on control dependency: [if], data = [none]
return infoLines; // depends on control dependency: [if], data = [none]
}
if (getPublicKeyFileName() == null) {
infoLines.add("Must provide a -pubkey <file> parameter"); // depends on control dependency: [if], data = [none]
return infoLines; // depends on control dependency: [if], data = [none]
}
final PKITools.RSAKeys keys =
pki.genRSAKeysIntoFiles(getPrivKeyFileName(),
getPublicKeyFileName(),
true);
if (keys == null) {
infoLines.add("Generation of keys failed"); // depends on control dependency: [if], data = [none]
return infoLines; // depends on control dependency: [if], data = [none]
}
// Now try the keys on the test text.
final int numKeys = pki.countKeys(getPrivKeyFileName());
//if (debug) {
// infoLines.add("Number of keys: " + numKeys);
//}
infoLines.add("test with---->" + testText); // depends on control dependency: [try], data = [none]
final String etext = pki.encryptWithKeyFile(getPublicKeyFileName(),
testText, numKeys - 1);
infoLines.add("encrypts to-->" + etext); // depends on control dependency: [try], data = [none]
final String detext = pki.decryptWithKeyFile(getPrivKeyFileName(),
etext, numKeys - 1);
infoLines.add("decrypts to-->" + detext); // depends on control dependency: [try], data = [none]
if (!testText.equals(detext)) {
infoLines.add("Validity check failed: encrypt/decrypt failure"); // depends on control dependency: [if], data = [none]
} else {
infoLines.add(""); // depends on control dependency: [if], data = [none]
infoLines.add("Validity check succeeded"); // depends on control dependency: [if], data = [none]
}
} catch (final Throwable t) {
error(t);
infoLines.add("Exception - check logs: " + t.getMessage());
} // depends on control dependency: [catch], data = [none]
return infoLines;
} } |
public class class_name {
public void setResourceSpecificResults(java.util.Collection<ResourceSpecificResult> resourceSpecificResults) {
if (resourceSpecificResults == null) {
this.resourceSpecificResults = null;
return;
}
this.resourceSpecificResults = new com.amazonaws.internal.SdkInternalList<ResourceSpecificResult>(resourceSpecificResults);
} } | public class class_name {
public void setResourceSpecificResults(java.util.Collection<ResourceSpecificResult> resourceSpecificResults) {
if (resourceSpecificResults == null) {
this.resourceSpecificResults = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.resourceSpecificResults = new com.amazonaws.internal.SdkInternalList<ResourceSpecificResult>(resourceSpecificResults);
} } |
public class class_name {
private void configureSecurityContextChain(
final Map<String, String> principals,
final Map<String, String> credentials,
final ISecurityContext securityContext,
final String baseContextName)
throws PortalSecurityException {
this.setContextParameters(principals, credentials, baseContextName, securityContext);
// load principals and credentials for the subContexts
for (final Enumeration<String> subCtxNames = securityContext.getSubContextNames();
subCtxNames.hasMoreElements(); ) {
final String fullSubCtxName = subCtxNames.nextElement();
// Strip off the base of the name
String localSubCtxName = fullSubCtxName;
if (fullSubCtxName.startsWith(baseContextName + ".")) {
localSubCtxName = localSubCtxName.substring(baseContextName.length() + 1);
}
final ISecurityContext sc = securityContext.getSubContext(localSubCtxName);
this.configureSecurityContextChain(principals, credentials, sc, fullSubCtxName);
}
} } | public class class_name {
private void configureSecurityContextChain(
final Map<String, String> principals,
final Map<String, String> credentials,
final ISecurityContext securityContext,
final String baseContextName)
throws PortalSecurityException {
this.setContextParameters(principals, credentials, baseContextName, securityContext);
// load principals and credentials for the subContexts
for (final Enumeration<String> subCtxNames = securityContext.getSubContextNames();
subCtxNames.hasMoreElements(); ) {
final String fullSubCtxName = subCtxNames.nextElement();
// Strip off the base of the name
String localSubCtxName = fullSubCtxName;
if (fullSubCtxName.startsWith(baseContextName + ".")) {
localSubCtxName = localSubCtxName.substring(baseContextName.length() + 1); // depends on control dependency: [if], data = [none]
}
final ISecurityContext sc = securityContext.getSubContext(localSubCtxName);
this.configureSecurityContextChain(principals, credentials, sc, fullSubCtxName);
}
} } |
public class class_name {
private void doRemoveRemoteSource(RequestErrorTracker errorTracker, Request request, SettableFuture<?> future)
{
errorTracker.startRequest();
FutureCallback<StatusResponse> callback = new FutureCallback<StatusResponse>() {
@Override
public void onSuccess(@Nullable StatusResponse response)
{
if (response == null) {
throw new PrestoException(GENERIC_INTERNAL_ERROR, "Request failed with null response");
}
if (response.getStatusCode() != OK.code()) {
throw new PrestoException(GENERIC_INTERNAL_ERROR, "Request failed with HTTP status " + response.getStatusCode());
}
future.set(null);
}
@Override
public void onFailure(Throwable failedReason)
{
if (failedReason instanceof RejectedExecutionException && httpClient.isClosed()) {
log.error("Unable to destroy exchange source at %s. HTTP client is closed", request.getUri());
future.setException(failedReason);
return;
}
// record failure
try {
errorTracker.requestFailed(failedReason);
}
catch (PrestoException e) {
future.setException(e);
return;
}
// if throttled due to error, asynchronously wait for timeout and try again
ListenableFuture<?> errorRateLimit = errorTracker.acquireRequestPermit();
if (errorRateLimit.isDone()) {
doRemoveRemoteSource(errorTracker, request, future);
}
else {
errorRateLimit.addListener(() -> doRemoveRemoteSource(errorTracker, request, future), errorScheduledExecutor);
}
}
};
addCallback(httpClient.executeAsync(request, createStatusResponseHandler()), callback, directExecutor());
} } | public class class_name {
private void doRemoveRemoteSource(RequestErrorTracker errorTracker, Request request, SettableFuture<?> future)
{
errorTracker.startRequest();
FutureCallback<StatusResponse> callback = new FutureCallback<StatusResponse>() {
@Override
public void onSuccess(@Nullable StatusResponse response)
{
if (response == null) {
throw new PrestoException(GENERIC_INTERNAL_ERROR, "Request failed with null response");
}
if (response.getStatusCode() != OK.code()) {
throw new PrestoException(GENERIC_INTERNAL_ERROR, "Request failed with HTTP status " + response.getStatusCode());
}
future.set(null);
}
@Override
public void onFailure(Throwable failedReason)
{
if (failedReason instanceof RejectedExecutionException && httpClient.isClosed()) {
log.error("Unable to destroy exchange source at %s. HTTP client is closed", request.getUri()); // depends on control dependency: [if], data = [none]
future.setException(failedReason); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
// record failure
try {
errorTracker.requestFailed(failedReason); // depends on control dependency: [try], data = [none]
}
catch (PrestoException e) {
future.setException(e);
return;
} // depends on control dependency: [catch], data = [none]
// if throttled due to error, asynchronously wait for timeout and try again
ListenableFuture<?> errorRateLimit = errorTracker.acquireRequestPermit();
if (errorRateLimit.isDone()) {
doRemoveRemoteSource(errorTracker, request, future); // depends on control dependency: [if], data = [none]
}
else {
errorRateLimit.addListener(() -> doRemoveRemoteSource(errorTracker, request, future), errorScheduledExecutor); // depends on control dependency: [if], data = [none]
}
}
};
addCallback(httpClient.executeAsync(request, createStatusResponseHandler()), callback, directExecutor());
} } |
public class class_name {
public static String toLegacyType(String keyword, String value) {
String legacyType = KeyTypeData.toLegacyType(keyword, value, null, null);
if (legacyType == null) {
// Checks if the specified locale type is well-formed with the legacy locale syntax.
//
// Note:
// Neither ICU nor LDML/CLDR provides the definition of keyword syntax.
// However, a type should not contain '=' obviously. For now, all existing
// types are using ASCII alphabetic letters with a few symbol letters. We won't
// add any new type that is not compatible with the BCP 47 syntax except timezone
// IDs. For now, we assume a valid type start with [0-9a-zA-Z], but may contain
// '-' '_' '/' in the middle.
if (value.matches("[0-9a-zA-Z]+([_/\\-][0-9a-zA-Z]+)*")) {
legacyType = AsciiUtil.toLowerString(value);
}
}
return legacyType;
} } | public class class_name {
public static String toLegacyType(String keyword, String value) {
String legacyType = KeyTypeData.toLegacyType(keyword, value, null, null);
if (legacyType == null) {
// Checks if the specified locale type is well-formed with the legacy locale syntax.
//
// Note:
// Neither ICU nor LDML/CLDR provides the definition of keyword syntax.
// However, a type should not contain '=' obviously. For now, all existing
// types are using ASCII alphabetic letters with a few symbol letters. We won't
// add any new type that is not compatible with the BCP 47 syntax except timezone
// IDs. For now, we assume a valid type start with [0-9a-zA-Z], but may contain
// '-' '_' '/' in the middle.
if (value.matches("[0-9a-zA-Z]+([_/\\-][0-9a-zA-Z]+)*")) {
legacyType = AsciiUtil.toLowerString(value); // depends on control dependency: [if], data = [none]
}
}
return legacyType;
} } |
public class class_name {
public final File getTemplateDir() {
if ((templateDir == null) && (templatePath != null)) {
try {
templateDir = new File(templatePath).getCanonicalFile();
} catch (final IOException ex) {
throw new RuntimeException("Couldn't determine canonical template file: " + templatePath, ex);
}
}
return templateDir;
} } | public class class_name {
public final File getTemplateDir() {
if ((templateDir == null) && (templatePath != null)) {
try {
templateDir = new File(templatePath).getCanonicalFile();
// depends on control dependency: [try], data = [none]
} catch (final IOException ex) {
throw new RuntimeException("Couldn't determine canonical template file: " + templatePath, ex);
}
// depends on control dependency: [catch], data = [none]
}
return templateDir;
} } |
public class class_name {
protected final void setEndpoints(Iterable<Endpoint> endpoints) {
final List<Endpoint> oldEndpoints = this.endpoints;
final List<Endpoint> newEndpoints = ImmutableList.sortedCopyOf(endpoints);
if (oldEndpoints.equals(newEndpoints)) {
return;
}
endpointsLock.lock();
try {
this.endpoints = newEndpoints;
} finally {
endpointsLock.unlock();
}
notifyListeners(newEndpoints);
completeInitialEndpointsFuture(newEndpoints);
} } | public class class_name {
protected final void setEndpoints(Iterable<Endpoint> endpoints) {
final List<Endpoint> oldEndpoints = this.endpoints;
final List<Endpoint> newEndpoints = ImmutableList.sortedCopyOf(endpoints);
if (oldEndpoints.equals(newEndpoints)) {
return; // depends on control dependency: [if], data = [none]
}
endpointsLock.lock();
try {
this.endpoints = newEndpoints; // depends on control dependency: [try], data = [none]
} finally {
endpointsLock.unlock();
}
notifyListeners(newEndpoints);
completeInitialEndpointsFuture(newEndpoints);
} } |
public class class_name {
public boolean moveElement(String name, Object sourceParent, Object targetParent) {
Element sourceGroup = null;
Element targetGroup = null;
Element element = null;
if (sourceParent != null) {
sourceGroup = getGroup(sourceParent);
element = getElement(sourceParent, name);
}
if (targetParent != null) {
targetGroup = getGroup(targetParent);
}
if (sourceGroup == null || targetGroup == null) {
return false;
}
if (Dom.isOrHasChild(sourceGroup, element)) {
Dom.removeChild(sourceGroup, element);
String newId = Dom.assembleId(targetGroup.getId(), name);
elementToName.remove(element.getId());
elementToName.put(newId, name);
Dom.setElementAttribute(element, "id", newId);
Dom.appendChild(targetGroup, element);
return true;
}
return false;
} } | public class class_name {
public boolean moveElement(String name, Object sourceParent, Object targetParent) {
Element sourceGroup = null;
Element targetGroup = null;
Element element = null;
if (sourceParent != null) {
sourceGroup = getGroup(sourceParent); // depends on control dependency: [if], data = [(sourceParent]
element = getElement(sourceParent, name); // depends on control dependency: [if], data = [(sourceParent]
}
if (targetParent != null) {
targetGroup = getGroup(targetParent); // depends on control dependency: [if], data = [(targetParent]
}
if (sourceGroup == null || targetGroup == null) {
return false; // depends on control dependency: [if], data = [none]
}
if (Dom.isOrHasChild(sourceGroup, element)) {
Dom.removeChild(sourceGroup, element); // depends on control dependency: [if], data = [none]
String newId = Dom.assembleId(targetGroup.getId(), name);
elementToName.remove(element.getId()); // depends on control dependency: [if], data = [none]
elementToName.put(newId, name); // depends on control dependency: [if], data = [none]
Dom.setElementAttribute(element, "id", newId); // depends on control dependency: [if], data = [none]
Dom.appendChild(targetGroup, element); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
return false;
} } |
public class class_name {
public void addActionResult(ActionResult actionResult) {
ActionResult existActionResult = getActionResult(actionResult.getActionId());
if (existActionResult != null &&
existActionResult.getResultValue() instanceof ResultValueMap &&
actionResult.getResultValue() instanceof ResultValueMap) {
ResultValueMap resultValueMap = (ResultValueMap)existActionResult.getResultValue();
resultValueMap.putAll((ResultValueMap)actionResult.getResultValue());
} else {
add(actionResult);
}
} } | public class class_name {
public void addActionResult(ActionResult actionResult) {
ActionResult existActionResult = getActionResult(actionResult.getActionId());
if (existActionResult != null &&
existActionResult.getResultValue() instanceof ResultValueMap &&
actionResult.getResultValue() instanceof ResultValueMap) {
ResultValueMap resultValueMap = (ResultValueMap)existActionResult.getResultValue();
resultValueMap.putAll((ResultValueMap)actionResult.getResultValue()); // depends on control dependency: [if], data = [none]
} else {
add(actionResult); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
void dropSchemas(Grantee grantee, boolean cascade) {
HsqlArrayList list = getSchemas(grantee);
Iterator it = list.iterator();
while (it.hasNext()) {
Schema schema = (Schema) it.next();
dropSchema(schema.name.name, cascade);
}
} } | public class class_name {
void dropSchemas(Grantee grantee, boolean cascade) {
HsqlArrayList list = getSchemas(grantee);
Iterator it = list.iterator();
while (it.hasNext()) {
Schema schema = (Schema) it.next();
dropSchema(schema.name.name, cascade); // depends on control dependency: [while], data = [none]
}
} } |
public class class_name {
protected void expandClusterOrder(DBID ipt, ClusterOrder order, DistanceQuery<V> dq, FiniteProgress prog) {
UpdatableHeap<OPTICSHeapEntry> heap = new UpdatableHeap<>();
heap.add(new OPTICSHeapEntry(ipt, null, Double.POSITIVE_INFINITY));
while(!heap.isEmpty()) {
final OPTICSHeapEntry current = heap.poll();
DBID currPt = current.objectID;
order.add(currPt, current.reachability, current.predecessorID);
processed.add(currPt);
double coredist = inverseDensities.doubleValue(currPt);
for(DBIDIter it = neighs.get(currPt).iter(); it.valid(); it.advance()) {
if(processed.contains(it)) {
continue;
}
double nrdist = dq.distance(currPt, it);
if(coredist > nrdist) {
nrdist = coredist;
}
if(reachDist.doubleValue(it) == UNDEFINED_DISTANCE) {
reachDist.put(it, nrdist);
}
else if(nrdist < reachDist.doubleValue(it)) {
reachDist.put(it, nrdist);
}
heap.add(new OPTICSHeapEntry(DBIDUtil.deref(it), currPt, nrdist));
}
LOG.incrementProcessed(prog);
}
} } | public class class_name {
protected void expandClusterOrder(DBID ipt, ClusterOrder order, DistanceQuery<V> dq, FiniteProgress prog) {
UpdatableHeap<OPTICSHeapEntry> heap = new UpdatableHeap<>();
heap.add(new OPTICSHeapEntry(ipt, null, Double.POSITIVE_INFINITY));
while(!heap.isEmpty()) {
final OPTICSHeapEntry current = heap.poll();
DBID currPt = current.objectID;
order.add(currPt, current.reachability, current.predecessorID); // depends on control dependency: [while], data = [none]
processed.add(currPt); // depends on control dependency: [while], data = [none]
double coredist = inverseDensities.doubleValue(currPt);
for(DBIDIter it = neighs.get(currPt).iter(); it.valid(); it.advance()) {
if(processed.contains(it)) {
continue;
}
double nrdist = dq.distance(currPt, it);
if(coredist > nrdist) {
nrdist = coredist; // depends on control dependency: [if], data = [none]
}
if(reachDist.doubleValue(it) == UNDEFINED_DISTANCE) {
reachDist.put(it, nrdist); // depends on control dependency: [if], data = [none]
}
else if(nrdist < reachDist.doubleValue(it)) {
reachDist.put(it, nrdist); // depends on control dependency: [if], data = [none]
}
heap.add(new OPTICSHeapEntry(DBIDUtil.deref(it), currPt, nrdist)); // depends on control dependency: [for], data = [it]
}
LOG.incrementProcessed(prog); // depends on control dependency: [while], data = [none]
}
} } |
public class class_name {
public static IAMInfo getIAMInstanceProfileInfo() {
String json = getData(EC2_METADATA_ROOT + "/iam/info");
if (null == json) {
return null;
}
try {
return mapper.readValue(json, IAMInfo.class);
} catch (Exception e) {
log.warn("Unable to parse IAM Instance profile info (" + json
+ "): " + e.getMessage(), e);
return null;
}
} } | public class class_name {
public static IAMInfo getIAMInstanceProfileInfo() {
String json = getData(EC2_METADATA_ROOT + "/iam/info");
if (null == json) {
return null; // depends on control dependency: [if], data = [none]
}
try {
return mapper.readValue(json, IAMInfo.class); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
log.warn("Unable to parse IAM Instance profile info (" + json
+ "): " + e.getMessage(), e);
return null;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public boolean modelBaseRecordClassGenerated(TopLevelClass topLevelClass, IntrospectedTable introspectedTable) {
// @Data
this.addAnnotations(topLevelClass, EnumLombokAnnotations.DATA);
if (topLevelClass.getSuperClass() != null) {
this.addAnnotations(topLevelClass, EnumLombokAnnotations.EQUALS_AND_HASH_CODE_CALL_SUPER);
this.addAnnotations(topLevelClass, EnumLombokAnnotations.TO_STRING_CALL_SUPER);
}
// @Builder
List<IntrospectedColumn> columns = IntrospectedTableTools.getModelBaseRecordClomns(introspectedTable);
if (this.hasBuilder && PluginTools.getHook(ILombokPluginHook.class).modelBaseRecordBuilderClassGenerated(topLevelClass, columns, introspectedTable)) {
// 有子类或者父类
if (introspectedTable.getRules().generateRecordWithBLOBsClass() || introspectedTable.getRules().generatePrimaryKeyClass() || topLevelClass.getSuperClass() != null) {
this.addAnnotations(topLevelClass, EnumLombokAnnotations.SUPER_BUILDER);
} else {
this.addAnnotations(topLevelClass, EnumLombokAnnotations.BUILDER);
}
}
// @Constructor
if (this.hasNoArgsConstructor) {
this.addAnnotations(topLevelClass, EnumLombokAnnotations.NO_ARGS_CONSTRUCTOR);
}
if (this.hasAllArgsConstructor) {
this.addAnnotations(topLevelClass, EnumLombokAnnotations.ALL_ARGS_CONSTRUCTOR);
}
return super.modelBaseRecordClassGenerated(topLevelClass, introspectedTable);
} } | public class class_name {
@Override
public boolean modelBaseRecordClassGenerated(TopLevelClass topLevelClass, IntrospectedTable introspectedTable) {
// @Data
this.addAnnotations(topLevelClass, EnumLombokAnnotations.DATA);
if (topLevelClass.getSuperClass() != null) {
this.addAnnotations(topLevelClass, EnumLombokAnnotations.EQUALS_AND_HASH_CODE_CALL_SUPER); // depends on control dependency: [if], data = [none]
this.addAnnotations(topLevelClass, EnumLombokAnnotations.TO_STRING_CALL_SUPER); // depends on control dependency: [if], data = [none]
}
// @Builder
List<IntrospectedColumn> columns = IntrospectedTableTools.getModelBaseRecordClomns(introspectedTable);
if (this.hasBuilder && PluginTools.getHook(ILombokPluginHook.class).modelBaseRecordBuilderClassGenerated(topLevelClass, columns, introspectedTable)) {
// 有子类或者父类
if (introspectedTable.getRules().generateRecordWithBLOBsClass() || introspectedTable.getRules().generatePrimaryKeyClass() || topLevelClass.getSuperClass() != null) {
this.addAnnotations(topLevelClass, EnumLombokAnnotations.SUPER_BUILDER); // depends on control dependency: [if], data = [none]
} else {
this.addAnnotations(topLevelClass, EnumLombokAnnotations.BUILDER); // depends on control dependency: [if], data = [none]
}
}
// @Constructor
if (this.hasNoArgsConstructor) {
this.addAnnotations(topLevelClass, EnumLombokAnnotations.NO_ARGS_CONSTRUCTOR); // depends on control dependency: [if], data = [none]
}
if (this.hasAllArgsConstructor) {
this.addAnnotations(topLevelClass, EnumLombokAnnotations.ALL_ARGS_CONSTRUCTOR); // depends on control dependency: [if], data = [none]
}
return super.modelBaseRecordClassGenerated(topLevelClass, introspectedTable);
} } |
public class class_name {
public List<PrimaryKey> getPrimaryKeys(final String catalog, final String schema, final String table)
throws SQLException {
final List<PrimaryKey> list = new ArrayList<>();
try (ResultSet results = databaseMetadata.getPrimaryKeys(catalog, schema, table)) {
if (results != null) {
bind(results, PrimaryKey.class, list);
}
}
return list;
} } | public class class_name {
public List<PrimaryKey> getPrimaryKeys(final String catalog, final String schema, final String table)
throws SQLException {
final List<PrimaryKey> list = new ArrayList<>();
try (ResultSet results = databaseMetadata.getPrimaryKeys(catalog, schema, table)) {
if (results != null) {
bind(results, PrimaryKey.class, list); // depends on control dependency: [if], data = [(results]
}
}
return list;
} } |
public class class_name {
@SuppressWarnings("unchecked")
public static <Item extends IItem, A extends IAdapter> FastAdapter<Item> with(@Nullable Collection<A> adapters, @Nullable Collection<IAdapterExtension<Item>> extensions) {
FastAdapter<Item> fastAdapter = new FastAdapter<>();
if (adapters == null) {
fastAdapter.mAdapters.add((IAdapter<Item>) items());
} else {
fastAdapter.mAdapters.addAll((Collection<IAdapter<Item>>) adapters);
}
for (int i = 0; i < fastAdapter.mAdapters.size(); i++) {
fastAdapter.mAdapters.get(i).withFastAdapter(fastAdapter).setOrder(i);
}
fastAdapter.cacheSizes();
if (extensions != null) {
for (IAdapterExtension<Item> extension : extensions) {
fastAdapter.addExtension(extension);
}
}
return fastAdapter;
} } | public class class_name {
@SuppressWarnings("unchecked")
public static <Item extends IItem, A extends IAdapter> FastAdapter<Item> with(@Nullable Collection<A> adapters, @Nullable Collection<IAdapterExtension<Item>> extensions) {
FastAdapter<Item> fastAdapter = new FastAdapter<>();
if (adapters == null) {
fastAdapter.mAdapters.add((IAdapter<Item>) items()); // depends on control dependency: [if], data = [none]
} else {
fastAdapter.mAdapters.addAll((Collection<IAdapter<Item>>) adapters); // depends on control dependency: [if], data = [none]
}
for (int i = 0; i < fastAdapter.mAdapters.size(); i++) {
fastAdapter.mAdapters.get(i).withFastAdapter(fastAdapter).setOrder(i); // depends on control dependency: [for], data = [i]
}
fastAdapter.cacheSizes();
if (extensions != null) {
for (IAdapterExtension<Item> extension : extensions) {
fastAdapter.addExtension(extension); // depends on control dependency: [for], data = [extension]
}
}
return fastAdapter;
} } |
public class class_name {
public final String[] getContext(final int index, final Object[] tokens,
final String[] tags) {
String next, nextnext, lex, prev, prevprev;
String tagprev, tagprevprev;
tagprev = tagprevprev = null;
next = nextnext = lex = prev = prevprev = null;
lex = tokens[index].toString();
if (tokens.length > index + 1) {
next = tokens[index + 1].toString();
if (tokens.length > index + 2) {
nextnext = tokens[index + 2].toString();
} else {
nextnext = this.SE; // Sentence End
}
} else {
next = this.SE; // Sentence End
}
if (index - 1 >= 0) {
prev = tokens[index - 1].toString();
tagprev = tags[index - 1];
if (index - 2 >= 0) {
prevprev = tokens[index - 2].toString();
tagprevprev = tags[index - 2];
} else {
prevprev = this.SB; // Sentence Beginning
}
} else {
prev = this.SB; // Sentence Beginning
}
final String cacheKey = index + tagprev + tagprevprev;
if (this.contextsCache != null) {
if (this.wordsKey == tokens) {
final String[] cachedContexts = (String[]) this.contextsCache
.get(cacheKey);
if (cachedContexts != null) {
return cachedContexts;
}
} else {
this.contextsCache.clear();
this.wordsKey = tokens;
}
}
final List<String> featureList = new ArrayList<String>();
featureList.add("default");
// add the word itself
featureList.add("w=" + lex);
this.dictGram[0] = lex;
if (this.dict == null || !this.dict.contains(new StringList(this.dictGram))) {
// do some basic suffix analysis
final String[] suffs = getSuffixes(lex);
for (final String suff : suffs) {
featureList.add("suf=" + suff);
}
final String[] prefs = getPrefixes(lex);
for (final String pref : prefs) {
featureList.add("pre=" + pref);
}
// see if the word has any special characters
if (lex.indexOf('-') != -1) {
featureList.add("h");
}
if (hasCap.matcher(lex).find()) {
featureList.add("c");
}
if (hasNum.matcher(lex).find()) {
featureList.add("d");
}
}
// add the words and pos's of the surrounding context
if (prev != null) {
featureList.add("pw=" + prev);
// bigram w-1,w
featureList.add("pw,w=" + prev + "," + lex);
if (tagprev != null) {
featureList.add("pt=" + tagprev);
// bigram tag-1, w
featureList.add("pt,w=" + tagprev + "," + lex);
}
if (prevprev != null) {
featureList.add("ppw=" + prevprev);
if (tagprevprev != null) {
// bigram tag-2,tag-1
featureList.add("pt2,pt1=" + tagprevprev + "," + tagprev);
}
}
}
if (next != null) {
featureList.add("nw=" + next);
if (nextnext != null) {
featureList.add("nnw=" + nextnext);
}
}
final String[] contexts = featureList
.toArray(new String[featureList.size()]);
if (this.contextsCache != null) {
this.contextsCache.put(cacheKey, contexts);
}
return contexts;
} } | public class class_name {
public final String[] getContext(final int index, final Object[] tokens,
final String[] tags) {
String next, nextnext, lex, prev, prevprev;
String tagprev, tagprevprev;
tagprev = tagprevprev = null;
next = nextnext = lex = prev = prevprev = null;
lex = tokens[index].toString();
if (tokens.length > index + 1) {
next = tokens[index + 1].toString(); // depends on control dependency: [if], data = [none]
if (tokens.length > index + 2) {
nextnext = tokens[index + 2].toString(); // depends on control dependency: [if], data = [none]
} else {
nextnext = this.SE; // Sentence End // depends on control dependency: [if], data = [none]
}
} else {
next = this.SE; // Sentence End // depends on control dependency: [if], data = [none]
}
if (index - 1 >= 0) {
prev = tokens[index - 1].toString(); // depends on control dependency: [if], data = [none]
tagprev = tags[index - 1]; // depends on control dependency: [if], data = [none]
if (index - 2 >= 0) {
prevprev = tokens[index - 2].toString(); // depends on control dependency: [if], data = [none]
tagprevprev = tags[index - 2]; // depends on control dependency: [if], data = [none]
} else {
prevprev = this.SB; // Sentence Beginning // depends on control dependency: [if], data = [none]
}
} else {
prev = this.SB; // Sentence Beginning // depends on control dependency: [if], data = [none]
}
final String cacheKey = index + tagprev + tagprevprev;
if (this.contextsCache != null) {
if (this.wordsKey == tokens) {
final String[] cachedContexts = (String[]) this.contextsCache
.get(cacheKey);
if (cachedContexts != null) {
return cachedContexts; // depends on control dependency: [if], data = [none]
}
} else {
this.contextsCache.clear(); // depends on control dependency: [if], data = [none]
this.wordsKey = tokens; // depends on control dependency: [if], data = [none]
}
}
final List<String> featureList = new ArrayList<String>();
featureList.add("default");
// add the word itself
featureList.add("w=" + lex);
this.dictGram[0] = lex;
if (this.dict == null || !this.dict.contains(new StringList(this.dictGram))) {
// do some basic suffix analysis
final String[] suffs = getSuffixes(lex);
for (final String suff : suffs) {
featureList.add("suf=" + suff); // depends on control dependency: [for], data = [suff]
}
final String[] prefs = getPrefixes(lex);
for (final String pref : prefs) {
featureList.add("pre=" + pref); // depends on control dependency: [for], data = [pref]
}
// see if the word has any special characters
if (lex.indexOf('-') != -1) {
featureList.add("h"); // depends on control dependency: [if], data = [none]
}
if (hasCap.matcher(lex).find()) {
featureList.add("c"); // depends on control dependency: [if], data = [none]
}
if (hasNum.matcher(lex).find()) {
featureList.add("d"); // depends on control dependency: [if], data = [none]
}
}
// add the words and pos's of the surrounding context
if (prev != null) {
featureList.add("pw=" + prev); // depends on control dependency: [if], data = [none]
// bigram w-1,w
featureList.add("pw,w=" + prev + "," + lex); // depends on control dependency: [if], data = [none]
if (tagprev != null) {
featureList.add("pt=" + tagprev); // depends on control dependency: [if], data = [none]
// bigram tag-1, w
featureList.add("pt,w=" + tagprev + "," + lex); // depends on control dependency: [if], data = [none]
}
if (prevprev != null) {
featureList.add("ppw=" + prevprev); // depends on control dependency: [if], data = [none]
if (tagprevprev != null) {
// bigram tag-2,tag-1
featureList.add("pt2,pt1=" + tagprevprev + "," + tagprev); // depends on control dependency: [if], data = [none]
}
}
}
if (next != null) {
featureList.add("nw=" + next); // depends on control dependency: [if], data = [none]
if (nextnext != null) {
featureList.add("nnw=" + nextnext); // depends on control dependency: [if], data = [none]
}
}
final String[] contexts = featureList
.toArray(new String[featureList.size()]);
if (this.contextsCache != null) {
this.contextsCache.put(cacheKey, contexts); // depends on control dependency: [if], data = [none]
}
return contexts;
} } |
public class class_name {
public final void updateObjectInUse(T object, boolean inUse) {
int origSize = inUseObjects.size();
if (inUse) {
inUseObjects.add(object);
if (origSize == 0) {
handleInUse();
}
} else {
boolean removed = inUseObjects.remove(object);
if (removed && origSize == 1) {
handleNotInUse();
}
}
} } | public class class_name {
public final void updateObjectInUse(T object, boolean inUse) {
int origSize = inUseObjects.size();
if (inUse) {
inUseObjects.add(object); // depends on control dependency: [if], data = [none]
if (origSize == 0) {
handleInUse(); // depends on control dependency: [if], data = [none]
}
} else {
boolean removed = inUseObjects.remove(object);
if (removed && origSize == 1) {
handleNotInUse(); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
private void checkMatrixBounds() {
RectF rect = getMatrixRectF();
float deltaX = 0, deltaY = 0;
final float viewWidth = getWidth();
final float viewHeight = getHeight();
// Check if image boundary exceeds imageView boundary
if (rect.top > 0 && isCheckTopAndBottom) {
deltaY = -rect.top;
}
if (rect.bottom < viewHeight && isCheckTopAndBottom) {
deltaY = viewHeight - rect.bottom;
}
if (rect.left > 0 && isCheckLeftAndRight) {
deltaX = -rect.left;
}
if (rect.right < viewWidth && isCheckLeftAndRight) {
deltaX = viewWidth - rect.right;
}
scaleMatrix.postTranslate(deltaX, deltaY);
} } | public class class_name {
private void checkMatrixBounds() {
RectF rect = getMatrixRectF();
float deltaX = 0, deltaY = 0;
final float viewWidth = getWidth();
final float viewHeight = getHeight();
// Check if image boundary exceeds imageView boundary
if (rect.top > 0 && isCheckTopAndBottom) {
deltaY = -rect.top; // depends on control dependency: [if], data = [none]
}
if (rect.bottom < viewHeight && isCheckTopAndBottom) {
deltaY = viewHeight - rect.bottom; // depends on control dependency: [if], data = [none]
}
if (rect.left > 0 && isCheckLeftAndRight) {
deltaX = -rect.left; // depends on control dependency: [if], data = [none]
}
if (rect.right < viewWidth && isCheckLeftAndRight) {
deltaX = viewWidth - rect.right; // depends on control dependency: [if], data = [none]
}
scaleMatrix.postTranslate(deltaX, deltaY);
} } |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.