code
stringlengths 130
281k
| code_dependency
stringlengths 182
306k
|
---|---|
public class class_name {
@Override
public List<String> getTables() {
ContentsDao contentDao = getContentsDao();
List<String> tables;
try {
tables = contentDao.getTables();
} catch (SQLException e) {
throw new GeoPackageException("Failed to retrieve tables", e);
}
return tables;
} } | public class class_name {
@Override
public List<String> getTables() {
ContentsDao contentDao = getContentsDao();
List<String> tables;
try {
tables = contentDao.getTables(); // depends on control dependency: [try], data = [none]
} catch (SQLException e) {
throw new GeoPackageException("Failed to retrieve tables", e);
} // depends on control dependency: [catch], data = [none]
return tables;
} } |
public class class_name {
public boolean isAfterRange(Range<T> otherRange) {
if (otherRange == null) { return false; }
return isAfter(otherRange.max);
} } | public class class_name {
public boolean isAfterRange(Range<T> otherRange) {
if (otherRange == null) { return false; } // depends on control dependency: [if], data = [none]
return isAfter(otherRange.max);
} } |
public class class_name {
protected Content getFrameDetails() {
HtmlTree frameset = HtmlTree.FRAMESET("20%,80%", null, "Documentation frame",
"top.loadFrames()");
if (noOfPackages <= 1) {
addAllClassesFrameTag(frameset);
} else if (noOfPackages > 1) {
HtmlTree leftFrameset = HtmlTree.FRAMESET(null, "30%,70%", "Left frames",
"top.loadFrames()");
addAllPackagesFrameTag(leftFrameset);
addAllClassesFrameTag(leftFrameset);
frameset.addContent(leftFrameset);
}
addClassFrameTag(frameset);
addFrameWarning(frameset);
return frameset;
} } | public class class_name {
protected Content getFrameDetails() {
HtmlTree frameset = HtmlTree.FRAMESET("20%,80%", null, "Documentation frame",
"top.loadFrames()");
if (noOfPackages <= 1) {
addAllClassesFrameTag(frameset); // depends on control dependency: [if], data = [none]
} else if (noOfPackages > 1) {
HtmlTree leftFrameset = HtmlTree.FRAMESET(null, "30%,70%", "Left frames",
"top.loadFrames()");
addAllPackagesFrameTag(leftFrameset); // depends on control dependency: [if], data = [none]
addAllClassesFrameTag(leftFrameset); // depends on control dependency: [if], data = [none]
frameset.addContent(leftFrameset); // depends on control dependency: [if], data = [none]
}
addClassFrameTag(frameset);
addFrameWarning(frameset);
return frameset;
} } |
public class class_name {
public void setGroups(java.util.Collection<SegmentGroup> groups) {
if (groups == null) {
this.groups = null;
return;
}
this.groups = new java.util.ArrayList<SegmentGroup>(groups);
} } | public class class_name {
public void setGroups(java.util.Collection<SegmentGroup> groups) {
if (groups == null) {
this.groups = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.groups = new java.util.ArrayList<SegmentGroup>(groups);
} } |
public class class_name {
static Positions newDisjointPositions(Matches matches, int position) {
if (matches == null) throw new IllegalArgumentException("null matches");
if (position < 0L) throw new IllegalArgumentException();
//TODO consider restoring dedicated size accessor on matches
if (position > matches.store().size()) throw new IllegalArgumentException();
int p = matches.first();
int s = matches.sequence().size();
while (p < position) {
p = matches.next(p + s);
}
return new BitStorePositions(matches, true, p);
} } | public class class_name {
static Positions newDisjointPositions(Matches matches, int position) {
if (matches == null) throw new IllegalArgumentException("null matches");
if (position < 0L) throw new IllegalArgumentException();
//TODO consider restoring dedicated size accessor on matches
if (position > matches.store().size()) throw new IllegalArgumentException();
int p = matches.first();
int s = matches.sequence().size();
while (p < position) {
p = matches.next(p + s); // depends on control dependency: [while], data = [(p]
}
return new BitStorePositions(matches, true, p);
} } |
public class class_name {
public Matrix getP() {
Matrix P = new DenseMatrix(jpvt.length, jpvt.length);
for (int i = 0; i < jpvt.length; i++) {
P.set(jpvt[i], i, 1);
}
return P;
} } | public class class_name {
public Matrix getP() {
Matrix P = new DenseMatrix(jpvt.length, jpvt.length);
for (int i = 0; i < jpvt.length; i++) {
P.set(jpvt[i], i, 1); // depends on control dependency: [for], data = [i]
}
return P;
} } |
public class class_name {
public void setInputs(java.util.Collection<JobInput> inputs) {
if (inputs == null) {
this.inputs = null;
return;
}
this.inputs = new com.amazonaws.internal.SdkInternalList<JobInput>(inputs);
} } | public class class_name {
public void setInputs(java.util.Collection<JobInput> inputs) {
if (inputs == null) {
this.inputs = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.inputs = new com.amazonaws.internal.SdkInternalList<JobInput>(inputs);
} } |
public class class_name {
public static UByte[] ubyte_a(byte... values) {
UByte[] array = new UByte[values.length];
for (int i = 0; i < values.length; i++) {
array[i] = ubyte(values[i]);
}
return array;
} } | public class class_name {
public static UByte[] ubyte_a(byte... values) {
UByte[] array = new UByte[values.length];
for (int i = 0; i < values.length; i++) {
array[i] = ubyte(values[i]);
// depends on control dependency: [for], data = [i]
}
return array;
} } |
public class class_name {
public Object createAnyPreferLegacyOrder(Properties props, String dataSourceID) throws Exception {
lock.readLock().lock();
try {
if (!isInitialized)
try {
// Switch to write lock for lazy initialization
lock.readLock().unlock();
lock.writeLock().lock();
if (!isInitialized) {
if (!loadFromApp())
classloader = AdapterUtil.getClassLoaderWithPriv(sharedLib);
isInitialized = true;
}
} finally {
// Downgrade to read lock for rest of method
lock.readLock().lock();
lock.writeLock().unlock();
}
String vendorPropertiesPID = props instanceof PropertyService ? ((PropertyService) props).getFactoryPID() : PropertyService.FACTORY_PID;
String className;
if (null != (className = (String) properties.get(ConnectionPoolDataSource.class.getName()))
|| null != (className = JDBCDrivers.getConnectionPoolDataSourceClassName(vendorPropertiesPID))
|| null != (className = JDBCDrivers.getConnectionPoolDataSourceClassName(getClasspath(sharedLib, true)))
|| null != (className = (String) properties.get(DataSource.class.getName()))
|| null != (className = JDBCDrivers.getDataSourceClassName(vendorPropertiesPID))
|| null != (className = JDBCDrivers.getDataSourceClassName(getClasspath(sharedLib, true)))
|| null != (className = (String) properties.get(XADataSource.class.getName()))
|| null != (className = JDBCDrivers.getXADataSourceClassName(vendorPropertiesPID))
|| null != (className = JDBCDrivers.getXADataSourceClassName(getClasspath(sharedLib, true))))
return create(className, props, dataSourceID);
String url = props.getProperty("URL", props.getProperty("url"));
if (url != null) {
Driver driver = loadDriver(null, url, classloader, props, dataSourceID);
if (driver != null)
return driver;
}
Set<String> packagesSearched = new LinkedHashSet<String>();
SimpleEntry<Integer, String> dsEntry = JDBCDrivers.inferDataSourceClassFromDriver(classloader,
packagesSearched,
JDBCDrivers.CONNECTION_POOL_DATA_SOURCE,
JDBCDrivers.DATA_SOURCE,
JDBCDrivers.XA_DATA_SOURCE);
if (dsEntry == null) {
List<String> interfaceNames = Arrays.asList(ConnectionPoolDataSource.class.getName(),
DataSource.class.getName(),
XADataSource.class.getName(),
Driver.class.getName());
throw classNotFound(interfaceNames, packagesSearched, dataSourceID, null);
}
return create(className = dsEntry.getValue(), props, dataSourceID);
} finally {
lock.readLock().unlock();
}
} } | public class class_name {
public Object createAnyPreferLegacyOrder(Properties props, String dataSourceID) throws Exception {
lock.readLock().lock();
try {
if (!isInitialized)
try {
// Switch to write lock for lazy initialization
lock.readLock().unlock(); // depends on control dependency: [try], data = [none]
lock.writeLock().lock(); // depends on control dependency: [try], data = [none]
if (!isInitialized) {
if (!loadFromApp())
classloader = AdapterUtil.getClassLoaderWithPriv(sharedLib);
isInitialized = true; // depends on control dependency: [if], data = [none]
}
} finally {
// Downgrade to read lock for rest of method
lock.readLock().lock();
lock.writeLock().unlock();
}
String vendorPropertiesPID = props instanceof PropertyService ? ((PropertyService) props).getFactoryPID() : PropertyService.FACTORY_PID;
String className;
if (null != (className = (String) properties.get(ConnectionPoolDataSource.class.getName()))
|| null != (className = JDBCDrivers.getConnectionPoolDataSourceClassName(vendorPropertiesPID))
|| null != (className = JDBCDrivers.getConnectionPoolDataSourceClassName(getClasspath(sharedLib, true)))
|| null != (className = (String) properties.get(DataSource.class.getName()))
|| null != (className = JDBCDrivers.getDataSourceClassName(vendorPropertiesPID))
|| null != (className = JDBCDrivers.getDataSourceClassName(getClasspath(sharedLib, true)))
|| null != (className = (String) properties.get(XADataSource.class.getName()))
|| null != (className = JDBCDrivers.getXADataSourceClassName(vendorPropertiesPID))
|| null != (className = JDBCDrivers.getXADataSourceClassName(getClasspath(sharedLib, true))))
return create(className, props, dataSourceID);
String url = props.getProperty("URL", props.getProperty("url"));
if (url != null) {
Driver driver = loadDriver(null, url, classloader, props, dataSourceID);
if (driver != null)
return driver;
}
Set<String> packagesSearched = new LinkedHashSet<String>();
SimpleEntry<Integer, String> dsEntry = JDBCDrivers.inferDataSourceClassFromDriver(classloader,
packagesSearched,
JDBCDrivers.CONNECTION_POOL_DATA_SOURCE,
JDBCDrivers.DATA_SOURCE,
JDBCDrivers.XA_DATA_SOURCE);
if (dsEntry == null) {
List<String> interfaceNames = Arrays.asList(ConnectionPoolDataSource.class.getName(),
DataSource.class.getName(),
XADataSource.class.getName(),
Driver.class.getName());
throw classNotFound(interfaceNames, packagesSearched, dataSourceID, null);
}
return create(className = dsEntry.getValue(), props, dataSourceID);
} finally {
lock.readLock().unlock();
}
} } |
public class class_name {
void attachMetadataCacheInternal(SlotReference slot, MetadataCache cache) {
MetadataCache oldCache = metadataCacheFiles.put(slot, cache);
if (oldCache != null) {
try {
oldCache.close();
} catch (IOException e) {
logger.error("Problem closing previous metadata cache", e);
}
}
deliverCacheUpdate(slot, cache);
} } | public class class_name {
void attachMetadataCacheInternal(SlotReference slot, MetadataCache cache) {
MetadataCache oldCache = metadataCacheFiles.put(slot, cache);
if (oldCache != null) {
try {
oldCache.close(); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
logger.error("Problem closing previous metadata cache", e);
} // depends on control dependency: [catch], data = [none]
}
deliverCacheUpdate(slot, cache);
} } |
public class class_name {
public EncryptionAlgorithmOptions withAllowedValues(EncryptionAlgorithm... allowedValues) {
java.util.ArrayList<String> allowedValuesCopy = new java.util.ArrayList<String>(allowedValues.length);
for (EncryptionAlgorithm value : allowedValues) {
allowedValuesCopy.add(value.toString());
}
if (getAllowedValues() == null) {
setAllowedValues(allowedValuesCopy);
} else {
getAllowedValues().addAll(allowedValuesCopy);
}
return this;
} } | public class class_name {
public EncryptionAlgorithmOptions withAllowedValues(EncryptionAlgorithm... allowedValues) {
java.util.ArrayList<String> allowedValuesCopy = new java.util.ArrayList<String>(allowedValues.length);
for (EncryptionAlgorithm value : allowedValues) {
allowedValuesCopy.add(value.toString()); // depends on control dependency: [for], data = [value]
}
if (getAllowedValues() == null) {
setAllowedValues(allowedValuesCopy); // depends on control dependency: [if], data = [none]
} else {
getAllowedValues().addAll(allowedValuesCopy); // depends on control dependency: [if], data = [none]
}
return this;
} } |
public class class_name {
@Override
protected void validateComponent(final List<Diagnostic> diags) {
String text1 = field1.getText();
String text2 = field2.getText();
String text3 = field3.getText();
if (text1 != null && text1.length() > 0 && text1.equals(text2)) {
// Note that this error will hyperlink to Field 2.
diags.add(createErrorDiagnostic(field2, "Fields 1 and 2 cannot be the same."));
}
int len = 0;
if (text1 != null) {
len += text1.length();
}
if (text2 != null) {
len += text2.length();
}
if (len > 20) {
// Note that this error does not link to a specific field.
diags.add(createErrorDiagnostic(
"The total length of Field 1 plus Field 2 can exceed 20 characters."));
}
// Sample Warning Message
if (Util.empty(text3)) {
diags.add(new DiagnosticImpl(Diagnostic.WARNING, UIContextHolder.getCurrent(), field3,
"Warning that this should not be blank"));
}
} } | public class class_name {
@Override
protected void validateComponent(final List<Diagnostic> diags) {
String text1 = field1.getText();
String text2 = field2.getText();
String text3 = field3.getText();
if (text1 != null && text1.length() > 0 && text1.equals(text2)) {
// Note that this error will hyperlink to Field 2.
diags.add(createErrorDiagnostic(field2, "Fields 1 and 2 cannot be the same.")); // depends on control dependency: [if], data = [none]
}
int len = 0;
if (text1 != null) {
len += text1.length(); // depends on control dependency: [if], data = [none]
}
if (text2 != null) {
len += text2.length(); // depends on control dependency: [if], data = [none]
}
if (len > 20) {
// Note that this error does not link to a specific field.
diags.add(createErrorDiagnostic(
"The total length of Field 1 plus Field 2 can exceed 20 characters.")); // depends on control dependency: [if], data = [none]
}
// Sample Warning Message
if (Util.empty(text3)) {
diags.add(new DiagnosticImpl(Diagnostic.WARNING, UIContextHolder.getCurrent(), field3,
"Warning that this should not be blank")); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private long computeConfigurationLastModified(List<Resource> configurations) {
long result = 0;
for (Resource configuration : configurations) {
try {
long currentConfigurationLastModified = configuration.lastModified();
if (currentConfigurationLastModified > result) {
result = currentConfigurationLastModified;
}
} catch (IOException ioex) {
logger.warn("Error while reading last configuration modification date.", ioex);
}
}
return result;
} } | public class class_name {
private long computeConfigurationLastModified(List<Resource> configurations) {
long result = 0;
for (Resource configuration : configurations) {
try {
long currentConfigurationLastModified = configuration.lastModified();
if (currentConfigurationLastModified > result) {
result = currentConfigurationLastModified; // depends on control dependency: [if], data = [none]
}
} catch (IOException ioex) {
logger.warn("Error while reading last configuration modification date.", ioex);
} // depends on control dependency: [catch], data = [none]
}
return result;
} } |
public class class_name {
public static void copy(int[][] x, int[][] y) {
if (x.length != y.length || x[0].length != y[0].length) {
throw new IllegalArgumentException(String.format("Matrices have different rows: %d x %d vs %d x %d", x.length, x[0].length, y.length, y[0].length));
}
for (int i = 0; i < x.length; i++) {
System.arraycopy(x[i], 0, y[i], 0, x[i].length);
}
} } | public class class_name {
public static void copy(int[][] x, int[][] y) {
if (x.length != y.length || x[0].length != y[0].length) {
throw new IllegalArgumentException(String.format("Matrices have different rows: %d x %d vs %d x %d", x.length, x[0].length, y.length, y[0].length));
}
for (int i = 0; i < x.length; i++) {
System.arraycopy(x[i], 0, y[i], 0, x[i].length); // depends on control dependency: [for], data = [i]
}
} } |
public class class_name {
@Override
public boolean intersects(IVersionRange yourRange)
{
for (IVersionRange range : ranges) {
if (range.intersects(yourRange)) {
return true;
}
}
return false;
} } | public class class_name {
@Override
public boolean intersects(IVersionRange yourRange)
{
for (IVersionRange range : ranges) {
if (range.intersects(yourRange)) {
return true; // depends on control dependency: [if], data = [none]
}
}
return false;
} } |
public class class_name {
String tableToString(Table table)
{
StringBuilder strb = new StringBuilder();
for(int row = 0; row < table.getRowCount(); row++)
{
for(int col = 0; col < table.getColumnCount(); col++)
{
strb.append(table.get(row, col));
strb.append(" ");
}
strb.append("\n");
}
return strb.toString();
} } | public class class_name {
String tableToString(Table table)
{
StringBuilder strb = new StringBuilder();
for(int row = 0; row < table.getRowCount(); row++)
{
for(int col = 0; col < table.getColumnCount(); col++)
{
strb.append(table.get(row, col));
// depends on control dependency: [for], data = [col]
strb.append(" ");
// depends on control dependency: [for], data = [none]
}
strb.append("\n");
// depends on control dependency: [for], data = [none]
}
return strb.toString();
} } |
public class class_name {
public DatasourceConnection getDatasourceConnection(Config config, DataSource datasource, String user, String pass) throws PageException {
config = ThreadLocalPageContext.getConfig(config);
if (StringUtil.isEmpty(user)) {
user = datasource.getUsername();
pass = datasource.getPassword();
}
if (pass == null) pass = "";
// get stack
DCStack stack = getDCStack(datasource, user, pass);
int max = datasource.getConnectionLimit();
// get an existing connection
DatasourceConnection rtn;
boolean wait = false;
outer: while (true) {
rtn = null;
// wait until it is again my turn
if (wait) {
SystemUtil.wait(waiter, WAIT);
wait = false;
}
synchronized (stack) {
// do we have already to many open connections?
if (max != -1) {
RefInteger _counter = stack.getCounter();// _getCounter(stack,datasource,user,pass);
if (max <= _counter.toInt()) {// go back ant wait
wait = true;
continue outer;
}
}
// get an existing connection
while (!stack.isEmpty()) {
DatasourceConnection dc = (DatasourceConnection) stack.get();
if (dc != null) {
rtn = dc;
break;
}
}
_inc(stack, datasource, user, pass); // if new or fine we
// increase in any case
// create a new instance
if (rtn == null) {
try {
rtn = loadDatasourceConnection(config, datasource, user, pass);
}
catch (PageException pe) {
_dec(stack, datasource, user, pass);
throw pe;
}
if (rtn instanceof DatasourceConnectionImpl) ((DatasourceConnectionImpl) rtn).using();
return rtn;
}
}
// we get us a fine connection (we do validation outside the
// synchronized to safe shared time)
if (isValid(rtn, Boolean.TRUE)) {
if (rtn instanceof DatasourceConnectionImpl) ((DatasourceConnectionImpl) rtn).using();
return rtn;
}
// we have an invalid connection (above check failed), so we have to
// start over
synchronized (stack) {
_dec(stack, datasource, user, pass); // we already did increment
// in case we are fine
SystemUtil.notify(waiter);
}
IOUtil.closeEL(rtn.getConnection());
rtn = null;
}
} } | public class class_name {
public DatasourceConnection getDatasourceConnection(Config config, DataSource datasource, String user, String pass) throws PageException {
config = ThreadLocalPageContext.getConfig(config);
if (StringUtil.isEmpty(user)) {
user = datasource.getUsername();
pass = datasource.getPassword();
}
if (pass == null) pass = "";
// get stack
DCStack stack = getDCStack(datasource, user, pass);
int max = datasource.getConnectionLimit();
// get an existing connection
DatasourceConnection rtn;
boolean wait = false;
outer: while (true) {
rtn = null;
// wait until it is again my turn
if (wait) {
SystemUtil.wait(waiter, WAIT); // depends on control dependency: [if], data = [none]
wait = false; // depends on control dependency: [if], data = [none]
}
synchronized (stack) {
// do we have already to many open connections?
if (max != -1) {
RefInteger _counter = stack.getCounter();// _getCounter(stack,datasource,user,pass);
if (max <= _counter.toInt()) {// go back ant wait
wait = true; // depends on control dependency: [if], data = [none]
continue outer;
}
}
// get an existing connection
while (!stack.isEmpty()) {
DatasourceConnection dc = (DatasourceConnection) stack.get();
if (dc != null) {
rtn = dc; // depends on control dependency: [if], data = [none]
break;
}
}
_inc(stack, datasource, user, pass); // if new or fine we
// increase in any case
// create a new instance
if (rtn == null) {
try {
rtn = loadDatasourceConnection(config, datasource, user, pass); // depends on control dependency: [try], data = [none]
}
catch (PageException pe) {
_dec(stack, datasource, user, pass);
throw pe;
} // depends on control dependency: [catch], data = [none]
if (rtn instanceof DatasourceConnectionImpl) ((DatasourceConnectionImpl) rtn).using();
return rtn; // depends on control dependency: [if], data = [none]
}
}
// we get us a fine connection (we do validation outside the
// synchronized to safe shared time)
if (isValid(rtn, Boolean.TRUE)) {
if (rtn instanceof DatasourceConnectionImpl) ((DatasourceConnectionImpl) rtn).using();
return rtn; // depends on control dependency: [if], data = [none]
}
// we have an invalid connection (above check failed), so we have to
// start over
synchronized (stack) {
_dec(stack, datasource, user, pass); // we already did increment
// in case we are fine
SystemUtil.notify(waiter);
}
IOUtil.closeEL(rtn.getConnection());
rtn = null;
}
} } |
public class class_name {
public void markInitialState() {
if (!attachedObjects.isEmpty()) {
for (T t : attachedObjects) {
if (t instanceof PartialStateHolder) {
((PartialStateHolder) t).markInitialState();
}
}
}
initialState = true;
} } | public class class_name {
public void markInitialState() {
if (!attachedObjects.isEmpty()) {
for (T t : attachedObjects) {
if (t instanceof PartialStateHolder) {
((PartialStateHolder) t).markInitialState(); // depends on control dependency: [if], data = [none]
}
}
}
initialState = true;
} } |
public class class_name {
public void marshall(StopDominantLanguageDetectionJobRequest stopDominantLanguageDetectionJobRequest, ProtocolMarshaller protocolMarshaller) {
if (stopDominantLanguageDetectionJobRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(stopDominantLanguageDetectionJobRequest.getJobId(), JOBID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(StopDominantLanguageDetectionJobRequest stopDominantLanguageDetectionJobRequest, ProtocolMarshaller protocolMarshaller) {
if (stopDominantLanguageDetectionJobRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(stopDominantLanguageDetectionJobRequest.getJobId(), JOBID_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private void addEmptyElements() {
for (Node node : nodes) {
boolean empty = true;
if(node instanceof NestableNode) {
List<Node> nl = ((NestableNode) node).getChildren();
for (Node n : nl) {
if (n instanceof Element) {
empty = false;
break;
} else if (n instanceof Text) {
// TODO: Should we trim the text and see if it's length 0?
String value = ((Text) n).getContent();
if (value.length() > 0) {
empty = false;
break;
}
}
}
}
if (empty) {
result.add(node);
}
}
} } | public class class_name {
private void addEmptyElements() {
for (Node node : nodes) {
boolean empty = true;
if(node instanceof NestableNode) {
List<Node> nl = ((NestableNode) node).getChildren();
for (Node n : nl) {
if (n instanceof Element) {
empty = false;
// depends on control dependency: [if], data = [none]
break;
} else if (n instanceof Text) {
// TODO: Should we trim the text and see if it's length 0?
String value = ((Text) n).getContent();
if (value.length() > 0) {
empty = false;
// depends on control dependency: [if], data = [none]
break;
}
}
}
}
if (empty) {
result.add(node);
// depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public E extractMin() {
if (isEmpty()) {
throw new NoSuchElementException();
}
HeapEntry<E> minEntry = indexToEntry.get(0);
int lastIndex = size() - 1;
if (lastIndex > 0) {
HeapEntry<E> lastEntry = indexToEntry.get(lastIndex);
swap(lastEntry, minEntry);
removeLast(minEntry);
heapifyDown(lastEntry);
} else {
removeLast(minEntry);
}
return minEntry.object;
} } | public class class_name {
public E extractMin() {
if (isEmpty()) {
throw new NoSuchElementException();
}
HeapEntry<E> minEntry = indexToEntry.get(0);
int lastIndex = size() - 1;
if (lastIndex > 0) {
HeapEntry<E> lastEntry = indexToEntry.get(lastIndex);
swap(lastEntry, minEntry);
// depends on control dependency: [if], data = [none]
removeLast(minEntry);
// depends on control dependency: [if], data = [none]
heapifyDown(lastEntry);
// depends on control dependency: [if], data = [none]
} else {
removeLast(minEntry);
// depends on control dependency: [if], data = [none]
}
return minEntry.object;
} } |
public class class_name {
@Override
public Object beforeGettingConnection(Subject subject, ConnectionRequestInfo reqInfo) throws ResourceException {
if (tc.isEntryEnabled())
Tr.entry(this, tc, "beforeGettingConnection", getSubjectString(subject), reqInfo);
Object retObject = null;
final Subject subj = subject;
// If Security is enabled, continue. Otherwise exit immediately
// with a null object.
if (ThreadIdentityManager.isThreadIdentityEnabled()) {
// Check if the current resource adapter MCF configuration
// supports using ThreadIdentity. If so, continue processing.
// Otherwise get out.
if ((m_ThreadIdentitySupport != AbstractConnectionFactoryService.THREAD_IDENTITY_NOT_ALLOWED)) {
if (subj != null) {
// resauth = CONTAINER
// Check if UTOKEN Generic Credential was found. If so,
// the subject being processed represents the user identity
// currently associated with the thread we are running under
// (i.e., was established by finalizeSubject()). In this case,
// if SyncToThread is enabled for the server and this resource
// adapter (e.g., DB2 or IMS DL/I) indicates it needs the user
// identity associated with the thread actually pushed to the
// OS thread, then push the user identity to the OS thread.
// Under z/OS, this will create an ACEE for the user identity
// on the z/OS Thread. Connectors like the DB2 390 Local JDBC
// Provider and the IMS DL/I Connector in the case of a
// getConnection() request without any userid/password base
// the connection owner on the user represented by the
// user identity of the current z/OS thread.
//
//
// It should be noted that if SyncToThread is not enabled
// for the server, we cannot push the user identity to the
// OS thread. In this case, the WAS z/OS server will not
// have a security ACEE at all. In this situation,
// Connectors like the DB2 390 Local JDBC Provider and the IMS
// DL/I Connector will end up using the Server identity as
// user associated with the connection. Even though server
// identity will be used, the Subject used by
// Connection Management will be left set to the
// user identity associated with the current
// thread (e.g., RunAs Caller or Role user) and
// connection pooling will be based on this
// subject as opposed to using the Server identity.
if (doesSubjectContainUTOKEN(subj)) {
// We are using the user identity associated with the thread.
// The Subject contains a UTOKEN credential representing that
// user identity.
if (m_ThreadSecurity) {
// Connector requires that user identity be pushed to
// OS Thread.
if (ThreadIdentityManager.isJ2CThreadIdentityEnabled()) {
// J2C SyncToThread is enabled for Server.
// Push Subject to thread
retObject = setJ2CThreadIdentity(subj);
if (tc.isDebugEnabled()) {
Tr.debug(tc, "beforeGettingConnection() pushed the user identity associated with the thread to the OS Thread: ",
new Object[] { getSubjectString(subj) });
}
} else {
// J2C SyncToThread not enabled for Server.
// The adapter Will run with server identity
// since we are not able to push the Subject's
// user identity to the current OS thread as an ACEE.
if (tc.isDebugEnabled()) {
Tr.debug(
tc,
"beforeGettingConnection() could not push user identity associated with the thread to the OS Thread because server was not enabled for SyncToThread.");
}
// Now we may need to synch server ID to thread.
// We found a UTOKEN credential on the Subject,
// which indicates that no container-managed alias was
// specified, (in which case finalizeSubject would not
// have gone through the process of getting a Subject with
// UTOKEN credential). So if (Connection Mgmt) SyncToThread is
// not enabled but Application SynchToThread is enabled, we
// would end up using the RunAs anyway, from the Application Synch,
// even though (Connection Mgmt) SyncToThread was disabled.
//
// We wish to control the id used to get a connection within Connection
// Mgmt code alone and to isolate the behavior from any settings not
// related to Connection Mgmt. If no Application Synch were done,
// a Thread Security-enabled connector would use server id to get a
// connection (when no container alias was specified and when no Connection Mgmt
// synch was done). So we add this code to ensure that the server id
// is also used to get a connection if an Application Synch has
// been performed (and no container alias was specified and when no Connection Mgmt
// synch was done)
//
// This is only worth doing if the Appliation Synch has been enabled. Though we
// won't determine if the Application Synch has actually been done.
if (ThreadIdentityManager.isAppThreadIdentityEnabled()) {
if (tc.isDebugEnabled()) {
Tr.debug(tc, "beforeGettingConnection() pushing server identity to the OS Thread because Application SyncToThread is enabled.");
}
// get Server subject and push it to the current OS thread
retObject = ThreadIdentityManager.runAsServer();
}
}
} else {
// OS ThreadSecurity is not enabled
// The adapter will use the user identity
// from the Subject that is associated with the
// current thread (i.e., the subject that is
// passed to the adapter).
}
} else {
// Subject doesn't have a UTOKEN GenericCredential.
// Check if we have an unexpected error situation.
checkForUTOKENNotFoundError(subj);
}
} else {
// resauth = APPLICATION
// When resauth is Application and the current
// connector is one that supports using ThreadSecurity (i.e,
// associates the user identity from the OS Thread with the
// connection when getConnection() is done without
// a userid/password), check if the server is enabed to perform
// SynctoThread. If so, get the Server identity and push
// the Server identity to the current OS Thread.
//
// The reason for this is to ensure consistency in terms of
// the default user identity used by the connector.
// Whenever a connector that supports ThreadSecurity defaults,
// we want to ensure the default is Server identity. For
// example, in the case of resauth=Container, if we try
// to push the current user identity asscoiated with the
// thread to the OS thread, but the server is not enabled
// to perform SyncToThread, we cannot push the user identity
// to the thread. Thus, in this case, the connector
// will default to using the Server identity as the owner of
// the connection that is allocated. Similarly, when
// resauth=Application and the getConnection() request is
// issued without a userid/password, we want to ensure that
// the connector defaults to using Server Identity. Thus,
// to make this happen, we push the Server identity to the
// current OS thread whenever resauth=Application and the
// connector supports ThreadSecurity. If it so happens
// that the application ends up issuing getConnection() with
// a userid/password, the fact that we have pushed the
// Server identity to the OS thread will not impact processing.
// Later, during afterGettingConnection() processing, the
// OS Thread Identity will then be returned to what it was.
if (m_ThreadSecurity && ThreadIdentityManager.isThreadIdentityEnabled()) {
// Get Server subject and push it to the current OS thread
retObject = ThreadIdentityManager.runAsServer();
}
}
}
} else {
// Security not enabled
if (tc.isDebugEnabled()) {
Tr.debug(tc, "beforeGettingConnection() processing skipped. Security not enabled.");
}
// NOTE: In the case where Security is not enabled, if
// no Container-managed alias was specified and the
// connector ALLOWS or REQUIRES ThreadIdentitySupport,
// any connection obtained will be associated with
// server identity.
}
if (tc.isEntryEnabled())
Tr.exit(this, tc, "beforeGettingConnection", retObject);
return retObject;
} } | public class class_name {
@Override
public Object beforeGettingConnection(Subject subject, ConnectionRequestInfo reqInfo) throws ResourceException {
if (tc.isEntryEnabled())
Tr.entry(this, tc, "beforeGettingConnection", getSubjectString(subject), reqInfo);
Object retObject = null;
final Subject subj = subject;
// If Security is enabled, continue. Otherwise exit immediately
// with a null object.
if (ThreadIdentityManager.isThreadIdentityEnabled()) {
// Check if the current resource adapter MCF configuration
// supports using ThreadIdentity. If so, continue processing.
// Otherwise get out.
if ((m_ThreadIdentitySupport != AbstractConnectionFactoryService.THREAD_IDENTITY_NOT_ALLOWED)) {
if (subj != null) {
// resauth = CONTAINER
// Check if UTOKEN Generic Credential was found. If so,
// the subject being processed represents the user identity
// currently associated with the thread we are running under
// (i.e., was established by finalizeSubject()). In this case,
// if SyncToThread is enabled for the server and this resource
// adapter (e.g., DB2 or IMS DL/I) indicates it needs the user
// identity associated with the thread actually pushed to the
// OS thread, then push the user identity to the OS thread.
// Under z/OS, this will create an ACEE for the user identity
// on the z/OS Thread. Connectors like the DB2 390 Local JDBC
// Provider and the IMS DL/I Connector in the case of a
// getConnection() request without any userid/password base
// the connection owner on the user represented by the
// user identity of the current z/OS thread.
//
//
// It should be noted that if SyncToThread is not enabled
// for the server, we cannot push the user identity to the
// OS thread. In this case, the WAS z/OS server will not
// have a security ACEE at all. In this situation,
// Connectors like the DB2 390 Local JDBC Provider and the IMS
// DL/I Connector will end up using the Server identity as
// user associated with the connection. Even though server
// identity will be used, the Subject used by
// Connection Management will be left set to the
// user identity associated with the current
// thread (e.g., RunAs Caller or Role user) and
// connection pooling will be based on this
// subject as opposed to using the Server identity.
if (doesSubjectContainUTOKEN(subj)) {
// We are using the user identity associated with the thread.
// The Subject contains a UTOKEN credential representing that
// user identity.
if (m_ThreadSecurity) {
// Connector requires that user identity be pushed to
// OS Thread.
if (ThreadIdentityManager.isJ2CThreadIdentityEnabled()) {
// J2C SyncToThread is enabled for Server.
// Push Subject to thread
retObject = setJ2CThreadIdentity(subj); // depends on control dependency: [if], data = [none]
if (tc.isDebugEnabled()) {
Tr.debug(tc, "beforeGettingConnection() pushed the user identity associated with the thread to the OS Thread: ",
new Object[] { getSubjectString(subj) }); // depends on control dependency: [if], data = [none]
}
} else {
// J2C SyncToThread not enabled for Server.
// The adapter Will run with server identity
// since we are not able to push the Subject's
// user identity to the current OS thread as an ACEE.
if (tc.isDebugEnabled()) {
Tr.debug(
tc,
"beforeGettingConnection() could not push user identity associated with the thread to the OS Thread because server was not enabled for SyncToThread."); // depends on control dependency: [if], data = [none]
}
// Now we may need to synch server ID to thread.
// We found a UTOKEN credential on the Subject,
// which indicates that no container-managed alias was
// specified, (in which case finalizeSubject would not
// have gone through the process of getting a Subject with
// UTOKEN credential). So if (Connection Mgmt) SyncToThread is
// not enabled but Application SynchToThread is enabled, we
// would end up using the RunAs anyway, from the Application Synch,
// even though (Connection Mgmt) SyncToThread was disabled.
//
// We wish to control the id used to get a connection within Connection
// Mgmt code alone and to isolate the behavior from any settings not
// related to Connection Mgmt. If no Application Synch were done,
// a Thread Security-enabled connector would use server id to get a
// connection (when no container alias was specified and when no Connection Mgmt
// synch was done). So we add this code to ensure that the server id
// is also used to get a connection if an Application Synch has
// been performed (and no container alias was specified and when no Connection Mgmt
// synch was done)
//
// This is only worth doing if the Appliation Synch has been enabled. Though we
// won't determine if the Application Synch has actually been done.
if (ThreadIdentityManager.isAppThreadIdentityEnabled()) {
if (tc.isDebugEnabled()) {
Tr.debug(tc, "beforeGettingConnection() pushing server identity to the OS Thread because Application SyncToThread is enabled."); // depends on control dependency: [if], data = [none]
}
// get Server subject and push it to the current OS thread
retObject = ThreadIdentityManager.runAsServer(); // depends on control dependency: [if], data = [none]
}
}
} else {
// OS ThreadSecurity is not enabled
// The adapter will use the user identity
// from the Subject that is associated with the
// current thread (i.e., the subject that is
// passed to the adapter).
}
} else {
// Subject doesn't have a UTOKEN GenericCredential.
// Check if we have an unexpected error situation.
checkForUTOKENNotFoundError(subj); // depends on control dependency: [if], data = [none]
}
} else {
// resauth = APPLICATION
// When resauth is Application and the current
// connector is one that supports using ThreadSecurity (i.e,
// associates the user identity from the OS Thread with the
// connection when getConnection() is done without
// a userid/password), check if the server is enabed to perform
// SynctoThread. If so, get the Server identity and push
// the Server identity to the current OS Thread.
//
// The reason for this is to ensure consistency in terms of
// the default user identity used by the connector.
// Whenever a connector that supports ThreadSecurity defaults,
// we want to ensure the default is Server identity. For
// example, in the case of resauth=Container, if we try
// to push the current user identity asscoiated with the
// thread to the OS thread, but the server is not enabled
// to perform SyncToThread, we cannot push the user identity
// to the thread. Thus, in this case, the connector
// will default to using the Server identity as the owner of
// the connection that is allocated. Similarly, when
// resauth=Application and the getConnection() request is
// issued without a userid/password, we want to ensure that
// the connector defaults to using Server Identity. Thus,
// to make this happen, we push the Server identity to the
// current OS thread whenever resauth=Application and the
// connector supports ThreadSecurity. If it so happens
// that the application ends up issuing getConnection() with
// a userid/password, the fact that we have pushed the
// Server identity to the OS thread will not impact processing.
// Later, during afterGettingConnection() processing, the
// OS Thread Identity will then be returned to what it was.
if (m_ThreadSecurity && ThreadIdentityManager.isThreadIdentityEnabled()) {
// Get Server subject and push it to the current OS thread
retObject = ThreadIdentityManager.runAsServer(); // depends on control dependency: [if], data = [none]
}
}
}
} else {
// Security not enabled
if (tc.isDebugEnabled()) {
Tr.debug(tc, "beforeGettingConnection() processing skipped. Security not enabled.");
}
// NOTE: In the case where Security is not enabled, if
// no Container-managed alias was specified and the
// connector ALLOWS or REQUIRES ThreadIdentitySupport,
// any connection obtained will be associated with
// server identity.
}
if (tc.isEntryEnabled())
Tr.exit(this, tc, "beforeGettingConnection", retObject);
return retObject;
} } |
public class class_name {
protected final File createDirectoryArtifact(final File outputDirectory,
final String artifact) {
// Construct artifact path based on output directory
String path = asPath(outputDirectory.getAbsolutePath(), artifact);
final File ret = new File(path);
if (!ret.isDirectory()) {
// Fail if the directory artifact couldn't be created
if (!ret.mkdir()) {
error(DIRECTORY_CREATION_FAILED + ret);
bail(ExitCode.BAD_OUTPUT_DIRECTORY);
}
}
return ret;
} } | public class class_name {
protected final File createDirectoryArtifact(final File outputDirectory,
final String artifact) {
// Construct artifact path based on output directory
String path = asPath(outputDirectory.getAbsolutePath(), artifact);
final File ret = new File(path);
if (!ret.isDirectory()) {
// Fail if the directory artifact couldn't be created
if (!ret.mkdir()) {
error(DIRECTORY_CREATION_FAILED + ret); // depends on control dependency: [if], data = [none]
bail(ExitCode.BAD_OUTPUT_DIRECTORY); // depends on control dependency: [if], data = [none]
}
}
return ret;
} } |
public class class_name {
private void addIndexLink(DeprecatedAPIListBuilder builder,
DeprElementKind kind, Content contentTree) {
if (builder.hasDocumentation(kind)) {
Content li = HtmlTree.LI(getHyperLink(getAnchorName(kind),
contents.getContent(getHeadingKey(kind))));
contentTree.addContent(li);
}
} } | public class class_name {
private void addIndexLink(DeprecatedAPIListBuilder builder,
DeprElementKind kind, Content contentTree) {
if (builder.hasDocumentation(kind)) {
Content li = HtmlTree.LI(getHyperLink(getAnchorName(kind),
contents.getContent(getHeadingKey(kind))));
contentTree.addContent(li); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static String getInternalType(String type) {
String internalType = null;
if ( "byte".equals( type ) ) {
internalType = "B";
} else if ( "char".equals( type ) ) {
internalType = "C";
} else if ( "double".equals( type ) ) {
internalType = "D";
} else if ( "float".equals( type ) ) {
internalType = "F";
} else if ( "int".equals( type ) ) {
internalType = "I";
} else if ( "long".equals( type ) ) {
internalType = "J";
} else if ( "short".equals( type ) ) {
internalType = "S";
} else if ( "boolean".equals( type ) ) {
internalType = "Z";
} else if ( "void".equals( type ) ) {
internalType = "V";
} else if ( type != null ) {
// I think this will fail for inner classes, but we don't really
// support inner class generation at the moment
internalType = type.replace( '.',
'/' );
}
return internalType;
} } | public class class_name {
public static String getInternalType(String type) {
String internalType = null;
if ( "byte".equals( type ) ) {
internalType = "B"; // depends on control dependency: [if], data = [none]
} else if ( "char".equals( type ) ) {
internalType = "C"; // depends on control dependency: [if], data = [none]
} else if ( "double".equals( type ) ) {
internalType = "D"; // depends on control dependency: [if], data = [none]
} else if ( "float".equals( type ) ) {
internalType = "F"; // depends on control dependency: [if], data = [none]
} else if ( "int".equals( type ) ) {
internalType = "I"; // depends on control dependency: [if], data = [none]
} else if ( "long".equals( type ) ) {
internalType = "J"; // depends on control dependency: [if], data = [none]
} else if ( "short".equals( type ) ) {
internalType = "S"; // depends on control dependency: [if], data = [none]
} else if ( "boolean".equals( type ) ) {
internalType = "Z"; // depends on control dependency: [if], data = [none]
} else if ( "void".equals( type ) ) {
internalType = "V"; // depends on control dependency: [if], data = [none]
} else if ( type != null ) {
// I think this will fail for inner classes, but we don't really
// support inner class generation at the moment
internalType = type.replace( '.',
'/' ); // depends on control dependency: [if], data = [none]
}
return internalType;
} } |
public class class_name {
void markKnownViewsInvalid() {
final int childCount = getChildCount();
for (int i = 0; i < childCount; i++) {
final ViewHolder holder = getChildViewHolderInt(getChildAt(i));
if (holder != null) {
holder.addFlags(ViewHolder.FLAG_UPDATE | ViewHolder.FLAG_INVALID);
}
}
mRecycler.markKnownViewsInvalid();
} } | public class class_name {
void markKnownViewsInvalid() {
final int childCount = getChildCount();
for (int i = 0; i < childCount; i++) {
final ViewHolder holder = getChildViewHolderInt(getChildAt(i));
if (holder != null) {
holder.addFlags(ViewHolder.FLAG_UPDATE | ViewHolder.FLAG_INVALID); // depends on control dependency: [if], data = [none]
}
}
mRecycler.markKnownViewsInvalid();
} } |
public class class_name {
public void trim(int ntrees) {
if (ntrees < 1) {
throw new IllegalArgumentException("Invalid new model size: " + ntrees);
}
if (k == 2) {
if (ntrees > trees.length) {
throw new IllegalArgumentException("The new model size is larger than the current size.");
}
if (ntrees < trees.length) {
trees = Arrays.copyOf(trees, ntrees);
this.ntrees = ntrees;
}
} else {
if (ntrees > forest[0].length) {
throw new IllegalArgumentException("The new model size is larger than the current one.");
}
if (ntrees < forest[0].length) {
for (int i = 0; i < forest.length; i++) {
forest[i] = Arrays.copyOf(forest[i], ntrees);
}
this.ntrees = ntrees;
}
}
} } | public class class_name {
public void trim(int ntrees) {
if (ntrees < 1) {
throw new IllegalArgumentException("Invalid new model size: " + ntrees);
}
if (k == 2) {
if (ntrees > trees.length) {
throw new IllegalArgumentException("The new model size is larger than the current size.");
}
if (ntrees < trees.length) {
trees = Arrays.copyOf(trees, ntrees); // depends on control dependency: [if], data = [none]
this.ntrees = ntrees; // depends on control dependency: [if], data = [none]
}
} else {
if (ntrees > forest[0].length) {
throw new IllegalArgumentException("The new model size is larger than the current one.");
}
if (ntrees < forest[0].length) {
for (int i = 0; i < forest.length; i++) {
forest[i] = Arrays.copyOf(forest[i], ntrees); // depends on control dependency: [for], data = [i]
}
this.ntrees = ntrees; // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
private int registerBlockInstance() {
int blockInstNo = blockInstTabCnt++;
if (blockInstTab == null) {
blockInstTab = new BlockInstTabRec[64];
}
if (blockInstTabCnt > blockInstTab.length) {
blockInstTab = (BlockInstTabRec[]) MiniTemplatorParser.resizeArray(blockInstTab, 2 * blockInstTabCnt);
}
blockInstTab[blockInstNo] = new BlockInstTabRec();
return blockInstNo;
} } | public class class_name {
private int registerBlockInstance() {
int blockInstNo = blockInstTabCnt++;
if (blockInstTab == null) {
blockInstTab = new BlockInstTabRec[64];
// depends on control dependency: [if], data = [none]
}
if (blockInstTabCnt > blockInstTab.length) {
blockInstTab = (BlockInstTabRec[]) MiniTemplatorParser.resizeArray(blockInstTab, 2 * blockInstTabCnt);
// depends on control dependency: [if], data = [none]
}
blockInstTab[blockInstNo] = new BlockInstTabRec();
return blockInstNo;
} } |
public class class_name {
void chooseExcessReplicates(Collection<DatanodeDescriptor> nonExcess,
Block b, short replication,
DatanodeDescriptor addedNode,
DatanodeDescriptor delNodeHint,
INodeFile inode,
List<DatanodeID> excessReplicateMapTmp) {
// first form a rack to datanodes map and
HashMap<String, ArrayList<DatanodeDescriptor>> rackMap =
new HashMap<String, ArrayList<DatanodeDescriptor>>();
for (Iterator<DatanodeDescriptor> iter = nonExcess.iterator();
iter.hasNext();) {
DatanodeDescriptor node = iter.next();
String rackName = node.getNetworkLocation();
ArrayList<DatanodeDescriptor> datanodeList = rackMap.get(rackName);
if (datanodeList == null) {
datanodeList = new ArrayList<DatanodeDescriptor>();
}
datanodeList.add(node);
rackMap.put(rackName, datanodeList);
}
// split nodes into two sets
// priSet contains nodes on rack with more than one replica
// remains contains the remaining nodes
// It may be useful for the corresponding BlockPlacementPolicy.
ArrayList<DatanodeDescriptor> priSet = new ArrayList<DatanodeDescriptor>();
ArrayList<DatanodeDescriptor> remains = new ArrayList<DatanodeDescriptor>();
for (Iterator<Entry<String, ArrayList<DatanodeDescriptor>>> iter =
rackMap.entrySet().iterator(); iter.hasNext();) {
Entry<String, ArrayList<DatanodeDescriptor>> rackEntry = iter.next();
ArrayList<DatanodeDescriptor> datanodeList = rackEntry.getValue();
if (datanodeList.size() == 1) {
remains.add(datanodeList.get(0));
} else {
priSet.addAll(datanodeList);
}
}
// pick one node to delete that favors the delete hint
// otherwise follow the strategy of corresponding BlockPlacementPolicy.
boolean firstOne = true;
while (nonExcess.size() - replication > 0) {
DatanodeInfo cur = null;
long minSpace = Long.MAX_VALUE;
// check if we can del delNodeHint
if (firstOne && delNodeHint != null && nonExcess.contains(delNodeHint) &&
(priSet.contains(delNodeHint) || (addedNode != null && !priSet.contains(addedNode)))) {
cur = delNodeHint;
} else { // regular excessive replica removal
cur = replicator.chooseReplicaToDelete(inode, b, replication, priSet, remains);
}
firstOne = false;
// adjust rackmap, priSet, and remains
String rack = cur.getNetworkLocation();
ArrayList<DatanodeDescriptor> datanodes = rackMap.get(rack);
datanodes.remove(cur);
if (datanodes.isEmpty()) {
rackMap.remove(rack);
}
if (priSet.remove(cur)) {
if (datanodes.size() == 1) {
priSet.remove(datanodes.get(0));
remains.add(datanodes.get(0));
}
} else {
remains.remove(cur);
}
nonExcess.remove(cur);
excessReplicateMapTmp.add(cur);
if (NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("BLOCK* NameSystem.chooseExcessReplicates: "
+ "(" + cur.getName() + ", " + b
+ ") is added to excessReplicateMapTmp");
}
}
} } | public class class_name {
void chooseExcessReplicates(Collection<DatanodeDescriptor> nonExcess,
Block b, short replication,
DatanodeDescriptor addedNode,
DatanodeDescriptor delNodeHint,
INodeFile inode,
List<DatanodeID> excessReplicateMapTmp) {
// first form a rack to datanodes map and
HashMap<String, ArrayList<DatanodeDescriptor>> rackMap =
new HashMap<String, ArrayList<DatanodeDescriptor>>();
for (Iterator<DatanodeDescriptor> iter = nonExcess.iterator();
iter.hasNext();) {
DatanodeDescriptor node = iter.next();
String rackName = node.getNetworkLocation();
ArrayList<DatanodeDescriptor> datanodeList = rackMap.get(rackName);
if (datanodeList == null) {
datanodeList = new ArrayList<DatanodeDescriptor>(); // depends on control dependency: [if], data = [none]
}
datanodeList.add(node); // depends on control dependency: [for], data = [none]
rackMap.put(rackName, datanodeList); // depends on control dependency: [for], data = [none]
}
// split nodes into two sets
// priSet contains nodes on rack with more than one replica
// remains contains the remaining nodes
// It may be useful for the corresponding BlockPlacementPolicy.
ArrayList<DatanodeDescriptor> priSet = new ArrayList<DatanodeDescriptor>();
ArrayList<DatanodeDescriptor> remains = new ArrayList<DatanodeDescriptor>();
for (Iterator<Entry<String, ArrayList<DatanodeDescriptor>>> iter =
rackMap.entrySet().iterator(); iter.hasNext();) {
Entry<String, ArrayList<DatanodeDescriptor>> rackEntry = iter.next();
ArrayList<DatanodeDescriptor> datanodeList = rackEntry.getValue();
if (datanodeList.size() == 1) {
remains.add(datanodeList.get(0)); // depends on control dependency: [if], data = [none]
} else {
priSet.addAll(datanodeList); // depends on control dependency: [if], data = [none]
}
}
// pick one node to delete that favors the delete hint
// otherwise follow the strategy of corresponding BlockPlacementPolicy.
boolean firstOne = true;
while (nonExcess.size() - replication > 0) {
DatanodeInfo cur = null;
long minSpace = Long.MAX_VALUE;
// check if we can del delNodeHint
if (firstOne && delNodeHint != null && nonExcess.contains(delNodeHint) &&
(priSet.contains(delNodeHint) || (addedNode != null && !priSet.contains(addedNode)))) {
cur = delNodeHint; // depends on control dependency: [if], data = [none]
} else { // regular excessive replica removal
cur = replicator.chooseReplicaToDelete(inode, b, replication, priSet, remains); // depends on control dependency: [if], data = [none]
}
firstOne = false; // depends on control dependency: [while], data = [none]
// adjust rackmap, priSet, and remains
String rack = cur.getNetworkLocation();
ArrayList<DatanodeDescriptor> datanodes = rackMap.get(rack);
datanodes.remove(cur); // depends on control dependency: [while], data = [none]
if (datanodes.isEmpty()) {
rackMap.remove(rack); // depends on control dependency: [if], data = [none]
}
if (priSet.remove(cur)) {
if (datanodes.size() == 1) {
priSet.remove(datanodes.get(0)); // depends on control dependency: [if], data = [none]
remains.add(datanodes.get(0)); // depends on control dependency: [if], data = [none]
}
} else {
remains.remove(cur); // depends on control dependency: [if], data = [none]
}
nonExcess.remove(cur); // depends on control dependency: [while], data = [none]
excessReplicateMapTmp.add(cur); // depends on control dependency: [while], data = [none]
if (NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("BLOCK* NameSystem.chooseExcessReplicates: "
+ "(" + cur.getName() + ", " + b
+ ") is added to excessReplicateMapTmp"); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public Integer getInt(int index) {
Number n = (Number) content.get(index);
if (n == null) {
return null;
} else if (n instanceof Integer) {
return (Integer) n;
} else {
return n.intValue(); //autoboxing to Integer
}
} } | public class class_name {
public Integer getInt(int index) {
Number n = (Number) content.get(index);
if (n == null) {
return null; // depends on control dependency: [if], data = [none]
} else if (n instanceof Integer) {
return (Integer) n; // depends on control dependency: [if], data = [none]
} else {
return n.intValue(); //autoboxing to Integer // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
public int countByLtE_S(Date expirationDate, int status) {
FinderPath finderPath = FINDER_PATH_WITH_PAGINATION_COUNT_BY_LTE_S;
Object[] finderArgs = new Object[] { _getTime(expirationDate), status };
Long count = (Long)finderCache.getResult(finderPath, finderArgs, this);
if (count == null) {
StringBundler query = new StringBundler(3);
query.append(_SQL_COUNT_COMMERCEDISCOUNT_WHERE);
boolean bindExpirationDate = false;
if (expirationDate == null) {
query.append(_FINDER_COLUMN_LTE_S_EXPIRATIONDATE_1);
}
else {
bindExpirationDate = true;
query.append(_FINDER_COLUMN_LTE_S_EXPIRATIONDATE_2);
}
query.append(_FINDER_COLUMN_LTE_S_STATUS_2);
String sql = query.toString();
Session session = null;
try {
session = openSession();
Query q = session.createQuery(sql);
QueryPos qPos = QueryPos.getInstance(q);
if (bindExpirationDate) {
qPos.add(new Timestamp(expirationDate.getTime()));
}
qPos.add(status);
count = (Long)q.uniqueResult();
finderCache.putResult(finderPath, finderArgs, count);
}
catch (Exception e) {
finderCache.removeResult(finderPath, finderArgs);
throw processException(e);
}
finally {
closeSession(session);
}
}
return count.intValue();
} } | public class class_name {
@Override
public int countByLtE_S(Date expirationDate, int status) {
FinderPath finderPath = FINDER_PATH_WITH_PAGINATION_COUNT_BY_LTE_S;
Object[] finderArgs = new Object[] { _getTime(expirationDate), status };
Long count = (Long)finderCache.getResult(finderPath, finderArgs, this);
if (count == null) {
StringBundler query = new StringBundler(3);
query.append(_SQL_COUNT_COMMERCEDISCOUNT_WHERE); // depends on control dependency: [if], data = [none]
boolean bindExpirationDate = false;
if (expirationDate == null) {
query.append(_FINDER_COLUMN_LTE_S_EXPIRATIONDATE_1); // depends on control dependency: [if], data = [none]
}
else {
bindExpirationDate = true; // depends on control dependency: [if], data = [none]
query.append(_FINDER_COLUMN_LTE_S_EXPIRATIONDATE_2); // depends on control dependency: [if], data = [none]
}
query.append(_FINDER_COLUMN_LTE_S_STATUS_2); // depends on control dependency: [if], data = [none]
String sql = query.toString();
Session session = null;
try {
session = openSession(); // depends on control dependency: [try], data = [none]
Query q = session.createQuery(sql);
QueryPos qPos = QueryPos.getInstance(q);
if (bindExpirationDate) {
qPos.add(new Timestamp(expirationDate.getTime())); // depends on control dependency: [if], data = [none]
}
qPos.add(status); // depends on control dependency: [try], data = [none]
count = (Long)q.uniqueResult(); // depends on control dependency: [try], data = [none]
finderCache.putResult(finderPath, finderArgs, count); // depends on control dependency: [try], data = [none]
}
catch (Exception e) {
finderCache.removeResult(finderPath, finderArgs);
throw processException(e);
} // depends on control dependency: [catch], data = [none]
finally {
closeSession(session);
}
}
return count.intValue();
} } |
public class class_name {
public final void typeArguments() throws RecognitionException {
try {
// src/main/resources/org/drools/compiler/lang/DRL6Expressions.g:143:5: ( LESS typeArgument ( COMMA typeArgument )* GREATER )
// src/main/resources/org/drools/compiler/lang/DRL6Expressions.g:143:7: LESS typeArgument ( COMMA typeArgument )* GREATER
{
match(input,LESS,FOLLOW_LESS_in_typeArguments640); if (state.failed) return;
pushFollow(FOLLOW_typeArgument_in_typeArguments642);
typeArgument();
state._fsp--;
if (state.failed) return;
// src/main/resources/org/drools/compiler/lang/DRL6Expressions.g:143:25: ( COMMA typeArgument )*
loop12:
while (true) {
int alt12=2;
int LA12_0 = input.LA(1);
if ( (LA12_0==COMMA) ) {
alt12=1;
}
switch (alt12) {
case 1 :
// src/main/resources/org/drools/compiler/lang/DRL6Expressions.g:143:26: COMMA typeArgument
{
match(input,COMMA,FOLLOW_COMMA_in_typeArguments645); if (state.failed) return;
pushFollow(FOLLOW_typeArgument_in_typeArguments647);
typeArgument();
state._fsp--;
if (state.failed) return;
}
break;
default :
break loop12;
}
}
match(input,GREATER,FOLLOW_GREATER_in_typeArguments651); if (state.failed) return;
}
}
catch (RecognitionException re) {
throw re;
}
finally {
// do for sure before leaving
}
} } | public class class_name {
public final void typeArguments() throws RecognitionException {
try {
// src/main/resources/org/drools/compiler/lang/DRL6Expressions.g:143:5: ( LESS typeArgument ( COMMA typeArgument )* GREATER )
// src/main/resources/org/drools/compiler/lang/DRL6Expressions.g:143:7: LESS typeArgument ( COMMA typeArgument )* GREATER
{
match(input,LESS,FOLLOW_LESS_in_typeArguments640); if (state.failed) return;
pushFollow(FOLLOW_typeArgument_in_typeArguments642);
typeArgument();
state._fsp--;
if (state.failed) return;
// src/main/resources/org/drools/compiler/lang/DRL6Expressions.g:143:25: ( COMMA typeArgument )*
loop12:
while (true) {
int alt12=2;
int LA12_0 = input.LA(1);
if ( (LA12_0==COMMA) ) {
alt12=1; // depends on control dependency: [if], data = [none]
}
switch (alt12) {
case 1 :
// src/main/resources/org/drools/compiler/lang/DRL6Expressions.g:143:26: COMMA typeArgument
{
match(input,COMMA,FOLLOW_COMMA_in_typeArguments645); if (state.failed) return;
pushFollow(FOLLOW_typeArgument_in_typeArguments647);
typeArgument();
state._fsp--;
if (state.failed) return;
}
break;
default :
break loop12;
}
}
match(input,GREATER,FOLLOW_GREATER_in_typeArguments651); if (state.failed) return;
}
}
catch (RecognitionException re) {
throw re;
}
finally {
// do for sure before leaving
}
} } |
public class class_name {
public WindowsFileSystemConfiguration withMaintenanceOperationsInProgress(FileSystemMaintenanceOperation... maintenanceOperationsInProgress) {
java.util.ArrayList<String> maintenanceOperationsInProgressCopy = new java.util.ArrayList<String>(maintenanceOperationsInProgress.length);
for (FileSystemMaintenanceOperation value : maintenanceOperationsInProgress) {
maintenanceOperationsInProgressCopy.add(value.toString());
}
if (getMaintenanceOperationsInProgress() == null) {
setMaintenanceOperationsInProgress(maintenanceOperationsInProgressCopy);
} else {
getMaintenanceOperationsInProgress().addAll(maintenanceOperationsInProgressCopy);
}
return this;
} } | public class class_name {
public WindowsFileSystemConfiguration withMaintenanceOperationsInProgress(FileSystemMaintenanceOperation... maintenanceOperationsInProgress) {
java.util.ArrayList<String> maintenanceOperationsInProgressCopy = new java.util.ArrayList<String>(maintenanceOperationsInProgress.length);
for (FileSystemMaintenanceOperation value : maintenanceOperationsInProgress) {
maintenanceOperationsInProgressCopy.add(value.toString()); // depends on control dependency: [for], data = [value]
}
if (getMaintenanceOperationsInProgress() == null) {
setMaintenanceOperationsInProgress(maintenanceOperationsInProgressCopy); // depends on control dependency: [if], data = [none]
} else {
getMaintenanceOperationsInProgress().addAll(maintenanceOperationsInProgressCopy); // depends on control dependency: [if], data = [none]
}
return this;
} } |
public class class_name {
private void fillNodeList(P parent) {
P prt = parent;
if ((prt != null) && (prt.isLeaf())) {
return;
}
while (prt != null) {
if (!prt.isLeaf()) {
if (!this.expandedNodes.contains(prt)) {
this.expandedNodes.add(prt);
for (int i = prt.getChildCount() - 1; i >= 0; --i) {
final C child = prt.getChildAt(i);
if (child != null) {
final P cn = toTraversableChild(prt, child);
if (cn != null) {
this.availableNodes.push(cn);
}
}
}
} else {
return;
}
}
prt = this.availableNodes.isEmpty() ? null : this.availableNodes.peek();
if ((prt != null) && (prt.isLeaf())) {
return;
}
}
} } | public class class_name {
private void fillNodeList(P parent) {
P prt = parent;
if ((prt != null) && (prt.isLeaf())) {
return; // depends on control dependency: [if], data = [none]
}
while (prt != null) {
if (!prt.isLeaf()) {
if (!this.expandedNodes.contains(prt)) {
this.expandedNodes.add(prt); // depends on control dependency: [if], data = [none]
for (int i = prt.getChildCount() - 1; i >= 0; --i) {
final C child = prt.getChildAt(i);
if (child != null) {
final P cn = toTraversableChild(prt, child);
if (cn != null) {
this.availableNodes.push(cn); // depends on control dependency: [if], data = [(cn]
}
}
}
} else {
return; // depends on control dependency: [if], data = [none]
}
}
prt = this.availableNodes.isEmpty() ? null : this.availableNodes.peek(); // depends on control dependency: [while], data = [none]
if ((prt != null) && (prt.isLeaf())) {
return; // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public boolean getBracketedToken(char start, char end, TextProvider textProvider)
{
clearLastToken(textProvider);
clearLeadingSpaces(textProvider);
mark(textProvider);
if (m_debug)
debug("testing " + start + " " + end,textProvider);
StringBuilder sb = new StringBuilder();
char c = getNextChar(textProvider);
if (c != start)
{
reset(textProvider);
return false;
}
int brackets = 0;
while (true)
{
if (c == start) brackets++;
if (c == end) brackets--;
sb.append(c);
if (brackets < 1) break;
c = getNextChar(textProvider);
if (c == 0)
{
reset(textProvider);
return false;
}
}
unmark(textProvider);
String s = sb.toString().trim();
if (s.length() == 0) return false;
textProvider.setLastToken(s);
debug(textProvider);
return true;
} } | public class class_name {
public boolean getBracketedToken(char start, char end, TextProvider textProvider)
{
clearLastToken(textProvider);
clearLeadingSpaces(textProvider);
mark(textProvider);
if (m_debug)
debug("testing " + start + " " + end,textProvider);
StringBuilder sb = new StringBuilder();
char c = getNextChar(textProvider);
if (c != start)
{
reset(textProvider); // depends on control dependency: [if], data = [none]
return false; // depends on control dependency: [if], data = [none]
}
int brackets = 0;
while (true)
{
if (c == start) brackets++;
if (c == end) brackets--;
sb.append(c); // depends on control dependency: [while], data = [none]
if (brackets < 1) break;
c = getNextChar(textProvider); // depends on control dependency: [while], data = [none]
if (c == 0)
{
reset(textProvider); // depends on control dependency: [if], data = [none]
return false; // depends on control dependency: [if], data = [none]
}
}
unmark(textProvider);
String s = sb.toString().trim();
if (s.length() == 0) return false;
textProvider.setLastToken(s);
debug(textProvider);
return true;
} } |
public class class_name {
public static HiveWorkUnit viewMaterializationWorkUnit(HiveDataset dataset, HiveConverterUtils.StorageFormat storageFormat,
StageableTableMetadata destinationTable, @Nullable String partitionName) {
HiveWorkUnit workUnit = new HiveWorkUnit(dataset);
workUnit.setProp(MATERIALIZER_MODE_KEY, MaterializerMode.TABLE_MATERIALIZATION.name());
workUnit.setProp(STORAGE_FORMAT_KEY, storageFormat.name());
workUnit.setProp(STAGEABLE_TABLE_METADATA_KEY, HiveSource.GENERICS_AWARE_GSON.toJson(destinationTable));
if (!Strings.isNullOrEmpty(partitionName)) {
workUnit.setPartitionName(partitionName);
}
TaskUtils.setTaskFactoryClass(workUnit, HiveMaterializerTaskFactory.class);
return workUnit;
} } | public class class_name {
public static HiveWorkUnit viewMaterializationWorkUnit(HiveDataset dataset, HiveConverterUtils.StorageFormat storageFormat,
StageableTableMetadata destinationTable, @Nullable String partitionName) {
HiveWorkUnit workUnit = new HiveWorkUnit(dataset);
workUnit.setProp(MATERIALIZER_MODE_KEY, MaterializerMode.TABLE_MATERIALIZATION.name());
workUnit.setProp(STORAGE_FORMAT_KEY, storageFormat.name());
workUnit.setProp(STAGEABLE_TABLE_METADATA_KEY, HiveSource.GENERICS_AWARE_GSON.toJson(destinationTable));
if (!Strings.isNullOrEmpty(partitionName)) {
workUnit.setPartitionName(partitionName); // depends on control dependency: [if], data = [none]
}
TaskUtils.setTaskFactoryClass(workUnit, HiveMaterializerTaskFactory.class);
return workUnit;
} } |
public class class_name {
public int getPrevOpcode(int offset) {
if (offset < 0) {
throw new IllegalArgumentException("offset (" + offset + ") must be nonnegative");
}
if (offset >= prevOpcode.length || offset > sizePrevOpcodeBuffer) {
return Const.NOP;
}
int pos = currentPosInPrevOpcodeBuffer - offset;
if (pos < 0) {
pos += prevOpcode.length;
}
return prevOpcode[pos];
} } | public class class_name {
public int getPrevOpcode(int offset) {
if (offset < 0) {
throw new IllegalArgumentException("offset (" + offset + ") must be nonnegative");
}
if (offset >= prevOpcode.length || offset > sizePrevOpcodeBuffer) {
return Const.NOP; // depends on control dependency: [if], data = [none]
}
int pos = currentPosInPrevOpcodeBuffer - offset;
if (pos < 0) {
pos += prevOpcode.length; // depends on control dependency: [if], data = [none]
}
return prevOpcode[pos];
} } |
public class class_name {
private void markSubroutines() {
BitSet anyvisited = new BitSet();
// First walk the main subroutine and find all those instructions which
// can be reached without invoking any JSR at all
markSubroutineWalk(mainSubroutine, 0, anyvisited);
// Go through the head of each subroutine and find any nodes reachable
// to that subroutine without following any JSR links.
for (Iterator<Map.Entry<LabelNode, BitSet>> it = subroutineHeads
.entrySet().iterator(); it.hasNext();) {
Map.Entry<LabelNode, BitSet> entry = it.next();
LabelNode lab = entry.getKey();
BitSet sub = entry.getValue();
int index = instructions.indexOf(lab);
markSubroutineWalk(sub, index, anyvisited);
}
} } | public class class_name {
private void markSubroutines() {
BitSet anyvisited = new BitSet();
// First walk the main subroutine and find all those instructions which
// can be reached without invoking any JSR at all
markSubroutineWalk(mainSubroutine, 0, anyvisited);
// Go through the head of each subroutine and find any nodes reachable
// to that subroutine without following any JSR links.
for (Iterator<Map.Entry<LabelNode, BitSet>> it = subroutineHeads
.entrySet().iterator(); it.hasNext();) {
Map.Entry<LabelNode, BitSet> entry = it.next();
LabelNode lab = entry.getKey();
BitSet sub = entry.getValue();
int index = instructions.indexOf(lab);
markSubroutineWalk(sub, index, anyvisited); // depends on control dependency: [for], data = [none]
}
} } |
public class class_name {
protected List<InetAddress> lookupHostAddress0(DnsName name, List<HostAddress> failedAddresses, DnssecMode dnssecMode) {
// Default implementation of a DNS name lookup for A/AAAA records. It is assumed that this method does never
// support DNSSEC. Subclasses are free to override this method.
if (dnssecMode != DnssecMode.disabled) {
throw new UnsupportedOperationException("This resolver does not support DNSSEC");
}
InetAddress[] inetAddressArray;
try {
inetAddressArray = InetAddress.getAllByName(name.toString());
} catch (UnknownHostException e) {
failedAddresses.add(new HostAddress(name, e));
return null;
}
return Arrays.asList(inetAddressArray);
} } | public class class_name {
protected List<InetAddress> lookupHostAddress0(DnsName name, List<HostAddress> failedAddresses, DnssecMode dnssecMode) {
// Default implementation of a DNS name lookup for A/AAAA records. It is assumed that this method does never
// support DNSSEC. Subclasses are free to override this method.
if (dnssecMode != DnssecMode.disabled) {
throw new UnsupportedOperationException("This resolver does not support DNSSEC");
}
InetAddress[] inetAddressArray;
try {
inetAddressArray = InetAddress.getAllByName(name.toString()); // depends on control dependency: [try], data = [none]
} catch (UnknownHostException e) {
failedAddresses.add(new HostAddress(name, e));
return null;
} // depends on control dependency: [catch], data = [none]
return Arrays.asList(inetAddressArray);
} } |
public class class_name {
public void start(boolean pLazy) {
Restrictor restrictor = createRestrictor();
backendManager = new BackendManager(configuration, logHandler, restrictor, pLazy);
requestHandler = new HttpRequestHandler(configuration, backendManager, logHandler);
if (listenForDiscoveryMcRequests(configuration)) {
try {
discoveryMulticastResponder = new DiscoveryMulticastResponder(backendManager, restrictor, logHandler);
discoveryMulticastResponder.start();
} catch (IOException e) {
logHandler.error("Cannot start discovery multicast handler: " + e, e);
}
}
} } | public class class_name {
public void start(boolean pLazy) {
Restrictor restrictor = createRestrictor();
backendManager = new BackendManager(configuration, logHandler, restrictor, pLazy);
requestHandler = new HttpRequestHandler(configuration, backendManager, logHandler);
if (listenForDiscoveryMcRequests(configuration)) {
try {
discoveryMulticastResponder = new DiscoveryMulticastResponder(backendManager, restrictor, logHandler); // depends on control dependency: [try], data = [none]
discoveryMulticastResponder.start(); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
logHandler.error("Cannot start discovery multicast handler: " + e, e);
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
public void eInit(SarlScript script, String name, IJvmTypeProvider context) {
setTypeResolutionContext(context);
if (this.sarlAgent == null) {
this.sarlAgent = SarlFactory.eINSTANCE.createSarlAgent();
script.getXtendTypes().add(this.sarlAgent);
this.sarlAgent.setAnnotationInfo(XtendFactory.eINSTANCE.createXtendTypeDeclaration());
if (!Strings.isEmpty(name)) {
this.sarlAgent.setName(name);
}
}
} } | public class class_name {
public void eInit(SarlScript script, String name, IJvmTypeProvider context) {
setTypeResolutionContext(context);
if (this.sarlAgent == null) {
this.sarlAgent = SarlFactory.eINSTANCE.createSarlAgent(); // depends on control dependency: [if], data = [none]
script.getXtendTypes().add(this.sarlAgent); // depends on control dependency: [if], data = [(this.sarlAgent]
this.sarlAgent.setAnnotationInfo(XtendFactory.eINSTANCE.createXtendTypeDeclaration()); // depends on control dependency: [if], data = [none]
if (!Strings.isEmpty(name)) {
this.sarlAgent.setName(name); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public QRCode toFile(File qrcodeFile) {
try {
if (!qrcodeFile.exists()) {
Files.createParentDirs(qrcodeFile);
qrcodeFile.createNewFile();
}
if (!ImageIO.write(this.qrcodeImage,
getSuffixName(qrcodeFile),
qrcodeFile)) {
throw new RuntimeException();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
this.qrcodeFile = qrcodeFile;
return this;
} } | public class class_name {
public QRCode toFile(File qrcodeFile) {
try {
if (!qrcodeFile.exists()) {
Files.createParentDirs(qrcodeFile); // depends on control dependency: [if], data = [none]
qrcodeFile.createNewFile(); // depends on control dependency: [if], data = [none]
}
if (!ImageIO.write(this.qrcodeImage,
getSuffixName(qrcodeFile),
qrcodeFile)) {
throw new RuntimeException();
}
} catch (IOException e) {
throw new RuntimeException(e);
} // depends on control dependency: [catch], data = [none]
this.qrcodeFile = qrcodeFile;
return this;
} } |
public class class_name {
private void updateMeaning(DoubleVector meaning,
Queue<String> prevWords,
Queue<String> nextWords) {
// Generate the semantics of the context using summation of index
// vectors.
if (semanticType == SemanticType.COMPOSITE ||
semanticType == SemanticType.CONTEXT) {
DoubleVector context = new DenseVector(indexVectorSize);
// Sum the words prior to the focus word, skipping filtered tokens.
for (String term: prevWords) {
if (term.equals(IteratorFactory.EMPTY_TOKEN))
continue;
VectorMath.add(context, vectorMap.get(term));
}
// Sum the words after the focus word, skipping filtered tokens.
for (String term: nextWords) {
if (term.equals(IteratorFactory.EMPTY_TOKEN))
continue;
VectorMath.add(context, vectorMap.get(term));
}
// Normalize the context vector and add it to the meaning.
normalize(context);
VectorMath.add(meaning, context);
}
// Generate the semantics of the ordering using circular convolution of
// n-grams.
if (semanticType == SemanticType.COMPOSITE ||
semanticType == SemanticType.ORDERING) {
DoubleVector order = groupConvolution(prevWords, nextWords);
// Normalize the order vector and add it to the meaning.
normalize(order);
VectorMath.add(meaning, order);
}
} } | public class class_name {
private void updateMeaning(DoubleVector meaning,
Queue<String> prevWords,
Queue<String> nextWords) {
// Generate the semantics of the context using summation of index
// vectors.
if (semanticType == SemanticType.COMPOSITE ||
semanticType == SemanticType.CONTEXT) {
DoubleVector context = new DenseVector(indexVectorSize);
// Sum the words prior to the focus word, skipping filtered tokens.
for (String term: prevWords) {
if (term.equals(IteratorFactory.EMPTY_TOKEN))
continue;
VectorMath.add(context, vectorMap.get(term)); // depends on control dependency: [for], data = [term]
}
// Sum the words after the focus word, skipping filtered tokens.
for (String term: nextWords) {
if (term.equals(IteratorFactory.EMPTY_TOKEN))
continue;
VectorMath.add(context, vectorMap.get(term)); // depends on control dependency: [for], data = [term]
}
// Normalize the context vector and add it to the meaning.
normalize(context); // depends on control dependency: [if], data = [none]
VectorMath.add(meaning, context); // depends on control dependency: [if], data = [none]
}
// Generate the semantics of the ordering using circular convolution of
// n-grams.
if (semanticType == SemanticType.COMPOSITE ||
semanticType == SemanticType.ORDERING) {
DoubleVector order = groupConvolution(prevWords, nextWords);
// Normalize the order vector and add it to the meaning.
normalize(order); // depends on control dependency: [if], data = [none]
VectorMath.add(meaning, order); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private void mergerByScore(Term fromTerm, int to, boolean asc) {
Term term = null;
if (terms[to] != null) {
term = terms[to];
while (term != null) {
// 关系式to.set(from)
term.setPathSelfScore(fromTerm, asc);
term = term.next();
}
}
} } | public class class_name {
private void mergerByScore(Term fromTerm, int to, boolean asc) {
Term term = null;
if (terms[to] != null) {
term = terms[to]; // depends on control dependency: [if], data = [none]
while (term != null) {
// 关系式to.set(from)
term.setPathSelfScore(fromTerm, asc); // depends on control dependency: [while], data = [none]
term = term.next(); // depends on control dependency: [while], data = [none]
}
}
} } |
public class class_name {
public static String getCompactServerStatus(final ODistributedServerManager manager, final ODocument distribCfg) {
final StringBuilder buffer = new StringBuilder();
final Collection<ODocument> members = distribCfg.field("members");
if (members != null) {
buffer.append(members.size());
buffer.append(":[");
int memberCount = 0;
for (ODocument m : members) {
if (m == null)
continue;
if (memberCount++ > 0)
buffer.append(",");
final String serverName = m.field("name");
buffer.append(serverName);
buffer.append((Object)m.field("status"));
final Collection<String> databases = m.field("databases");
if (databases != null) {
buffer.append("{");
int dbCount = 0;
for (String dbName : databases) {
final ODistributedConfiguration dbCfg = manager.getDatabaseConfiguration(dbName, false);
if (dbCfg == null)
continue;
if (dbCount++ > 0)
buffer.append(",");
buffer.append(dbName);
buffer.append("=");
buffer.append(manager.getDatabaseStatus(serverName, dbName));
buffer.append(" (");
buffer.append(dbCfg.getServerRole(serverName));
buffer.append(")");
}
buffer.append("}");
}
}
buffer.append("]");
}
return buffer.toString();
} } | public class class_name {
public static String getCompactServerStatus(final ODistributedServerManager manager, final ODocument distribCfg) {
final StringBuilder buffer = new StringBuilder();
final Collection<ODocument> members = distribCfg.field("members");
if (members != null) {
buffer.append(members.size()); // depends on control dependency: [if], data = [(members]
buffer.append(":["); // depends on control dependency: [if], data = [none]
int memberCount = 0;
for (ODocument m : members) {
if (m == null)
continue;
if (memberCount++ > 0)
buffer.append(",");
final String serverName = m.field("name");
buffer.append(serverName); // depends on control dependency: [for], data = [m]
buffer.append((Object)m.field("status")); // depends on control dependency: [for], data = [m]
final Collection<String> databases = m.field("databases");
if (databases != null) {
buffer.append("{"); // depends on control dependency: [if], data = [none]
int dbCount = 0;
for (String dbName : databases) {
final ODistributedConfiguration dbCfg = manager.getDatabaseConfiguration(dbName, false);
if (dbCfg == null)
continue;
if (dbCount++ > 0)
buffer.append(",");
buffer.append(dbName); // depends on control dependency: [for], data = [dbName]
buffer.append("="); // depends on control dependency: [for], data = [none]
buffer.append(manager.getDatabaseStatus(serverName, dbName)); // depends on control dependency: [for], data = [dbName]
buffer.append(" ("); // depends on control dependency: [for], data = [none]
buffer.append(dbCfg.getServerRole(serverName)); // depends on control dependency: [for], data = [none]
buffer.append(")"); // depends on control dependency: [for], data = [none]
}
buffer.append("}"); // depends on control dependency: [if], data = [none]
}
}
buffer.append("]"); // depends on control dependency: [if], data = [none]
}
return buffer.toString();
} } |
public class class_name {
public List<String> descriptionsForIds(List<String> _ids, boolean payload, boolean pretty) {
List<String> descriptions = new ArrayList<String>();
for (ServiceWrapper s : _cache) {
if (_ids.isEmpty() || _ids.contains(s.id)) {
StringBuffer buf = serviceDesciption(payload, pretty, s);
descriptions.add(buf.toString());
}
}
return descriptions;
} } | public class class_name {
public List<String> descriptionsForIds(List<String> _ids, boolean payload, boolean pretty) {
List<String> descriptions = new ArrayList<String>();
for (ServiceWrapper s : _cache) {
if (_ids.isEmpty() || _ids.contains(s.id)) {
StringBuffer buf = serviceDesciption(payload, pretty, s);
descriptions.add(buf.toString()); // depends on control dependency: [if], data = [none]
}
}
return descriptions;
} } |
public class class_name {
public Object getObject(int index)
{
try (InputStream is = openInputStream(index)) {
if (is == null) {
return null;
}
try (InH3 in = serializer().in(is)) {
return in.readObject();
}
} catch (IOException e) {
throw new RuntimeException(e);
} catch (Exception e) {
e.printStackTrace();;
throw e;
}
} } | public class class_name {
public Object getObject(int index)
{
try (InputStream is = openInputStream(index)) {
if (is == null) {
return null; // depends on control dependency: [if], data = [none]
}
try (InH3 in = serializer().in(is)) {
return in.readObject();
}
} catch (IOException e) {
throw new RuntimeException(e);
} catch (Exception e) {
e.printStackTrace();;
throw e;
}
} } |
public class class_name {
public long getLong(String key, long defaultValue) {
if (containsKey(key)) {
return Long.parseLong(get(key));
} else {
return defaultValue;
}
} } | public class class_name {
public long getLong(String key, long defaultValue) {
if (containsKey(key)) {
return Long.parseLong(get(key)); // depends on control dependency: [if], data = [none]
} else {
return defaultValue; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
@JsonIgnore
public Map<String, OptionalConfigurationComponent<?>> getValidationMap() {
// Use getDeclaredFields as the fields will probably be private
final Field[] fields = getClass().getDeclaredFields();
final Map<String, OptionalConfigurationComponent<?>> result = new HashMap<>();
for (final Field field : fields) {
final boolean oldValue = field.isAccessible();
try {
field.setAccessible(true);
final Object o = field.get(this);
// if o is null this is false
if (o instanceof OptionalConfigurationComponent) {
final JsonProperty ann = field.getAnnotation(JsonProperty.class);
final String key;
if (ann != null) {
key = ann.value();
}
else {
key = field.getName();
}
result.put(key, (OptionalConfigurationComponent<?>) o);
}
} catch (final IllegalAccessException e) {
throw new AssertionError("Your JVM does not allow you to run this code.", e);
} finally {
//noinspection ThrowFromFinallyBlock
field.setAccessible(oldValue);
}
}
return result;
} } | public class class_name {
@Override
@JsonIgnore
public Map<String, OptionalConfigurationComponent<?>> getValidationMap() {
// Use getDeclaredFields as the fields will probably be private
final Field[] fields = getClass().getDeclaredFields();
final Map<String, OptionalConfigurationComponent<?>> result = new HashMap<>();
for (final Field field : fields) {
final boolean oldValue = field.isAccessible();
try {
field.setAccessible(true); // depends on control dependency: [try], data = [none]
final Object o = field.get(this);
// if o is null this is false
if (o instanceof OptionalConfigurationComponent) {
final JsonProperty ann = field.getAnnotation(JsonProperty.class);
final String key;
if (ann != null) {
key = ann.value(); // depends on control dependency: [if], data = [none]
}
else {
key = field.getName(); // depends on control dependency: [if], data = [none]
}
result.put(key, (OptionalConfigurationComponent<?>) o); // depends on control dependency: [if], data = [none]
}
} catch (final IllegalAccessException e) {
throw new AssertionError("Your JVM does not allow you to run this code.", e);
} finally { // depends on control dependency: [catch], data = [none]
//noinspection ThrowFromFinallyBlock
field.setAccessible(oldValue);
}
}
return result;
} } |
public class class_name {
public static char[] encodeHex(byte[] data) {
int l = data.length;
char[] out = new char[l << 1];
// two characters form the hex value.
for (int i = 0, j = 0; i < l; i++) {
out[j++] = DIGITS[(0xF0 & data[i]) >>> 4 ];
out[j++] = DIGITS[ 0x0F & data[i] ];
}
return out;
} } | public class class_name {
public static char[] encodeHex(byte[] data) {
int l = data.length;
char[] out = new char[l << 1];
// two characters form the hex value.
for (int i = 0, j = 0; i < l; i++) {
out[j++] = DIGITS[(0xF0 & data[i]) >>> 4 ]; // depends on control dependency: [for], data = [i]
out[j++] = DIGITS[ 0x0F & data[i] ]; // depends on control dependency: [for], data = [i]
}
return out;
} } |
public class class_name {
protected void getTablesAndIndexesFromSubqueries(Map<String, StmtTargetTableScan> tablesRead,
Collection<String> indexes) {
for(AbstractExpression expr : findAllSubquerySubexpressions()) {
assert(expr instanceof AbstractSubqueryExpression);
AbstractSubqueryExpression subquery = (AbstractSubqueryExpression) expr;
AbstractPlanNode subqueryNode = subquery.getSubqueryNode();
assert(subqueryNode != null);
subqueryNode.getTablesAndIndexes(tablesRead, indexes);
}
} } | public class class_name {
protected void getTablesAndIndexesFromSubqueries(Map<String, StmtTargetTableScan> tablesRead,
Collection<String> indexes) {
for(AbstractExpression expr : findAllSubquerySubexpressions()) {
assert(expr instanceof AbstractSubqueryExpression); // depends on control dependency: [for], data = [expr]
AbstractSubqueryExpression subquery = (AbstractSubqueryExpression) expr;
AbstractPlanNode subqueryNode = subquery.getSubqueryNode();
assert(subqueryNode != null); // depends on control dependency: [for], data = [none]
subqueryNode.getTablesAndIndexes(tablesRead, indexes); // depends on control dependency: [for], data = [none]
}
} } |
public class class_name {
public LoggingEvent rewrite(final LoggingEvent source) {
if (!properties.isEmpty()) {
Map rewriteProps = new HashMap(source.getProperties());
for(Iterator iter = properties.entrySet().iterator();
iter.hasNext();
) {
Map.Entry entry = (Map.Entry) iter.next();
if (!rewriteProps.containsKey(entry.getKey())) {
rewriteProps.put(entry.getKey(), entry.getValue());
}
}
return new LoggingEvent(
source.getFQNOfLoggerClass(),
source.getLogger() != null ? source.getLogger(): Logger.getLogger(source.getLoggerName()),
source.getTimeStamp(),
source.getLevel(),
source.getMessage(),
source.getThreadName(),
source.getThrowableInformation(),
source.getNDC(),
source.getLocationInformation(),
rewriteProps);
}
return source;
} } | public class class_name {
public LoggingEvent rewrite(final LoggingEvent source) {
if (!properties.isEmpty()) {
Map rewriteProps = new HashMap(source.getProperties());
for(Iterator iter = properties.entrySet().iterator();
iter.hasNext();
) {
Map.Entry entry = (Map.Entry) iter.next();
if (!rewriteProps.containsKey(entry.getKey())) {
rewriteProps.put(entry.getKey(), entry.getValue()); // depends on control dependency: [if], data = [none]
}
}
return new LoggingEvent(
source.getFQNOfLoggerClass(),
source.getLogger() != null ? source.getLogger(): Logger.getLogger(source.getLoggerName()),
source.getTimeStamp(),
source.getLevel(),
source.getMessage(),
source.getThreadName(),
source.getThrowableInformation(),
source.getNDC(),
source.getLocationInformation(),
rewriteProps); // depends on control dependency: [if], data = [none]
}
return source;
} } |
public class class_name {
public com.google.protobuf.ByteString
getOrganizationBytes() {
java.lang.Object ref = organization_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
organization_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
} } | public class class_name {
public com.google.protobuf.ByteString
getOrganizationBytes() {
java.lang.Object ref = organization_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
organization_ = b; // depends on control dependency: [if], data = [none]
return b; // depends on control dependency: [if], data = [none]
} else {
return (com.google.protobuf.ByteString) ref; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
public Response resynchronize( String applicationName ) {
this.logger.fine( "Request: resynchronize all the agents." );
String lang = lang( this.manager );
Response response = Response.ok().build();
try {
ManagedApplication ma = this.manager.applicationMngr().findManagedApplicationByName( applicationName );
if( ma == null )
response = handleError( Status.NOT_FOUND, new RestError( REST_INEXISTING, application( applicationName )), lang ).build();
else
this.manager.instancesMngr().resynchronizeAgents( ma );
} catch( IOException e ) {
response = RestServicesUtils.handleError(
Status.NOT_ACCEPTABLE,
new RestError( ErrorCode.REST_UNDETAILED_ERROR, e ),
lang ).build();
}
return response;
} } | public class class_name {
@Override
public Response resynchronize( String applicationName ) {
this.logger.fine( "Request: resynchronize all the agents." );
String lang = lang( this.manager );
Response response = Response.ok().build();
try {
ManagedApplication ma = this.manager.applicationMngr().findManagedApplicationByName( applicationName );
if( ma == null )
response = handleError( Status.NOT_FOUND, new RestError( REST_INEXISTING, application( applicationName )), lang ).build();
else
this.manager.instancesMngr().resynchronizeAgents( ma );
} catch( IOException e ) {
response = RestServicesUtils.handleError(
Status.NOT_ACCEPTABLE,
new RestError( ErrorCode.REST_UNDETAILED_ERROR, e ),
lang ).build();
} // depends on control dependency: [catch], data = [none]
return response;
} } |
public class class_name {
private void makeMSGgeostationary() {
double Lat0 = gds.getDouble(GridDefRecord.LAP); // sub-satellite point lat
double Lon0 = gds.getDouble(GridDefRecord.LOP); // sub-satellite point lon
//int nx = gds.getInt(GridDefRecord.NX);
int ny = gds.getInt(GridDefRecord.NY);
int x_off = gds.getInt(GridDefRecord.XP); // sub-satellite point in grid lengths
int y_off = gds.getInt(GridDefRecord.YP);
double dx; // = gds.getDouble(GridDefRecord.DX); // apparent diameter of earth in units of grid lengths
double dy = gds.getDouble(GridDefRecord.DY);
// per Simon Eliot 1/18/2010, there is a bug in Eumetsat grib files,
// we need to "correct for ellipsoidal earth"
// (Note we should check who the originating center is
// "Originating_center" = "EUMETSAT Operation Centre" in the GRIB id (section 1))
// although AFAIK, eumetsat is only one using this projection.
if (dy < 2100) {
dx = 1207;
dy = 1203;
} else {
dx = 3622;
dy = 3610;
}
// have to check both names because Grib1 and Grib2 used different names
double major_axis = gds.getDouble(GridDefRecord.MAJOR_AXIS_EARTH); // m
if (Double.isNaN(major_axis))
major_axis = gds.getDouble("major_axis_earth");
double minor_axis = gds.getDouble(GridDefRecord.MINOR_AXIS_EARTH); // m
if (Double.isNaN(minor_axis))
minor_axis = gds.getDouble("minor_axis_earth");
// Nr = altitude of camera from center, in units of radius
double nr = gds.getDouble(GridDefRecord.NR) * 1e-6; // altitude of the camera from the Earths centre, measured in units of the Earth (equatorial) radius
// CFAC = 2^16 / {[2 * arcsine (10^6 / Nr)] / dx }
double as = 2 * Math.asin(1.0/nr);
double cfac = dx / as;
double lfac = dy / as;
// use km, so scale by the earth radius
double scale_factor = (nr - 1) * major_axis / 1000; // this sets the units of the projection x,y coords in km
double scale_x = scale_factor; // LOOK fake neg need scan value
double scale_y = -scale_factor; // LOOK fake neg need scan value
startx = scale_factor * (1 - x_off) / cfac;
starty = scale_factor * (y_off - ny) / lfac;
incrx = scale_factor/cfac;
incry = scale_factor/lfac;
attributes.add(new Attribute(GridCF.GRID_MAPPING_NAME, "MSGnavigation"));
attributes.add(new Attribute(GridCF.LONGITUDE_OF_PROJECTION_ORIGIN, Lon0));
attributes.add(new Attribute(GridCF.LATITUDE_OF_PROJECTION_ORIGIN, Lat0));
//attributes.add(new Attribute("semi_major_axis", new Double(major_axis)));
//attributes.add(new Attribute("semi_minor_axis", new Double(minor_axis)));
attributes.add(new Attribute("height_from_earth_center", nr * major_axis));
attributes.add(new Attribute("scale_x", scale_x));
attributes.add(new Attribute("scale_y", scale_y));
proj = new MSGnavigation(Lat0, Lon0, major_axis, minor_axis, nr * major_axis, scale_x, scale_y);
if (GridServiceProvider.debugProj) {
double Lo2 = gds.getDouble(GridDefRecord.LO2) + 360.0;
double La2 = gds.getDouble(GridDefRecord.LA2);
LatLonPointImpl endLL = new LatLonPointImpl(La2, Lo2);
System.out.println("GridHorizCoordSys.makeMSGgeostationary end at latlon " + endLL);
ProjectionPointImpl endPP =(ProjectionPointImpl) proj.latLonToProj(endLL);
System.out.println(" end at proj coord " + endPP);
double endx = 1 + getNx();
double endy = 1 + getNy();
System.out.println(" should be x=" + endx + " y=" + endy);
}
} } | public class class_name {
private void makeMSGgeostationary() {
double Lat0 = gds.getDouble(GridDefRecord.LAP); // sub-satellite point lat
double Lon0 = gds.getDouble(GridDefRecord.LOP); // sub-satellite point lon
//int nx = gds.getInt(GridDefRecord.NX);
int ny = gds.getInt(GridDefRecord.NY);
int x_off = gds.getInt(GridDefRecord.XP); // sub-satellite point in grid lengths
int y_off = gds.getInt(GridDefRecord.YP);
double dx; // = gds.getDouble(GridDefRecord.DX); // apparent diameter of earth in units of grid lengths
double dy = gds.getDouble(GridDefRecord.DY);
// per Simon Eliot 1/18/2010, there is a bug in Eumetsat grib files,
// we need to "correct for ellipsoidal earth"
// (Note we should check who the originating center is
// "Originating_center" = "EUMETSAT Operation Centre" in the GRIB id (section 1))
// although AFAIK, eumetsat is only one using this projection.
if (dy < 2100) {
dx = 1207; // depends on control dependency: [if], data = [none]
dy = 1203; // depends on control dependency: [if], data = [none]
} else {
dx = 3622; // depends on control dependency: [if], data = [none]
dy = 3610; // depends on control dependency: [if], data = [none]
}
// have to check both names because Grib1 and Grib2 used different names
double major_axis = gds.getDouble(GridDefRecord.MAJOR_AXIS_EARTH); // m
if (Double.isNaN(major_axis))
major_axis = gds.getDouble("major_axis_earth");
double minor_axis = gds.getDouble(GridDefRecord.MINOR_AXIS_EARTH); // m
if (Double.isNaN(minor_axis))
minor_axis = gds.getDouble("minor_axis_earth");
// Nr = altitude of camera from center, in units of radius
double nr = gds.getDouble(GridDefRecord.NR) * 1e-6; // altitude of the camera from the Earths centre, measured in units of the Earth (equatorial) radius
// CFAC = 2^16 / {[2 * arcsine (10^6 / Nr)] / dx }
double as = 2 * Math.asin(1.0/nr);
double cfac = dx / as;
double lfac = dy / as;
// use km, so scale by the earth radius
double scale_factor = (nr - 1) * major_axis / 1000; // this sets the units of the projection x,y coords in km
double scale_x = scale_factor; // LOOK fake neg need scan value
double scale_y = -scale_factor; // LOOK fake neg need scan value
startx = scale_factor * (1 - x_off) / cfac;
starty = scale_factor * (y_off - ny) / lfac;
incrx = scale_factor/cfac;
incry = scale_factor/lfac;
attributes.add(new Attribute(GridCF.GRID_MAPPING_NAME, "MSGnavigation"));
attributes.add(new Attribute(GridCF.LONGITUDE_OF_PROJECTION_ORIGIN, Lon0));
attributes.add(new Attribute(GridCF.LATITUDE_OF_PROJECTION_ORIGIN, Lat0));
//attributes.add(new Attribute("semi_major_axis", new Double(major_axis)));
//attributes.add(new Attribute("semi_minor_axis", new Double(minor_axis)));
attributes.add(new Attribute("height_from_earth_center", nr * major_axis));
attributes.add(new Attribute("scale_x", scale_x));
attributes.add(new Attribute("scale_y", scale_y));
proj = new MSGnavigation(Lat0, Lon0, major_axis, minor_axis, nr * major_axis, scale_x, scale_y);
if (GridServiceProvider.debugProj) {
double Lo2 = gds.getDouble(GridDefRecord.LO2) + 360.0;
double La2 = gds.getDouble(GridDefRecord.LA2);
LatLonPointImpl endLL = new LatLonPointImpl(La2, Lo2);
System.out.println("GridHorizCoordSys.makeMSGgeostationary end at latlon " + endLL); // depends on control dependency: [if], data = [none]
ProjectionPointImpl endPP =(ProjectionPointImpl) proj.latLonToProj(endLL);
System.out.println(" end at proj coord " + endPP); // depends on control dependency: [if], data = [none]
double endx = 1 + getNx();
double endy = 1 + getNy();
System.out.println(" should be x=" + endx + " y=" + endy); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static void clearThreadPool() {
for (ExecutorService pool:threadPool) {
pool.shutdown();
}
for (ExecutorService pool:threadPool) {
try {
pool.awaitTermination(10, TimeUnit.SECONDS);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
threadPool.clear();
} } | public class class_name {
public static void clearThreadPool() {
for (ExecutorService pool:threadPool) {
pool.shutdown(); // depends on control dependency: [for], data = [pool]
}
for (ExecutorService pool:threadPool) {
try {
pool.awaitTermination(10, TimeUnit.SECONDS); // depends on control dependency: [try], data = [none]
} catch (InterruptedException e) {
e.printStackTrace();
} // depends on control dependency: [catch], data = [none]
}
threadPool.clear();
} } |
public class class_name {
public Dialog dimAmount(float amount){
Window window = getWindow();
if(amount > 0f){
window.addFlags(WindowManager.LayoutParams.FLAG_DIM_BEHIND);
WindowManager.LayoutParams lp = window.getAttributes();
lp.dimAmount = amount;
window.setAttributes(lp);
}
else
window.clearFlags(WindowManager.LayoutParams.FLAG_DIM_BEHIND);
return this;
} } | public class class_name {
public Dialog dimAmount(float amount){
Window window = getWindow();
if(amount > 0f){
window.addFlags(WindowManager.LayoutParams.FLAG_DIM_BEHIND); // depends on control dependency: [if], data = [none]
WindowManager.LayoutParams lp = window.getAttributes();
lp.dimAmount = amount; // depends on control dependency: [if], data = [none]
window.setAttributes(lp); // depends on control dependency: [if], data = [none]
}
else
window.clearFlags(WindowManager.LayoutParams.FLAG_DIM_BEHIND);
return this;
} } |
public class class_name {
public static void main(String[] args) {
java.util.Random r = new java.util.Random();
Bits bits = new Bits();
for (int i=0; i<125; i++) {
int k;
do {
k = r.nextInt(250);
} while (bits.isMember(k));
System.out.println("adding " + k);
bits.incl(k);
}
int count = 0;
for (int i = bits.nextBit(0); i >= 0; i = bits.nextBit(i+1)) {
System.out.println("found " + i);
count ++;
}
if (count != 125) {
throw new Error();
}
} } | public class class_name {
public static void main(String[] args) {
java.util.Random r = new java.util.Random();
Bits bits = new Bits();
for (int i=0; i<125; i++) {
int k;
do {
k = r.nextInt(250);
} while (bits.isMember(k));
System.out.println("adding " + k); // depends on control dependency: [for], data = [none]
bits.incl(k); // depends on control dependency: [for], data = [none]
}
int count = 0;
for (int i = bits.nextBit(0); i >= 0; i = bits.nextBit(i+1)) {
System.out.println("found " + i); // depends on control dependency: [for], data = [i]
count ++; // depends on control dependency: [for], data = [none]
}
if (count != 125) {
throw new Error();
}
} } |
public class class_name {
private void uninstallLayout(JRootPane root) {
if (savedOldLayout != null) {
root.setLayout(savedOldLayout);
savedOldLayout = null;
}
} } | public class class_name {
private void uninstallLayout(JRootPane root) {
if (savedOldLayout != null) {
root.setLayout(savedOldLayout); // depends on control dependency: [if], data = [(savedOldLayout]
savedOldLayout = null; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
public EClass getIfcStyleModel() {
if (ifcStyleModelEClass == null) {
ifcStyleModelEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI).getEClassifiers()
.get(665);
}
return ifcStyleModelEClass;
} } | public class class_name {
@Override
public EClass getIfcStyleModel() {
if (ifcStyleModelEClass == null) {
ifcStyleModelEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI).getEClassifiers()
.get(665);
// depends on control dependency: [if], data = [none]
}
return ifcStyleModelEClass;
} } |
public class class_name {
@Override
public void replace(List<ApiType> list, String resourceVersion) {
lock.lock();
try {
Map<String, ApiType> newItems = new HashMap<>();
for (ApiType item : list) {
String key = keyFunc.apply(item);
newItems.put(key, item);
}
this.items = newItems;
// rebuild any index
this.indices = new HashMap<>();
for (Map.Entry<String, ApiType> itemEntry : items.entrySet()) {
this.updateIndices(null, itemEntry.getValue(), itemEntry.getKey());
}
} finally {
lock.unlock();
}
} } | public class class_name {
@Override
public void replace(List<ApiType> list, String resourceVersion) {
lock.lock();
try {
Map<String, ApiType> newItems = new HashMap<>();
for (ApiType item : list) {
String key = keyFunc.apply(item);
newItems.put(key, item); // depends on control dependency: [for], data = [item]
}
this.items = newItems; // depends on control dependency: [try], data = [none]
// rebuild any index
this.indices = new HashMap<>(); // depends on control dependency: [try], data = [none]
for (Map.Entry<String, ApiType> itemEntry : items.entrySet()) {
this.updateIndices(null, itemEntry.getValue(), itemEntry.getKey()); // depends on control dependency: [for], data = [itemEntry]
}
} finally {
lock.unlock();
}
} } |
public class class_name {
public Observable<Integer> deleteOrphanedEvents(String[] ids) {
return Observable.fromCallable(() -> {
SQLiteDatabase writable = dbHelper.getWritableDatabase();
if (ids.length > 0) {
int numberOfRows = writable.delete(DbOrphanedEvent.TABLE_NAME, DbOrphanedEvent.EVENT_ID + " IN " + queryPlaceholder(ids.length), ids);
log.d("Deleted " + numberOfRows + " orphaned events with ids " + Arrays.toString(ids));
return numberOfRows;
} else {
return 0;
}
});
} } | public class class_name {
public Observable<Integer> deleteOrphanedEvents(String[] ids) {
return Observable.fromCallable(() -> {
SQLiteDatabase writable = dbHelper.getWritableDatabase();
if (ids.length > 0) {
int numberOfRows = writable.delete(DbOrphanedEvent.TABLE_NAME, DbOrphanedEvent.EVENT_ID + " IN " + queryPlaceholder(ids.length), ids);
log.d("Deleted " + numberOfRows + " orphaned events with ids " + Arrays.toString(ids)); // depends on control dependency: [if], data = [none]
return numberOfRows; // depends on control dependency: [if], data = [none]
} else {
return 0; // depends on control dependency: [if], data = [none]
}
});
} } |
public class class_name {
private Account readAccountFromDescriptionNode(Element element)
throws Exception {
Account account = new Account();
// This is the hash
account.setId(element.getAttribute("RDF:about").trim());
account.setName(element.getAttribute("NS1:name").trim());
account.setDesc(element.getAttribute("NS1:description").trim());
// Groups only have a name, about, and description attribute.
// If this is detected, mark it as a folder. Otherwise read
// the full account data set.
if (!element.hasAttribute("NS1:hashAlgorithmLB")) {
account.setIsFolder(true);
} else {
account.setLeetType(LeetType.fromRdfString(element.getAttribute("NS1:whereLeetLB").trim().toLowerCase()));
String level = element.getAttribute("NS1:leetLevelLB").trim();
// chrome passwordmaker sets this to zero when leet is turned off.
if (level.equals("0")) level = "1";
account.setLeetLevel(LeetLevel.fromString(level));
String algorithm = element.getAttribute("NS1:hashAlgorithmLB").trim().toLowerCase();
Pair<AlgorithmType, Boolean> algo = fromRdfString(account, algorithm);
account.setAlgorithm(algo.first);
account.setTrim(algo.second);
account.setHmac(algorithm.contains("hmac-"));
String passwordLength = element.getAttribute("NS1:passwordLength").trim();
if (passwordLength.length() > 0)
account.setLength(Integer.parseInt(passwordLength));
else
account.setLength(Account.DEFAULT_LENGTH);
account.setUsername(element.getAttribute("NS1:usernameTB").trim());
account.setModifier(element.getAttribute("NS1:counter").trim());
// If trim is not set, that means only the Hex characterset is used.
if ( account.isTrim() )
account.setCharacterSet(element.getAttribute("NS1:charset").trim());
account.setPrefix(element.getAttribute("NS1:prefix").trim());
account.setSuffix(element.getAttribute("NS1:suffix").trim());
account.setAutoPop(element.getAttribute("NS1:autoPopulate").trim().compareTo("true") == 0);
// Read the URL extraction specifiers
account.clearUrlComponents();
if (element.getAttribute("NS1:protocolCB").trim().compareToIgnoreCase("true") == 0)
account.addUrlComponent(Account.UrlComponents.Protocol);
if (element.getAttribute("NS1:subdomainCB").trim().compareToIgnoreCase("true") == 0)
account.addUrlComponent(Account.UrlComponents.Subdomain);
if (element.getAttribute("NS1:domainCB").trim().compareToIgnoreCase("true") == 0)
account.addUrlComponent(Account.UrlComponents.Domain);
if (element.getAttribute("NS1:pathCB").trim().compareToIgnoreCase("true") == 0)
account.addUrlComponent(Account.UrlComponents.PortPathAnchorQuery);
account.setUrl(element.getAttribute("NS1:urlToUse").trim());
// pattern info... I really hope nobody has more than 100000(MAX_PATTERNS)
for (int iPattern = 0; iPattern < MAX_PATTERNS; ++iPattern) {
String pattern = element.getAttribute("NS1:pattern" + iPattern).trim();
String patternType = element.getAttribute("NS1:patterntype" + iPattern).trim();
String patternEnabled = element.getAttribute("NS1:patternenabled" + iPattern).trim();
String patternDesc = element.getAttribute("NS1:patterndesc" + iPattern).trim();
if (pattern.length() > 0 || patternType.length() > 0 || patternEnabled.length() > 0 || patternDesc.length() > 0) {
AccountPatternData data = new AccountPatternData();
if ( patternDesc.isEmpty() ) patternDesc = pattern;
data.setPattern(pattern);
data.setType(AccountPatternType.fromString(patternType));
data.setEnabled(patternEnabled.compareTo("true") == 0);
data.setDesc(patternDesc);
account.getPatterns().add(data);
} else {
iPattern = MAX_PATTERNS + 1;
}
}
}
if ( account.isRoot() ) {
// The root node isn't really an account, just a starting point for our account tree.
if ( ! account.getUrlComponents().isEmpty() || ! account.getUrl().isEmpty()
|| ! account.getUsername().isEmpty() || !account.getPatterns().isEmpty() ) {
logger.info("Clear out junk settings on the ROOT account.");
account.clearUrlComponents();
account.setUrl("");
account.setUsername("");
account.getPatterns().clear();
}
}
return account;
} } | public class class_name {
private Account readAccountFromDescriptionNode(Element element)
throws Exception {
Account account = new Account();
// This is the hash
account.setId(element.getAttribute("RDF:about").trim());
account.setName(element.getAttribute("NS1:name").trim());
account.setDesc(element.getAttribute("NS1:description").trim());
// Groups only have a name, about, and description attribute.
// If this is detected, mark it as a folder. Otherwise read
// the full account data set.
if (!element.hasAttribute("NS1:hashAlgorithmLB")) {
account.setIsFolder(true);
} else {
account.setLeetType(LeetType.fromRdfString(element.getAttribute("NS1:whereLeetLB").trim().toLowerCase()));
String level = element.getAttribute("NS1:leetLevelLB").trim();
// chrome passwordmaker sets this to zero when leet is turned off.
if (level.equals("0")) level = "1";
account.setLeetLevel(LeetLevel.fromString(level));
String algorithm = element.getAttribute("NS1:hashAlgorithmLB").trim().toLowerCase();
Pair<AlgorithmType, Boolean> algo = fromRdfString(account, algorithm);
account.setAlgorithm(algo.first);
account.setTrim(algo.second);
account.setHmac(algorithm.contains("hmac-"));
String passwordLength = element.getAttribute("NS1:passwordLength").trim();
if (passwordLength.length() > 0)
account.setLength(Integer.parseInt(passwordLength));
else
account.setLength(Account.DEFAULT_LENGTH);
account.setUsername(element.getAttribute("NS1:usernameTB").trim());
account.setModifier(element.getAttribute("NS1:counter").trim());
// If trim is not set, that means only the Hex characterset is used.
if ( account.isTrim() )
account.setCharacterSet(element.getAttribute("NS1:charset").trim());
account.setPrefix(element.getAttribute("NS1:prefix").trim());
account.setSuffix(element.getAttribute("NS1:suffix").trim());
account.setAutoPop(element.getAttribute("NS1:autoPopulate").trim().compareTo("true") == 0);
// Read the URL extraction specifiers
account.clearUrlComponents();
if (element.getAttribute("NS1:protocolCB").trim().compareToIgnoreCase("true") == 0)
account.addUrlComponent(Account.UrlComponents.Protocol);
if (element.getAttribute("NS1:subdomainCB").trim().compareToIgnoreCase("true") == 0)
account.addUrlComponent(Account.UrlComponents.Subdomain);
if (element.getAttribute("NS1:domainCB").trim().compareToIgnoreCase("true") == 0)
account.addUrlComponent(Account.UrlComponents.Domain);
if (element.getAttribute("NS1:pathCB").trim().compareToIgnoreCase("true") == 0)
account.addUrlComponent(Account.UrlComponents.PortPathAnchorQuery);
account.setUrl(element.getAttribute("NS1:urlToUse").trim());
// pattern info... I really hope nobody has more than 100000(MAX_PATTERNS)
for (int iPattern = 0; iPattern < MAX_PATTERNS; ++iPattern) {
String pattern = element.getAttribute("NS1:pattern" + iPattern).trim();
String patternType = element.getAttribute("NS1:patterntype" + iPattern).trim();
String patternEnabled = element.getAttribute("NS1:patternenabled" + iPattern).trim();
String patternDesc = element.getAttribute("NS1:patterndesc" + iPattern).trim();
if (pattern.length() > 0 || patternType.length() > 0 || patternEnabled.length() > 0 || patternDesc.length() > 0) {
AccountPatternData data = new AccountPatternData();
if ( patternDesc.isEmpty() ) patternDesc = pattern;
data.setPattern(pattern); // depends on control dependency: [if], data = [none]
data.setType(AccountPatternType.fromString(patternType)); // depends on control dependency: [if], data = [none]
data.setEnabled(patternEnabled.compareTo("true") == 0); // depends on control dependency: [if], data = [none]
data.setDesc(patternDesc); // depends on control dependency: [if], data = [none]
account.getPatterns().add(data); // depends on control dependency: [if], data = [none]
} else {
iPattern = MAX_PATTERNS + 1; // depends on control dependency: [if], data = [none]
}
}
}
if ( account.isRoot() ) {
// The root node isn't really an account, just a starting point for our account tree.
if ( ! account.getUrlComponents().isEmpty() || ! account.getUrl().isEmpty()
|| ! account.getUsername().isEmpty() || !account.getPatterns().isEmpty() ) {
logger.info("Clear out junk settings on the ROOT account.");
account.clearUrlComponents();
account.setUrl("");
account.setUsername("");
account.getPatterns().clear();
}
}
return account;
} } |
public class class_name {
private void adaptWindowBackgroundAndInset() {
DialogRootView rootView = getRootView();
if (rootView != null) {
rootView.setWindowBackgroundAndInset(windowBackground, windowInsets);
}
} } | public class class_name {
private void adaptWindowBackgroundAndInset() {
DialogRootView rootView = getRootView();
if (rootView != null) {
rootView.setWindowBackgroundAndInset(windowBackground, windowInsets); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
public double[] getVotesForInstance(Instance inst) {
if (m_weights == null) {
return new double[inst.numAttributes() + 1];
}
double[] result = new double[2];
double wx = dotProd(inst, m_weights, inst.classIndex());// * m_wScale;
double z = (wx + m_weights[m_weights.length - 1]);
//System.out.print("" + z + ": ");
// System.out.println(1.0 / (1.0 + Math.exp(-z)));
if (z <= 0) {
// z = 0;
if (m_loss == LOGLOSS) {
result[0] = 1.0 / (1.0 + Math.exp(z));
result[1] = 1.0 - result[0];
} else {
result[0] = 1;
}
} else {
if (m_loss == LOGLOSS) {
result[1] = 1.0 / (1.0 + Math.exp(-z));
result[0] = 1.0 - result[1];
} else {
result[1] = 1;
}
}
return result;
} } | public class class_name {
@Override
public double[] getVotesForInstance(Instance inst) {
if (m_weights == null) {
return new double[inst.numAttributes() + 1]; // depends on control dependency: [if], data = [none]
}
double[] result = new double[2];
double wx = dotProd(inst, m_weights, inst.classIndex());// * m_wScale;
double z = (wx + m_weights[m_weights.length - 1]);
//System.out.print("" + z + ": ");
// System.out.println(1.0 / (1.0 + Math.exp(-z)));
if (z <= 0) {
// z = 0;
if (m_loss == LOGLOSS) {
result[0] = 1.0 / (1.0 + Math.exp(z)); // depends on control dependency: [if], data = [none]
result[1] = 1.0 - result[0]; // depends on control dependency: [if], data = [none]
} else {
result[0] = 1; // depends on control dependency: [if], data = [none]
}
} else {
if (m_loss == LOGLOSS) {
result[1] = 1.0 / (1.0 + Math.exp(-z)); // depends on control dependency: [if], data = [none]
result[0] = 1.0 - result[1]; // depends on control dependency: [if], data = [none]
} else {
result[1] = 1; // depends on control dependency: [if], data = [none]
}
}
return result;
} } |
public class class_name {
public Observable<ServiceResponse<Page<JobStreamInner>>> listByJobWithServiceResponseAsync(final String resourceGroupName, final String automationAccountName, final String jobId) {
return listByJobSinglePageAsync(resourceGroupName, automationAccountName, jobId)
.concatMap(new Func1<ServiceResponse<Page<JobStreamInner>>, Observable<ServiceResponse<Page<JobStreamInner>>>>() {
@Override
public Observable<ServiceResponse<Page<JobStreamInner>>> call(ServiceResponse<Page<JobStreamInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listByJobNextWithServiceResponseAsync(nextPageLink));
}
});
} } | public class class_name {
public Observable<ServiceResponse<Page<JobStreamInner>>> listByJobWithServiceResponseAsync(final String resourceGroupName, final String automationAccountName, final String jobId) {
return listByJobSinglePageAsync(resourceGroupName, automationAccountName, jobId)
.concatMap(new Func1<ServiceResponse<Page<JobStreamInner>>, Observable<ServiceResponse<Page<JobStreamInner>>>>() {
@Override
public Observable<ServiceResponse<Page<JobStreamInner>>> call(ServiceResponse<Page<JobStreamInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page); // depends on control dependency: [if], data = [none]
}
return Observable.just(page).concatWith(listByJobNextWithServiceResponseAsync(nextPageLink));
}
});
} } |
public class class_name {
protected SITransaction getTransaction() {
if (TRACE.isEntryEnabled()) {
final String methodName = "getTransaction";
SibTr.entry(this, TRACE, methodName);
SibTr.exit(this, TRACE, methodName, null);
}
return null;
} } | public class class_name {
protected SITransaction getTransaction() {
if (TRACE.isEntryEnabled()) {
final String methodName = "getTransaction";
SibTr.entry(this, TRACE, methodName); // depends on control dependency: [if], data = [none]
SibTr.exit(this, TRACE, methodName, null); // depends on control dependency: [if], data = [none]
}
return null;
} } |
public class class_name {
public boolean containsNaN() {
for (int c = 0; c < this.size(); c++) {
if (s.isNaN(this.getValue(c))) {
return true;
}
}
return false;
} } | public class class_name {
public boolean containsNaN() {
for (int c = 0; c < this.size(); c++) {
if (s.isNaN(this.getValue(c))) {
return true; // depends on control dependency: [if], data = [none]
}
}
return false;
} } |
public class class_name {
public void aggregateQueryStringParams(String additionalQueryString, boolean setQS)
{
// 321485
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15
logger.logp(Level.FINE, CLASS_NAME,"aggregateQueryStringParams", "entry qs --> " + additionalQueryString + " set --> " + String.valueOf(setQS));
}
if (WCCustomProperties.CHECK_REQUEST_OBJECT_IN_USE){
checkRequestObjectInUse();
}
QSListItem tmpQS = null;
SRTServletRequestThreadData reqData = SRTServletRequestThreadData.getInstance();
if (reqData.getParameters() == null)
{
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) //306998.15
logger.logp(Level.FINE, CLASS_NAME,"aggregateQueryStringParams", "The paramater stack is currently null");
// Begin 258025, Part 2
LinkedList queryStringList = SRTServletRequestThreadData.getInstance().getQueryStringList();
if (queryStringList == null || queryStringList.isEmpty())
{
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) //306998.15
logger.logp(Level.FINE, CLASS_NAME,"aggregateQueryStringParams", "The queryStringList is empty");
if (queryStringList == null)
queryStringList = new LinkedList();
if (getQueryString()!=null){
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) //306998.15
logger.logp(Level.FINE, CLASS_NAME,"aggregateQueryStringParams", "getQueryString will be added first in the QSList wih value->"+getQueryString());
tmpQS = new QSListItem(getQueryString(), null);
queryStringList.add(tmpQS);
}
SRTServletRequestThreadData.getInstance().setQueryStringList(queryStringList);
}
// End 258025, Part 2
if (additionalQueryString !=null){
tmpQS = new QSListItem(additionalQueryString, null);
queryStringList.add(tmpQS);
}
}
if (setQS){
setQueryString(additionalQueryString);
}
// if _parameters is not null, then this is part of a forward or include...add the additional query parms
// if _parameters is null, then the string will be parsed if needed
if (reqData.getParameters() != null && additionalQueryString != null)
{
Hashtable parameters = RequestUtils.parseQueryString(additionalQueryString, getReaderEncoding(true));
// end 249841, 256836
String[] valArray;
for (Enumeration e = parameters.keys(); e.hasMoreElements();)
{
String key = (String) e.nextElement();
String[] newVals = (String[]) parameters.get(key);
// Check to see if a parameter with the key already exists
// and prepend the values since QueryString takes precedence
//
if (reqData.getParameters().containsKey(key))
{
String[] oldVals = (String[]) reqData.getParameters().get(key);
Vector v = new Vector();
for (int i = 0; i < newVals.length; i++)
{
v.add(newVals[i]);
}
for (int i = 0; i < oldVals.length; i++)
{
// 249841, do not check to see if values already exist
v.add(oldVals[i]);
}
valArray = new String[v.size()];
v.toArray(valArray);
reqData.getParameters().put(key, valArray);
}
else
{
reqData.getParameters().put(key, newVals);
}
}
}
} } | public class class_name {
public void aggregateQueryStringParams(String additionalQueryString, boolean setQS)
{
// 321485
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15
logger.logp(Level.FINE, CLASS_NAME,"aggregateQueryStringParams", "entry qs --> " + additionalQueryString + " set --> " + String.valueOf(setQS)); // depends on control dependency: [if], data = [none]
}
if (WCCustomProperties.CHECK_REQUEST_OBJECT_IN_USE){
checkRequestObjectInUse(); // depends on control dependency: [if], data = [none]
}
QSListItem tmpQS = null;
SRTServletRequestThreadData reqData = SRTServletRequestThreadData.getInstance();
if (reqData.getParameters() == null)
{
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) //306998.15
logger.logp(Level.FINE, CLASS_NAME,"aggregateQueryStringParams", "The paramater stack is currently null");
// Begin 258025, Part 2
LinkedList queryStringList = SRTServletRequestThreadData.getInstance().getQueryStringList();
if (queryStringList == null || queryStringList.isEmpty())
{
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) //306998.15
logger.logp(Level.FINE, CLASS_NAME,"aggregateQueryStringParams", "The queryStringList is empty");
if (queryStringList == null)
queryStringList = new LinkedList();
if (getQueryString()!=null){
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) //306998.15
logger.logp(Level.FINE, CLASS_NAME,"aggregateQueryStringParams", "getQueryString will be added first in the QSList wih value->"+getQueryString());
tmpQS = new QSListItem(getQueryString(), null); // depends on control dependency: [if], data = [(getQueryString()]
queryStringList.add(tmpQS); // depends on control dependency: [if], data = [none]
}
SRTServletRequestThreadData.getInstance().setQueryStringList(queryStringList); // depends on control dependency: [if], data = [(queryStringList]
}
// End 258025, Part 2
if (additionalQueryString !=null){
tmpQS = new QSListItem(additionalQueryString, null); // depends on control dependency: [if], data = [(additionalQueryString]
queryStringList.add(tmpQS); // depends on control dependency: [if], data = [none]
}
}
if (setQS){
setQueryString(additionalQueryString); // depends on control dependency: [if], data = [none]
}
// if _parameters is not null, then this is part of a forward or include...add the additional query parms
// if _parameters is null, then the string will be parsed if needed
if (reqData.getParameters() != null && additionalQueryString != null)
{
Hashtable parameters = RequestUtils.parseQueryString(additionalQueryString, getReaderEncoding(true));
// end 249841, 256836
String[] valArray;
for (Enumeration e = parameters.keys(); e.hasMoreElements();)
{
String key = (String) e.nextElement();
String[] newVals = (String[]) parameters.get(key);
// Check to see if a parameter with the key already exists
// and prepend the values since QueryString takes precedence
//
if (reqData.getParameters().containsKey(key))
{
String[] oldVals = (String[]) reqData.getParameters().get(key);
Vector v = new Vector();
for (int i = 0; i < newVals.length; i++)
{
v.add(newVals[i]); // depends on control dependency: [for], data = [i]
}
for (int i = 0; i < oldVals.length; i++)
{
// 249841, do not check to see if values already exist
v.add(oldVals[i]); // depends on control dependency: [for], data = [i]
}
valArray = new String[v.size()]; // depends on control dependency: [if], data = [none]
v.toArray(valArray); // depends on control dependency: [if], data = [none]
reqData.getParameters().put(key, valArray); // depends on control dependency: [if], data = [none]
}
else
{
reqData.getParameters().put(key, newVals); // depends on control dependency: [if], data = [none]
}
}
}
} } |
public class class_name {
public T extractFromHeader(String headerName, String variable) {
if (headerExtractor == null) {
headerExtractor = new MessageHeaderVariableExtractor();
getAction().getVariableExtractors().add(headerExtractor);
}
headerExtractor.getHeaderMappings().put(headerName, variable);
return self;
} } | public class class_name {
public T extractFromHeader(String headerName, String variable) {
if (headerExtractor == null) {
headerExtractor = new MessageHeaderVariableExtractor(); // depends on control dependency: [if], data = [none]
getAction().getVariableExtractors().add(headerExtractor); // depends on control dependency: [if], data = [(headerExtractor]
}
headerExtractor.getHeaderMappings().put(headerName, variable);
return self;
} } |
public class class_name {
public synchronized static void close() throws IOException {
if (initialized) {
initialized = false;
reader = null;
if (terminal != null) {
try {
terminal.close();
} finally {
terminal = null;
}
}
}
} } | public class class_name {
public synchronized static void close() throws IOException {
if (initialized) {
initialized = false;
reader = null;
if (terminal != null) {
try {
terminal.close(); // depends on control dependency: [try], data = [none]
} finally {
terminal = null;
}
}
}
} } |
public class class_name {
private static String numToString(int num, int width) {
String str = Integer.toString(num);
int len = str.length();
if (len >= width) {
return str.substring(len - width, len);
}
StringBuilder sb = new StringBuilder(width);
for (int i = len; i < width; i++) {
sb.append('0');
}
sb.append(str);
return sb.toString();
} } | public class class_name {
private static String numToString(int num, int width) {
String str = Integer.toString(num);
int len = str.length();
if (len >= width) {
return str.substring(len - width, len); // depends on control dependency: [if], data = [(len]
}
StringBuilder sb = new StringBuilder(width);
for (int i = len; i < width; i++) {
sb.append('0'); // depends on control dependency: [for], data = [none]
}
sb.append(str);
return sb.toString();
} } |
public class class_name {
@Override
public boolean hasNext() {
if (!isForward) {
currentRec = currentRec - pointerSize;
isForward = true;
}
return currentRec > 0 || blk.number() > 0;
} } | public class class_name {
@Override
public boolean hasNext() {
if (!isForward) {
currentRec = currentRec - pointerSize;
// depends on control dependency: [if], data = [none]
isForward = true;
// depends on control dependency: [if], data = [none]
}
return currentRec > 0 || blk.number() > 0;
} } |
public class class_name {
@Override
public void doRender(final WComponent component, final WebXmlRenderContext renderContext) {
WStyledText text = (WStyledText) component;
XmlStringBuilder xml = renderContext.getWriter();
String textString = text.getText();
if (textString != null && textString.length() > 0) {
xml.appendTagOpen("ui:text");
xml.appendOptionalAttribute("class", component.getHtmlClass());
switch (text.getType()) {
case EMPHASISED:
xml.appendAttribute("type", "emphasised");
break;
case HIGH_PRIORITY:
xml.appendAttribute("type", "highPriority");
break;
case LOW_PRIORITY:
xml.appendAttribute("type", "lowPriority");
break;
case MEDIUM_PRIORITY:
xml.appendAttribute("type", "mediumPriority");
break;
case ACTIVE_INDICATOR:
xml.appendAttribute("type", "activeIndicator");
break;
case MATCH_INDICATOR:
xml.appendAttribute("type", "matchIndicator");
break;
case INSERT:
xml.appendAttribute("type", "insert");
break;
case DELETE:
xml.appendAttribute("type", "delete");
break;
case MANDATORY_INDICATOR:
xml.appendAttribute("type", "mandatoryIndicator");
break;
case PLAIN:
default:
xml.appendAttribute("type", "plain");
break;
}
switch (text.getWhitespaceMode()) {
case PARAGRAPHS:
xml.appendAttribute("space", "paragraphs");
break;
case PRESERVE:
xml.appendAttribute("space", "preserve");
break;
case DEFAULT:
break;
default:
throw new IllegalArgumentException("Unknown white space mode: " + text.
getWhitespaceMode());
}
xml.appendClose();
if (WStyledText.WhitespaceMode.PARAGRAPHS.equals(text.getWhitespaceMode())) {
textString = text.isEncodeText() ? WebUtilities.encode(textString) : HtmlToXMLUtil.unescapeToXML(textString);
writeParagraphs(textString, xml);
} else {
xml.append(textString, text.isEncodeText());
}
xml.appendEndTag("ui:text");
}
} } | public class class_name {
@Override
public void doRender(final WComponent component, final WebXmlRenderContext renderContext) {
WStyledText text = (WStyledText) component;
XmlStringBuilder xml = renderContext.getWriter();
String textString = text.getText();
if (textString != null && textString.length() > 0) {
xml.appendTagOpen("ui:text"); // depends on control dependency: [if], data = [none]
xml.appendOptionalAttribute("class", component.getHtmlClass()); // depends on control dependency: [if], data = [none]
switch (text.getType()) {
case EMPHASISED:
xml.appendAttribute("type", "emphasised");
break;
case HIGH_PRIORITY:
xml.appendAttribute("type", "highPriority");
break;
case LOW_PRIORITY:
xml.appendAttribute("type", "lowPriority");
break;
case MEDIUM_PRIORITY:
xml.appendAttribute("type", "mediumPriority");
break;
case ACTIVE_INDICATOR:
xml.appendAttribute("type", "activeIndicator");
break;
case MATCH_INDICATOR:
xml.appendAttribute("type", "matchIndicator");
break;
case INSERT:
xml.appendAttribute("type", "insert");
break;
case DELETE:
xml.appendAttribute("type", "delete");
break;
case MANDATORY_INDICATOR:
xml.appendAttribute("type", "mandatoryIndicator");
break;
case PLAIN:
default:
xml.appendAttribute("type", "plain");
break;
}
switch (text.getWhitespaceMode()) {
case PARAGRAPHS:
xml.appendAttribute("space", "paragraphs");
break;
case PRESERVE:
xml.appendAttribute("space", "preserve");
break;
case DEFAULT:
break;
default:
throw new IllegalArgumentException("Unknown white space mode: " + text.
getWhitespaceMode());
}
xml.appendClose();
if (WStyledText.WhitespaceMode.PARAGRAPHS.equals(text.getWhitespaceMode())) {
textString = text.isEncodeText() ? WebUtilities.encode(textString) : HtmlToXMLUtil.unescapeToXML(textString);
writeParagraphs(textString, xml);
} else {
xml.append(textString, text.isEncodeText());
}
xml.appendEndTag("ui:text");
}
} } |
public class class_name {
@SuppressWarnings("unchecked")
public void filterTable(String search) {
m_container.removeAllContainerFilters();
if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(search)) {
m_container.addContainerFilter(
new Or(
new SimpleStringFilter(TableColumn.Channel, search, true, false),
new SimpleStringFilter(TableColumn.ParentChannel, search, true, false),
new SimpleStringFilter(TableColumn.File, search, true, false)));
}
if ((getValue() != null) & !((Set<Logger>)getValue()).isEmpty()) {
setCurrentPageFirstItemId(((Set<Logger>)getValue()).iterator().next());
}
} } | public class class_name {
@SuppressWarnings("unchecked")
public void filterTable(String search) {
m_container.removeAllContainerFilters();
if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(search)) {
m_container.addContainerFilter(
new Or(
new SimpleStringFilter(TableColumn.Channel, search, true, false),
new SimpleStringFilter(TableColumn.ParentChannel, search, true, false),
new SimpleStringFilter(TableColumn.File, search, true, false))); // depends on control dependency: [if], data = [none]
}
if ((getValue() != null) & !((Set<Logger>)getValue()).isEmpty()) {
setCurrentPageFirstItemId(((Set<Logger>)getValue()).iterator().next()); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private void init(Symtab syms, boolean definitive) {
if (classes != null) return;
if (definitive) {
Assert.check(packages == null || packages == syms.packages);
packages = syms.packages;
Assert.check(classes == null || classes == syms.classes);
classes = syms.classes;
} else {
packages = new HashMap<Name, PackageSymbol>();
classes = new HashMap<Name, ClassSymbol>();
}
packages.put(names.empty, syms.rootPackage);
syms.rootPackage.completer = thisCompleter;
syms.unnamedPackage.completer = thisCompleter;
} } | public class class_name {
private void init(Symtab syms, boolean definitive) {
if (classes != null) return;
if (definitive) {
Assert.check(packages == null || packages == syms.packages); // depends on control dependency: [if], data = [none]
packages = syms.packages; // depends on control dependency: [if], data = [none]
Assert.check(classes == null || classes == syms.classes); // depends on control dependency: [if], data = [none]
classes = syms.classes; // depends on control dependency: [if], data = [none]
} else {
packages = new HashMap<Name, PackageSymbol>(); // depends on control dependency: [if], data = [none]
classes = new HashMap<Name, ClassSymbol>(); // depends on control dependency: [if], data = [none]
}
packages.put(names.empty, syms.rootPackage);
syms.rootPackage.completer = thisCompleter;
syms.unnamedPackage.completer = thisCompleter;
} } |
public class class_name {
public boolean contains(int partitionId) {
if (lowerID < upperID) { //"Proper" id range
return lowerID <= partitionId && upperID > partitionId;
} else { //Id range "wraps around"
return (lowerID <= partitionId && partitionId < idUpperBound) ||
(upperID > partitionId && partitionId >= 0);
}
} } | public class class_name {
public boolean contains(int partitionId) {
if (lowerID < upperID) { //"Proper" id range
return lowerID <= partitionId && upperID > partitionId; // depends on control dependency: [if], data = [none]
} else { //Id range "wraps around"
return (lowerID <= partitionId && partitionId < idUpperBound) ||
(upperID > partitionId && partitionId >= 0); // depends on control dependency: [if], data = [(lowerID]
}
} } |
public class class_name {
public int[] getReferenceNumbers() {
Iterator it = m_tunes.keySet().iterator();
int[] refNb = new int[m_tunes.size()];
int index = 0;
while (it.hasNext()) {
refNb[index] = (Integer) it.next();
index++;
}
return refNb;
} } | public class class_name {
public int[] getReferenceNumbers() {
Iterator it = m_tunes.keySet().iterator();
int[] refNb = new int[m_tunes.size()];
int index = 0;
while (it.hasNext()) {
refNb[index] = (Integer) it.next();
// depends on control dependency: [while], data = [none]
index++;
// depends on control dependency: [while], data = [none]
}
return refNb;
} } |
public class class_name {
@Override
public EClass getIfcConnectedFaceSet() {
if (ifcConnectedFaceSetEClass == null) {
ifcConnectedFaceSetEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(120);
}
return ifcConnectedFaceSetEClass;
} } | public class class_name {
@Override
public EClass getIfcConnectedFaceSet() {
if (ifcConnectedFaceSetEClass == null) {
ifcConnectedFaceSetEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(120);
// depends on control dependency: [if], data = [none]
}
return ifcConnectedFaceSetEClass;
} } |
public class class_name {
static <T> T loadService(Class<T> ofType) {
List<String> implClasses = loadServicesImplementations(ofType);
for (String implClass : implClasses) {
T result = attemptLoad(ofType, implClass);
if (result != null) {
if (LOG.isLoggable(Level.FINEST)) {
LOG.finest("Selected " + ofType.getSimpleName()
+ " implementation: "
+ result.getClass().getName());
}
return result;
}
}
throw(new IllegalStateException(
"Could not load " + ofType.getName() + " implementation"));
} } | public class class_name {
static <T> T loadService(Class<T> ofType) {
List<String> implClasses = loadServicesImplementations(ofType);
for (String implClass : implClasses) {
T result = attemptLoad(ofType, implClass);
if (result != null) {
if (LOG.isLoggable(Level.FINEST)) {
LOG.finest("Selected " + ofType.getSimpleName()
+ " implementation: "
+ result.getClass().getName()); // depends on control dependency: [if], data = [none]
}
return result; // depends on control dependency: [if], data = [none]
}
}
throw(new IllegalStateException(
"Could not load " + ofType.getName() + " implementation"));
} } |
public class class_name {
static public ThreddsMetadata.Variables extractVariables(InvDatasetImpl threddsDataset) throws IOException {
ThreddsDataFactory.Result result = null;
try {
result = new ThreddsDataFactory().openFeatureDataset(threddsDataset, null);
if (result.fatalError) {
System.out.println(" openDatatype errs=" + result.errLog);
return null;
}
if (result.featureType == FeatureType.GRID) {
// System.out.println(" extractVariables GRID=" + result.location);
GridDataset gridDataset = (GridDataset) result.featureDataset;
return extractVariables(threddsDataset, gridDataset);
} else if ((result.featureType == FeatureType.STATION) || (result.featureType == FeatureType.POINT)) {
PointObsDataset pobsDataset = (PointObsDataset) result.featureDataset;
ThreddsMetadata.Variables vars = new ThreddsMetadata.Variables("CF-1.0");
for (VariableSimpleIF vs : pobsDataset.getDataVariables()) {
ThreddsMetadata.Variable v = new ThreddsMetadata.Variable();
vars.addVariable( v);
v.setName( vs.getShortName());
v.setDescription( vs.getDescription());
v.setUnits( vs.getUnitsString());
ucar.nc2.Attribute att = vs.findAttributeIgnoreCase("standard_name");
if (att != null)
v.setVocabularyName(att.getStringValue());
}
vars.sort();
return vars;
}
} finally {
try {
if ((result != null) && (result.featureDataset != null))
result.featureDataset.close();
} catch (IOException ioe) {
logger.error("Closing dataset "+result.featureDataset, ioe);
}
}
return null;
} } | public class class_name {
static public ThreddsMetadata.Variables extractVariables(InvDatasetImpl threddsDataset) throws IOException {
ThreddsDataFactory.Result result = null;
try {
result = new ThreddsDataFactory().openFeatureDataset(threddsDataset, null);
if (result.fatalError) {
System.out.println(" openDatatype errs=" + result.errLog); // depends on control dependency: [if], data = [none]
return null; // depends on control dependency: [if], data = [none]
}
if (result.featureType == FeatureType.GRID) {
// System.out.println(" extractVariables GRID=" + result.location);
GridDataset gridDataset = (GridDataset) result.featureDataset;
return extractVariables(threddsDataset, gridDataset); // depends on control dependency: [if], data = [none]
} else if ((result.featureType == FeatureType.STATION) || (result.featureType == FeatureType.POINT)) {
PointObsDataset pobsDataset = (PointObsDataset) result.featureDataset;
ThreddsMetadata.Variables vars = new ThreddsMetadata.Variables("CF-1.0");
for (VariableSimpleIF vs : pobsDataset.getDataVariables()) {
ThreddsMetadata.Variable v = new ThreddsMetadata.Variable();
vars.addVariable( v); // depends on control dependency: [for], data = [none]
v.setName( vs.getShortName()); // depends on control dependency: [for], data = [vs]
v.setDescription( vs.getDescription()); // depends on control dependency: [for], data = [vs]
v.setUnits( vs.getUnitsString()); // depends on control dependency: [for], data = [vs]
ucar.nc2.Attribute att = vs.findAttributeIgnoreCase("standard_name");
if (att != null)
v.setVocabularyName(att.getStringValue());
}
vars.sort(); // depends on control dependency: [if], data = [none]
return vars; // depends on control dependency: [if], data = [none]
}
} finally {
try {
if ((result != null) && (result.featureDataset != null))
result.featureDataset.close();
} catch (IOException ioe) {
logger.error("Closing dataset "+result.featureDataset, ioe);
} // depends on control dependency: [catch], data = [none]
}
return null;
} } |
public class class_name {
public static Transformer setOutputProperties(Transformer transformer, boolean omitXMLDeclaration, int indentAmount, String encoding){
transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, omitXMLDeclaration ? "yes" : "no");
// indentation
if(indentAmount>0){
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty(OUTPUT_KEY_INDENT_AMOUT, String.valueOf(indentAmount));
}
if(!StringUtil.isWhitespace(encoding))
transformer.setOutputProperty(OutputKeys.ENCODING, encoding.trim());
return transformer;
} } | public class class_name {
public static Transformer setOutputProperties(Transformer transformer, boolean omitXMLDeclaration, int indentAmount, String encoding){
transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, omitXMLDeclaration ? "yes" : "no");
// indentation
if(indentAmount>0){
transformer.setOutputProperty(OutputKeys.INDENT, "yes"); // depends on control dependency: [if], data = [none]
transformer.setOutputProperty(OUTPUT_KEY_INDENT_AMOUT, String.valueOf(indentAmount)); // depends on control dependency: [if], data = [(indentAmount]
}
if(!StringUtil.isWhitespace(encoding))
transformer.setOutputProperty(OutputKeys.ENCODING, encoding.trim());
return transformer;
} } |
public class class_name {
private void reloadDescendants(QPath parentOld, QPath parent) throws RepositoryException
{
List<ItemImpl> items = itemsPool.getDescendats(parentOld);
for (ItemImpl item : items)
{
ItemData oldItemData = item.getData();
ItemData newItemData = updatePath(parentOld, parent, oldItemData);
ItemImpl reloadedItem = reloadItem(newItemData);
if (reloadedItem != null)
{
invalidated.add(reloadedItem);
}
}
} } | public class class_name {
private void reloadDescendants(QPath parentOld, QPath parent) throws RepositoryException
{
List<ItemImpl> items = itemsPool.getDescendats(parentOld);
for (ItemImpl item : items)
{
ItemData oldItemData = item.getData();
ItemData newItemData = updatePath(parentOld, parent, oldItemData);
ItemImpl reloadedItem = reloadItem(newItemData);
if (reloadedItem != null)
{
invalidated.add(reloadedItem); // depends on control dependency: [if], data = [(reloadedItem]
}
}
} } |
public class class_name {
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (mShapeBadgeItem != null) {
mShapeBadgeItem.draw(canvas);
}
} } | public class class_name {
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (mShapeBadgeItem != null) {
mShapeBadgeItem.draw(canvas); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Deprecated
public static List<String> trim(List<String> values) {
List<String> trimmed = new ArrayList<>();
for (String value : values) {
trimmed.add(value.trim());
}
return trimmed;
} } | public class class_name {
@Deprecated
public static List<String> trim(List<String> values) {
List<String> trimmed = new ArrayList<>();
for (String value : values) {
trimmed.add(value.trim()); // depends on control dependency: [for], data = [value]
}
return trimmed;
} } |
public class class_name {
private Symbol addConstantDynamicOrInvokeDynamicReference(
final int tag, final String name, final String descriptor, final int bootstrapMethodIndex) {
int hashCode = hash(tag, name, descriptor, bootstrapMethodIndex);
Entry entry = get(hashCode);
while (entry != null) {
if (entry.tag == tag
&& entry.hashCode == hashCode
&& entry.data == bootstrapMethodIndex
&& entry.name.equals(name)
&& entry.value.equals(descriptor)) {
return entry;
}
entry = entry.next;
}
constantPool.put122(tag, bootstrapMethodIndex, addConstantNameAndType(name, descriptor));
return put(
new Entry(
constantPoolCount++, tag, null, name, descriptor, bootstrapMethodIndex, hashCode));
} } | public class class_name {
private Symbol addConstantDynamicOrInvokeDynamicReference(
final int tag, final String name, final String descriptor, final int bootstrapMethodIndex) {
int hashCode = hash(tag, name, descriptor, bootstrapMethodIndex);
Entry entry = get(hashCode);
while (entry != null) {
if (entry.tag == tag
&& entry.hashCode == hashCode
&& entry.data == bootstrapMethodIndex
&& entry.name.equals(name)
&& entry.value.equals(descriptor)) {
return entry; // depends on control dependency: [if], data = [none]
}
entry = entry.next; // depends on control dependency: [while], data = [none]
}
constantPool.put122(tag, bootstrapMethodIndex, addConstantNameAndType(name, descriptor));
return put(
new Entry(
constantPoolCount++, tag, null, name, descriptor, bootstrapMethodIndex, hashCode));
} } |
public class class_name {
public void commit(boolean force) {
if (checkSession()) {
return;
}
if (log.isDebugEnabled()) {
ToStringBuilder tsb = new ToStringBuilder(String.format("Committing transactional %s@%x",
sqlSession.getClass().getSimpleName(),sqlSession.hashCode()));
tsb.append("force", force);
log.debug(tsb.toString());
}
sqlSession.commit(force);
} } | public class class_name {
public void commit(boolean force) {
if (checkSession()) {
return; // depends on control dependency: [if], data = [none]
}
if (log.isDebugEnabled()) {
ToStringBuilder tsb = new ToStringBuilder(String.format("Committing transactional %s@%x",
sqlSession.getClass().getSimpleName(),sqlSession.hashCode()));
tsb.append("force", force); // depends on control dependency: [if], data = [none]
log.debug(tsb.toString()); // depends on control dependency: [if], data = [none]
}
sqlSession.commit(force);
} } |
public class class_name {
public void log(ServiceReference sr, int level, String message, Throwable exception) {
switch (level) {
case LOG_DEBUG:
if (delegate.isDebugEnabled()) {
delegate.debug(createMessage(sr, message), exception);
}
break;
case LOG_ERROR:
if (delegate.isErrorEnabled()) {
delegate.error(createMessage(sr, message), exception);
}
break;
case LOG_INFO:
if (delegate.isInfoEnabled()) {
delegate.info(createMessage(sr, message), exception);
}
break;
case LOG_WARNING:
if (delegate.isWarnEnabled()) {
delegate.warn(createMessage(sr, message), exception);
}
break;
default:
break;
}
} } | public class class_name {
public void log(ServiceReference sr, int level, String message, Throwable exception) {
switch (level) {
case LOG_DEBUG:
if (delegate.isDebugEnabled()) {
delegate.debug(createMessage(sr, message), exception); // depends on control dependency: [if], data = [none]
}
break;
case LOG_ERROR:
if (delegate.isErrorEnabled()) {
delegate.error(createMessage(sr, message), exception); // depends on control dependency: [if], data = [none]
}
break;
case LOG_INFO:
if (delegate.isInfoEnabled()) {
delegate.info(createMessage(sr, message), exception); // depends on control dependency: [if], data = [none]
}
break;
case LOG_WARNING:
if (delegate.isWarnEnabled()) {
delegate.warn(createMessage(sr, message), exception); // depends on control dependency: [if], data = [none]
}
break;
default:
break;
}
} } |
public class class_name {
public static final String addQuotes(String szTableNames, char charStart, char charEnd)
{
String strFileName = szTableNames;
if (charStart == -1)
charStart = DBConstants.SQL_START_QUOTE;
if (charEnd == -1)
charEnd = DBConstants.SQL_END_QUOTE;
for (int iIndex = 0; iIndex < strFileName.length(); iIndex++)
{
if ((strFileName.charAt(iIndex) == charStart)
|| (strFileName.charAt(iIndex) == charEnd))
{ // If a quote is in this string, replace with a double-quote Fred's -> Fred''s
strFileName = strFileName.substring(0, iIndex) +
strFileName.substring(iIndex, iIndex + 1) +
strFileName.substring(iIndex, iIndex + 1) +
strFileName.substring(iIndex + 1, strFileName.length());
iIndex++; // Skip the second quote
}
}
if ((charStart != ' ') && (charEnd != ' '))
strFileName = charStart + strFileName + charEnd; // Spaces in name, quotes required
return strFileName;
} } | public class class_name {
public static final String addQuotes(String szTableNames, char charStart, char charEnd)
{
String strFileName = szTableNames;
if (charStart == -1)
charStart = DBConstants.SQL_START_QUOTE;
if (charEnd == -1)
charEnd = DBConstants.SQL_END_QUOTE;
for (int iIndex = 0; iIndex < strFileName.length(); iIndex++)
{
if ((strFileName.charAt(iIndex) == charStart)
|| (strFileName.charAt(iIndex) == charEnd))
{ // If a quote is in this string, replace with a double-quote Fred's -> Fred''s
strFileName = strFileName.substring(0, iIndex) +
strFileName.substring(iIndex, iIndex + 1) +
strFileName.substring(iIndex, iIndex + 1) +
strFileName.substring(iIndex + 1, strFileName.length()); // depends on control dependency: [if], data = [none]
iIndex++; // Skip the second quote // depends on control dependency: [if], data = [none]
}
}
if ((charStart != ' ') && (charEnd != ' '))
strFileName = charStart + strFileName + charEnd; // Spaces in name, quotes required
return strFileName;
} } |
public class class_name {
public Float getFloat(final String key) {
Number number = (Number) map.get(key);
if (number == null) {
return null;
}
if (number instanceof Float) {
return (Float) number;
}
return number.floatValue();
} } | public class class_name {
public Float getFloat(final String key) {
Number number = (Number) map.get(key);
if (number == null) {
return null; // depends on control dependency: [if], data = [none]
}
if (number instanceof Float) {
return (Float) number; // depends on control dependency: [if], data = [none]
}
return number.floatValue();
} } |
public class class_name {
protected Message getSendMessage( String name )
{
PlainOldFixture plainOldFixture = new PlainOldFixture( target );
if ( plainOldFixture.canSend( name ) )
{
return plainOldFixture.send( name );
}
return getMessage(name, false, true);
} } | public class class_name {
protected Message getSendMessage( String name )
{
PlainOldFixture plainOldFixture = new PlainOldFixture( target );
if ( plainOldFixture.canSend( name ) )
{
return plainOldFixture.send( name ); // depends on control dependency: [if], data = [none]
}
return getMessage(name, false, true);
} } |
public class class_name {
@Provides
@Singleton
@Named(GuiceProperties.REST_SERVICES_PREFIX)
public String getRestServicesPrefix(ServletContext context)
{
String restPath = context.getInitParameter(RESTEASY_MAPPING_PREFIX);
if (restPath == null || restPath.isEmpty() || restPath.equals("/"))
{
return "";
}
else
{
return restPath;
}
} } | public class class_name {
@Provides
@Singleton
@Named(GuiceProperties.REST_SERVICES_PREFIX)
public String getRestServicesPrefix(ServletContext context)
{
String restPath = context.getInitParameter(RESTEASY_MAPPING_PREFIX);
if (restPath == null || restPath.isEmpty() || restPath.equals("/"))
{
return ""; // depends on control dependency: [if], data = [none]
}
else
{
return restPath; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
public void persistJoinTable(JoinTableData joinTableData)
{
String joinTableName = joinTableData.getJoinTableName();
String joinColumnName = joinTableData.getJoinColumnName();
String invJoinColumnName = joinTableData.getInverseJoinColumnName();
Map<Object, Set<Object>> joinTableRecords = joinTableData.getJoinTableRecords();
DBCollection dbCollection = mongoDb.getCollection(joinTableName);
KunderaCoreUtils.printQuery("Persist join table:" + joinTableName, showQuery);
for (Object key : joinTableRecords.keySet())
{
Set<Object> values = joinTableRecords.get(key);
Object joinColumnValue = key;
for (Object childId : values)
{
DBObject dbObj = new BasicDBObject();
dbObj.put("_id", joinColumnValue.toString() + childId);
dbObj.put(joinColumnName, MongoDBUtils.populateValue(joinColumnValue, joinColumnValue.getClass()));
dbObj.put(invJoinColumnName, MongoDBUtils.populateValue(childId, childId.getClass()));
KunderaCoreUtils.printQuery("id:" + joinColumnValue.toString() + childId + " " + joinColumnName + ":"
+ joinColumnValue + " " + invJoinColumnName + ":" + childId, showQuery);
dbCollection.save(dbObj, getWriteConcern());
}
}
} } | public class class_name {
@Override
public void persistJoinTable(JoinTableData joinTableData)
{
String joinTableName = joinTableData.getJoinTableName();
String joinColumnName = joinTableData.getJoinColumnName();
String invJoinColumnName = joinTableData.getInverseJoinColumnName();
Map<Object, Set<Object>> joinTableRecords = joinTableData.getJoinTableRecords();
DBCollection dbCollection = mongoDb.getCollection(joinTableName);
KunderaCoreUtils.printQuery("Persist join table:" + joinTableName, showQuery);
for (Object key : joinTableRecords.keySet())
{
Set<Object> values = joinTableRecords.get(key);
Object joinColumnValue = key;
for (Object childId : values)
{
DBObject dbObj = new BasicDBObject();
dbObj.put("_id", joinColumnValue.toString() + childId); // depends on control dependency: [for], data = [childId]
dbObj.put(joinColumnName, MongoDBUtils.populateValue(joinColumnValue, joinColumnValue.getClass())); // depends on control dependency: [for], data = [none]
dbObj.put(invJoinColumnName, MongoDBUtils.populateValue(childId, childId.getClass())); // depends on control dependency: [for], data = [childId]
KunderaCoreUtils.printQuery("id:" + joinColumnValue.toString() + childId + " " + joinColumnName + ":"
+ joinColumnValue + " " + invJoinColumnName + ":" + childId, showQuery); // depends on control dependency: [for], data = [childId]
dbCollection.save(dbObj, getWriteConcern()); // depends on control dependency: [for], data = [none]
}
}
} } |
public class class_name {
public static int decodeBitWidth(int n)
{
if (n >= ONE.ordinal() && n <= TWENTY_FOUR.ordinal()) {
return n + 1;
}
else if (n == TWENTY_SIX.ordinal()) {
return 26;
}
else if (n == TWENTY_EIGHT.ordinal()) {
return 28;
}
else if (n == THIRTY.ordinal()) {
return 30;
}
else if (n == THIRTY_TWO.ordinal()) {
return 32;
}
else if (n == FORTY.ordinal()) {
return 40;
}
else if (n == FORTY_EIGHT.ordinal()) {
return 48;
}
else if (n == FIFTY_SIX.ordinal()) {
return 56;
}
else {
return 64;
}
} } | public class class_name {
public static int decodeBitWidth(int n)
{
if (n >= ONE.ordinal() && n <= TWENTY_FOUR.ordinal()) {
return n + 1; // depends on control dependency: [if], data = [none]
}
else if (n == TWENTY_SIX.ordinal()) {
return 26; // depends on control dependency: [if], data = [none]
}
else if (n == TWENTY_EIGHT.ordinal()) {
return 28; // depends on control dependency: [if], data = [none]
}
else if (n == THIRTY.ordinal()) {
return 30; // depends on control dependency: [if], data = [none]
}
else if (n == THIRTY_TWO.ordinal()) {
return 32; // depends on control dependency: [if], data = [none]
}
else if (n == FORTY.ordinal()) {
return 40; // depends on control dependency: [if], data = [none]
}
else if (n == FORTY_EIGHT.ordinal()) {
return 48; // depends on control dependency: [if], data = [none]
}
else if (n == FIFTY_SIX.ordinal()) {
return 56; // depends on control dependency: [if], data = [none]
}
else {
return 64; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private ResponseWrapper fetchContentFromCacheIfAvailable(
final RenderRequest req, final String serviceUrl) {
ResponseWrapper rslt = null; // default
final List<CacheTuple> cacheKeysToTry = new ArrayList<>();
// Don't use private-scope caching for anonymous users
if (req.getRemoteUser() != null) {
cacheKeysToTry.add(
// Private-scope cache key
new CacheTuple(
serviceUrl,
req.getPortletMode().toString(),
req.getWindowState().toString(),
req.getRemoteUser()));
}
cacheKeysToTry.add(
// Public-scope cache key
new CacheTuple(
serviceUrl,
req.getPortletMode().toString(),
req.getWindowState().toString()));
for (CacheTuple key : cacheKeysToTry) {
final Element cacheElement = this.responseCache.get(key);
if (cacheElement != null) {
rslt = (ResponseWrapper) cacheElement.getObjectValue();
break;
}
}
return rslt;
} } | public class class_name {
private ResponseWrapper fetchContentFromCacheIfAvailable(
final RenderRequest req, final String serviceUrl) {
ResponseWrapper rslt = null; // default
final List<CacheTuple> cacheKeysToTry = new ArrayList<>();
// Don't use private-scope caching for anonymous users
if (req.getRemoteUser() != null) {
cacheKeysToTry.add(
// Private-scope cache key
new CacheTuple(
serviceUrl,
req.getPortletMode().toString(),
req.getWindowState().toString(),
req.getRemoteUser())); // depends on control dependency: [if], data = [none]
}
cacheKeysToTry.add(
// Public-scope cache key
new CacheTuple(
serviceUrl,
req.getPortletMode().toString(),
req.getWindowState().toString()));
for (CacheTuple key : cacheKeysToTry) {
final Element cacheElement = this.responseCache.get(key);
if (cacheElement != null) {
rslt = (ResponseWrapper) cacheElement.getObjectValue(); // depends on control dependency: [if], data = [none]
break;
}
}
return rslt;
} } |
public class class_name {
private void initializeFloatingActionButton() {
floatingActionButton = findViewById(R.id.floating_action_button);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
floatingActionButton.show();
floatingActionButton.setOnClickListener(createFloatingActionButtonListener());
} else {
floatingActionButton.hide();
}
} } | public class class_name {
private void initializeFloatingActionButton() {
floatingActionButton = findViewById(R.id.floating_action_button);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
floatingActionButton.show(); // depends on control dependency: [if], data = [none]
floatingActionButton.setOnClickListener(createFloatingActionButtonListener()); // depends on control dependency: [if], data = [none]
} else {
floatingActionButton.hide(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static void init(Map configProperties){
if(configProperties != null && configProperties.size() > 0){
ElasticSearchPropertiesFilePlugin.configProperties = configProperties;
initType = 1;
}
} } | public class class_name {
public static void init(Map configProperties){
if(configProperties != null && configProperties.size() > 0){
ElasticSearchPropertiesFilePlugin.configProperties = configProperties; // depends on control dependency: [if], data = [none]
initType = 1; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
public void tellThreadsToStop() {
super.tellThreadsToStop();
for (DynamicThread thread : poolThreads) {
stopThread(thread.getThreadName(), true);
}
} } | public class class_name {
@Override
public void tellThreadsToStop() {
super.tellThreadsToStop();
for (DynamicThread thread : poolThreads) {
stopThread(thread.getThreadName(), true); // depends on control dependency: [for], data = [thread]
}
} } |
public class class_name {
public final void close(final int retryCloseTimeoutMs, final Consumer<Thread> closeFailAction)
{
isRunning = false;
final Thread thread = this.thread.getAndSet(TOMBSTONE);
if (null == thread)
{
try
{
agent.onClose();
}
catch (final Throwable throwable)
{
errorHandler.onError(throwable);
}
finally
{
isClosed = true;
}
}
else if (TOMBSTONE != thread)
{
while (true)
{
try
{
thread.join(retryCloseTimeoutMs);
if (!thread.isAlive() || isClosed)
{
return;
}
if (null == closeFailAction)
{
System.err.println(
"timeout waiting for agent '" + agent.roleName() + "' to close, " + "retrying...");
}
else
{
closeFailAction.accept(thread);
}
if (!thread.isInterrupted())
{
thread.interrupt();
}
}
catch (final InterruptedException ignore)
{
if (null == closeFailAction)
{
System.err.println(
"close failed for agent '" + agent.roleName() + "' due to InterruptedException");
}
else
{
closeFailAction.accept(thread);
}
Thread.currentThread().interrupt();
return;
}
}
}
} } | public class class_name {
public final void close(final int retryCloseTimeoutMs, final Consumer<Thread> closeFailAction)
{
isRunning = false;
final Thread thread = this.thread.getAndSet(TOMBSTONE);
if (null == thread)
{
try
{
agent.onClose(); // depends on control dependency: [try], data = [none]
}
catch (final Throwable throwable)
{
errorHandler.onError(throwable);
} // depends on control dependency: [catch], data = [none]
finally
{
isClosed = true;
}
}
else if (TOMBSTONE != thread)
{
while (true)
{
try
{
thread.join(retryCloseTimeoutMs); // depends on control dependency: [try], data = [none]
if (!thread.isAlive() || isClosed)
{
return; // depends on control dependency: [if], data = [none]
}
if (null == closeFailAction)
{
System.err.println(
"timeout waiting for agent '" + agent.roleName() + "' to close, " + "retrying..."); // depends on control dependency: [if], data = [none]
}
else
{
closeFailAction.accept(thread); // depends on control dependency: [if], data = [none]
}
if (!thread.isInterrupted())
{
thread.interrupt(); // depends on control dependency: [if], data = [none]
}
}
catch (final InterruptedException ignore)
{
if (null == closeFailAction)
{
System.err.println(
"close failed for agent '" + agent.roleName() + "' due to InterruptedException"); // depends on control dependency: [if], data = [none]
}
else
{
closeFailAction.accept(thread); // depends on control dependency: [if], data = [none]
}
Thread.currentThread().interrupt();
return;
} // depends on control dependency: [catch], data = [none]
}
}
} } |
public class class_name {
public static ASN1OctetString getInstance(
Object obj)
{
if (obj == null || obj instanceof ASN1OctetString)
{
return (ASN1OctetString)obj;
}
if (obj instanceof ASN1TaggedObject)
{
return getInstance(((ASN1TaggedObject)obj).getObject());
}
if (obj instanceof ASN1Sequence)
{
Vector v = new Vector();
Enumeration e = ((ASN1Sequence)obj).getObjects();
while (e.hasMoreElements())
{
v.addElement(e.nextElement());
}
return new BERConstructedOctetString(v);
}
throw new IllegalArgumentException("illegal object in getInstance: " + obj.getClass().getName());
} } | public class class_name {
public static ASN1OctetString getInstance(
Object obj)
{
if (obj == null || obj instanceof ASN1OctetString)
{
return (ASN1OctetString)obj;
// depends on control dependency: [if], data = [none]
}
if (obj instanceof ASN1TaggedObject)
{
return getInstance(((ASN1TaggedObject)obj).getObject());
// depends on control dependency: [if], data = [none]
}
if (obj instanceof ASN1Sequence)
{
Vector v = new Vector();
Enumeration e = ((ASN1Sequence)obj).getObjects();
while (e.hasMoreElements())
{
v.addElement(e.nextElement());
// depends on control dependency: [while], data = [none]
}
return new BERConstructedOctetString(v);
// depends on control dependency: [if], data = [none]
}
throw new IllegalArgumentException("illegal object in getInstance: " + obj.getClass().getName());
} } |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.