code
stringlengths 130
281k
| code_dependency
stringlengths 182
306k
|
---|---|
public class class_name {
private boolean checkBodyValidity() throws IOException {
// LI4335 - allow response body reading if early reads are in place
if (isImmediateReadEnabled() || this.bEarlyReads) {
if (!headersParsed()) {
// this means they are requesting body buffers prior to sending
// the minimum request headers
IOException ioe = new IOException("Request headers not sent yet");
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Attempt to read response prior to sendRequest");
}
throw ioe;
}
// otherwise continue as normal
} else if (!isMessageSent()) {
// request message must be fully sent prior to reading any part of
// the response body
IOException ioe = new IOException("Request not finished yet");
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Attempt to read response prior to finishRequest");
}
throw ioe;
}
// check to see if we should be reading for data
return isIncomingBodyValid();
} } | public class class_name {
private boolean checkBodyValidity() throws IOException {
// LI4335 - allow response body reading if early reads are in place
if (isImmediateReadEnabled() || this.bEarlyReads) {
if (!headersParsed()) {
// this means they are requesting body buffers prior to sending
// the minimum request headers
IOException ioe = new IOException("Request headers not sent yet");
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Attempt to read response prior to sendRequest"); // depends on control dependency: [if], data = [none]
}
throw ioe;
}
// otherwise continue as normal
} else if (!isMessageSent()) {
// request message must be fully sent prior to reading any part of
// the response body
IOException ioe = new IOException("Request not finished yet");
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Attempt to read response prior to finishRequest");
}
throw ioe;
}
// check to see if we should be reading for data
return isIncomingBodyValid();
} } |
public class class_name {
@Override
@Trivial
public String dump() {
assertNotClosed();
StringBuilder sb = new StringBuilder();
Set<String> keys = getKeySet();
keys = new TreeSet<String>(keys);
Iterator<String> keyItr = keys.iterator();
while (keyItr.hasNext()) {
String key = keyItr.next();
SourcedValue rawSourcedValue = getRawSourcedValue(key);
if (rawSourcedValue == null) {
sb.append("null");
} else {
sb.append(rawSourcedValue);
}
if (keyItr.hasNext()) {
sb.append("\n");
}
}
return sb.toString();
} } | public class class_name {
@Override
@Trivial
public String dump() {
assertNotClosed();
StringBuilder sb = new StringBuilder();
Set<String> keys = getKeySet();
keys = new TreeSet<String>(keys);
Iterator<String> keyItr = keys.iterator();
while (keyItr.hasNext()) {
String key = keyItr.next();
SourcedValue rawSourcedValue = getRawSourcedValue(key);
if (rawSourcedValue == null) {
sb.append("null"); // depends on control dependency: [if], data = [none]
} else {
sb.append(rawSourcedValue); // depends on control dependency: [if], data = [(rawSourcedValue]
}
if (keyItr.hasNext()) {
sb.append("\n"); // depends on control dependency: [if], data = [none]
}
}
return sb.toString();
} } |
public class class_name {
public Optional<String> getMajorUrlParameter() {
if (customMajorParam != null) {
return Optional.of(customMajorParam);
}
Optional<Integer> majorParameterPosition = endpoint.getMajorParameterPosition();
if (!majorParameterPosition.isPresent()) {
return Optional.empty();
}
if (majorParameterPosition.get() >= urlParameters.length) {
return Optional.empty();
}
return Optional.of(urlParameters[majorParameterPosition.get()]);
} } | public class class_name {
public Optional<String> getMajorUrlParameter() {
if (customMajorParam != null) {
return Optional.of(customMajorParam); // depends on control dependency: [if], data = [(customMajorParam]
}
Optional<Integer> majorParameterPosition = endpoint.getMajorParameterPosition();
if (!majorParameterPosition.isPresent()) {
return Optional.empty(); // depends on control dependency: [if], data = [none]
}
if (majorParameterPosition.get() >= urlParameters.length) {
return Optional.empty(); // depends on control dependency: [if], data = [none]
}
return Optional.of(urlParameters[majorParameterPosition.get()]);
} } |
public class class_name {
protected void removeRareFeatures(Map<Object, Double> featureCounts, int rareFeatureThreshold) {
logger.debug("removeRareFeatures()");
Iterator<Map.Entry<Object, Double>> it = featureCounts.entrySet().iterator();
while(it.hasNext()) {
Map.Entry<Object, Double> entry = it.next();
if(entry.getValue()<rareFeatureThreshold) {
it.remove();
}
}
} } | public class class_name {
protected void removeRareFeatures(Map<Object, Double> featureCounts, int rareFeatureThreshold) {
logger.debug("removeRareFeatures()");
Iterator<Map.Entry<Object, Double>> it = featureCounts.entrySet().iterator();
while(it.hasNext()) {
Map.Entry<Object, Double> entry = it.next();
if(entry.getValue()<rareFeatureThreshold) {
it.remove(); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public CommandResponse execute(GwtCommand request) {
if (request != null) {
return commandDispatcher.execute(request.getCommandName(), request.getCommandRequest(),
request.getUserToken(), request.getLocale());
}
return null;
} } | public class class_name {
public CommandResponse execute(GwtCommand request) {
if (request != null) {
return commandDispatcher.execute(request.getCommandName(), request.getCommandRequest(),
request.getUserToken(), request.getLocale()); // depends on control dependency: [if], data = [(request]
}
return null;
} } |
public class class_name {
public static List<Group> getUnalignedGroups(Atom[] ca) {
Set<Chain> chains = new HashSet<Chain>();
Set<Group> caGroups = new HashSet<Group>();
// Create list of all chains in this structure
Structure s = null;
if (ca.length > 0) {
Group g = ca[0].getGroup();
if (g != null) {
Chain c = g.getChain();
if (c != null) {
s = c.getStructure();
}
}
}
if (s != null) {
// Add all chains from the structure
for (Chain c : s.getChains(0)) {
chains.add(c);
}
}
// Add groups and chains from ca
for (Atom a : ca) {
Group g = a.getGroup();
if (g != null) {
caGroups.add(g);
Chain c = g.getChain();
if (c != null) {
chains.add(c);
}
}
}
// Iterate through all chains, finding groups not in ca
List<Group> unadded = new ArrayList<Group>();
for (Chain c : chains) {
for (Group g : c.getAtomGroups()) {
if (!caGroups.contains(g)) {
unadded.add(g);
}
}
}
return unadded;
} } | public class class_name {
public static List<Group> getUnalignedGroups(Atom[] ca) {
Set<Chain> chains = new HashSet<Chain>();
Set<Group> caGroups = new HashSet<Group>();
// Create list of all chains in this structure
Structure s = null;
if (ca.length > 0) {
Group g = ca[0].getGroup();
if (g != null) {
Chain c = g.getChain();
if (c != null) {
s = c.getStructure(); // depends on control dependency: [if], data = [none]
}
}
}
if (s != null) {
// Add all chains from the structure
for (Chain c : s.getChains(0)) {
chains.add(c); // depends on control dependency: [for], data = [c]
}
}
// Add groups and chains from ca
for (Atom a : ca) {
Group g = a.getGroup();
if (g != null) {
caGroups.add(g); // depends on control dependency: [if], data = [(g]
Chain c = g.getChain();
if (c != null) {
chains.add(c); // depends on control dependency: [if], data = [(c]
}
}
}
// Iterate through all chains, finding groups not in ca
List<Group> unadded = new ArrayList<Group>();
for (Chain c : chains) {
for (Group g : c.getAtomGroups()) {
if (!caGroups.contains(g)) {
unadded.add(g); // depends on control dependency: [if], data = [none]
}
}
}
return unadded;
} } |
public class class_name {
private void purge()
{
if (TraceComponent.isAnyTracingEnabled() && _tc.isEntryEnabled()) SibTr.entry(this, _tc, "purge");
Iterator<Map.Entry<K,TimedValue<V>>> it = _realMap.entrySet().iterator();
while (it.hasNext())
{
Map.Entry<K,TimedValue<V>> entry = it.next();
TimedValue<V> value = entry.getValue();
if (value.hasExipred())
{
it.remove();
if (TraceComponent.isAnyTracingEnabled() && _tc.isDebugEnabled())
{
SibTr.debug(_tc, "The value with the key " + entry.getKey() + " has expired");
}
}
}
if (TraceComponent.isAnyTracingEnabled() && _tc.isEntryEnabled()) SibTr.exit(this, _tc, "purge");
} } | public class class_name {
private void purge()
{
if (TraceComponent.isAnyTracingEnabled() && _tc.isEntryEnabled()) SibTr.entry(this, _tc, "purge");
Iterator<Map.Entry<K,TimedValue<V>>> it = _realMap.entrySet().iterator();
while (it.hasNext())
{
Map.Entry<K,TimedValue<V>> entry = it.next();
TimedValue<V> value = entry.getValue();
if (value.hasExipred())
{
it.remove(); // depends on control dependency: [if], data = [none]
if (TraceComponent.isAnyTracingEnabled() && _tc.isDebugEnabled())
{
SibTr.debug(_tc, "The value with the key " + entry.getKey() + " has expired"); // depends on control dependency: [if], data = [none]
}
}
}
if (TraceComponent.isAnyTracingEnabled() && _tc.isEntryEnabled()) SibTr.exit(this, _tc, "purge");
} } |
public class class_name {
public static Unmarshaller createUnmarshaller(Class clazz) {
try {
JAXBContext jaxbContext = getJaxbContext(clazz);
return jaxbContext.createUnmarshaller();
} catch (JAXBException e) {
throw ExceptionUtil.unchecked(e);
}
} } | public class class_name {
public static Unmarshaller createUnmarshaller(Class clazz) {
try {
JAXBContext jaxbContext = getJaxbContext(clazz);
return jaxbContext.createUnmarshaller(); // depends on control dependency: [try], data = [none]
} catch (JAXBException e) {
throw ExceptionUtil.unchecked(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static void deleteMetadata(GeoPackageCore geoPackage, String table) {
MetadataReferenceDao metadataReferenceDao = geoPackage
.getMetadataReferenceDao();
try {
if (metadataReferenceDao.isTableExists()) {
metadataReferenceDao.deleteByTableName(table);
}
} catch (SQLException e) {
throw new GeoPackageException(
"Failed to delete Metadata extension. GeoPackage: "
+ geoPackage.getName() + ", Table: " + table, e);
}
} } | public class class_name {
public static void deleteMetadata(GeoPackageCore geoPackage, String table) {
MetadataReferenceDao metadataReferenceDao = geoPackage
.getMetadataReferenceDao();
try {
if (metadataReferenceDao.isTableExists()) {
metadataReferenceDao.deleteByTableName(table); // depends on control dependency: [if], data = [none]
}
} catch (SQLException e) {
throw new GeoPackageException(
"Failed to delete Metadata extension. GeoPackage: "
+ geoPackage.getName() + ", Table: " + table, e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void setDomain(Vec2 ... verts){
dom = new Vertex(this);
vertexCount = verts.length;
this.setOrient(0.0f);
getPosition().set( getCentroid(orderVertex(verts)).x, getCentroid(orderVertex(verts)).y );
Vec2[] halfPoly = new Vec2[verts.length];
for (int i = 0; i < verts.length; i++) {
halfPoly[i] = new Vec2(verts[i].x - getPosition().x, verts[i].y - getPosition().y);
}
set( halfPoly );
initialize();
hasDefaultDomain = false;
} } | public class class_name {
public void setDomain(Vec2 ... verts){
dom = new Vertex(this);
vertexCount = verts.length;
this.setOrient(0.0f);
getPosition().set( getCentroid(orderVertex(verts)).x, getCentroid(orderVertex(verts)).y );
Vec2[] halfPoly = new Vec2[verts.length];
for (int i = 0; i < verts.length; i++) {
halfPoly[i] = new Vec2(verts[i].x - getPosition().x, verts[i].y - getPosition().y); // depends on control dependency: [for], data = [i]
}
set( halfPoly );
initialize();
hasDefaultDomain = false;
} } |
public class class_name {
@SuppressWarnings("static-method")
public ITextReplacerContext fix(final ITextReplacerContext context, IComment comment) {
final IHiddenRegion hiddenRegion = comment.getHiddenRegion();
if (detectBugSituation(hiddenRegion) && fixBug(hiddenRegion)) {
// Indentation of the first comment line
final ITextRegionAccess access = comment.getTextRegionAccess();
final ITextSegment target = access.regionForOffset(comment.getOffset(), 0);
context.addReplacement(target.replaceWith(context.getIndentationString(1)));
// Indentation of the comment's lines
return new FixedReplacementContext(context);
}
return context;
} } | public class class_name {
@SuppressWarnings("static-method")
public ITextReplacerContext fix(final ITextReplacerContext context, IComment comment) {
final IHiddenRegion hiddenRegion = comment.getHiddenRegion();
if (detectBugSituation(hiddenRegion) && fixBug(hiddenRegion)) {
// Indentation of the first comment line
final ITextRegionAccess access = comment.getTextRegionAccess();
final ITextSegment target = access.regionForOffset(comment.getOffset(), 0);
context.addReplacement(target.replaceWith(context.getIndentationString(1))); // depends on control dependency: [if], data = [none]
// Indentation of the comment's lines
return new FixedReplacementContext(context); // depends on control dependency: [if], data = [none]
}
return context;
} } |
public class class_name {
@SuppressWarnings("unchecked")
public T setSheet(int sheetIndex) {
try {
this.sheet = this.workbook.getSheetAt(sheetIndex);
} catch (IllegalArgumentException e) {
this.sheet = this.workbook.createSheet();
}
if (null == this.sheet) {
this.sheet = this.workbook.createSheet();
}
return (T) this;
} } | public class class_name {
@SuppressWarnings("unchecked")
public T setSheet(int sheetIndex) {
try {
this.sheet = this.workbook.getSheetAt(sheetIndex);
// depends on control dependency: [try], data = [none]
} catch (IllegalArgumentException e) {
this.sheet = this.workbook.createSheet();
}
// depends on control dependency: [catch], data = [none]
if (null == this.sheet) {
this.sheet = this.workbook.createSheet();
// depends on control dependency: [if], data = [none]
}
return (T) this;
} } |
public class class_name {
@Override
public void dropSchema()
{
if (operation != null && ("create-drop").equalsIgnoreCase(operation))
{
for (TableInfo tableInfo : tableInfos)
{
removeBucket(tableInfo.getTableName());
}
}
cluster.disconnect();
cluster = null;
} } | public class class_name {
@Override
public void dropSchema()
{
if (operation != null && ("create-drop").equalsIgnoreCase(operation))
{
for (TableInfo tableInfo : tableInfos)
{
removeBucket(tableInfo.getTableName()); // depends on control dependency: [for], data = [tableInfo]
}
}
cluster.disconnect();
cluster = null;
} } |
public class class_name {
public WebACL withRules(ActivatedRule... rules) {
if (this.rules == null) {
setRules(new java.util.ArrayList<ActivatedRule>(rules.length));
}
for (ActivatedRule ele : rules) {
this.rules.add(ele);
}
return this;
} } | public class class_name {
public WebACL withRules(ActivatedRule... rules) {
if (this.rules == null) {
setRules(new java.util.ArrayList<ActivatedRule>(rules.length)); // depends on control dependency: [if], data = [none]
}
for (ActivatedRule ele : rules) {
this.rules.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
private Tenant getTenant(Map<String, String> variableMap) {
String tenantName = variableMap.get("tenant");
if (Utils.isEmpty(tenantName)) {
tenantName = TenantService.instance().getDefaultTenantName();
}
Tenant tenant = TenantService.instance().getTenant(tenantName);
if (tenant == null) {
throw new NotFoundException("Unknown tenant: " + tenantName);
}
return tenant;
} } | public class class_name {
private Tenant getTenant(Map<String, String> variableMap) {
String tenantName = variableMap.get("tenant");
if (Utils.isEmpty(tenantName)) {
tenantName = TenantService.instance().getDefaultTenantName();
// depends on control dependency: [if], data = [none]
}
Tenant tenant = TenantService.instance().getTenant(tenantName);
if (tenant == null) {
throw new NotFoundException("Unknown tenant: " + tenantName);
}
return tenant;
} } |
public class class_name {
@Override
public void close () throws IOException {
try {
SmbFileHandleImpl h = this.handle;
if ( h != null ) {
h.close();
}
}
catch ( SmbException se ) {
throw seToIoe(se);
}
finally {
this.tmp = null;
this.handle = null;
if ( this.unsharedFile ) {
this.file.close();
}
}
} } | public class class_name {
@Override
public void close () throws IOException {
try {
SmbFileHandleImpl h = this.handle;
if ( h != null ) {
h.close();
}
}
catch ( SmbException se ) {
throw seToIoe(se);
}
finally {
this.tmp = null;
this.handle = null;
if ( this.unsharedFile ) {
this.file.close(); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public long offer(final DirectBufferVector[] vectors)
{
if (headerVector != vectors[0])
{
vectors[0] = headerVector;
}
return publication.offer(vectors, null);
} } | public class class_name {
public long offer(final DirectBufferVector[] vectors)
{
if (headerVector != vectors[0])
{
vectors[0] = headerVector; // depends on control dependency: [if], data = [none]
}
return publication.offer(vectors, null);
} } |
public class class_name {
private static void buildNodeName(Node node, StringBuffer buffer) {
if (node.getParentNode() == null) {
return;
}
buildNodeName(node.getParentNode(), buffer);
if (node.getParentNode() != null
&& node.getParentNode().getParentNode() != null) {
buffer.append(".");
}
buffer.append(node.getLocalName());
} } | public class class_name {
private static void buildNodeName(Node node, StringBuffer buffer) {
if (node.getParentNode() == null) {
return; // depends on control dependency: [if], data = [none]
}
buildNodeName(node.getParentNode(), buffer);
if (node.getParentNode() != null
&& node.getParentNode().getParentNode() != null) {
buffer.append("."); // depends on control dependency: [if], data = [none]
}
buffer.append(node.getLocalName());
} } |
public class class_name {
public PipelineConfig withFunctions(String... functions) {
if (this.functions == null) {
setFunctions(new java.util.ArrayList<String>(functions.length));
}
for (String ele : functions) {
this.functions.add(ele);
}
return this;
} } | public class class_name {
public PipelineConfig withFunctions(String... functions) {
if (this.functions == null) {
setFunctions(new java.util.ArrayList<String>(functions.length)); // depends on control dependency: [if], data = [none]
}
for (String ele : functions) {
this.functions.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
public Multimap<String, String> getOrCreate(String index) {
Multimap<String, String> mmap = storeMap.get(index);
if (mmap == null) {
SetMultimap<String, String> multimap =
Multimaps.newSetMultimap(new HashMap<String, Collection<String>>(),
new Supplier<Set<String>>() {
public Set<String> get() {
return Sets.newSetFromMap(new ConcurrentHashMap<String, Boolean>());
}
});
mmap = concurrent ? Multimaps.synchronizedSetMultimap(multimap) : multimap;
storeMap.put(index,mmap);
}
return mmap;
} } | public class class_name {
public Multimap<String, String> getOrCreate(String index) {
Multimap<String, String> mmap = storeMap.get(index);
if (mmap == null) {
SetMultimap<String, String> multimap =
Multimaps.newSetMultimap(new HashMap<String, Collection<String>>(),
new Supplier<Set<String>>() {
public Set<String> get() {
return Sets.newSetFromMap(new ConcurrentHashMap<String, Boolean>());
}
});
mmap = concurrent ? Multimaps.synchronizedSetMultimap(multimap) : multimap; // depends on control dependency: [if], data = [none]
storeMap.put(index,mmap); // depends on control dependency: [if], data = [none]
}
return mmap;
} } |
public class class_name {
public UNode toDoc() {
UNode paramNode = UNode.createMapNode(m_name);
if (!Utils.isEmpty(m_type)) {
paramNode.addValueNode("_type", m_type);
}
if (m_isRequired) {
paramNode.addValueNode("_required", Boolean.toString(m_isRequired));
}
if (m_parameters.size() > 0) {
for (RESTParameter param : m_parameters) {
paramNode.addChildNode(param.toDoc());
}
}
return paramNode;
} } | public class class_name {
public UNode toDoc() {
UNode paramNode = UNode.createMapNode(m_name);
if (!Utils.isEmpty(m_type)) {
paramNode.addValueNode("_type", m_type); // depends on control dependency: [if], data = [none]
}
if (m_isRequired) {
paramNode.addValueNode("_required", Boolean.toString(m_isRequired)); // depends on control dependency: [if], data = [(m_isRequired)]
}
if (m_parameters.size() > 0) {
for (RESTParameter param : m_parameters) {
paramNode.addChildNode(param.toDoc()); // depends on control dependency: [for], data = [param]
}
}
return paramNode;
} } |
public class class_name {
private Triangle insertPointSimple(Set<Vector3> vertices, Vector3 p) {
if (!allCollinear) {
return insertNonColinear(p);
} else {
return insertColinear(vertices, p);
}
} } | public class class_name {
private Triangle insertPointSimple(Set<Vector3> vertices, Vector3 p) {
if (!allCollinear) {
return insertNonColinear(p);
// depends on control dependency: [if], data = [none]
} else {
return insertColinear(vertices, p);
// depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private static BeanDefinition buildRetryOperationDefinition(Element element,
ParserContext parserContext) {
BeanDefinitionBuilder builder = BeanDefinitionBuilder
.rootBeanDefinition(RETRY_OPERATIONS_CLASS_NAME);
builder.addPropertyValue("retryPolicy",
buildRetryPolicyDefinition(element, parserContext));
if (StringUtils.hasText(element.getAttribute(BACK_OFF_POLICY))) {
String backOffPolicyBeanName = element.getAttribute(BACK_OFF_POLICY);
builder.addPropertyReference(
Conventions.attributeNameToPropertyName(BACK_OFF_POLICY),
backOffPolicyBeanName);
}
return builder.getBeanDefinition();
} } | public class class_name {
private static BeanDefinition buildRetryOperationDefinition(Element element,
ParserContext parserContext) {
BeanDefinitionBuilder builder = BeanDefinitionBuilder
.rootBeanDefinition(RETRY_OPERATIONS_CLASS_NAME);
builder.addPropertyValue("retryPolicy",
buildRetryPolicyDefinition(element, parserContext));
if (StringUtils.hasText(element.getAttribute(BACK_OFF_POLICY))) {
String backOffPolicyBeanName = element.getAttribute(BACK_OFF_POLICY);
builder.addPropertyReference(
Conventions.attributeNameToPropertyName(BACK_OFF_POLICY),
backOffPolicyBeanName); // depends on control dependency: [if], data = [none]
}
return builder.getBeanDefinition();
} } |
public class class_name {
protected boolean orderByColumnsCoverUniqueKeys() {
// In theory, if EVERY table in the query has a uniqueness constraint
// (primary key or other unique index) on columns that are all listed in the ORDER BY values,
// the result is deterministic.
// This holds regardless of whether the associated index is actually used in the selected plan,
// so this check is plan-independent.
//
// baseTableAliases associates table aliases with the order by
// expressions which reference them. Presumably by using
// table aliases we will map table scans to expressions rather
// than tables to expressions, and not confuse ourselves with
// different instances of the same table in self joins.
HashMap<String, List<AbstractExpression> > baseTableAliases =
new HashMap<>();
for (ParsedColInfo col : orderByColumns()) {
AbstractExpression expr = col.m_expression;
//
// Compute the set of tables mentioned in the expression.
// 1. Search out all the TVEs.
// 2. Throw the aliases of the tables of each of these into a HashSet.
// The table must have an alias. It might not have a name.
// 3. If the HashSet has size > 1 we can't use this expression.
//
List<TupleValueExpression> baseTVEExpressions =
expr.findAllTupleValueSubexpressions();
Set<String> baseTableNames = new HashSet<>();
for (TupleValueExpression tve : baseTVEExpressions) {
String tableAlias = tve.getTableAlias();
assert(tableAlias != null);
baseTableNames.add(tableAlias);
}
if (baseTableNames.size() != 1) {
// Table-spanning ORDER BYs -- like ORDER BY A.X + B.Y are not helpful.
// Neither are (nonsense) constant (table-less) expressions.
continue;
}
// Everything in the baseTVEExpressions table is a column
// in the same table and has the same alias. So just grab the first one.
// All we really want is the alias.
AbstractExpression baseTVE = baseTVEExpressions.get(0);
String nextTableAlias = ((TupleValueExpression)baseTVE).getTableAlias();
// This was tested above. But the assert above may prove to be over cautious
// and disappear.
assert(nextTableAlias != null);
List<AbstractExpression> perTable = baseTableAliases.get(nextTableAlias);
if (perTable == null) {
perTable = new ArrayList<>();
baseTableAliases.put(nextTableAlias, perTable);
}
perTable.add(expr);
}
if (m_tableAliasMap.size() > baseTableAliases.size()) {
// FIXME: There are more table aliases in the select list than tables
// named in the order by clause. So, some tables named in the
// select list are not explicitly listed in the order by
// clause.
//
// This would be one of the tricky cases where the goal would be to prove that the
// row with no ORDER BY component came from the right side of a 1-to-1 or many-to-1 join.
// like Unique Index nested loop join, etc.
return false;
}
boolean allScansAreDeterministic = true;
for (Entry<String, List<AbstractExpression>> orderedAlias : baseTableAliases.entrySet()) {
List<AbstractExpression> orderedAliasExprs = orderedAlias.getValue();
StmtTableScan tableScan = getStmtTableScanByAlias(orderedAlias.getKey());
if (tableScan == null) {
assert(false);
return false;
}
if (tableScan instanceof StmtSubqueryScan) {
return false; // don't yet handle FROM clause subquery, here.
}
Table table = ((StmtTargetTableScan)tableScan).getTargetTable();
// This table's scans need to be proven deterministic.
allScansAreDeterministic = false;
// Search indexes for one that makes the order by deterministic
for (Index index : table.getIndexes()) {
// skip non-unique indexes
if ( ! index.getUnique()) {
continue;
}
// get the list of expressions for the index
List<AbstractExpression> indexExpressions = new ArrayList<>();
String jsonExpr = index.getExpressionsjson();
// if this is a pure-column index...
if (jsonExpr.isEmpty()) {
for (ColumnRef cref : index.getColumns()) {
Column col = cref.getColumn();
TupleValueExpression tve = new TupleValueExpression(table.getTypeName(),
orderedAlias.getKey(),
col.getName(),
col.getName(),
col.getIndex());
indexExpressions.add(tve);
}
}
// if this is a fancy expression-based index...
else {
try {
indexExpressions = AbstractExpression.fromJSONArrayString(jsonExpr, tableScan);
}
catch (JSONException e) {
e.printStackTrace();
assert(false);
continue;
}
}
// If the sort covers the index, then it's a unique sort.
// TODO: The statement's equivalence sets would be handy here to recognize cases like
// WHERE B.unique_id = A.b_id
// ORDER BY A.unique_id, A.b_id
if (orderedAliasExprs.containsAll(indexExpressions)) {
allScansAreDeterministic = true;
break;
}
}
// ALL tables' scans need to have proved deterministic
if ( ! allScansAreDeterministic) {
return false;
}
}
return true;
} } | public class class_name {
protected boolean orderByColumnsCoverUniqueKeys() {
// In theory, if EVERY table in the query has a uniqueness constraint
// (primary key or other unique index) on columns that are all listed in the ORDER BY values,
// the result is deterministic.
// This holds regardless of whether the associated index is actually used in the selected plan,
// so this check is plan-independent.
//
// baseTableAliases associates table aliases with the order by
// expressions which reference them. Presumably by using
// table aliases we will map table scans to expressions rather
// than tables to expressions, and not confuse ourselves with
// different instances of the same table in self joins.
HashMap<String, List<AbstractExpression> > baseTableAliases =
new HashMap<>();
for (ParsedColInfo col : orderByColumns()) {
AbstractExpression expr = col.m_expression;
//
// Compute the set of tables mentioned in the expression.
// 1. Search out all the TVEs.
// 2. Throw the aliases of the tables of each of these into a HashSet.
// The table must have an alias. It might not have a name.
// 3. If the HashSet has size > 1 we can't use this expression.
//
List<TupleValueExpression> baseTVEExpressions =
expr.findAllTupleValueSubexpressions();
Set<String> baseTableNames = new HashSet<>();
for (TupleValueExpression tve : baseTVEExpressions) {
String tableAlias = tve.getTableAlias();
assert(tableAlias != null); // depends on control dependency: [for], data = [none]
baseTableNames.add(tableAlias); // depends on control dependency: [for], data = [none]
}
if (baseTableNames.size() != 1) {
// Table-spanning ORDER BYs -- like ORDER BY A.X + B.Y are not helpful.
// Neither are (nonsense) constant (table-less) expressions.
continue;
}
// Everything in the baseTVEExpressions table is a column
// in the same table and has the same alias. So just grab the first one.
// All we really want is the alias.
AbstractExpression baseTVE = baseTVEExpressions.get(0);
String nextTableAlias = ((TupleValueExpression)baseTVE).getTableAlias();
// This was tested above. But the assert above may prove to be over cautious
// and disappear.
assert(nextTableAlias != null); // depends on control dependency: [for], data = [none]
List<AbstractExpression> perTable = baseTableAliases.get(nextTableAlias);
if (perTable == null) {
perTable = new ArrayList<>(); // depends on control dependency: [if], data = [none]
baseTableAliases.put(nextTableAlias, perTable); // depends on control dependency: [if], data = [none]
}
perTable.add(expr); // depends on control dependency: [for], data = [none]
}
if (m_tableAliasMap.size() > baseTableAliases.size()) {
// FIXME: There are more table aliases in the select list than tables
// named in the order by clause. So, some tables named in the
// select list are not explicitly listed in the order by
// clause.
//
// This would be one of the tricky cases where the goal would be to prove that the
// row with no ORDER BY component came from the right side of a 1-to-1 or many-to-1 join.
// like Unique Index nested loop join, etc.
return false; // depends on control dependency: [if], data = [none]
}
boolean allScansAreDeterministic = true;
for (Entry<String, List<AbstractExpression>> orderedAlias : baseTableAliases.entrySet()) {
List<AbstractExpression> orderedAliasExprs = orderedAlias.getValue();
StmtTableScan tableScan = getStmtTableScanByAlias(orderedAlias.getKey());
if (tableScan == null) {
assert(false); // depends on control dependency: [if], data = [none]
return false; // depends on control dependency: [if], data = [none]
}
if (tableScan instanceof StmtSubqueryScan) {
return false; // don't yet handle FROM clause subquery, here. // depends on control dependency: [if], data = [none]
}
Table table = ((StmtTargetTableScan)tableScan).getTargetTable();
// This table's scans need to be proven deterministic.
allScansAreDeterministic = false; // depends on control dependency: [for], data = [none]
// Search indexes for one that makes the order by deterministic
for (Index index : table.getIndexes()) {
// skip non-unique indexes
if ( ! index.getUnique()) {
continue;
}
// get the list of expressions for the index
List<AbstractExpression> indexExpressions = new ArrayList<>();
String jsonExpr = index.getExpressionsjson();
// if this is a pure-column index...
if (jsonExpr.isEmpty()) {
for (ColumnRef cref : index.getColumns()) {
Column col = cref.getColumn();
TupleValueExpression tve = new TupleValueExpression(table.getTypeName(),
orderedAlias.getKey(),
col.getName(),
col.getName(),
col.getIndex());
indexExpressions.add(tve); // depends on control dependency: [for], data = [none]
}
}
// if this is a fancy expression-based index...
else {
try {
indexExpressions = AbstractExpression.fromJSONArrayString(jsonExpr, tableScan); // depends on control dependency: [try], data = [none]
}
catch (JSONException e) {
e.printStackTrace();
assert(false);
continue;
} // depends on control dependency: [catch], data = [none]
}
// If the sort covers the index, then it's a unique sort.
// TODO: The statement's equivalence sets would be handy here to recognize cases like
// WHERE B.unique_id = A.b_id
// ORDER BY A.unique_id, A.b_id
if (orderedAliasExprs.containsAll(indexExpressions)) {
allScansAreDeterministic = true; // depends on control dependency: [if], data = [none]
break;
}
}
// ALL tables' scans need to have proved deterministic
if ( ! allScansAreDeterministic) {
return false; // depends on control dependency: [if], data = [none]
}
}
return true;
} } |
public class class_name {
public void sendToUsers(String topicURI, Object event, Set<String> eligibleUsers) {
Set<String> eligibleSessionIds = null;
if (eligibleUsers != null && !eligibleUsers.isEmpty()) {
eligibleSessionIds = new HashSet<>(eligibleUsers.size());
for (String user : eligibleUsers) {
for (SimpSession session : this.simpUserRegistry.getUser(user)
.getSessions()) {
eligibleSessionIds.add(session.getId());
}
}
}
this.eventMessenger.sendTo(topicURI, event, eligibleSessionIds);
} } | public class class_name {
public void sendToUsers(String topicURI, Object event, Set<String> eligibleUsers) {
Set<String> eligibleSessionIds = null;
if (eligibleUsers != null && !eligibleUsers.isEmpty()) {
eligibleSessionIds = new HashSet<>(eligibleUsers.size()); // depends on control dependency: [if], data = [(eligibleUsers]
for (String user : eligibleUsers) {
for (SimpSession session : this.simpUserRegistry.getUser(user)
.getSessions()) {
eligibleSessionIds.add(session.getId()); // depends on control dependency: [for], data = [session]
}
}
}
this.eventMessenger.sendTo(topicURI, event, eligibleSessionIds);
} } |
public class class_name {
private void crossSecond1withNValues(T2 val1, T1 firstValN,
Iterator<T1> valsN, GenericJoiner<T1, T2, O> matchFunction, Collector<O> collector)
throws Exception
{
this.copy2 = this.serializer2.copy(val1, this.copy2);
matchFunction.join(firstValN, this.copy2, collector);
// set copy and match first element
boolean more = true;
do {
final T1 nRec = valsN.next();
if (valsN.hasNext()) {
this.copy2 = this.serializer2.copy(val1, this.copy2);
matchFunction.join(nRec, this.copy2, collector);
} else {
matchFunction.join(nRec, val1, collector);
more = false;
}
}
while (more);
} } | public class class_name {
private void crossSecond1withNValues(T2 val1, T1 firstValN,
Iterator<T1> valsN, GenericJoiner<T1, T2, O> matchFunction, Collector<O> collector)
throws Exception
{
this.copy2 = this.serializer2.copy(val1, this.copy2);
matchFunction.join(firstValN, this.copy2, collector);
// set copy and match first element
boolean more = true;
do {
final T1 nRec = valsN.next();
if (valsN.hasNext()) {
this.copy2 = this.serializer2.copy(val1, this.copy2); // depends on control dependency: [if], data = [none]
matchFunction.join(nRec, this.copy2, collector); // depends on control dependency: [if], data = [none]
} else {
matchFunction.join(nRec, val1, collector); // depends on control dependency: [if], data = [none]
more = false; // depends on control dependency: [if], data = [none]
}
}
while (more);
} } |
public class class_name {
private static void printGroupInfo(final PrintWriter out, final ThreadGroup group, final String indent) {
if (group == null) {
return;
}
final int numThreads = group.activeCount();
final int numGroups = group.activeGroupCount();
final Thread[] threads = new Thread[numThreads];
final ThreadGroup[] groups = new ThreadGroup[numGroups];
group.enumerate(threads, false);
group.enumerate(groups, false);
out.println(indent + "Thread Group: " + group.getName() + " Max Priority: " + group.getMaxPriority() + (group.isDaemon() ? " Daemon" : ""));
for (int i = 0; i < numThreads; i++) {
Util.printThreadInfo(out, threads[i], indent + " ");
}
for (int i = 0; i < numGroups; i++) {
Util.printGroupInfo(out, groups[i], indent + " ");
}
} } | public class class_name {
private static void printGroupInfo(final PrintWriter out, final ThreadGroup group, final String indent) {
if (group == null) {
return;
// depends on control dependency: [if], data = [none]
}
final int numThreads = group.activeCount();
final int numGroups = group.activeGroupCount();
final Thread[] threads = new Thread[numThreads];
final ThreadGroup[] groups = new ThreadGroup[numGroups];
group.enumerate(threads, false);
group.enumerate(groups, false);
out.println(indent + "Thread Group: " + group.getName() + " Max Priority: " + group.getMaxPriority() + (group.isDaemon() ? " Daemon" : ""));
for (int i = 0; i < numThreads; i++) {
Util.printThreadInfo(out, threads[i], indent + " ");
// depends on control dependency: [for], data = [i]
}
for (int i = 0; i < numGroups; i++) {
Util.printGroupInfo(out, groups[i], indent + " ");
// depends on control dependency: [for], data = [i]
}
} } |
public class class_name {
public static void deleteIfExists(File file) throws IOException {
if (file.exists()) {
if (file.isFile()) {
if (!file.delete()) {
throw new IOException("Delete file failure,path:" + file.getAbsolutePath());
}
} else {
File[] files = file.listFiles();
if (files != null && files.length > 0) {
for (File temp : files) {
deleteIfExists(temp);
}
}
if (!file.delete()) {
throw new IOException("Delete file failure,path:" + file.getAbsolutePath());
}
}
}
} } | public class class_name {
public static void deleteIfExists(File file) throws IOException {
if (file.exists()) {
if (file.isFile()) {
if (!file.delete()) {
throw new IOException("Delete file failure,path:" + file.getAbsolutePath());
}
} else {
File[] files = file.listFiles();
if (files != null && files.length > 0) {
for (File temp : files) {
deleteIfExists(temp); // depends on control dependency: [for], data = [temp]
}
}
if (!file.delete()) {
throw new IOException("Delete file failure,path:" + file.getAbsolutePath());
}
}
}
} } |
public class class_name {
@Override public void normalise(ProjectCalendar calendar, LinkedList<TimephasedWork> list)
{
if (!list.isEmpty())
{
//dumpList(list);
splitDays(calendar, list);
//dumpList(list);
mergeSameDay(calendar, list);
//dumpList(list);
mergeSameWork(list);
//dumpList(list);
convertToHours(list);
//dumpList(list);
}
} } | public class class_name {
@Override public void normalise(ProjectCalendar calendar, LinkedList<TimephasedWork> list)
{
if (!list.isEmpty())
{
//dumpList(list);
splitDays(calendar, list); // depends on control dependency: [if], data = [none]
//dumpList(list);
mergeSameDay(calendar, list); // depends on control dependency: [if], data = [none]
//dumpList(list);
mergeSameWork(list); // depends on control dependency: [if], data = [none]
//dumpList(list);
convertToHours(list); // depends on control dependency: [if], data = [none]
//dumpList(list);
}
} } |
public class class_name {
public static boolean isAlpha(String in) {
char c = 0;
for (int i = in.length(); i > 0; i--) {
c = in.charAt(i - 1);
if (!Character.isLetter(c)) {
return false;
}
}
return true;
} } | public class class_name {
public static boolean isAlpha(String in) {
char c = 0;
for (int i = in.length(); i > 0; i--) {
c = in.charAt(i - 1); // depends on control dependency: [for], data = [i]
if (!Character.isLetter(c)) {
return false; // depends on control dependency: [if], data = [none]
}
}
return true;
} } |
public class class_name {
public void execute(InterpreterSelector interpreterSelector)
{
AbstractSpecification spec = new FilteredSpecification( start );
while (spec.hasMoreExamples() && canContinue( stats ))
{
Interpreter interpreter = interpreterSelector.selectInterpreter( spec.peek() );
interpreter.interpret( spec );
}
} } | public class class_name {
public void execute(InterpreterSelector interpreterSelector)
{
AbstractSpecification spec = new FilteredSpecification( start );
while (spec.hasMoreExamples() && canContinue( stats ))
{
Interpreter interpreter = interpreterSelector.selectInterpreter( spec.peek() );
interpreter.interpret( spec ); // depends on control dependency: [while], data = [none]
}
} } |
public class class_name {
@Override protected void handleEvents(final String EVENT_TYPE) {
if ("REDRAW".equals(EVENT_TYPE)) {
pane.setEffect(gauge.isShadowsEnabled() ? mainInnerShadow1 : null);
shadowGroup.setEffect(gauge.isShadowsEnabled() ? FOREGROUND_SHADOW : null);
updateLcdDesign(height);
redraw();
} else if ("RESIZE".equals(EVENT_TYPE)) {
aspectRatio = gauge.getPrefHeight() / gauge.getPrefWidth();
resize();
redraw();
} else if ("LCD".equals(EVENT_TYPE)) {
updateLcdDesign(height);
} else if ("VISIBILITY".equals(EVENT_TYPE)) {
Helper.enableNode(crystalOverlay, gauge.isLcdCrystalEnabled());
Helper.enableNode(title, !gauge.getTitle().isEmpty());
Helper.enableNode(unitText, !gauge.getUnit().isEmpty());
Helper.enableNode(upperLeftText, gauge.isMinMeasuredValueVisible());
Helper.enableNode(upperRightText, gauge.isMaxMeasuredValueVisible());
Helper.enableNode(lowerRightText, !gauge.getSubTitle().isEmpty());
Helper.enableNode(lowerCenterText, gauge.isOldValueVisible());
Helper.enableNode(average, gauge.isAverageVisible());
Helper.enableNode(threshold, gauge.isThresholdVisible());
resize();
redraw();
} else if ("SECTION".equals(EVENT_TYPE)) {
sections = gauge.getSections();
updateSectionColors();
resize();
redraw();
}
} } | public class class_name {
@Override protected void handleEvents(final String EVENT_TYPE) {
if ("REDRAW".equals(EVENT_TYPE)) {
pane.setEffect(gauge.isShadowsEnabled() ? mainInnerShadow1 : null); // depends on control dependency: [if], data = [none]
shadowGroup.setEffect(gauge.isShadowsEnabled() ? FOREGROUND_SHADOW : null); // depends on control dependency: [if], data = [none]
updateLcdDesign(height); // depends on control dependency: [if], data = [none]
redraw(); // depends on control dependency: [if], data = [none]
} else if ("RESIZE".equals(EVENT_TYPE)) {
aspectRatio = gauge.getPrefHeight() / gauge.getPrefWidth(); // depends on control dependency: [if], data = [none]
resize(); // depends on control dependency: [if], data = [none]
redraw(); // depends on control dependency: [if], data = [none]
} else if ("LCD".equals(EVENT_TYPE)) {
updateLcdDesign(height); // depends on control dependency: [if], data = [none]
} else if ("VISIBILITY".equals(EVENT_TYPE)) {
Helper.enableNode(crystalOverlay, gauge.isLcdCrystalEnabled()); // depends on control dependency: [if], data = [none]
Helper.enableNode(title, !gauge.getTitle().isEmpty()); // depends on control dependency: [if], data = [none]
Helper.enableNode(unitText, !gauge.getUnit().isEmpty()); // depends on control dependency: [if], data = [none]
Helper.enableNode(upperLeftText, gauge.isMinMeasuredValueVisible()); // depends on control dependency: [if], data = [none]
Helper.enableNode(upperRightText, gauge.isMaxMeasuredValueVisible()); // depends on control dependency: [if], data = [none]
Helper.enableNode(lowerRightText, !gauge.getSubTitle().isEmpty()); // depends on control dependency: [if], data = [none]
Helper.enableNode(lowerCenterText, gauge.isOldValueVisible()); // depends on control dependency: [if], data = [none]
Helper.enableNode(average, gauge.isAverageVisible()); // depends on control dependency: [if], data = [none]
Helper.enableNode(threshold, gauge.isThresholdVisible()); // depends on control dependency: [if], data = [none]
resize(); // depends on control dependency: [if], data = [none]
redraw(); // depends on control dependency: [if], data = [none]
} else if ("SECTION".equals(EVENT_TYPE)) {
sections = gauge.getSections(); // depends on control dependency: [if], data = [none]
updateSectionColors(); // depends on control dependency: [if], data = [none]
resize(); // depends on control dependency: [if], data = [none]
redraw(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private void bubbleUpNodeTable(DAGraph<DataT, NodeT> from, LinkedList<String> path) {
if (path.contains(from.rootNode.key())) {
path.push(from.rootNode.key()); // For better error message
throw new IllegalStateException("Detected circular dependency: " + StringUtils.join(path, " -> "));
}
path.push(from.rootNode.key());
for (DAGraph<DataT, NodeT> to : from.parentDAGs) {
this.merge(from.nodeTable, to.nodeTable);
this.bubbleUpNodeTable(to, path);
}
path.pop();
} } | public class class_name {
private void bubbleUpNodeTable(DAGraph<DataT, NodeT> from, LinkedList<String> path) {
if (path.contains(from.rootNode.key())) {
path.push(from.rootNode.key()); // For better error message // depends on control dependency: [if], data = [none]
throw new IllegalStateException("Detected circular dependency: " + StringUtils.join(path, " -> "));
}
path.push(from.rootNode.key());
for (DAGraph<DataT, NodeT> to : from.parentDAGs) {
this.merge(from.nodeTable, to.nodeTable); // depends on control dependency: [for], data = [to]
this.bubbleUpNodeTable(to, path); // depends on control dependency: [for], data = [to]
}
path.pop();
} } |
public class class_name {
public static <T> List<T> subList(final List<T> list, int pageSize, int page) {
if (pageSize <= 0 || page == 0) {
return Collections.EMPTY_LIST;
}
int size = list.size();
int fromIndex = page > 0
? (page - 1) * pageSize
: size + (page * pageSize);
int toIndex = fromIndex + pageSize;
int finalFromIndex = Math.max(0, fromIndex);
int finalToIndex = Math.min(size, Math.max(0, toIndex));
// prevent fromIndex to be greater than toIndex
if (finalFromIndex > finalToIndex) {
finalFromIndex = finalToIndex;
}
try {
return list.subList(finalFromIndex, finalToIndex);
} catch (Throwable t) {
logger.warn("Invalid range for sublist in paging, pageSize {}, page {}: {}", new Object[] {
pageSize,
page,
t.getMessage()
});
}
return Collections.EMPTY_LIST;
} } | public class class_name {
public static <T> List<T> subList(final List<T> list, int pageSize, int page) {
if (pageSize <= 0 || page == 0) {
return Collections.EMPTY_LIST; // depends on control dependency: [if], data = [none]
}
int size = list.size();
int fromIndex = page > 0
? (page - 1) * pageSize
: size + (page * pageSize);
int toIndex = fromIndex + pageSize;
int finalFromIndex = Math.max(0, fromIndex);
int finalToIndex = Math.min(size, Math.max(0, toIndex));
// prevent fromIndex to be greater than toIndex
if (finalFromIndex > finalToIndex) {
finalFromIndex = finalToIndex; // depends on control dependency: [if], data = [none]
}
try {
return list.subList(finalFromIndex, finalToIndex); // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
logger.warn("Invalid range for sublist in paging, pageSize {}, page {}: {}", new Object[] {
pageSize,
page,
t.getMessage()
});
} // depends on control dependency: [catch], data = [none]
return Collections.EMPTY_LIST;
} } |
public class class_name {
protected boolean is4YearTerm(Term<?> term) {
if(!(term instanceof DateTerm.YearTerm)) {
return false;
}
DateTerm.YearTerm yearTerm = (DateTerm.YearTerm)term;
if(yearTerm.getFormat().length() == 4) {
return true;
}
return false;
} } | public class class_name {
protected boolean is4YearTerm(Term<?> term) {
if(!(term instanceof DateTerm.YearTerm)) {
return false;
// depends on control dependency: [if], data = [none]
}
DateTerm.YearTerm yearTerm = (DateTerm.YearTerm)term;
if(yearTerm.getFormat().length() == 4) {
return true;
// depends on control dependency: [if], data = [none]
}
return false;
} } |
public class class_name {
public void start(Xid xid, int flags) throws XAException {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.entry(this, tc, "start", new Object[]
{
ivManagedConnection,
AdapterUtil.toString(xid),
AdapterUtil.getXAResourceStartFlagString(flags)
});
// if the MC marked Stale, it means the user requested a purge pool with an immediate option
// so don't allow any work to continue. In this case, we throw XAER_RMFAIL xa error
// which indicates that the resource manager is not available
if (ivManagedConnection._mcStale) {
Tr.error(tc, "INVALID_CONNECTION");
XAException x = new XAException(XAException.XAER_RMFAIL);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.exit(this, tc, "start", new Object[] { "MC is stale throwing XAER_RMFAIL", ivManagedConnection });
throw x;
}
if (dsConfig.get().enableMultithreadedAccessDetection)
ivManagedConnection.detectMultithreadedAccess();
this.ivXid = xid;
try {
// TODO if we add dsConfig.transactionBranchesLooselyCoupled, then for Oracle, do
// flags |= 0x10000; // value of oracle.jdbc.xa.OracleXAResource.ORATRANSLOOSE
ivXaRes.start(xid, flags);
ivStateManager.setState(WSStateManager.XA_START);
} catch (TransactionException te) {
//Exception means setState failed because it was invalid to set the state in this case
FFDCFilter.processException(te, "com.ibm.ws.rsadapter.spi.WSRdbXaResourceImpl.start", "615", this);
Tr.error(tc, "INVALID_TX_STATE", new Object[] { "XAResource.start()", ivManagedConnection.getTransactionStateAsString() });
try {
ivXaRes.end(xid, XAResource.TMNOFLAGS);
ivXaRes.rollback(xid);
} catch (XAException eatXA) {
FFDCFilter.processException(eatXA, "com.ibm.ws.rsadapter.spi.WSRdbXaResourceImpl.start", "624", this);
traceXAException(eatXA, currClass);
//eat this exception because in the next line we will throw one
}
XAException xae = AdapterUtil.createXAException(
"INVALID_TX_STATE",
new Object[] { "XAResource.start", ivManagedConnection.getTransactionStateAsString() },
XAException.XA_RBPROTO);
traceXAException(xae, currClass);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.exit(this, tc, "start", "Exception");
throw xae;
} catch (XAException xae) {
FFDCFilter.processException(xae, "com.ibm.ws.rsadapter.spi.WSRdbXaResourceImpl.start", "639", this);
traceXAException(xae, currClass);
checkXAException(xae);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.exit(this, tc, "start", "Exception");
throw xae;
}
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
{
String cId = null;
try {
cId = ivManagedConnection.mcf.getCorrelator(ivManagedConnection);
} catch (SQLException x) {
// will just log the exception here and ignore it since its in trace
Tr.debug(this, tc, "got an exception trying to get the correlator in rollback, exception is: ", x);
}
if (cId != null) {
StringBuffer stbuf = new StringBuffer(200);
stbuf.append("Correlator: DB2, ID: ");
stbuf.append(cId);
if (xid != null) {
stbuf.append("Transaction ID : ");
stbuf.append(xid);
}
stbuf.append(" BEGIN");
Tr.debug(this, tc, stbuf.toString());
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.exit(this, tc, "start");
} } | public class class_name {
public void start(Xid xid, int flags) throws XAException {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.entry(this, tc, "start", new Object[]
{
ivManagedConnection,
AdapterUtil.toString(xid),
AdapterUtil.getXAResourceStartFlagString(flags)
});
// if the MC marked Stale, it means the user requested a purge pool with an immediate option
// so don't allow any work to continue. In this case, we throw XAER_RMFAIL xa error
// which indicates that the resource manager is not available
if (ivManagedConnection._mcStale) {
Tr.error(tc, "INVALID_CONNECTION");
XAException x = new XAException(XAException.XAER_RMFAIL);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.exit(this, tc, "start", new Object[] { "MC is stale throwing XAER_RMFAIL", ivManagedConnection });
throw x;
}
if (dsConfig.get().enableMultithreadedAccessDetection)
ivManagedConnection.detectMultithreadedAccess();
this.ivXid = xid;
try {
// TODO if we add dsConfig.transactionBranchesLooselyCoupled, then for Oracle, do
// flags |= 0x10000; // value of oracle.jdbc.xa.OracleXAResource.ORATRANSLOOSE
ivXaRes.start(xid, flags);
ivStateManager.setState(WSStateManager.XA_START);
} catch (TransactionException te) {
//Exception means setState failed because it was invalid to set the state in this case
FFDCFilter.processException(te, "com.ibm.ws.rsadapter.spi.WSRdbXaResourceImpl.start", "615", this);
Tr.error(tc, "INVALID_TX_STATE", new Object[] { "XAResource.start()", ivManagedConnection.getTransactionStateAsString() });
try {
ivXaRes.end(xid, XAResource.TMNOFLAGS); // depends on control dependency: [try], data = [none]
ivXaRes.rollback(xid); // depends on control dependency: [try], data = [none]
} catch (XAException eatXA) {
FFDCFilter.processException(eatXA, "com.ibm.ws.rsadapter.spi.WSRdbXaResourceImpl.start", "624", this);
traceXAException(eatXA, currClass);
//eat this exception because in the next line we will throw one
} // depends on control dependency: [catch], data = [none]
XAException xae = AdapterUtil.createXAException(
"INVALID_TX_STATE",
new Object[] { "XAResource.start", ivManagedConnection.getTransactionStateAsString() },
XAException.XA_RBPROTO);
traceXAException(xae, currClass);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.exit(this, tc, "start", "Exception");
throw xae;
} catch (XAException xae) {
FFDCFilter.processException(xae, "com.ibm.ws.rsadapter.spi.WSRdbXaResourceImpl.start", "639", this);
traceXAException(xae, currClass);
checkXAException(xae);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.exit(this, tc, "start", "Exception");
throw xae;
}
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
{
String cId = null;
try {
cId = ivManagedConnection.mcf.getCorrelator(ivManagedConnection); // depends on control dependency: [try], data = [none]
} catch (SQLException x) {
// will just log the exception here and ignore it since its in trace
Tr.debug(this, tc, "got an exception trying to get the correlator in rollback, exception is: ", x);
} // depends on control dependency: [catch], data = [none]
if (cId != null) {
StringBuffer stbuf = new StringBuffer(200);
stbuf.append("Correlator: DB2, ID: "); // depends on control dependency: [if], data = [none]
stbuf.append(cId); // depends on control dependency: [if], data = [(cId]
if (xid != null) {
stbuf.append("Transaction ID : "); // depends on control dependency: [if], data = [none]
stbuf.append(xid); // depends on control dependency: [if], data = [(xid]
}
stbuf.append(" BEGIN"); // depends on control dependency: [if], data = [none]
Tr.debug(this, tc, stbuf.toString()); // depends on control dependency: [if], data = [none]
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.exit(this, tc, "start");
} } |
public class class_name {
@Override
public void index(ByteBuffer key, ColumnFamily columnFamily) {
Log.debug("Indexing row %s in index %s ", key, logName);
lock.readLock().lock();
try {
if (rowService != null) {
long timestamp = System.currentTimeMillis();
rowService.index(key, columnFamily, timestamp);
}
} catch (RuntimeException e) {
Log.error("Error while indexing row %s", key);
throw e;
} finally {
lock.readLock().unlock();
}
} } | public class class_name {
@Override
public void index(ByteBuffer key, ColumnFamily columnFamily) {
Log.debug("Indexing row %s in index %s ", key, logName);
lock.readLock().lock();
try {
if (rowService != null) {
long timestamp = System.currentTimeMillis();
rowService.index(key, columnFamily, timestamp); // depends on control dependency: [if], data = [none]
}
} catch (RuntimeException e) {
Log.error("Error while indexing row %s", key);
throw e;
} finally { // depends on control dependency: [catch], data = [none]
lock.readLock().unlock();
}
} } |
public class class_name {
int guessFluffedDataSize() {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "guessFluffedDataSize");
int total = 0;
// If we have a fluffed up in memory map ...
if (bodyMap != null) {
// Add the overhead for the map itself.
total += FLUFFED_MAP_OVERHEAD;
// Also add on a constant guess for each map entry.
total += bodyMap.size() * FLUFFED_MAP_ENTRY_SIZE;
}
// Figure out a guesstimate without fluffing up anything unnecessarily
else {
// Add the estimate for the fluffed payload size
// If the body's JMF message is already fluffed up & cached, ask it for the size.
// Do NOT hold on to this JSMsgPart, as it could lose validity at any time.
JsMsgPart part = getPayloadIfFluffed();
if (part != null) {
// .... get the estimate for the fluffed names (which is a list)
// .... and add double that to cater for the values
total += part.estimateFieldValueSize(JmsMapBodyAccess.BODY_DATA_ENTRY_NAME) * 2;
}
// If the JMF message hasn't been fluffed up, find the total assembled length of
// the payload message if possible.
else {
// If we have a valid length, remove a bit & assume the rest is the encoded map.
int payloadSize = jmo.getPayloadPart().getAssembledLengthIfKnown();
if (payloadSize != -1) {
int flatMapSize = payloadSize-FLATTENED_PAYLOAD_PART;
// It is a number of entries each of which will consist of a name+value.
// They are probably mostly Strings. Let's say 15 chars for each name or value.
int numEnts = flatMapSize / 30;
if (numEnts > 0) {
// Now we've decided how many entries we tink we might have,
// calculate the likely fluffed size.
total += FLUFFED_MAP_OVERHEAD;
total += numEnts * FLUFFED_MAP_ENTRY_SIZE;
}
}
// If the payloadSize == -1, then the body message must have been fluffed up
// but not yet cached, so we'll locate & cache it now.
else {
// Add the overhead for a fluffed up map
total += FLUFFED_MAP_OVERHEAD;
// .... get the estimate for the fluffed names (which is a list)
// .... and add double that to cater for the values
total += getPayload().estimateFieldValueSize(JmsMapBodyAccess.BODY_DATA_ENTRY_NAME) * 2;
}
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "guessFluffedDataSize", total);
return total;
} } | public class class_name {
int guessFluffedDataSize() {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "guessFluffedDataSize");
int total = 0;
// If we have a fluffed up in memory map ...
if (bodyMap != null) {
// Add the overhead for the map itself.
total += FLUFFED_MAP_OVERHEAD; // depends on control dependency: [if], data = [none]
// Also add on a constant guess for each map entry.
total += bodyMap.size() * FLUFFED_MAP_ENTRY_SIZE; // depends on control dependency: [if], data = [none]
}
// Figure out a guesstimate without fluffing up anything unnecessarily
else {
// Add the estimate for the fluffed payload size
// If the body's JMF message is already fluffed up & cached, ask it for the size.
// Do NOT hold on to this JSMsgPart, as it could lose validity at any time.
JsMsgPart part = getPayloadIfFluffed();
if (part != null) {
// .... get the estimate for the fluffed names (which is a list)
// .... and add double that to cater for the values
total += part.estimateFieldValueSize(JmsMapBodyAccess.BODY_DATA_ENTRY_NAME) * 2; // depends on control dependency: [if], data = [none]
}
// If the JMF message hasn't been fluffed up, find the total assembled length of
// the payload message if possible.
else {
// If we have a valid length, remove a bit & assume the rest is the encoded map.
int payloadSize = jmo.getPayloadPart().getAssembledLengthIfKnown();
if (payloadSize != -1) {
int flatMapSize = payloadSize-FLATTENED_PAYLOAD_PART;
// It is a number of entries each of which will consist of a name+value.
// They are probably mostly Strings. Let's say 15 chars for each name or value.
int numEnts = flatMapSize / 30;
if (numEnts > 0) {
// Now we've decided how many entries we tink we might have,
// calculate the likely fluffed size.
total += FLUFFED_MAP_OVERHEAD; // depends on control dependency: [if], data = [none]
total += numEnts * FLUFFED_MAP_ENTRY_SIZE; // depends on control dependency: [if], data = [none]
}
}
// If the payloadSize == -1, then the body message must have been fluffed up
// but not yet cached, so we'll locate & cache it now.
else {
// Add the overhead for a fluffed up map
total += FLUFFED_MAP_OVERHEAD; // depends on control dependency: [if], data = [none]
// .... get the estimate for the fluffed names (which is a list)
// .... and add double that to cater for the values
total += getPayload().estimateFieldValueSize(JmsMapBodyAccess.BODY_DATA_ENTRY_NAME) * 2; // depends on control dependency: [if], data = [none]
}
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "guessFluffedDataSize", total);
return total;
} } |
public class class_name {
private void search(double[] q, Node node, Neighbor<double[], E> neighbor) {
if (node.isLeaf()) {
// look at all the instances in this leaf
for (int idx = node.index; idx < node.index + node.count; idx++) {
if (q == keys[index[idx]] && identicalExcluded) {
continue;
}
double distance = Math.squaredDistance(q, keys[index[idx]]);
if (distance < neighbor.distance) {
neighbor.key = keys[index[idx]];
neighbor.value = data[index[idx]];
neighbor.index = index[idx];
neighbor.distance = distance;
}
}
} else {
Node nearer, further;
double diff = q[node.split] - node.cutoff;
if (diff < 0) {
nearer = node.lower;
further = node.upper;
} else {
nearer = node.upper;
further = node.lower;
}
search(q, nearer, neighbor);
// now look in further half
if (neighbor.distance >= diff * diff) {
search(q, further, neighbor);
}
}
} } | public class class_name {
private void search(double[] q, Node node, Neighbor<double[], E> neighbor) {
if (node.isLeaf()) {
// look at all the instances in this leaf
for (int idx = node.index; idx < node.index + node.count; idx++) {
if (q == keys[index[idx]] && identicalExcluded) {
continue;
}
double distance = Math.squaredDistance(q, keys[index[idx]]);
if (distance < neighbor.distance) {
neighbor.key = keys[index[idx]]; // depends on control dependency: [if], data = [none]
neighbor.value = data[index[idx]]; // depends on control dependency: [if], data = [none]
neighbor.index = index[idx]; // depends on control dependency: [if], data = [none]
neighbor.distance = distance; // depends on control dependency: [if], data = [none]
}
}
} else {
Node nearer, further;
double diff = q[node.split] - node.cutoff;
if (diff < 0) {
nearer = node.lower; // depends on control dependency: [if], data = [none]
further = node.upper; // depends on control dependency: [if], data = [none]
} else {
nearer = node.upper; // depends on control dependency: [if], data = [none]
further = node.lower; // depends on control dependency: [if], data = [none]
}
search(q, nearer, neighbor); // depends on control dependency: [if], data = [none]
// now look in further half
if (neighbor.distance >= diff * diff) {
search(q, further, neighbor); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
private static List<String> resolveValues(final Iterable<? extends String> values) {
final List<String> valuesList = new ArrayList<String>();
for(final String value : values) {
Validate.notNull(value, "One of the specified values was null");
valuesList.add(value);
}
return valuesList;
} } | public class class_name {
private static List<String> resolveValues(final Iterable<? extends String> values) {
final List<String> valuesList = new ArrayList<String>();
for(final String value : values) {
Validate.notNull(value, "One of the specified values was null"); // depends on control dependency: [for], data = [value]
valuesList.add(value); // depends on control dependency: [for], data = [value]
}
return valuesList;
} } |
public class class_name {
public void marshall(GetRelationalDatabaseRequest getRelationalDatabaseRequest, ProtocolMarshaller protocolMarshaller) {
if (getRelationalDatabaseRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(getRelationalDatabaseRequest.getRelationalDatabaseName(), RELATIONALDATABASENAME_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(GetRelationalDatabaseRequest getRelationalDatabaseRequest, ProtocolMarshaller protocolMarshaller) {
if (getRelationalDatabaseRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(getRelationalDatabaseRequest.getRelationalDatabaseName(), RELATIONALDATABASENAME_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void addMessage(String key, String value) {
if (m_bundleMap != null) {
m_bundleMap.put(key, value);
}
} } | public class class_name {
public void addMessage(String key, String value) {
if (m_bundleMap != null) {
m_bundleMap.put(key, value); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private OMMapBufferEntry[] acquire(final OFileMMap iFile, final long iBeginOffset, final int iSize, final boolean iForce,
final OMMapManager.OPERATION_TYPE iOperationType, final OMMapManager.ALLOC_STRATEGY iStrategy) {
if (iStrategy == OMMapManager.ALLOC_STRATEGY.MMAP_NEVER)
return null;
lock.writeLock().lock();
try {
lastStrategy = iStrategy;
OMMapBufferEntry entry = searchBetweenLastBlocks(iFile, iBeginOffset, iSize);
try {
if (entry != null && entry.buffer != null)
return new OMMapBufferEntry[] { entry };
// SEARCH THE REQUESTED RANGE IN THE CACHED BUFFERS
List<OMMapBufferEntry> fileEntries = bufferPoolPerFile.get(iFile);
if (fileEntries == null) {
fileEntries = new ArrayList<OMMapBufferEntry>();
bufferPoolPerFile.put(iFile, fileEntries);
}
int position = searchEntry(fileEntries, iBeginOffset, iSize);
if (position > -1) {
// FOUND !!!
entry = fileEntries.get(position);
if (entry != null && entry.buffer != null)
return new OMMapBufferEntry[] { entry };
}
int p = (position + 2) * -1;
// CHECK IF THERE IS A BUFFER THAT OVERLAPS
if (!allocIfOverlaps(iBeginOffset, iSize, fileEntries, p)) {
OProfiler.getInstance().updateCounter("system.file.mmap.usedChannel", 1);
return null;
}
int bufferSize = computeBestEntrySize(iFile, iBeginOffset, iSize, iForce, fileEntries, p);
if (totalMemory + bufferSize > maxMemory
&& (iStrategy == OMMapManager.ALLOC_STRATEGY.MMAP_ONLY_AVAIL_POOL || iOperationType == OMMapManager.OPERATION_TYPE.READ
&& iStrategy == OMMapManager.ALLOC_STRATEGY.MMAP_WRITE_ALWAYS_READ_IF_AVAIL_POOL)) {
OProfiler.getInstance().updateCounter("system.file.mmap.usedChannel", 1);
return null;
}
entry = null;
// FREE LESS-USED BUFFERS UNTIL THE FREE-MEMORY IS DOWN THE CONFIGURED MAX LIMIT
do {
if (totalMemory + bufferSize > maxMemory)
freeResources();
// RECOMPUTE THE POSITION AFTER REMOVING
fileEntries = bufferPoolPerFile.get(iFile);
position = searchEntry(fileEntries, iBeginOffset, iSize);
if (position > -1) {
// FOUND: THIS IS PRETTY STRANGE SINCE IT WASN'T FOUND!
entry = fileEntries.get(position);
if (entry != null && entry.buffer != null)
return new OMMapBufferEntry[] { entry };
}
// LOAD THE PAGE
try {
entry = mapBuffer(iFile, iBeginOffset, bufferSize);
} catch (IllegalArgumentException e) {
throw e;
} catch (Exception e) {
// REDUCE MAX MEMORY TO FORCE EMPTY BUFFERS
maxMemory = maxMemory * 90 / 100;
OLogManager.instance().warn(OMMapManagerOld.class, "Memory mapping error, try to reduce max memory to %d and retry...",
e, maxMemory);
}
} while (entry == null && maxMemory > MIN_MEMORY);
if (entry == null || !entry.isValid())
throw new OIOException("You cannot access to the file portion " + iBeginOffset + "-" + iBeginOffset + iSize + " bytes");
totalMemory += bufferSize;
bufferPoolLRU.add(entry);
p = (position + 2) * -1;
if (p < 0)
p = 0;
if (fileEntries == null) {
// IN CASE THE CLEAN HAS REMOVED THE LIST
fileEntries = new ArrayList<OMMapBufferEntry>();
bufferPoolPerFile.put(iFile, fileEntries);
}
fileEntries.add(p, entry);
if (entry != null && entry.buffer != null)
return new OMMapBufferEntry[] { entry };
} finally {
if (entry != null) {
entry.acquireWriteLock();
if (iOperationType == OMMapManager.OPERATION_TYPE.WRITE)
entry.setDirty();
}
}
return null;
} finally {
lock.writeLock().unlock();
}
} } | public class class_name {
private OMMapBufferEntry[] acquire(final OFileMMap iFile, final long iBeginOffset, final int iSize, final boolean iForce,
final OMMapManager.OPERATION_TYPE iOperationType, final OMMapManager.ALLOC_STRATEGY iStrategy) {
if (iStrategy == OMMapManager.ALLOC_STRATEGY.MMAP_NEVER)
return null;
lock.writeLock().lock();
try {
lastStrategy = iStrategy;
// depends on control dependency: [try], data = [none]
OMMapBufferEntry entry = searchBetweenLastBlocks(iFile, iBeginOffset, iSize);
try {
if (entry != null && entry.buffer != null)
return new OMMapBufferEntry[] { entry };
// SEARCH THE REQUESTED RANGE IN THE CACHED BUFFERS
List<OMMapBufferEntry> fileEntries = bufferPoolPerFile.get(iFile);
if (fileEntries == null) {
fileEntries = new ArrayList<OMMapBufferEntry>();
// depends on control dependency: [if], data = [none]
bufferPoolPerFile.put(iFile, fileEntries);
// depends on control dependency: [if], data = [none]
}
int position = searchEntry(fileEntries, iBeginOffset, iSize);
if (position > -1) {
// FOUND !!!
entry = fileEntries.get(position);
// depends on control dependency: [if], data = [(position]
if (entry != null && entry.buffer != null)
return new OMMapBufferEntry[] { entry };
}
int p = (position + 2) * -1;
// CHECK IF THERE IS A BUFFER THAT OVERLAPS
if (!allocIfOverlaps(iBeginOffset, iSize, fileEntries, p)) {
OProfiler.getInstance().updateCounter("system.file.mmap.usedChannel", 1);
// depends on control dependency: [if], data = [none]
return null;
// depends on control dependency: [if], data = [none]
}
int bufferSize = computeBestEntrySize(iFile, iBeginOffset, iSize, iForce, fileEntries, p);
if (totalMemory + bufferSize > maxMemory
&& (iStrategy == OMMapManager.ALLOC_STRATEGY.MMAP_ONLY_AVAIL_POOL || iOperationType == OMMapManager.OPERATION_TYPE.READ
&& iStrategy == OMMapManager.ALLOC_STRATEGY.MMAP_WRITE_ALWAYS_READ_IF_AVAIL_POOL)) {
OProfiler.getInstance().updateCounter("system.file.mmap.usedChannel", 1);
// depends on control dependency: [if], data = [none]
return null;
// depends on control dependency: [if], data = [none]
}
entry = null;
// depends on control dependency: [try], data = [none]
// FREE LESS-USED BUFFERS UNTIL THE FREE-MEMORY IS DOWN THE CONFIGURED MAX LIMIT
do {
if (totalMemory + bufferSize > maxMemory)
freeResources();
// RECOMPUTE THE POSITION AFTER REMOVING
fileEntries = bufferPoolPerFile.get(iFile);
position = searchEntry(fileEntries, iBeginOffset, iSize);
if (position > -1) {
// FOUND: THIS IS PRETTY STRANGE SINCE IT WASN'T FOUND!
entry = fileEntries.get(position);
// depends on control dependency: [if], data = [(position]
if (entry != null && entry.buffer != null)
return new OMMapBufferEntry[] { entry };
}
// LOAD THE PAGE
try {
entry = mapBuffer(iFile, iBeginOffset, bufferSize);
// depends on control dependency: [try], data = [none]
} catch (IllegalArgumentException e) {
throw e;
} catch (Exception e) {
// depends on control dependency: [catch], data = [none]
// REDUCE MAX MEMORY TO FORCE EMPTY BUFFERS
maxMemory = maxMemory * 90 / 100;
OLogManager.instance().warn(OMMapManagerOld.class, "Memory mapping error, try to reduce max memory to %d and retry...",
e, maxMemory);
}
// depends on control dependency: [catch], data = [none]
} while (entry == null && maxMemory > MIN_MEMORY);
if (entry == null || !entry.isValid())
throw new OIOException("You cannot access to the file portion " + iBeginOffset + "-" + iBeginOffset + iSize + " bytes");
totalMemory += bufferSize;
// depends on control dependency: [try], data = [none]
bufferPoolLRU.add(entry);
// depends on control dependency: [try], data = [none]
p = (position + 2) * -1;
// depends on control dependency: [try], data = [none]
if (p < 0)
p = 0;
if (fileEntries == null) {
// IN CASE THE CLEAN HAS REMOVED THE LIST
fileEntries = new ArrayList<OMMapBufferEntry>();
// depends on control dependency: [if], data = [none]
bufferPoolPerFile.put(iFile, fileEntries);
// depends on control dependency: [if], data = [none]
}
fileEntries.add(p, entry);
// depends on control dependency: [try], data = [none]
if (entry != null && entry.buffer != null)
return new OMMapBufferEntry[] { entry };
} finally {
if (entry != null) {
entry.acquireWriteLock();
// depends on control dependency: [if], data = [none]
if (iOperationType == OMMapManager.OPERATION_TYPE.WRITE)
entry.setDirty();
}
}
return null;
// depends on control dependency: [try], data = [none]
} finally {
lock.writeLock().unlock();
}
} } |
public class class_name {
@Reference(authors = "T. Ooura", //
title = "Gamma / Error Functions", booktitle = "", //
url = "http://www.kurims.kyoto-u.ac.jp/~ooura/gamerf.html", //
bibkey = "web/Ooura96")
public static double erfc(double x) {
if(Double.isNaN(x)) {
return Double.NaN;
}
if(Double.isInfinite(x)) {
return (x < 0.0) ? 2 : 0;
}
final double t = 3.97886080735226 / (Math.abs(x) + 3.97886080735226);
final double u = t - 0.5;
double y = (((//
((((((0.00127109764952614092 * u //
+ 1.19314022838340944e-4) * u //
- 0.003963850973605135) * u //
- 8.70779635317295828e-4) * u //
+ 0.00773672528313526668) * u //
+ 0.00383335126264887303) * u //
- 0.0127223813782122755) * u //
- 0.0133823644533460069) * u //
+ 0.0161315329733252248) * u //
+ 0.0390976845588484035) * u //
+ 0.00249367200053503304;
y = ((((((((((((y * u //
- 0.0838864557023001992) * u //
- 0.119463959964325415) * u //
+ 0.0166207924969367356) * u //
+ 0.357524274449531043) * u //
+ 0.805276408752910567) * u //
+ 1.18902982909273333) * u //
+ 1.37040217682338167) * u //
+ 1.31314653831023098) * u //
+ 1.07925515155856677) * u //
+ 0.774368199119538609) * u //
+ 0.490165080585318424) * u //
+ 0.275374741597376782) //
* t * FastMath.exp(-x * x);
return x < 0 ? 2 - y : y;
} } | public class class_name {
@Reference(authors = "T. Ooura", //
title = "Gamma / Error Functions", booktitle = "", //
url = "http://www.kurims.kyoto-u.ac.jp/~ooura/gamerf.html", //
bibkey = "web/Ooura96")
public static double erfc(double x) {
if(Double.isNaN(x)) {
return Double.NaN; // depends on control dependency: [if], data = [none]
}
if(Double.isInfinite(x)) {
return (x < 0.0) ? 2 : 0; // depends on control dependency: [if], data = [none]
}
final double t = 3.97886080735226 / (Math.abs(x) + 3.97886080735226);
final double u = t - 0.5;
double y = (((//
((((((0.00127109764952614092 * u //
+ 1.19314022838340944e-4) * u //
- 0.003963850973605135) * u //
- 8.70779635317295828e-4) * u //
+ 0.00773672528313526668) * u //
+ 0.00383335126264887303) * u //
- 0.0127223813782122755) * u //
- 0.0133823644533460069) * u //
+ 0.0161315329733252248) * u //
+ 0.0390976845588484035) * u //
+ 0.00249367200053503304;
y = ((((((((((((y * u //
- 0.0838864557023001992) * u //
- 0.119463959964325415) * u //
+ 0.0166207924969367356) * u //
+ 0.357524274449531043) * u //
+ 0.805276408752910567) * u //
+ 1.18902982909273333) * u //
+ 1.37040217682338167) * u //
+ 1.31314653831023098) * u //
+ 1.07925515155856677) * u //
+ 0.774368199119538609) * u //
+ 0.490165080585318424) * u //
+ 0.275374741597376782) //
* t * FastMath.exp(-x * x);
return x < 0 ? 2 - y : y;
} } |
public class class_name {
public BinarySparseDataset parse(String name, InputStream stream) throws IOException, ParseException {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(stream))) {
BinarySparseDataset sparse = new BinarySparseDataset(name);
String line = reader.readLine();
if (line == null) {
throw new IOException("Empty data source.");
}
Set<Integer> items = new HashSet<>();
do {
line = line.trim();
if (line.isEmpty()) {
continue;
}
String[] s = line.split("\\s+");
items.clear();
for (int i = 0; i < s.length; i++) {
items.add(Integer.parseInt(s[i]));
}
int j = 0;
int[] point = new int[items.size()];
for (int i : items) {
point[j++] = i;
}
Arrays.sort(point);
sparse.add(point);
line = reader.readLine();
} while (line != null);
return sparse;
}
} } | public class class_name {
public BinarySparseDataset parse(String name, InputStream stream) throws IOException, ParseException {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(stream))) {
BinarySparseDataset sparse = new BinarySparseDataset(name);
String line = reader.readLine();
if (line == null) {
throw new IOException("Empty data source.");
}
Set<Integer> items = new HashSet<>();
do {
line = line.trim();
if (line.isEmpty()) {
continue;
}
String[] s = line.split("\\s+");
items.clear();
for (int i = 0; i < s.length; i++) {
items.add(Integer.parseInt(s[i])); // depends on control dependency: [for], data = [i]
}
int j = 0;
int[] point = new int[items.size()];
for (int i : items) {
point[j++] = i; // depends on control dependency: [for], data = [i]
}
Arrays.sort(point);
sparse.add(point);
line = reader.readLine();
} while (line != null);
return sparse;
}
} } |
public class class_name {
public AccessLogOption prepareAccessLogOption(BootLogger logger, Properties props, List<String> readConfigList) { // null allowed
if (props == null) {
return null;
}
final String enabled = props.getProperty("tomcat.accesslog.enabled");
if (enabled == null || !isStringBooleanTrue(enabled)) {
return null;
}
logger.info("...Preparing tomcat access log: enabled=" + enabled + ", config=" + readConfigList);
final AccessLogOption option = new AccessLogOption();
doPrepareAccessLogOption(logger, props, "logDir", value -> option.logDir(value));
doPrepareAccessLogOption(logger, props, "filePrefix", value -> option.filePrefix(value));
doPrepareAccessLogOption(logger, props, "fileSuffix", value -> option.fileSuffix(value));
doPrepareAccessLogOption(logger, props, "fileDateFormat", value -> option.fileDateFormat(value));
doPrepareAccessLogOption(logger, props, "fileEncoding", value -> option.fileEncoding(value));
doPrepareAccessLogOption(logger, props, "formatPattern", value -> option.formatPattern(value));
doPrepareAccessLogOption(logger, props, "conditionIf", value -> option.conditionIf(value));
doPrepareAccessLogOption(logger, props, "conditionUnless", value -> option.conditionUnless(value));
return option;
} } | public class class_name {
public AccessLogOption prepareAccessLogOption(BootLogger logger, Properties props, List<String> readConfigList) { // null allowed
if (props == null) {
return null; // depends on control dependency: [if], data = [none]
}
final String enabled = props.getProperty("tomcat.accesslog.enabled");
if (enabled == null || !isStringBooleanTrue(enabled)) {
return null; // depends on control dependency: [if], data = [none]
}
logger.info("...Preparing tomcat access log: enabled=" + enabled + ", config=" + readConfigList);
final AccessLogOption option = new AccessLogOption();
doPrepareAccessLogOption(logger, props, "logDir", value -> option.logDir(value));
doPrepareAccessLogOption(logger, props, "filePrefix", value -> option.filePrefix(value));
doPrepareAccessLogOption(logger, props, "fileSuffix", value -> option.fileSuffix(value));
doPrepareAccessLogOption(logger, props, "fileDateFormat", value -> option.fileDateFormat(value));
doPrepareAccessLogOption(logger, props, "fileEncoding", value -> option.fileEncoding(value));
doPrepareAccessLogOption(logger, props, "formatPattern", value -> option.formatPattern(value));
doPrepareAccessLogOption(logger, props, "conditionIf", value -> option.conditionIf(value));
doPrepareAccessLogOption(logger, props, "conditionUnless", value -> option.conditionUnless(value));
return option;
} } |
public class class_name {
public static Point2d get2DCenter(Iterator<IAtom> atoms) {
IAtom atom;
double xsum = 0;
double ysum = 0;
int length = 0;
while (atoms.hasNext()) {
atom = (IAtom) atoms.next();
if (atom.getPoint2d() != null) {
xsum += atom.getPoint2d().x;
ysum += atom.getPoint2d().y;
}
++length;
}
return new Point2d(xsum / (double) length, ysum / (double) length);
} } | public class class_name {
public static Point2d get2DCenter(Iterator<IAtom> atoms) {
IAtom atom;
double xsum = 0;
double ysum = 0;
int length = 0;
while (atoms.hasNext()) {
atom = (IAtom) atoms.next(); // depends on control dependency: [while], data = [none]
if (atom.getPoint2d() != null) {
xsum += atom.getPoint2d().x; // depends on control dependency: [if], data = [none]
ysum += atom.getPoint2d().y; // depends on control dependency: [if], data = [none]
}
++length; // depends on control dependency: [while], data = [none]
}
return new Point2d(xsum / (double) length, ysum / (double) length);
} } |
public class class_name {
public void marshall(SubModule subModule, ProtocolMarshaller protocolMarshaller) {
if (subModule == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(subModule.getCommitId(), COMMITID_BINDING);
protocolMarshaller.marshall(subModule.getAbsolutePath(), ABSOLUTEPATH_BINDING);
protocolMarshaller.marshall(subModule.getRelativePath(), RELATIVEPATH_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(SubModule subModule, ProtocolMarshaller protocolMarshaller) {
if (subModule == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(subModule.getCommitId(), COMMITID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(subModule.getAbsolutePath(), ABSOLUTEPATH_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(subModule.getRelativePath(), RELATIVEPATH_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void marshall(UpdateFunctionConfigurationRequest updateFunctionConfigurationRequest, ProtocolMarshaller protocolMarshaller) {
if (updateFunctionConfigurationRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getFunctionName(), FUNCTIONNAME_BINDING);
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getRole(), ROLE_BINDING);
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getHandler(), HANDLER_BINDING);
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getDescription(), DESCRIPTION_BINDING);
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getTimeout(), TIMEOUT_BINDING);
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getMemorySize(), MEMORYSIZE_BINDING);
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getVpcConfig(), VPCCONFIG_BINDING);
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getEnvironment(), ENVIRONMENT_BINDING);
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getRuntime(), RUNTIME_BINDING);
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getDeadLetterConfig(), DEADLETTERCONFIG_BINDING);
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getKMSKeyArn(), KMSKEYARN_BINDING);
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getTracingConfig(), TRACINGCONFIG_BINDING);
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getRevisionId(), REVISIONID_BINDING);
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getLayers(), LAYERS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(UpdateFunctionConfigurationRequest updateFunctionConfigurationRequest, ProtocolMarshaller protocolMarshaller) {
if (updateFunctionConfigurationRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getFunctionName(), FUNCTIONNAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getRole(), ROLE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getHandler(), HANDLER_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getDescription(), DESCRIPTION_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getTimeout(), TIMEOUT_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getMemorySize(), MEMORYSIZE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getVpcConfig(), VPCCONFIG_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getEnvironment(), ENVIRONMENT_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getRuntime(), RUNTIME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getDeadLetterConfig(), DEADLETTERCONFIG_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getKMSKeyArn(), KMSKEYARN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getTracingConfig(), TRACINGCONFIG_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getRevisionId(), REVISIONID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateFunctionConfigurationRequest.getLayers(), LAYERS_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private void createOrUpdateColumnFamily(TableInfo tableInfo, KsDef ksDef) throws Exception
{
MetaDataHandler handler = new MetaDataHandler();
if (containsCompositeKey(tableInfo))
{
validateCompoundKey(tableInfo);
createOrUpdateUsingCQL3(tableInfo, ksDef);
// After successful schema operation, perform index creation.
createIndexUsingCql(tableInfo);
}
else if (containsCollectionColumns(tableInfo) || isCql3Enabled(tableInfo))
{
createOrUpdateUsingCQL3(tableInfo, ksDef);
createIndexUsingCql(tableInfo);
}
else
{
CfDef cf_def = handler.getTableMetadata(tableInfo);
try
{
cassandra_client.system_add_column_family(cf_def);
}
catch (InvalidRequestException irex)
{
updateExistingColumnFamily(tableInfo, ksDef, irex);
}
}
} } | public class class_name {
private void createOrUpdateColumnFamily(TableInfo tableInfo, KsDef ksDef) throws Exception
{
MetaDataHandler handler = new MetaDataHandler();
if (containsCompositeKey(tableInfo))
{
validateCompoundKey(tableInfo);
createOrUpdateUsingCQL3(tableInfo, ksDef);
// After successful schema operation, perform index creation.
createIndexUsingCql(tableInfo);
}
else if (containsCollectionColumns(tableInfo) || isCql3Enabled(tableInfo))
{
createOrUpdateUsingCQL3(tableInfo, ksDef);
createIndexUsingCql(tableInfo);
}
else
{
CfDef cf_def = handler.getTableMetadata(tableInfo);
try
{
cassandra_client.system_add_column_family(cf_def); // depends on control dependency: [try], data = [none]
}
catch (InvalidRequestException irex)
{
updateExistingColumnFamily(tableInfo, ksDef, irex);
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
@Override
public synchronized void rollback(boolean chain) {
// tempActionHistory.add("rollback " + actionTimestamp);
if (isClosed) {
return;
}
if (!isTransaction) {
isReadOnly = isReadOnlyDefault;
isolationMode = isolationModeDefault;
return;
}
try {
database.logger.writeToLog(this, Tokens.T_ROLLBACK);
} catch (HsqlException e) {}
database.txManager.rollback(this);
endTransaction(false);
} } | public class class_name {
@Override
public synchronized void rollback(boolean chain) {
// tempActionHistory.add("rollback " + actionTimestamp);
if (isClosed) {
return; // depends on control dependency: [if], data = [none]
}
if (!isTransaction) {
isReadOnly = isReadOnlyDefault; // depends on control dependency: [if], data = [none]
isolationMode = isolationModeDefault; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
try {
database.logger.writeToLog(this, Tokens.T_ROLLBACK); // depends on control dependency: [try], data = [none]
} catch (HsqlException e) {} // depends on control dependency: [catch], data = [none]
database.txManager.rollback(this);
endTransaction(false);
} } |
public class class_name {
public static <C extends Comparable> boolean isLessThan(final Range<C> range, final C value) {
checkNotNull(range);
checkNotNull(value);
if (!range.hasUpperBound()) {
return false;
}
if (range.upperBoundType() == BoundType.OPEN && range.upperEndpoint().equals(value)) {
return true;
}
return range.upperEndpoint().compareTo(value) < 0;
} } | public class class_name {
public static <C extends Comparable> boolean isLessThan(final Range<C> range, final C value) {
checkNotNull(range);
checkNotNull(value);
if (!range.hasUpperBound()) {
return false; // depends on control dependency: [if], data = [none]
}
if (range.upperBoundType() == BoundType.OPEN && range.upperEndpoint().equals(value)) {
return true; // depends on control dependency: [if], data = [none]
}
return range.upperEndpoint().compareTo(value) < 0;
} } |
public class class_name {
public static String getExtensionFromMimeType(String mimeType) {
if (mimeType == null || mimeType.length() == 0) {
return null;
}
return mimeTypeToExtensionMap.get(mimeType);
} } | public class class_name {
public static String getExtensionFromMimeType(String mimeType) {
if (mimeType == null || mimeType.length() == 0) {
return null; // depends on control dependency: [if], data = [none]
}
return mimeTypeToExtensionMap.get(mimeType);
} } |
public class class_name {
public static String breakTextAtWords(String text, String insert, int lineSize) {
StringBuilder buff = new StringBuilder();
StringTokenizer stoker = new StringTokenizer(text);
int lineCount = 0;
while (stoker.hasMoreTokens()) {
String tok = stoker.nextToken();
if (tok.length() + lineCount >= lineSize) {
buff.append(insert);
lineCount = 0;
}
buff.append(tok);
buff.append(" ");
lineCount += tok.length() + 1;
}
return buff.toString();
} } | public class class_name {
public static String breakTextAtWords(String text, String insert, int lineSize) {
StringBuilder buff = new StringBuilder();
StringTokenizer stoker = new StringTokenizer(text);
int lineCount = 0;
while (stoker.hasMoreTokens()) {
String tok = stoker.nextToken();
if (tok.length() + lineCount >= lineSize) {
buff.append(insert);
// depends on control dependency: [if], data = [none]
lineCount = 0;
// depends on control dependency: [if], data = [none]
}
buff.append(tok);
// depends on control dependency: [while], data = [none]
buff.append(" ");
// depends on control dependency: [while], data = [none]
lineCount += tok.length() + 1;
// depends on control dependency: [while], data = [none]
}
return buff.toString();
} } |
public class class_name {
private Producer<EncodedImage> newEncodedCacheMultiplexToTranscodeSequence(
Producer<EncodedImage> inputProducer) {
if (WebpSupportStatus.sIsWebpSupportRequired &&
(!mWebpSupportEnabled || WebpSupportStatus.sWebpBitmapFactory == null)) {
inputProducer = mProducerFactory.newWebpTranscodeProducer(inputProducer);
}
if (mDiskCacheEnabled) {
inputProducer = newDiskCacheSequence(inputProducer);
}
EncodedMemoryCacheProducer encodedMemoryCacheProducer =
mProducerFactory.newEncodedMemoryCacheProducer(inputProducer);
return mProducerFactory.newEncodedCacheKeyMultiplexProducer(encodedMemoryCacheProducer);
} } | public class class_name {
private Producer<EncodedImage> newEncodedCacheMultiplexToTranscodeSequence(
Producer<EncodedImage> inputProducer) {
if (WebpSupportStatus.sIsWebpSupportRequired &&
(!mWebpSupportEnabled || WebpSupportStatus.sWebpBitmapFactory == null)) {
inputProducer = mProducerFactory.newWebpTranscodeProducer(inputProducer); // depends on control dependency: [if], data = [none]
}
if (mDiskCacheEnabled) {
inputProducer = newDiskCacheSequence(inputProducer); // depends on control dependency: [if], data = [none]
}
EncodedMemoryCacheProducer encodedMemoryCacheProducer =
mProducerFactory.newEncodedMemoryCacheProducer(inputProducer);
return mProducerFactory.newEncodedCacheKeyMultiplexProducer(encodedMemoryCacheProducer);
} } |
public class class_name {
final public void And() throws ParseException {
Equality();
label_8:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case AND0:
case AND1:
;
break;
default:
jj_la1[12] = jj_gen;
break label_8;
}
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case AND0:
jj_consume_token(AND0);
break;
case AND1:
jj_consume_token(AND1);
break;
default:
jj_la1[13] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
AstAnd jjtn001 = new AstAnd(JJTAND);
boolean jjtc001 = true;
jjtree.openNodeScope(jjtn001);
try {
Equality();
} catch (Throwable jjte001) {
if (jjtc001) {
jjtree.clearNodeScope(jjtn001);
jjtc001 = false;
} else {
jjtree.popNode();
}
if (jjte001 instanceof RuntimeException) {
{if (true) throw (RuntimeException)jjte001;}
}
if (jjte001 instanceof ParseException) {
{if (true) throw (ParseException)jjte001;}
}
{if (true) throw (Error)jjte001;}
} finally {
if (jjtc001) {
jjtree.closeNodeScope(jjtn001, 2);
}
}
}
} } | public class class_name {
final public void And() throws ParseException {
Equality();
label_8:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case AND0:
case AND1:
;
break;
default:
jj_la1[12] = jj_gen;
break label_8;
}
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case AND0:
jj_consume_token(AND0);
break;
case AND1:
jj_consume_token(AND1);
break;
default:
jj_la1[13] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
AstAnd jjtn001 = new AstAnd(JJTAND);
boolean jjtc001 = true;
jjtree.openNodeScope(jjtn001);
try {
Equality();
} catch (Throwable jjte001) {
if (jjtc001) {
jjtree.clearNodeScope(jjtn001); // depends on control dependency: [if], data = [none]
jjtc001 = false; // depends on control dependency: [if], data = [none]
} else {
jjtree.popNode(); // depends on control dependency: [if], data = [none]
}
if (jjte001 instanceof RuntimeException) {
{if (true) throw (RuntimeException)jjte001;}
}
if (jjte001 instanceof ParseException) {
{if (true) throw (ParseException)jjte001;}
}
{if (true) throw (Error)jjte001;}
} finally {
if (jjtc001) {
jjtree.closeNodeScope(jjtn001, 2); // depends on control dependency: [if], data = [none]
}
}
}
} } |
public class class_name {
protected static boolean fetchAnonymousAccessTokenIfNeeded(final String userId, final ResponseHandler onFetchedHandler) {
if (!WonderPush.isInitialized()) {
// Note: Could use WonderPush.safeDefer() here but as we require consent to proceed,
// let's use WonderPush.safeDeferWithConsent() to additionally passively wait for SDK initialization.
WonderPush.safeDeferWithConsent(new Runnable() {
@Override
public void run() {
if (!fetchAnonymousAccessTokenIfNeeded(userId, onFetchedHandler)) {
// Call the handler anyway
onFetchedHandler.onSuccess(null);
}
}
}, null);
return true; // true: the handler will be called
}
if (null == WonderPushConfiguration.getAccessToken()) {
fetchAnonymousAccessToken(userId, onFetchedHandler);
return true;
}
return false;
} } | public class class_name {
protected static boolean fetchAnonymousAccessTokenIfNeeded(final String userId, final ResponseHandler onFetchedHandler) {
if (!WonderPush.isInitialized()) {
// Note: Could use WonderPush.safeDefer() here but as we require consent to proceed,
// let's use WonderPush.safeDeferWithConsent() to additionally passively wait for SDK initialization.
WonderPush.safeDeferWithConsent(new Runnable() {
@Override
public void run() {
if (!fetchAnonymousAccessTokenIfNeeded(userId, onFetchedHandler)) {
// Call the handler anyway
onFetchedHandler.onSuccess(null); // depends on control dependency: [if], data = [none]
}
}
}, null); // depends on control dependency: [if], data = [none]
return true; // true: the handler will be called // depends on control dependency: [if], data = [none]
}
if (null == WonderPushConfiguration.getAccessToken()) {
fetchAnonymousAccessToken(userId, onFetchedHandler); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
return false;
} } |
public class class_name {
public static Stream<Record> decode(final Stream<Statement> stream,
@Nullable final Iterable<? extends URI> types, @Nullable final Boolean chunked) {
Preconditions.checkNotNull(stream);
if (types != null) {
stream.setProperty("types", types);
}
if (chunked != null) {
stream.setProperty("chunked", chunked);
}
return stream.transform(null, new Function<Handler<Record>, Handler<Statement>>() {
@SuppressWarnings("unchecked")
@Override
public Handler<Statement> apply(final Handler<Record> handler) {
final Iterable<? extends URI> types = stream.getProperty("types", Iterable.class);
final Boolean chunked = stream.getProperty("chunked", Boolean.class);
return new Decoder(handler, types, chunked);
}
});
} } | public class class_name {
public static Stream<Record> decode(final Stream<Statement> stream,
@Nullable final Iterable<? extends URI> types, @Nullable final Boolean chunked) {
Preconditions.checkNotNull(stream);
if (types != null) {
stream.setProperty("types", types); // depends on control dependency: [if], data = [none]
}
if (chunked != null) {
stream.setProperty("chunked", chunked); // depends on control dependency: [if], data = [none]
}
return stream.transform(null, new Function<Handler<Record>, Handler<Statement>>() {
@SuppressWarnings("unchecked")
@Override
public Handler<Statement> apply(final Handler<Record> handler) {
final Iterable<? extends URI> types = stream.getProperty("types", Iterable.class);
final Boolean chunked = stream.getProperty("chunked", Boolean.class);
return new Decoder(handler, types, chunked);
}
});
} } |
public class class_name {
@Override
public void reset(boolean shuffle) {
this.position.set(0);
if (shuffle) {
logger.debug("Calling shuffle() on entries...");
// https://en.wikipedia.org/wiki/Fisher%E2%80%93Yates_shuffle#The_modern_algorithm
for (int i = order.length - 1; i > 0; i--) {
int j = rng.nextInt(i + 1);
int temp = order[j];
order[j] = order[i];
order[i] = temp;
}
}
} } | public class class_name {
@Override
public void reset(boolean shuffle) {
this.position.set(0);
if (shuffle) {
logger.debug("Calling shuffle() on entries..."); // depends on control dependency: [if], data = [none]
// https://en.wikipedia.org/wiki/Fisher%E2%80%93Yates_shuffle#The_modern_algorithm
for (int i = order.length - 1; i > 0; i--) {
int j = rng.nextInt(i + 1);
int temp = order[j];
order[j] = order[i]; // depends on control dependency: [for], data = [i]
order[i] = temp; // depends on control dependency: [for], data = [i]
}
}
} } |
public class class_name {
public static <S, I, O> LYResult<S, I, O> compute(final MealyMachine<S, I, ?, O> automaton,
final Alphabet<I> input) {
final SplitTreeResult<S, I, O> str = computeSplitTree(automaton, input);
if (str.isPresent()) {
final Set<S> states = new HashSet<>(automaton.getStates());
return new LYResult<>(extractADS(automaton,
str.get(),
states,
states.stream()
.collect(Collectors.toMap(Function.identity(), Function.identity())),
null));
}
return new LYResult<>(str.getIndistinguishableStates());
} } | public class class_name {
public static <S, I, O> LYResult<S, I, O> compute(final MealyMachine<S, I, ?, O> automaton,
final Alphabet<I> input) {
final SplitTreeResult<S, I, O> str = computeSplitTree(automaton, input);
if (str.isPresent()) {
final Set<S> states = new HashSet<>(automaton.getStates());
return new LYResult<>(extractADS(automaton,
str.get(),
states,
states.stream()
.collect(Collectors.toMap(Function.identity(), Function.identity())),
null)); // depends on control dependency: [if], data = [none]
}
return new LYResult<>(str.getIndistinguishableStates());
} } |
public class class_name {
static HSL toHSL( double color ) {
long argb = Double.doubleToRawLongBits( color );
double a = alpha( color );
double r = clamp( ((argb >> 32) & 0xFFFF) / (double)0xFF00 );
double g = clamp( ((argb >> 16) & 0xFFFF) / (double)0xFF00 );
double b = clamp( ((argb >> 0) & 0xFFFF) / (double)0xFF00 );
double max = Math.max( Math.max( r, g ), b );
double min = Math.min( Math.min( r, g ), b );
double h, s, l = (max + min) / 2, d = max - min;
if( max == min ) {
h = s = 0;
} else {
s = l > 0.5 ? d / (2 - max - min) : d / (max + min);
if( max == r ) {
h = (g - b) / d + (g < b ? 6 : 0);
} else if( max == g ) {
h = (b - r) / d + 2;
} else {
h = (r - g) / d + 4;
}
h /= 6;
}
return new HSL( h * 360, s, l, a );
} } | public class class_name {
static HSL toHSL( double color ) {
long argb = Double.doubleToRawLongBits( color );
double a = alpha( color );
double r = clamp( ((argb >> 32) & 0xFFFF) / (double)0xFF00 );
double g = clamp( ((argb >> 16) & 0xFFFF) / (double)0xFF00 );
double b = clamp( ((argb >> 0) & 0xFFFF) / (double)0xFF00 );
double max = Math.max( Math.max( r, g ), b );
double min = Math.min( Math.min( r, g ), b );
double h, s, l = (max + min) / 2, d = max - min;
if( max == min ) {
h = s = 0; // depends on control dependency: [if], data = [none]
} else {
s = l > 0.5 ? d / (2 - max - min) : d / (max + min); // depends on control dependency: [if], data = [none]
if( max == r ) {
h = (g - b) / d + (g < b ? 6 : 0); // depends on control dependency: [if], data = [none]
} else if( max == g ) {
h = (b - r) / d + 2; // depends on control dependency: [if], data = [none]
} else {
h = (r - g) / d + 4; // depends on control dependency: [if], data = [none]
}
h /= 6; // depends on control dependency: [if], data = [none]
}
return new HSL( h * 360, s, l, a );
} } |
public class class_name {
public double computeAnomalySupervised(RuleClassification rl, int ruleIndex, Instance inst) { //Not supervised
ArrayList<Integer> caseAnomalyTemp = new ArrayList<Integer>();
ArrayList<ArrayList<Double>> AttribAnomalyStatisticTemp2 = new ArrayList<ArrayList<Double>>();
double D = 0.0;
double N = 0.0;
if (rl.instancesSeen > this.anomalyNumInstThresholdOption.getValue() && this.anomalyDetectionOption.isSet()) {
for (int x = 0; x < inst.numAttributes() - 1; x++) {
if(!inst.isMissing(x)){
ArrayList<Double> AttribAnomalyStatisticTemp = new ArrayList<Double>();
if (inst.attribute(x).isNumeric()) { //Numeric Attributes
if((rl.instancesSeen - rl.attributeMissingValues.getValue(x)) > 30){
double mean = computeMean((double)rl.attributeStatisticsSupervised.get(x).get((int)inst.classValue()), (int)rl.obserClassDistrib.getValue((int)inst.classValue()));
double sd = computeSD((double)rl.squaredAttributeStatisticsSupervised.get(x).get((int)inst.classValue()), (double)rl.attributeStatisticsSupervised.get(x).get((int)inst.classValue()), (int)rl.obserClassDistrib.getValue((int)inst.classValue()));
double probability = computeProbability(mean, sd, inst.value(x));
if(probability!=0.0) {
D = D + Math.log(probability);
if(probability < this.probabilityThresholdOption.getValue()){ //0.10
N = N + Math.log(probability);
AttribAnomalyStatisticTemp.add((double)x);
AttribAnomalyStatisticTemp.add(inst.value(x));
AttribAnomalyStatisticTemp.add(mean);
AttribAnomalyStatisticTemp.add(sd);
AttribAnomalyStatisticTemp.add(probability);
AttribAnomalyStatisticTemp2.add(AttribAnomalyStatisticTemp);
}
}
}
}else { //Nominal
double attribVal = inst.value(x); //Attribute value
double classVal = inst.classValue(); //Attribute value
double probability = rl.observers.get(x).probabilityOfAttributeValueGivenClass(attribVal, (int)classVal);
if(probability!=0.0) {
D = D + Math.log(probability);
if(probability < this.probabilityThresholdOption.getValue()){ //0.10
N = N + Math.log(probability);
AttribAnomalyStatisticTemp.add((double)x);
AttribAnomalyStatisticTemp.add(inst.value(x));
AttribAnomalyStatisticTemp.add(probability);
AttribAnomalyStatisticTemp2.add(AttribAnomalyStatisticTemp);
}
}
}
}
}
}
double anomaly=0.0;
if(D !=0){
anomaly = Math.abs(N/D);
}
if(anomaly >= this.anomalyProbabilityThresholdOption.getValue()){
caseAnomalyTemp.add(this.numInstance);
double val = anomaly * 100;
caseAnomalyTemp.add((int)val);
this.caseAnomalySupervised.add(caseAnomalyTemp);
RuleClassification y = new RuleClassification(this.ruleSet.get(ruleIndex));
this.ruleSetAnomaliesSupervised.add(y);
this.ruleAnomaliesIndexSupervised.add(ruleIndex + 1);
this.ruleAttribAnomalyStatisticsSupervised.add(AttribAnomalyStatisticTemp2);
}
return anomaly;
} } | public class class_name {
public double computeAnomalySupervised(RuleClassification rl, int ruleIndex, Instance inst) { //Not supervised
ArrayList<Integer> caseAnomalyTemp = new ArrayList<Integer>();
ArrayList<ArrayList<Double>> AttribAnomalyStatisticTemp2 = new ArrayList<ArrayList<Double>>();
double D = 0.0;
double N = 0.0;
if (rl.instancesSeen > this.anomalyNumInstThresholdOption.getValue() && this.anomalyDetectionOption.isSet()) {
for (int x = 0; x < inst.numAttributes() - 1; x++) {
if(!inst.isMissing(x)){
ArrayList<Double> AttribAnomalyStatisticTemp = new ArrayList<Double>();
if (inst.attribute(x).isNumeric()) { //Numeric Attributes
if((rl.instancesSeen - rl.attributeMissingValues.getValue(x)) > 30){
double mean = computeMean((double)rl.attributeStatisticsSupervised.get(x).get((int)inst.classValue()), (int)rl.obserClassDistrib.getValue((int)inst.classValue()));
double sd = computeSD((double)rl.squaredAttributeStatisticsSupervised.get(x).get((int)inst.classValue()), (double)rl.attributeStatisticsSupervised.get(x).get((int)inst.classValue()), (int)rl.obserClassDistrib.getValue((int)inst.classValue()));
double probability = computeProbability(mean, sd, inst.value(x));
if(probability!=0.0) {
D = D + Math.log(probability); // depends on control dependency: [if], data = [(probability]
if(probability < this.probabilityThresholdOption.getValue()){ //0.10
N = N + Math.log(probability); // depends on control dependency: [if], data = [(probability]
AttribAnomalyStatisticTemp.add((double)x); // depends on control dependency: [if], data = [none]
AttribAnomalyStatisticTemp.add(inst.value(x)); // depends on control dependency: [if], data = [none]
AttribAnomalyStatisticTemp.add(mean); // depends on control dependency: [if], data = [none]
AttribAnomalyStatisticTemp.add(sd); // depends on control dependency: [if], data = [none]
AttribAnomalyStatisticTemp.add(probability); // depends on control dependency: [if], data = [(probability]
AttribAnomalyStatisticTemp2.add(AttribAnomalyStatisticTemp); // depends on control dependency: [if], data = [none]
}
}
}
}else { //Nominal
double attribVal = inst.value(x); //Attribute value
double classVal = inst.classValue(); //Attribute value
double probability = rl.observers.get(x).probabilityOfAttributeValueGivenClass(attribVal, (int)classVal);
if(probability!=0.0) {
D = D + Math.log(probability); // depends on control dependency: [if], data = [(probability]
if(probability < this.probabilityThresholdOption.getValue()){ //0.10
N = N + Math.log(probability); // depends on control dependency: [if], data = [(probability]
AttribAnomalyStatisticTemp.add((double)x); // depends on control dependency: [if], data = [none]
AttribAnomalyStatisticTemp.add(inst.value(x)); // depends on control dependency: [if], data = [none]
AttribAnomalyStatisticTemp.add(probability); // depends on control dependency: [if], data = [(probability]
AttribAnomalyStatisticTemp2.add(AttribAnomalyStatisticTemp); // depends on control dependency: [if], data = [none]
}
}
}
}
}
}
double anomaly=0.0;
if(D !=0){
anomaly = Math.abs(N/D); // depends on control dependency: [if], data = [none]
}
if(anomaly >= this.anomalyProbabilityThresholdOption.getValue()){
caseAnomalyTemp.add(this.numInstance); // depends on control dependency: [if], data = [none]
double val = anomaly * 100;
caseAnomalyTemp.add((int)val); // depends on control dependency: [if], data = [none]
this.caseAnomalySupervised.add(caseAnomalyTemp); // depends on control dependency: [if], data = [none]
RuleClassification y = new RuleClassification(this.ruleSet.get(ruleIndex));
this.ruleSetAnomaliesSupervised.add(y); // depends on control dependency: [if], data = [none]
this.ruleAnomaliesIndexSupervised.add(ruleIndex + 1); // depends on control dependency: [if], data = [none]
this.ruleAttribAnomalyStatisticsSupervised.add(AttribAnomalyStatisticTemp2); // depends on control dependency: [if], data = [none]
}
return anomaly;
} } |
public class class_name {
public ServiceCall<ListCollectionFieldsResponse> listCollectionFields(
ListCollectionFieldsOptions listCollectionFieldsOptions) {
Validator.notNull(listCollectionFieldsOptions, "listCollectionFieldsOptions cannot be null");
String[] pathSegments = { "v1/environments", "collections", "fields" };
String[] pathParameters = { listCollectionFieldsOptions.environmentId(), listCollectionFieldsOptions
.collectionId() };
RequestBuilder builder = RequestBuilder.get(RequestBuilder.constructHttpUrl(getEndPoint(), pathSegments,
pathParameters));
builder.query("version", versionDate);
Map<String, String> sdkHeaders = SdkCommon.getSdkHeaders("discovery", "v1", "listCollectionFields");
for (Entry<String, String> header : sdkHeaders.entrySet()) {
builder.header(header.getKey(), header.getValue());
}
builder.header("Accept", "application/json");
return createServiceCall(builder.build(), ResponseConverterUtils.getObject(ListCollectionFieldsResponse.class));
} } | public class class_name {
public ServiceCall<ListCollectionFieldsResponse> listCollectionFields(
ListCollectionFieldsOptions listCollectionFieldsOptions) {
Validator.notNull(listCollectionFieldsOptions, "listCollectionFieldsOptions cannot be null");
String[] pathSegments = { "v1/environments", "collections", "fields" };
String[] pathParameters = { listCollectionFieldsOptions.environmentId(), listCollectionFieldsOptions
.collectionId() };
RequestBuilder builder = RequestBuilder.get(RequestBuilder.constructHttpUrl(getEndPoint(), pathSegments,
pathParameters));
builder.query("version", versionDate);
Map<String, String> sdkHeaders = SdkCommon.getSdkHeaders("discovery", "v1", "listCollectionFields");
for (Entry<String, String> header : sdkHeaders.entrySet()) {
builder.header(header.getKey(), header.getValue()); // depends on control dependency: [for], data = [header]
}
builder.header("Accept", "application/json");
return createServiceCall(builder.build(), ResponseConverterUtils.getObject(ListCollectionFieldsResponse.class));
} } |
public class class_name {
private static boolean isValidStandardOperator(final Element e) {
if (e.getKind() == ElementKind.FIELD) {
return e.getModifiers().containsAll(EnumSet.of(Modifier.PUBLIC, Modifier.STATIC, Modifier.FINAL));
}
if (e.getKind() == ElementKind.METHOD) {
return e.getModifiers().containsAll(EnumSet.of(Modifier.PUBLIC, Modifier.STATIC))
&& ((ExecutableElement) e).getParameters().isEmpty();
}
if (e.getKind() == ElementKind.CLASS) {
if (e.getModifiers().contains(Modifier.ABSTRACT) || findDefaultConstructor((TypeElement) e) == null) {
return false;
}
Element current = e;
while (current.getKind() == ElementKind.CLASS) {
final TypeElement t = (TypeElement) current;
if (t.getNestingKind() == NestingKind.TOP_LEVEL) {
return true;
}
if (t.getNestingKind() == NestingKind.MEMBER && t.getModifiers().contains(Modifier.STATIC)) {
current = t.getEnclosingElement();
continue;
}
break;
}
}
return false;
} } | public class class_name {
private static boolean isValidStandardOperator(final Element e) {
if (e.getKind() == ElementKind.FIELD) {
return e.getModifiers().containsAll(EnumSet.of(Modifier.PUBLIC, Modifier.STATIC, Modifier.FINAL));
// depends on control dependency: [if], data = [none]
}
if (e.getKind() == ElementKind.METHOD) {
return e.getModifiers().containsAll(EnumSet.of(Modifier.PUBLIC, Modifier.STATIC))
&& ((ExecutableElement) e).getParameters().isEmpty();
// depends on control dependency: [if], data = [none]
}
if (e.getKind() == ElementKind.CLASS) {
if (e.getModifiers().contains(Modifier.ABSTRACT) || findDefaultConstructor((TypeElement) e) == null) {
return false;
// depends on control dependency: [if], data = [none]
}
Element current = e;
while (current.getKind() == ElementKind.CLASS) {
final TypeElement t = (TypeElement) current;
if (t.getNestingKind() == NestingKind.TOP_LEVEL) {
return true;
// depends on control dependency: [if], data = [none]
}
if (t.getNestingKind() == NestingKind.MEMBER && t.getModifiers().contains(Modifier.STATIC)) {
current = t.getEnclosingElement();
// depends on control dependency: [if], data = [none]
continue;
}
break;
}
}
return false;
} } |
public class class_name {
public String getPoolName() {
if (CmsStringUtil.isEmpty(m_poolName)) {
// use default pool as pool name
m_poolName = OpenCms.getSqlManager().getDefaultDbPoolName();
}
return m_poolName;
} } | public class class_name {
public String getPoolName() {
if (CmsStringUtil.isEmpty(m_poolName)) {
// use default pool as pool name
m_poolName = OpenCms.getSqlManager().getDefaultDbPoolName(); // depends on control dependency: [if], data = [none]
}
return m_poolName;
} } |
public class class_name {
public void marshall(ListClustersRequest listClustersRequest, ProtocolMarshaller protocolMarshaller) {
if (listClustersRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(listClustersRequest.getClusterNameFilter(), CLUSTERNAMEFILTER_BINDING);
protocolMarshaller.marshall(listClustersRequest.getMaxResults(), MAXRESULTS_BINDING);
protocolMarshaller.marshall(listClustersRequest.getNextToken(), NEXTTOKEN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(ListClustersRequest listClustersRequest, ProtocolMarshaller protocolMarshaller) {
if (listClustersRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(listClustersRequest.getClusterNameFilter(), CLUSTERNAMEFILTER_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listClustersRequest.getMaxResults(), MAXRESULTS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listClustersRequest.getNextToken(), NEXTTOKEN_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
Map<Archive, Set<Archive>> dependences() {
Map<Archive, Set<Archive>> map = new HashMap<>();
parsedArchives.values().stream()
.flatMap(Deque::stream)
.filter(a -> !a.isEmpty())
.forEach(source -> {
Set<Archive> deps = getDependences(source).collect(toSet());
if (!deps.isEmpty()) {
map.put(source, deps);
}
});
return map;
} } | public class class_name {
Map<Archive, Set<Archive>> dependences() {
Map<Archive, Set<Archive>> map = new HashMap<>();
parsedArchives.values().stream()
.flatMap(Deque::stream)
.filter(a -> !a.isEmpty())
.forEach(source -> {
Set<Archive> deps = getDependences(source).collect(toSet());
if (!deps.isEmpty()) {
map.put(source, deps); // depends on control dependency: [if], data = [none]
}
});
return map;
} } |
public class class_name {
private void createTasksForTable(Table table,
List<DataTargetInfo> dataTargets,
AtomicInteger numTables,
SnapshotRegistry.Snapshot snapshotRecord,
List<Long> hsids)
{
// srcHSId -> tasks
Multimap<Long, SnapshotTableTask> tasks = ArrayListMultimap.create();
for (DataTargetInfo targetInfo : dataTargets) {
if (table.getIsreplicated() && !targetInfo.dataTarget.isReplicatedTableTarget()) {
// For replicated tables only the lowest site's dataTarget actually does any work.
// The other dataTargets just need to be tracked so we send EOF when all streams have finished.
m_targets.add(targetInfo.dataTarget);
continue;
}
final SnapshotTableTask task = createSingleTableTask(table, targetInfo, numTables, snapshotRecord);
SNAP_LOG.debug("ADDING TASK for streamSnapshot: " + task);
tasks.put(targetInfo.srcHSId, task);
}
placeTasksForTable(table, tasks, hsids);
} } | public class class_name {
private void createTasksForTable(Table table,
List<DataTargetInfo> dataTargets,
AtomicInteger numTables,
SnapshotRegistry.Snapshot snapshotRecord,
List<Long> hsids)
{
// srcHSId -> tasks
Multimap<Long, SnapshotTableTask> tasks = ArrayListMultimap.create();
for (DataTargetInfo targetInfo : dataTargets) {
if (table.getIsreplicated() && !targetInfo.dataTarget.isReplicatedTableTarget()) {
// For replicated tables only the lowest site's dataTarget actually does any work.
// The other dataTargets just need to be tracked so we send EOF when all streams have finished.
m_targets.add(targetInfo.dataTarget); // depends on control dependency: [if], data = [none]
continue;
}
final SnapshotTableTask task = createSingleTableTask(table, targetInfo, numTables, snapshotRecord);
SNAP_LOG.debug("ADDING TASK for streamSnapshot: " + task); // depends on control dependency: [for], data = [none]
tasks.put(targetInfo.srcHSId, task); // depends on control dependency: [for], data = [targetInfo]
}
placeTasksForTable(table, tasks, hsids);
} } |
public class class_name {
private static Platform determinePlatformReflectively() {
if (System.getProperty("com.google.appengine.runtime.environment") == null) {
return Platform.STANDARD;
}
// GAE_LONG_APP_ID is only set in the GAE Flexible Environment, where we want standard threading
if (System.getenv("GAE_LONG_APP_ID") != null) {
return Platform.APPENGINE_FLEXIBLE;
}
try {
// If the current environment is null, we're not inside AppEngine.
boolean isInsideAppengine = Class.forName("com.google.apphosting.api.ApiProxy")
.getMethod("getCurrentEnvironment")
.invoke(null) != null;
return isInsideAppengine ? Platform.APPENGINE_STANDARD : Platform.STANDARD;
} catch (ClassNotFoundException e) {
// If ApiProxy doesn't exist, we're not on AppEngine at all.
return Platform.STANDARD;
} catch (InvocationTargetException e) {
// If ApiProxy throws an exception, we're not in a proper AppEngine environment.
return Platform.STANDARD;
} catch (IllegalAccessException e) {
// If the method isn't accessible, we're not on a supported version of AppEngine;
return Platform.STANDARD;
} catch (NoSuchMethodException e) {
// If the method doesn't exist, we're not on a supported version of AppEngine;
return Platform.STANDARD;
}
} } | public class class_name {
private static Platform determinePlatformReflectively() {
if (System.getProperty("com.google.appengine.runtime.environment") == null) {
return Platform.STANDARD; // depends on control dependency: [if], data = [none]
}
// GAE_LONG_APP_ID is only set in the GAE Flexible Environment, where we want standard threading
if (System.getenv("GAE_LONG_APP_ID") != null) {
return Platform.APPENGINE_FLEXIBLE; // depends on control dependency: [if], data = [none]
}
try {
// If the current environment is null, we're not inside AppEngine.
boolean isInsideAppengine = Class.forName("com.google.apphosting.api.ApiProxy")
.getMethod("getCurrentEnvironment")
.invoke(null) != null;
return isInsideAppengine ? Platform.APPENGINE_STANDARD : Platform.STANDARD; // depends on control dependency: [try], data = [none]
} catch (ClassNotFoundException e) {
// If ApiProxy doesn't exist, we're not on AppEngine at all.
return Platform.STANDARD;
} catch (InvocationTargetException e) { // depends on control dependency: [catch], data = [none]
// If ApiProxy throws an exception, we're not in a proper AppEngine environment.
return Platform.STANDARD;
} catch (IllegalAccessException e) { // depends on control dependency: [catch], data = [none]
// If the method isn't accessible, we're not on a supported version of AppEngine;
return Platform.STANDARD;
} catch (NoSuchMethodException e) { // depends on control dependency: [catch], data = [none]
// If the method doesn't exist, we're not on a supported version of AppEngine;
return Platform.STANDARD;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
protected Object getOptionValue(Queue<String> argq) {
String name = argq.remove();
File file = new File(name);
if (mustExist && !file.isFile()) {
System.err.println("File does not exist: '" + name + "'");
System.exit(1);
}
return file;
} } | public class class_name {
@Override
protected Object getOptionValue(Queue<String> argq) {
String name = argq.remove();
File file = new File(name);
if (mustExist && !file.isFile()) {
System.err.println("File does not exist: '" + name + "'"); // depends on control dependency: [if], data = [none]
System.exit(1); // depends on control dependency: [if], data = [none]
}
return file;
} } |
public class class_name {
@SuppressWarnings("UnusedDeclaration")
public void init() throws Exception {
initBuilderSpecific();
resetFields();
if (!UserPluginInfo.NO_PLUGIN_KEY.equals(getSelectedStagingPluginName())) {
PluginSettings selectedStagingPluginSettings = getSelectedStagingPlugin();
try {
stagingStrategy = getArtifactoryServer().getStagingStrategy(selectedStagingPluginSettings, Util.rawEncode(project.getName()), project);
} catch (Exception e) {
log.log(Level.WARNING, "Failed to obtain staging strategy: " + e.getMessage(), e);
strategyRequestFailed = true;
strategyRequestErrorMessage = "Failed to obtain staging strategy '" +
selectedStagingPluginSettings.getPluginName() + "': " + e.getMessage() +
".\nPlease review the log for further information.";
stagingStrategy = null;
}
strategyPluginExists = (stagingStrategy != null) && !stagingStrategy.isEmpty();
}
prepareDefaultVersioning();
prepareDefaultGlobalModule();
prepareDefaultModules();
prepareDefaultVcsSettings();
prepareDefaultPromotionConfig();
} } | public class class_name {
@SuppressWarnings("UnusedDeclaration")
public void init() throws Exception {
initBuilderSpecific();
resetFields();
if (!UserPluginInfo.NO_PLUGIN_KEY.equals(getSelectedStagingPluginName())) {
PluginSettings selectedStagingPluginSettings = getSelectedStagingPlugin();
try {
stagingStrategy = getArtifactoryServer().getStagingStrategy(selectedStagingPluginSettings, Util.rawEncode(project.getName()), project); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
log.log(Level.WARNING, "Failed to obtain staging strategy: " + e.getMessage(), e);
strategyRequestFailed = true;
strategyRequestErrorMessage = "Failed to obtain staging strategy '" +
selectedStagingPluginSettings.getPluginName() + "': " + e.getMessage() +
".\nPlease review the log for further information.";
stagingStrategy = null;
} // depends on control dependency: [catch], data = [none]
strategyPluginExists = (stagingStrategy != null) && !stagingStrategy.isEmpty();
}
prepareDefaultVersioning();
prepareDefaultGlobalModule();
prepareDefaultModules();
prepareDefaultVcsSettings();
prepareDefaultPromotionConfig();
} } |
public class class_name {
public void mergeDuplicate() {
for(int i = 0; i < labels.size(); i++)
for(int j = i + 1; j < labels.size(); j++){
T tagi = labels.get(i);
T tagj = labels.get(j);
if(tagi.equals(tagj)){
scores.set(i, scores.get(i) + scores.get(j));
labels.remove(j);
scores.remove(j);
j--;
}
}
} } | public class class_name {
public void mergeDuplicate() {
for(int i = 0; i < labels.size(); i++)
for(int j = i + 1; j < labels.size(); j++){
T tagi = labels.get(i);
T tagj = labels.get(j);
if(tagi.equals(tagj)){
scores.set(i, scores.get(i) + scores.get(j));
// depends on control dependency: [if], data = [none]
labels.remove(j);
// depends on control dependency: [if], data = [none]
scores.remove(j);
// depends on control dependency: [if], data = [none]
j--;
// depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public Date getStart()
{
Date result = (Date) getCachedValue(AssignmentField.START);
if (result == null)
{
result = getTask().getStart();
}
return result;
} } | public class class_name {
public Date getStart()
{
Date result = (Date) getCachedValue(AssignmentField.START);
if (result == null)
{
result = getTask().getStart(); // depends on control dependency: [if], data = [none]
}
return result;
} } |
public class class_name {
@Override
public void ready(VirtualConnection inVC) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
Tr.entry(tc, "ready, vc=" + getVCHash());
}
// Double check for error condition where close already happened. Protective measure.
if (!closed && FrameworkState.isValid()) {
try {
// Outbound connections took care of sslContext and sslEngine creation already.
// If inbound, discrimination may have already created the engine and context
if (isInbound) {
// See if discrimination ran already. Get the state map from the VC.
Map<Object, Object> stateMap = inVC.getStateMap();
// Extract and remove result of discrimination, if it happened.
discState = (SSLDiscriminatorState) stateMap.remove(SSLChannel.SSL_DISCRIMINATOR_STATE);
if (discState != null) {
// Discrimination has happened. Save already existing sslEngine.
sslEngine = discState.getEngine();
sslContext = discState.getSSLContext();
setLinkConfig((SSLLinkConfig) stateMap.get(SSLConnectionLink.LINKCONFIG));
} else if (sslContext == null || getSSLEngine() == null) {
// Create a new SSL context based on the current properties in the ssl config.
sslContext = getChannel().getSSLContextForInboundLink(this, inVC);
// Discrimination has not happened yet. Create new SSL engine.
sslEngine = SSLUtils.getSSLEngine(sslContext,
sslChannel.getConfig().getFlowType(),
getLinkConfig(),
this);
}
} else {
// Outbound connect is ready. Ensure we have an sslContext and sslEngine.
if (sslContext == null || getSSLEngine() == null) {
// Create a new SSL context based on the current properties in the ssl config.
sslContext = getChannel().getSSLContextForOutboundLink(this, inVC, targetAddress);
// PK46069 - use engine that allows session id re-use
sslEngine = SSLUtils.getOutboundSSLEngine(
sslContext, getLinkConfig(),
targetAddress.getRemoteAddress().getHostName(),
targetAddress.getRemoteAddress().getPort(),
this);
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "SSL engine hc=" + getSSLEngine().hashCode() + " associated with vc=" + getVCHash());
}
// Flag that connection has been established.
// Need to set this to true for inbound and outbound so close will work right.
connected = true;
// Determine if this is an inbound or outbound connection.
if (isInbound) {
readyInbound(inVC);
} else {
readyOutbound(inVC, true);
}
} catch (Exception e) {
if (FrameworkState.isStopping()) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Ignoring exception during server shutdown: " + e);
}
} else {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Caught exception during ready, " + e, e);
}
FFDCFilter.processException(e, getClass().getName(), "238", this);
}
close(inVC, e);
}
} else {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "ready called after close so do nothing");
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
Tr.exit(tc, "ready");
}
} } | public class class_name {
@Override
public void ready(VirtualConnection inVC) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
Tr.entry(tc, "ready, vc=" + getVCHash()); // depends on control dependency: [if], data = [none]
}
// Double check for error condition where close already happened. Protective measure.
if (!closed && FrameworkState.isValid()) {
try {
// Outbound connections took care of sslContext and sslEngine creation already.
// If inbound, discrimination may have already created the engine and context
if (isInbound) {
// See if discrimination ran already. Get the state map from the VC.
Map<Object, Object> stateMap = inVC.getStateMap();
// Extract and remove result of discrimination, if it happened.
discState = (SSLDiscriminatorState) stateMap.remove(SSLChannel.SSL_DISCRIMINATOR_STATE); // depends on control dependency: [if], data = [none]
if (discState != null) {
// Discrimination has happened. Save already existing sslEngine.
sslEngine = discState.getEngine(); // depends on control dependency: [if], data = [none]
sslContext = discState.getSSLContext(); // depends on control dependency: [if], data = [none]
setLinkConfig((SSLLinkConfig) stateMap.get(SSLConnectionLink.LINKCONFIG)); // depends on control dependency: [if], data = [none]
} else if (sslContext == null || getSSLEngine() == null) {
// Create a new SSL context based on the current properties in the ssl config.
sslContext = getChannel().getSSLContextForInboundLink(this, inVC); // depends on control dependency: [if], data = [none]
// Discrimination has not happened yet. Create new SSL engine.
sslEngine = SSLUtils.getSSLEngine(sslContext,
sslChannel.getConfig().getFlowType(),
getLinkConfig(),
this); // depends on control dependency: [if], data = [none]
}
} else {
// Outbound connect is ready. Ensure we have an sslContext and sslEngine.
if (sslContext == null || getSSLEngine() == null) {
// Create a new SSL context based on the current properties in the ssl config.
sslContext = getChannel().getSSLContextForOutboundLink(this, inVC, targetAddress); // depends on control dependency: [if], data = [none]
// PK46069 - use engine that allows session id re-use
sslEngine = SSLUtils.getOutboundSSLEngine(
sslContext, getLinkConfig(),
targetAddress.getRemoteAddress().getHostName(),
targetAddress.getRemoteAddress().getPort(),
this); // depends on control dependency: [if], data = [none]
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "SSL engine hc=" + getSSLEngine().hashCode() + " associated with vc=" + getVCHash()); // depends on control dependency: [if], data = [none]
}
// Flag that connection has been established.
// Need to set this to true for inbound and outbound so close will work right.
connected = true; // depends on control dependency: [try], data = [none]
// Determine if this is an inbound or outbound connection.
if (isInbound) {
readyInbound(inVC); // depends on control dependency: [if], data = [none]
} else {
readyOutbound(inVC, true); // depends on control dependency: [if], data = [none]
}
} catch (Exception e) {
if (FrameworkState.isStopping()) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Ignoring exception during server shutdown: " + e); // depends on control dependency: [if], data = [none]
}
} else {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Caught exception during ready, " + e, e); // depends on control dependency: [if], data = [none]
}
FFDCFilter.processException(e, getClass().getName(), "238", this); // depends on control dependency: [if], data = [none]
}
close(inVC, e);
} // depends on control dependency: [catch], data = [none]
} else {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "ready called after close so do nothing"); // depends on control dependency: [if], data = [none]
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
Tr.exit(tc, "ready"); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public void marshall(StartSelector startSelector, ProtocolMarshaller protocolMarshaller) {
if (startSelector == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(startSelector.getStartSelectorType(), STARTSELECTORTYPE_BINDING);
protocolMarshaller.marshall(startSelector.getAfterFragmentNumber(), AFTERFRAGMENTNUMBER_BINDING);
protocolMarshaller.marshall(startSelector.getStartTimestamp(), STARTTIMESTAMP_BINDING);
protocolMarshaller.marshall(startSelector.getContinuationToken(), CONTINUATIONTOKEN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(StartSelector startSelector, ProtocolMarshaller protocolMarshaller) {
if (startSelector == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(startSelector.getStartSelectorType(), STARTSELECTORTYPE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(startSelector.getAfterFragmentNumber(), AFTERFRAGMENTNUMBER_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(startSelector.getStartTimestamp(), STARTTIMESTAMP_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(startSelector.getContinuationToken(), CONTINUATIONTOKEN_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public TimelineEvent merge(TimelineEvent event, Collection<TimelineEvent> events, TimelineUpdater timelineUpdater) {
if (event == null) {
// nothing to merge
return null;
}
if (events == null || events.isEmpty()) {
// nothing to merge
return event;
}
// check whether all events within the same group
String group = event.getGroup();
for (TimelineEvent e : events) {
if ((group == null && e.getGroup() != null) || (group != null && !group.equals(e.getGroup()))) {
throw new IllegalStateException("Events to be merged may be only belong to one and the same group!");
}
}
// order events according to their start / end dates
TreeSet<TimelineEvent> orderedEvents = new TreeSet<TimelineEvent>(new TimelineEventComparator());
orderedEvents.add(event);
orderedEvents.addAll(events);
// find the largest end date
Date endDate = null;
for (TimelineEvent e : orderedEvents) {
if (endDate == null && e.getEndDate() != null) {
endDate = e.getEndDate();
}
else if (endDate != null && e.getEndDate() != null && endDate.before(e.getEndDate())) {
endDate = e.getEndDate();
}
}
TimelineEvent mergedEvent
= new TimelineEvent(event.getData(), orderedEvents.first().getStartDate(), endDate, event.isEditable(),
event.getGroup(), event.getStyleClass());
// merge...
deleteAll(events, timelineUpdater);
update(mergedEvent, timelineUpdater);
return mergedEvent;
} } | public class class_name {
public TimelineEvent merge(TimelineEvent event, Collection<TimelineEvent> events, TimelineUpdater timelineUpdater) {
if (event == null) {
// nothing to merge
return null; // depends on control dependency: [if], data = [none]
}
if (events == null || events.isEmpty()) {
// nothing to merge
return event; // depends on control dependency: [if], data = [none]
}
// check whether all events within the same group
String group = event.getGroup();
for (TimelineEvent e : events) {
if ((group == null && e.getGroup() != null) || (group != null && !group.equals(e.getGroup()))) {
throw new IllegalStateException("Events to be merged may be only belong to one and the same group!");
}
}
// order events according to their start / end dates
TreeSet<TimelineEvent> orderedEvents = new TreeSet<TimelineEvent>(new TimelineEventComparator());
orderedEvents.add(event);
orderedEvents.addAll(events);
// find the largest end date
Date endDate = null;
for (TimelineEvent e : orderedEvents) {
if (endDate == null && e.getEndDate() != null) {
endDate = e.getEndDate(); // depends on control dependency: [if], data = [none]
}
else if (endDate != null && e.getEndDate() != null && endDate.before(e.getEndDate())) {
endDate = e.getEndDate(); // depends on control dependency: [if], data = [none]
}
}
TimelineEvent mergedEvent
= new TimelineEvent(event.getData(), orderedEvents.first().getStartDate(), endDate, event.isEditable(),
event.getGroup(), event.getStyleClass());
// merge...
deleteAll(events, timelineUpdater);
update(mergedEvent, timelineUpdater);
return mergedEvent;
} } |
public class class_name {
public Node getLastChild()
{
ElemTemplateElement lastChild = null;
for (ElemTemplateElement node = m_firstChild; node != null;
node = node.m_nextSibling)
{
lastChild = node;
}
return lastChild;
} } | public class class_name {
public Node getLastChild()
{
ElemTemplateElement lastChild = null;
for (ElemTemplateElement node = m_firstChild; node != null;
node = node.m_nextSibling)
{
lastChild = node; // depends on control dependency: [for], data = [node]
}
return lastChild;
} } |
public class class_name {
public static void javaMethodParam2WhereConditions(MethodSpec.Builder methodBuilder, SQLiteModelMethod method,
String methodParamName, String paramName, TypeName paramType) {
if (method.hasAdapterForParam(methodParamName)) {
checkTypeAdapterForParam(method, methodParamName, BindSqlParam.class);
methodBuilder.addCode(
AbstractSQLTransform.PRE_TYPE_ADAPTER_TO_STRING + "$L" + AbstractSQLTransform.POST_TYPE_ADAPTER,
SQLTypeAdapterUtils.class, method.getAdapterForParam(methodParamName), paramName);
} else {
SQLTransform transform = lookup(paramType);
AssertKripton.assertTrueOrUnsupportedFieldTypeException(transform != null, paramType);
transform.generateWriteParam2WhereCondition(methodBuilder, method, paramName, paramType);
}
} } | public class class_name {
public static void javaMethodParam2WhereConditions(MethodSpec.Builder methodBuilder, SQLiteModelMethod method,
String methodParamName, String paramName, TypeName paramType) {
if (method.hasAdapterForParam(methodParamName)) {
checkTypeAdapterForParam(method, methodParamName, BindSqlParam.class); // depends on control dependency: [if], data = [none]
methodBuilder.addCode(
AbstractSQLTransform.PRE_TYPE_ADAPTER_TO_STRING + "$L" + AbstractSQLTransform.POST_TYPE_ADAPTER,
SQLTypeAdapterUtils.class, method.getAdapterForParam(methodParamName), paramName); // depends on control dependency: [if], data = [none]
} else {
SQLTransform transform = lookup(paramType);
AssertKripton.assertTrueOrUnsupportedFieldTypeException(transform != null, paramType); // depends on control dependency: [if], data = [none]
transform.generateWriteParam2WhereCondition(methodBuilder, method, paramName, paramType); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static Collection<?> createEmptyCollectionForType(Class<?> fieldType, int initialSize) {
rejectUnsupportedTypes(fieldType);
Collection<?> collection;
try {
collection = (Collection<?>) fieldType.newInstance();
} catch (InstantiationException | IllegalAccessException e) {
if (fieldType.equals(ArrayBlockingQueue.class)) {
collection = new ArrayBlockingQueue<>(initialSize);
} else {
collection = (Collection<?>) new ObjenesisStd().newInstance(fieldType);
}
}
return collection;
} } | public class class_name {
public static Collection<?> createEmptyCollectionForType(Class<?> fieldType, int initialSize) {
rejectUnsupportedTypes(fieldType);
Collection<?> collection;
try {
collection = (Collection<?>) fieldType.newInstance(); // depends on control dependency: [try], data = [none]
} catch (InstantiationException | IllegalAccessException e) {
if (fieldType.equals(ArrayBlockingQueue.class)) {
collection = new ArrayBlockingQueue<>(initialSize); // depends on control dependency: [if], data = [none]
} else {
collection = (Collection<?>) new ObjenesisStd().newInstance(fieldType); // depends on control dependency: [if], data = [none]
}
} // depends on control dependency: [catch], data = [none]
return collection;
} } |
public class class_name {
@Override
public CPOption fetchByGroupId_Last(long groupId,
OrderByComparator<CPOption> orderByComparator) {
int count = countByGroupId(groupId);
if (count == 0) {
return null;
}
List<CPOption> list = findByGroupId(groupId, count - 1, count,
orderByComparator);
if (!list.isEmpty()) {
return list.get(0);
}
return null;
} } | public class class_name {
@Override
public CPOption fetchByGroupId_Last(long groupId,
OrderByComparator<CPOption> orderByComparator) {
int count = countByGroupId(groupId);
if (count == 0) {
return null; // depends on control dependency: [if], data = [none]
}
List<CPOption> list = findByGroupId(groupId, count - 1, count,
orderByComparator);
if (!list.isEmpty()) {
return list.get(0); // depends on control dependency: [if], data = [none]
}
return null;
} } |
public class class_name {
static boolean isWifi(Context context) {
if (context == null) {
return false;
}
ConnectivityManager manager =
(ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
if (manager == null) {
return false;
}
NetworkInfo info = manager.getActiveNetworkInfo();
return info != null && (info.getType() == ConnectivityManager.TYPE_WIFI);
} } | public class class_name {
static boolean isWifi(Context context) {
if (context == null) {
return false; // depends on control dependency: [if], data = [none]
}
ConnectivityManager manager =
(ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
if (manager == null) {
return false; // depends on control dependency: [if], data = [none]
}
NetworkInfo info = manager.getActiveNetworkInfo();
return info != null && (info.getType() == ConnectivityManager.TYPE_WIFI);
} } |
public class class_name {
public void marshall(SubscribeToDatasetRequest subscribeToDatasetRequest, ProtocolMarshaller protocolMarshaller) {
if (subscribeToDatasetRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(subscribeToDatasetRequest.getIdentityPoolId(), IDENTITYPOOLID_BINDING);
protocolMarshaller.marshall(subscribeToDatasetRequest.getIdentityId(), IDENTITYID_BINDING);
protocolMarshaller.marshall(subscribeToDatasetRequest.getDatasetName(), DATASETNAME_BINDING);
protocolMarshaller.marshall(subscribeToDatasetRequest.getDeviceId(), DEVICEID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(SubscribeToDatasetRequest subscribeToDatasetRequest, ProtocolMarshaller protocolMarshaller) {
if (subscribeToDatasetRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(subscribeToDatasetRequest.getIdentityPoolId(), IDENTITYPOOLID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(subscribeToDatasetRequest.getIdentityId(), IDENTITYID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(subscribeToDatasetRequest.getDatasetName(), DATASETNAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(subscribeToDatasetRequest.getDeviceId(), DEVICEID_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
void logInfo(final String message) {
if(logger != null) {
try {
StringBuilder buf = new StringBuilder(name);
buf.append(": ");
buf.append(message);
logger.info(buf.toString());
} catch(Throwable t) {
//Ignore - logging should not kill anything
}
}
} } | public class class_name {
void logInfo(final String message) {
if(logger != null) {
try {
StringBuilder buf = new StringBuilder(name);
buf.append(": "); // depends on control dependency: [try], data = [none]
buf.append(message); // depends on control dependency: [try], data = [none]
logger.info(buf.toString()); // depends on control dependency: [try], data = [none]
} catch(Throwable t) {
//Ignore - logging should not kill anything
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
public S transform(TypeDescription instrumentedType, S target) {
for (Transformer<S> transformer : transformers) {
target = transformer.transform(instrumentedType, target);
}
return target;
} } | public class class_name {
public S transform(TypeDescription instrumentedType, S target) {
for (Transformer<S> transformer : transformers) {
target = transformer.transform(instrumentedType, target); // depends on control dependency: [for], data = [transformer]
}
return target;
} } |
public class class_name {
private MessageItem getMessageItem()
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "getMessageItem");
MessageItem msg = null;
try
{
msg = (MessageItem) getReferredItem();
} catch (MessageStoreException e)
{
// FFDC
FFDCFilter.processException(
e,
"com.ibm.ws.sib.processor.impl.store.items.MessageItemReference.getMessageItem",
"1:857:1.147",
this);
SibTr.exception(tc, e);
// TODO : For now we throw a runtime here but we need to look at the stack to see
// what this means.
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "getMessageItem", e);
throw new SIErrorException(e);
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "getMessageItem", msg);
return msg;
} } | public class class_name {
private MessageItem getMessageItem()
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "getMessageItem");
MessageItem msg = null;
try
{
msg = (MessageItem) getReferredItem(); // depends on control dependency: [try], data = [none]
} catch (MessageStoreException e)
{
// FFDC
FFDCFilter.processException(
e,
"com.ibm.ws.sib.processor.impl.store.items.MessageItemReference.getMessageItem",
"1:857:1.147",
this);
SibTr.exception(tc, e);
// TODO : For now we throw a runtime here but we need to look at the stack to see
// what this means.
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "getMessageItem", e);
throw new SIErrorException(e);
} // depends on control dependency: [catch], data = [none]
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "getMessageItem", msg);
return msg;
} } |
public class class_name {
public boolean sendJoinRequest(Address toAddress, boolean withCredentials) {
if (toAddress == null) {
toAddress = clusterService.getMasterAddress();
}
JoinRequestOp joinRequest = new JoinRequestOp(node.createJoinRequest(withCredentials));
return nodeEngine.getOperationService().send(joinRequest, toAddress);
} } | public class class_name {
public boolean sendJoinRequest(Address toAddress, boolean withCredentials) {
if (toAddress == null) {
toAddress = clusterService.getMasterAddress(); // depends on control dependency: [if], data = [none]
}
JoinRequestOp joinRequest = new JoinRequestOp(node.createJoinRequest(withCredentials));
return nodeEngine.getOperationService().send(joinRequest, toAddress);
} } |
public class class_name {
public static SimpleTransaction beginTransaction(javax.sql.DataSource dataSource, IsolationLevel isolationLevel) throws UncheckedSQLException {
N.checkArgNotNull(dataSource);
N.checkArgNotNull(isolationLevel);
final String ttid = SimpleTransaction.getTransactionThreadId(dataSource);
SimpleTransaction tran = SimpleTransaction.threadTransacionMap.get(ttid);
if (tran == null) {
Connection conn = null;
boolean isOk = false;
try {
conn = dataSource.getConnection();
tran = new SimpleTransaction(ttid, conn, isolationLevel, true);
tran.incrementAndGet(isolationLevel);
isOk = true;
} catch (SQLException e) {
throw new UncheckedSQLException(e);
} finally {
if (isOk == false) {
closeQuietly(conn);
}
}
logger.info("Create a new transaction(id={})", tran.id());
SimpleTransaction.threadTransacionMap.put(ttid, tran);
} else {
logger.info("Reusing the existing transaction(id={})", tran.id());
tran.incrementAndGet(isolationLevel);
}
logger.debug("Current active transaction: {}", SimpleTransaction.threadTransacionMap.values());
return tran;
} } | public class class_name {
public static SimpleTransaction beginTransaction(javax.sql.DataSource dataSource, IsolationLevel isolationLevel) throws UncheckedSQLException {
N.checkArgNotNull(dataSource);
N.checkArgNotNull(isolationLevel);
final String ttid = SimpleTransaction.getTransactionThreadId(dataSource);
SimpleTransaction tran = SimpleTransaction.threadTransacionMap.get(ttid);
if (tran == null) {
Connection conn = null;
boolean isOk = false;
try {
conn = dataSource.getConnection();
tran = new SimpleTransaction(ttid, conn, isolationLevel, true);
tran.incrementAndGet(isolationLevel);
isOk = true;
} catch (SQLException e) {
throw new UncheckedSQLException(e);
} finally {
if (isOk == false) {
closeQuietly(conn);
// depends on control dependency: [if], data = [none]
}
}
logger.info("Create a new transaction(id={})", tran.id());
SimpleTransaction.threadTransacionMap.put(ttid, tran);
} else {
logger.info("Reusing the existing transaction(id={})", tran.id());
tran.incrementAndGet(isolationLevel);
}
logger.debug("Current active transaction: {}", SimpleTransaction.threadTransacionMap.values());
return tran;
} } |
public class class_name {
public static int compareVersions(String v1, String v2) {
String[] components1 = split(v1);
String[] components2 = split(v2);
int diff;
int length = Math.min(components1.length, components2.length);
for (int i = 0; i < length; i++) {
String s1 = components1[i];
String s2 = components2[i];
Integer i1 = tryParseInteger(s1);
Integer i2 = tryParseInteger(s2);
if (i1 != null && i2 != null) {
diff = i1.compareTo(i2);
} else {
// lets assume strings instead
diff = s1.compareTo(s2);
}
if (diff != 0) {
return diff;
}
}
diff = Integer.compare(components1.length, components2.length);
if (diff == 0) {
if (v1 == v2) {
return 0;
}
/* if v1 == null then v2 can't be null here (see 'if' above).
So for v1 == null its always smaller than v2 */;
return v1 != null ? v1.compareTo(v2) : -1;
}
return diff;
} } | public class class_name {
public static int compareVersions(String v1, String v2) {
String[] components1 = split(v1);
String[] components2 = split(v2);
int diff;
int length = Math.min(components1.length, components2.length);
for (int i = 0; i < length; i++) {
String s1 = components1[i];
String s2 = components2[i];
Integer i1 = tryParseInteger(s1);
Integer i2 = tryParseInteger(s2);
if (i1 != null && i2 != null) {
diff = i1.compareTo(i2); // depends on control dependency: [if], data = [none]
} else {
// lets assume strings instead
diff = s1.compareTo(s2); // depends on control dependency: [if], data = [none]
}
if (diff != 0) {
return diff; // depends on control dependency: [if], data = [none]
}
}
diff = Integer.compare(components1.length, components2.length);
if (diff == 0) {
if (v1 == v2) {
return 0; // depends on control dependency: [if], data = [none]
}
/* if v1 == null then v2 can't be null here (see 'if' above).
So for v1 == null its always smaller than v2 */;
return v1 != null ? v1.compareTo(v2) : -1; // depends on control dependency: [if], data = [none]
}
return diff;
} } |
public class class_name {
protected URI doPostCreateMultipart(String path, InputStream inputStream, MultivaluedMap<String, String> headers) throws ClientException {
this.readLock.lock();
try {
WebResource.Builder requestBuilder = getResourceWrapper().rewritten(path, HttpMethod.POST).getRequestBuilder();
requestBuilder = ensurePostCreateMultipartHeaders(headers, requestBuilder);
ClientResponse response = requestBuilder.post(ClientResponse.class, inputStream);
errorIfStatusNotEqualTo(response, ClientResponse.Status.OK, ClientResponse.Status.CREATED);
try {
return response.getLocation();
} finally {
response.close();
}
} catch (ClientHandlerException ex) {
throw new ClientException(ClientResponse.Status.INTERNAL_SERVER_ERROR, ex.getMessage());
} finally {
this.readLock.unlock();
}
} } | public class class_name {
protected URI doPostCreateMultipart(String path, InputStream inputStream, MultivaluedMap<String, String> headers) throws ClientException {
this.readLock.lock();
try {
WebResource.Builder requestBuilder = getResourceWrapper().rewritten(path, HttpMethod.POST).getRequestBuilder();
requestBuilder = ensurePostCreateMultipartHeaders(headers, requestBuilder);
ClientResponse response = requestBuilder.post(ClientResponse.class, inputStream);
errorIfStatusNotEqualTo(response, ClientResponse.Status.OK, ClientResponse.Status.CREATED);
try {
return response.getLocation(); // depends on control dependency: [try], data = [none]
} finally {
response.close();
}
} catch (ClientHandlerException ex) {
throw new ClientException(ClientResponse.Status.INTERNAL_SERVER_ERROR, ex.getMessage());
} finally {
this.readLock.unlock();
}
} } |
public class class_name {
public boolean hasMoreElements()
{
try {
return hasMoreElementsR();
} catch (NoMoreElementsException e) {
// FFDCFilter.processException(e, CLASS_NAME + ".hasMoreElements", "131", this);
return false;
} catch (EnumeratorException e) {
// FFDCFilter.processException(e, CLASS_NAME + ".hasMoreElements", "134", this);
throw new RuntimeException(e.toString());
} catch (NoSuchObjectException e) {
// FFDCFilter.processException(e, CLASS_NAME + ".hasMoreElements", "137", this);
throw new IllegalStateException("Cannot access finder result outside transaction");
} catch (RemoteException e) {
// FFDCFilter.processException(e, CLASS_NAME + ".hasMoreElements", "140", this);
throw new RuntimeException(e.toString());
}
} } | public class class_name {
public boolean hasMoreElements()
{
try {
return hasMoreElementsR(); // depends on control dependency: [try], data = [none]
} catch (NoMoreElementsException e) {
// FFDCFilter.processException(e, CLASS_NAME + ".hasMoreElements", "131", this);
return false;
} catch (EnumeratorException e) { // depends on control dependency: [catch], data = [none]
// FFDCFilter.processException(e, CLASS_NAME + ".hasMoreElements", "134", this);
throw new RuntimeException(e.toString());
} catch (NoSuchObjectException e) { // depends on control dependency: [catch], data = [none]
// FFDCFilter.processException(e, CLASS_NAME + ".hasMoreElements", "137", this);
throw new IllegalStateException("Cannot access finder result outside transaction");
} catch (RemoteException e) { // depends on control dependency: [catch], data = [none]
// FFDCFilter.processException(e, CLASS_NAME + ".hasMoreElements", "140", this);
throw new RuntimeException(e.toString());
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
void feedback(Object from, String from_out, Object to, String to_in) {
// add them to the set of commands
if (from == to) {
throw new ComponentException("src == dest.");
}
if (to_in == null || from_out == null) {
throw new ComponentException("Some field arguments are null");
}
ComponentAccess ca_from = lookup(from);
ComponentAccess ca_to = lookup(to);
Access from_access = ca_from.output(from_out);
checkFA(from_access, from, from_out);
Access to_access = ca_to.input(to_in);
checkFA(to_access, to, to_in);
if (!canConnect(from_access, to_access)) {
throw new ComponentException("Type/Access mismatch, Cannot connect: " + from + '.' + to_in + " -> " + to + '.' + from_out);
}
// src data object
FieldContent data = from_access.getData();
data.tagIn();
data.tagOut();
// dataSet.add(data);
to_access.setData(data); // connect the two
ca_from.setOutput(from_out, new AsyncFieldAccess(from_access));
ca_to.setInput(to_in, new AsyncFieldAccess(to_access));
if (checkCircular) {
// val.addConnection(from, to);
// val.checkCircular();
}
if (log.isLoggable(Level.CONFIG)) {
log.config(String.format("feedback @Out(%s) -> @In(%s)", from_access.toString() , to_access.toString()));
}
// ens.fireConnect(from_access, to_access);
} } | public class class_name {
void feedback(Object from, String from_out, Object to, String to_in) {
// add them to the set of commands
if (from == to) {
throw new ComponentException("src == dest.");
}
if (to_in == null || from_out == null) {
throw new ComponentException("Some field arguments are null");
}
ComponentAccess ca_from = lookup(from);
ComponentAccess ca_to = lookup(to);
Access from_access = ca_from.output(from_out);
checkFA(from_access, from, from_out);
Access to_access = ca_to.input(to_in);
checkFA(to_access, to, to_in);
if (!canConnect(from_access, to_access)) {
throw new ComponentException("Type/Access mismatch, Cannot connect: " + from + '.' + to_in + " -> " + to + '.' + from_out);
}
// src data object
FieldContent data = from_access.getData();
data.tagIn();
data.tagOut();
// dataSet.add(data);
to_access.setData(data); // connect the two
ca_from.setOutput(from_out, new AsyncFieldAccess(from_access));
ca_to.setInput(to_in, new AsyncFieldAccess(to_access));
if (checkCircular) {
// val.addConnection(from, to);
// val.checkCircular();
}
if (log.isLoggable(Level.CONFIG)) {
log.config(String.format("feedback @Out(%s) -> @In(%s)", from_access.toString() , to_access.toString())); // depends on control dependency: [if], data = [none]
}
// ens.fireConnect(from_access, to_access);
} } |
public class class_name {
public void loadPictureInThread( URL imageUrl, int priority, double rotation ) {
if ( pictureStatusCode == LOADING ) {
stopLoadingExcept( imageUrl );
}
this.imageUrl = imageUrl;
this.rotation = rotation;
LoadThread t = new LoadThread( this );
t.setPriority( priority );
t.start();
} } | public class class_name {
public void loadPictureInThread( URL imageUrl, int priority, double rotation ) {
if ( pictureStatusCode == LOADING ) {
stopLoadingExcept( imageUrl ); // depends on control dependency: [if], data = [none]
}
this.imageUrl = imageUrl;
this.rotation = rotation;
LoadThread t = new LoadThread( this );
t.setPriority( priority );
t.start();
} } |
public class class_name {
private String unexternalizeImageRef(String ref) {
String unexternalizedRef = ref;
if (StringUtils.isNotEmpty(unexternalizedRef)) {
// decode if required
unexternalizedRef = decodeIfEncoded(unexternalizedRef);
// TODO: implementation has to be aligned with MediaSource implementations!
// remove default servlet extension that is needed for inline images in RTE
unexternalizedRef = StringUtils.removeEnd(unexternalizedRef, "/" + JcrConstants.JCR_CONTENT + ".default");
unexternalizedRef = StringUtils.removeEnd(unexternalizedRef, "/_jcr_content.default");
}
return unexternalizedRef;
} } | public class class_name {
private String unexternalizeImageRef(String ref) {
String unexternalizedRef = ref;
if (StringUtils.isNotEmpty(unexternalizedRef)) {
// decode if required
unexternalizedRef = decodeIfEncoded(unexternalizedRef); // depends on control dependency: [if], data = [none]
// TODO: implementation has to be aligned with MediaSource implementations!
// remove default servlet extension that is needed for inline images in RTE
unexternalizedRef = StringUtils.removeEnd(unexternalizedRef, "/" + JcrConstants.JCR_CONTENT + ".default"); // depends on control dependency: [if], data = [none]
unexternalizedRef = StringUtils.removeEnd(unexternalizedRef, "/_jcr_content.default"); // depends on control dependency: [if], data = [none]
}
return unexternalizedRef;
} } |
public class class_name {
public static String getAt(String list, String delimiter, int position, boolean ignoreEmpty, String defaultValue) {
if (delimiter.length() == 1) return getAt(list, delimiter.charAt(0), position, ignoreEmpty, defaultValue);
int len = list.length();
if (len == 0) return defaultValue;
int last = -1;
int count = -1;
char[] del = delimiter.toCharArray();
char c;
for (int i = 0; i < len; i++) {
c = list.charAt(i);
for (int y = 0; y < del.length; y++) {
if (c == del[y]) {
if (ignoreEmpty && (last + 1) == i) {
last = i;
break;
}
count++;
if (count == position) {
return list.substring(last + 1, i);
}
last = i;
break;
}
}
}
if (position == count + 1) {
if (!ignoreEmpty || last + 1 < len) return list.substring(last + 1);
}
return defaultValue;
} } | public class class_name {
public static String getAt(String list, String delimiter, int position, boolean ignoreEmpty, String defaultValue) {
if (delimiter.length() == 1) return getAt(list, delimiter.charAt(0), position, ignoreEmpty, defaultValue);
int len = list.length();
if (len == 0) return defaultValue;
int last = -1;
int count = -1;
char[] del = delimiter.toCharArray();
char c;
for (int i = 0; i < len; i++) {
c = list.charAt(i); // depends on control dependency: [for], data = [i]
for (int y = 0; y < del.length; y++) {
if (c == del[y]) {
if (ignoreEmpty && (last + 1) == i) {
last = i; // depends on control dependency: [if], data = [none]
break;
}
count++; // depends on control dependency: [if], data = [none]
if (count == position) {
return list.substring(last + 1, i); // depends on control dependency: [if], data = [none]
}
last = i; // depends on control dependency: [if], data = [none]
break;
}
}
}
if (position == count + 1) {
if (!ignoreEmpty || last + 1 < len) return list.substring(last + 1);
}
return defaultValue;
} } |
public class class_name {
public static List<URL> getResources(String resourceName, Class<?> callingClass) {
Enumeration<URL> em = null;
try {
em = Thread.currentThread().getContextClassLoader().getResources(resourceName);
if (!em.hasMoreElements()) {
em = ClassLoaders.class.getClassLoader().getResources(resourceName);
if (!em.hasMoreElements()) {
ClassLoader cl = callingClass.getClassLoader();
if (cl != null) em = cl.getResources(resourceName);
}
}
} catch (IOException e) {
e.printStackTrace();
}
List<URL> urls = CollectUtils.newArrayList();
while (null != em && em.hasMoreElements()) {
urls.add(em.nextElement());
}
return urls;
} } | public class class_name {
public static List<URL> getResources(String resourceName, Class<?> callingClass) {
Enumeration<URL> em = null;
try {
em = Thread.currentThread().getContextClassLoader().getResources(resourceName); // depends on control dependency: [try], data = [none]
if (!em.hasMoreElements()) {
em = ClassLoaders.class.getClassLoader().getResources(resourceName); // depends on control dependency: [if], data = [none]
if (!em.hasMoreElements()) {
ClassLoader cl = callingClass.getClassLoader();
if (cl != null) em = cl.getResources(resourceName);
}
}
} catch (IOException e) {
e.printStackTrace();
} // depends on control dependency: [catch], data = [none]
List<URL> urls = CollectUtils.newArrayList();
while (null != em && em.hasMoreElements()) {
urls.add(em.nextElement()); // depends on control dependency: [while], data = [none]
}
return urls;
} } |
public class class_name {
public void marshall(FaultRootCause faultRootCause, ProtocolMarshaller protocolMarshaller) {
if (faultRootCause == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(faultRootCause.getServices(), SERVICES_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(FaultRootCause faultRootCause, ProtocolMarshaller protocolMarshaller) {
if (faultRootCause == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(faultRootCause.getServices(), SERVICES_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static DMatrixRMaj extractColumn(DMatrixRMaj a , int column , DMatrixRMaj out ) {
if( out == null)
out = new DMatrixRMaj(a.numRows,1);
else if( !MatrixFeatures_DDRM.isVector(out) || out.getNumElements() != a.numRows )
throw new MatrixDimensionException("Output must be a vector of length "+a.numRows);
int index = column;
for (int i = 0; i < a.numRows; i++, index += a.numCols ) {
out.data[i] = a.data[index];
}
return out;
} } | public class class_name {
public static DMatrixRMaj extractColumn(DMatrixRMaj a , int column , DMatrixRMaj out ) {
if( out == null)
out = new DMatrixRMaj(a.numRows,1);
else if( !MatrixFeatures_DDRM.isVector(out) || out.getNumElements() != a.numRows )
throw new MatrixDimensionException("Output must be a vector of length "+a.numRows);
int index = column;
for (int i = 0; i < a.numRows; i++, index += a.numCols ) {
out.data[i] = a.data[index]; // depends on control dependency: [for], data = [i]
}
return out;
} } |
public class class_name {
private Criteria buildCriteria(QueryModel queryModel) {
Criteria criteria = getCurrentSession().createCriteria(persistentClass);
if (queryModel.getConditions() != null) {
for (Condition condition : queryModel.getConditions()) {
criteria.add((Criterion) condition.getConstraint());
}
}
for (Map.Entry<String, List<Condition>> associationCriteriaEntry : queryModel.getAssociationConditions().entrySet()) {
Criteria associationCriteria = criteria.createCriteria(associationCriteriaEntry.getKey());
criteria.setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY);
for (Condition condition : associationCriteriaEntry.getValue()) {
associationCriteria.add((Criterion) condition.getConstraint());
}
}
if (queryModel.getProjection() != null) {
ProjectionList projectionList = Projections.projectionList();
projectionList.add((org.hibernate.criterion.Projection) queryModel.getProjection().getDetails());
criteria.setProjection(projectionList);
}
return criteria;
} } | public class class_name {
private Criteria buildCriteria(QueryModel queryModel) {
Criteria criteria = getCurrentSession().createCriteria(persistentClass);
if (queryModel.getConditions() != null) {
for (Condition condition : queryModel.getConditions()) {
criteria.add((Criterion) condition.getConstraint()); // depends on control dependency: [for], data = [condition]
}
}
for (Map.Entry<String, List<Condition>> associationCriteriaEntry : queryModel.getAssociationConditions().entrySet()) {
Criteria associationCriteria = criteria.createCriteria(associationCriteriaEntry.getKey());
criteria.setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY); // depends on control dependency: [for], data = [none]
for (Condition condition : associationCriteriaEntry.getValue()) {
associationCriteria.add((Criterion) condition.getConstraint()); // depends on control dependency: [for], data = [condition]
}
}
if (queryModel.getProjection() != null) {
ProjectionList projectionList = Projections.projectionList();
projectionList.add((org.hibernate.criterion.Projection) queryModel.getProjection().getDetails()); // depends on control dependency: [if], data = [none]
criteria.setProjection(projectionList); // depends on control dependency: [if], data = [none]
}
return criteria;
} } |
public class class_name {
public static List<Field> sortSuperTypesLast(Collection<? extends Field> unsortedFields) {
List<Field> fields = new ArrayList<Field>(unsortedFields);
Collections.sort(fields, compareFieldsByName);
int i = 0;
while (i < fields.size() - 1) {
Field f = fields.get(i);
Class<?> ft = f.getType();
int newPos = i;
for (int j = i + 1; j < fields.size(); j++) {
Class<?> t = fields.get(j).getType();
if (ft != t && ft.isAssignableFrom(t)) {
newPos = j;
}
}
if (newPos == i) {
i++;
} else {
fields.remove(i);
fields.add(newPos, f);
}
}
return fields;
} } | public class class_name {
public static List<Field> sortSuperTypesLast(Collection<? extends Field> unsortedFields) {
List<Field> fields = new ArrayList<Field>(unsortedFields);
Collections.sort(fields, compareFieldsByName);
int i = 0;
while (i < fields.size() - 1) {
Field f = fields.get(i);
Class<?> ft = f.getType();
int newPos = i;
for (int j = i + 1; j < fields.size(); j++) {
Class<?> t = fields.get(j).getType();
if (ft != t && ft.isAssignableFrom(t)) {
newPos = j; // depends on control dependency: [if], data = [none]
}
}
if (newPos == i) {
i++; // depends on control dependency: [if], data = [none]
} else {
fields.remove(i); // depends on control dependency: [if], data = [i)]
fields.add(newPos, f); // depends on control dependency: [if], data = [(newPos]
}
}
return fields;
} } |
public class class_name {
@Override
public final void setItem(final ServiceToSale pService) {
this.item = pService;
if (this.itsId == null) {
this.itsId = new ServicePlaceId();
}
this.itsId.setItem(this.item);
} } | public class class_name {
@Override
public final void setItem(final ServiceToSale pService) {
this.item = pService;
if (this.itsId == null) {
this.itsId = new ServicePlaceId(); // depends on control dependency: [if], data = [none]
}
this.itsId.setItem(this.item);
} } |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.