code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { protected Message createControlMessage(TestContext context, String messageType) { if (dataDictionary != null) { messageBuilder.setDataDictionary(dataDictionary); } return messageBuilder.buildMessageContent(context, messageType, MessageDirection.INBOUND); } }
public class class_name { protected Message createControlMessage(TestContext context, String messageType) { if (dataDictionary != null) { messageBuilder.setDataDictionary(dataDictionary); // depends on control dependency: [if], data = [(dataDictionary] } return messageBuilder.buildMessageContent(context, messageType, MessageDirection.INBOUND); } }
public class class_name { public Revision next() throws Exception { // Checks whether the next revision has already been reconstructed. Revision revision; if (chronoStorage.isTop(revisionIndex)) { // If this is the case the revision will removed from the storage return chronoStorage.remove(); } // Otherwise the chronological order counter will be mapped to the // revsision counter int revCount = revisionIndex; if (mappingStorage.containsKey(revisionIndex)) { revCount = mappingStorage.get(revisionIndex); } // Retrieve the related full revision block ChronoFullRevision cfr = fullRevStorage.get(revCount); int queryPK, limit, previousRevisionCounter; String previousRevision; // Determine the nearest revision that could be used to construct // the specified revision revision = cfr.getNearest(revCount); if (revision == null) { // Create query bounds (all revisions from the full revision till // now) queryPK = articlePK + cfr.getStartRC() - 1; limit = revCount - cfr.getStartRC() + 1; previousRevision = null; previousRevisionCounter = -1; } else { // Create query bounds (only new revisions, last known + 1 till now) queryPK = revision.getPrimaryKey() + 1; limit = revCount - revision.getRevisionCounter(); previousRevision = revision.getRevisionText(); previousRevisionCounter = revision.getRevisionCounter(); } Statement statement = null; ResultSet result = null; revision = null; try { statement = this.connection.createStatement(); // Retrieve encoded revisions result = statement .executeQuery("SELECT Revision, PrimaryKey, RevisionCounter, RevisionID, ArticleID, Timestamp " + "FROM revisions " + "WHERE PrimaryKey >= " + queryPK + " LIMIT " + limit); String currentRevision = null; Diff diff; RevisionDecoder decoder; boolean binaryData = result.getMetaData().getColumnType(1) == Types.LONGVARBINARY; while (result.next()) { decoder = new RevisionDecoder(config.getCharacterSet()); // binary or base64 encoded if (binaryData) { decoder.setInput(result.getBinaryStream(1), true); } else { decoder.setInput(result.getString(1)); } // Decode and rebuild diff = decoder.decode(); if (previousRevisionCounter != -1) { if (previousRevisionCounter + 1 != result.getInt(3)) { System.err.println("Reconstruction data invalid - " + "\r\n\t" + "Expected " + (previousRevisionCounter + 1) + " instead of " + result.getInt(3)); return null; } } else { if (cfr.getStartRC() != result.getInt(3)) { System.err.println("Reconstruction data invalid - " + "\r\n\t" + "Expected " + (cfr.getStartRC()) + " instead of " + result.getInt(3)); return null; } } try { currentRevision = diff.buildRevision(previousRevision); revision = new Revision(result.getInt(3)); revision.setRevisionText(currentRevision); revision.setPrimaryKey(result.getInt(2)); revision.setRevisionID(result.getInt(4)); revision.setArticleID(result.getInt(5)); revision.setTimeStamp(new Timestamp(result.getLong(6))); previousRevision = currentRevision; previousRevisionCounter = revision.getRevisionCounter(); } catch (Exception e) { System.err.println("Reconstruction failed while retrieving" + " data to reconstruct <" + revisionIndex + ">" + "\r\n\t" + "[ArticleId " + result.getInt(5) + ", RevisionId " + result.getInt(4) + ", RevisionCounter " + result.getInt(3) + "]"); previousRevision = null; revision = null; return null; } // Add the reconstructed revision to the storage if (revision != null) { chronoStorage.add(revision); } } // Ensure that the correct revision is on top of the storage if (chronoStorage.isTop(revisionIndex)) { chronoStorage.remove(); return revision; } else { return null; } } finally { if (statement != null) { statement.close(); } if (result != null) { result.close(); } } } }
public class class_name { public Revision next() throws Exception { // Checks whether the next revision has already been reconstructed. Revision revision; if (chronoStorage.isTop(revisionIndex)) { // If this is the case the revision will removed from the storage return chronoStorage.remove(); } // Otherwise the chronological order counter will be mapped to the // revsision counter int revCount = revisionIndex; if (mappingStorage.containsKey(revisionIndex)) { revCount = mappingStorage.get(revisionIndex); } // Retrieve the related full revision block ChronoFullRevision cfr = fullRevStorage.get(revCount); int queryPK, limit, previousRevisionCounter; String previousRevision; // Determine the nearest revision that could be used to construct // the specified revision revision = cfr.getNearest(revCount); if (revision == null) { // Create query bounds (all revisions from the full revision till // now) queryPK = articlePK + cfr.getStartRC() - 1; limit = revCount - cfr.getStartRC() + 1; previousRevision = null; previousRevisionCounter = -1; } else { // Create query bounds (only new revisions, last known + 1 till now) queryPK = revision.getPrimaryKey() + 1; limit = revCount - revision.getRevisionCounter(); previousRevision = revision.getRevisionText(); previousRevisionCounter = revision.getRevisionCounter(); } Statement statement = null; ResultSet result = null; revision = null; try { statement = this.connection.createStatement(); // Retrieve encoded revisions result = statement .executeQuery("SELECT Revision, PrimaryKey, RevisionCounter, RevisionID, ArticleID, Timestamp " + "FROM revisions " + "WHERE PrimaryKey >= " + queryPK + " LIMIT " + limit); String currentRevision = null; Diff diff; RevisionDecoder decoder; boolean binaryData = result.getMetaData().getColumnType(1) == Types.LONGVARBINARY; while (result.next()) { decoder = new RevisionDecoder(config.getCharacterSet()); // depends on control dependency: [while], data = [none] // binary or base64 encoded if (binaryData) { decoder.setInput(result.getBinaryStream(1), true); // depends on control dependency: [if], data = [none] } else { decoder.setInput(result.getString(1)); // depends on control dependency: [if], data = [none] } // Decode and rebuild diff = decoder.decode(); // depends on control dependency: [while], data = [none] if (previousRevisionCounter != -1) { if (previousRevisionCounter + 1 != result.getInt(3)) { System.err.println("Reconstruction data invalid - " + "\r\n\t" + "Expected " + (previousRevisionCounter + 1) + " instead of " + result.getInt(3)); // depends on control dependency: [if], data = [none] return null; // depends on control dependency: [if], data = [none] } } else { if (cfr.getStartRC() != result.getInt(3)) { System.err.println("Reconstruction data invalid - " + "\r\n\t" + "Expected " + (cfr.getStartRC()) + " instead of " + result.getInt(3)); // depends on control dependency: [if], data = [none] return null; // depends on control dependency: [if], data = [none] } } try { currentRevision = diff.buildRevision(previousRevision); // depends on control dependency: [try], data = [none] revision = new Revision(result.getInt(3)); // depends on control dependency: [try], data = [none] revision.setRevisionText(currentRevision); // depends on control dependency: [try], data = [none] revision.setPrimaryKey(result.getInt(2)); // depends on control dependency: [try], data = [none] revision.setRevisionID(result.getInt(4)); // depends on control dependency: [try], data = [none] revision.setArticleID(result.getInt(5)); // depends on control dependency: [try], data = [none] revision.setTimeStamp(new Timestamp(result.getLong(6))); // depends on control dependency: [try], data = [none] previousRevision = currentRevision; // depends on control dependency: [try], data = [none] previousRevisionCounter = revision.getRevisionCounter(); // depends on control dependency: [try], data = [none] } catch (Exception e) { System.err.println("Reconstruction failed while retrieving" + " data to reconstruct <" + revisionIndex + ">" + "\r\n\t" + "[ArticleId " + result.getInt(5) + ", RevisionId " + result.getInt(4) + ", RevisionCounter " + result.getInt(3) + "]"); previousRevision = null; revision = null; return null; } // depends on control dependency: [catch], data = [none] // Add the reconstructed revision to the storage if (revision != null) { chronoStorage.add(revision); // depends on control dependency: [if], data = [(revision] } } // Ensure that the correct revision is on top of the storage if (chronoStorage.isTop(revisionIndex)) { chronoStorage.remove(); // depends on control dependency: [if], data = [none] return revision; // depends on control dependency: [if], data = [none] } else { return null; // depends on control dependency: [if], data = [none] } } finally { if (statement != null) { statement.close(); // depends on control dependency: [if], data = [none] } if (result != null) { result.close(); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public static void sortBySpecificityAndQuality(List<MediaType> mediaTypes) { checkNotNull(mediaTypes, "'mediaTypes' must not be null"); if (mediaTypes.size() > 1) { Collections.sort(mediaTypes, Ordering.compound( ImmutableList.of(MediaType.SPECIFICITY_COMPARATOR, MediaType.QUALITY_VALUE_COMPARATOR))); } } }
public class class_name { public static void sortBySpecificityAndQuality(List<MediaType> mediaTypes) { checkNotNull(mediaTypes, "'mediaTypes' must not be null"); if (mediaTypes.size() > 1) { Collections.sort(mediaTypes, Ordering.compound( ImmutableList.of(MediaType.SPECIFICITY_COMPARATOR, MediaType.QUALITY_VALUE_COMPARATOR))); // depends on control dependency: [if], data = [none] } } }
public class class_name { public void setFacility(String facilityStr) { if (facilityStr != null) { facilityStr = facilityStr.trim(); } this.facilityStr = facilityStr; } }
public class class_name { public void setFacility(String facilityStr) { if (facilityStr != null) { facilityStr = facilityStr.trim(); // depends on control dependency: [if], data = [none] } this.facilityStr = facilityStr; } }
public class class_name { public static int[] btopDecode(String alignment, Alphabet alphabet) { Matcher matcher = btopPattern.matcher(alignment); IntArrayList mutations = new IntArrayList(); int sPosition = 0; while (matcher.find()) { String g = matcher.group(); if (isPositiveInteger(g)) sPosition += Integer.parseInt(g); else if (g.charAt(0) == '-') { mutations.add(createDeletion(sPosition, alphabet.symbolToCodeWithException(g.charAt(1)))); ++sPosition; } else if (g.charAt(1) == '-') mutations.add(createInsertion(sPosition, alphabet.symbolToCodeWithException(g.charAt(0)))); else { mutations.add(createSubstitution(sPosition, alphabet.symbolToCodeWithException(g.charAt(1)), alphabet.symbolToCodeWithException(g.charAt(0)))); ++sPosition; } } return mutations.toArray(); } }
public class class_name { public static int[] btopDecode(String alignment, Alphabet alphabet) { Matcher matcher = btopPattern.matcher(alignment); IntArrayList mutations = new IntArrayList(); int sPosition = 0; while (matcher.find()) { String g = matcher.group(); if (isPositiveInteger(g)) sPosition += Integer.parseInt(g); else if (g.charAt(0) == '-') { mutations.add(createDeletion(sPosition, alphabet.symbolToCodeWithException(g.charAt(1)))); // depends on control dependency: [if], data = [none] ++sPosition; // depends on control dependency: [if], data = [none] } else if (g.charAt(1) == '-') mutations.add(createInsertion(sPosition, alphabet.symbolToCodeWithException(g.charAt(0)))); else { mutations.add(createSubstitution(sPosition, alphabet.symbolToCodeWithException(g.charAt(1)), alphabet.symbolToCodeWithException(g.charAt(0)))); // depends on control dependency: [if], data = [(g.charAt(1)] ++sPosition; // depends on control dependency: [if], data = [none] } } return mutations.toArray(); } }
public class class_name { protected void changeBreakpoint(final IDbgpSession session, IVdmBreakpoint breakpoint) throws DbgpException, CoreException { final IDbgpBreakpointCommands commands = session.getCoreCommands(); URI bpUri = null; // map the outgoing uri if we're a line breakpoint if (breakpoint instanceof IVdmLineBreakpoint) { IVdmLineBreakpoint bp = (IVdmLineBreakpoint) breakpoint; bpUri = bpPathMapper.map(bp.getResourceURI()); } if (breakpoint instanceof IVdmMethodEntryBreakpoint) { DbgpBreakpointConfig config = createBreakpointConfig(breakpoint); IVdmMethodEntryBreakpoint entryBreakpoint = (IVdmMethodEntryBreakpoint) breakpoint; String entryId = entryBreakpoint.getEntryBreakpointId(); if (entryBreakpoint.breakOnEntry()) { if (entryId == null) { // Create entry breakpoint entryId = commands.setCallBreakpoint(bpUri, entryBreakpoint.getMethodName(), config); entryBreakpoint.setEntryBreakpointId(entryId); } else { // Update entry breakpoint commands.updateBreakpoint(entryId, config); } } else { if (entryId != null) { // Remove existing entry breakpoint commands.removeBreakpoint(entryId); entryBreakpoint.setEntryBreakpointId(null); } } String exitId = entryBreakpoint.getExitBreakpointId(); if (entryBreakpoint.breakOnExit()) { if (exitId == null) { // Create exit breakpoint exitId = commands.setReturnBreakpoint(bpUri, entryBreakpoint.getMethodName(), config); entryBreakpoint.setExitBreakpointId(exitId); } else { // Update exit breakpoint commands.updateBreakpoint(exitId, config); } } else { if (exitId != null) { // Remove exit breakpoint commands.removeBreakpoint(exitId); entryBreakpoint.setExitBreakpointId(null); } } } else { // All other breakpoints final String id = breakpoint.getId(session); if (id != null) { final DbgpBreakpointConfig config = createBreakpointConfig(breakpoint); if (breakpoint instanceof IVdmWatchpoint) { config.setExpression(makeWatchpointExpression((IVdmWatchpoint) breakpoint)); } commands.updateBreakpoint(id, config); } } } }
public class class_name { protected void changeBreakpoint(final IDbgpSession session, IVdmBreakpoint breakpoint) throws DbgpException, CoreException { final IDbgpBreakpointCommands commands = session.getCoreCommands(); URI bpUri = null; // map the outgoing uri if we're a line breakpoint if (breakpoint instanceof IVdmLineBreakpoint) { IVdmLineBreakpoint bp = (IVdmLineBreakpoint) breakpoint; bpUri = bpPathMapper.map(bp.getResourceURI()); } if (breakpoint instanceof IVdmMethodEntryBreakpoint) { DbgpBreakpointConfig config = createBreakpointConfig(breakpoint); IVdmMethodEntryBreakpoint entryBreakpoint = (IVdmMethodEntryBreakpoint) breakpoint; String entryId = entryBreakpoint.getEntryBreakpointId(); if (entryBreakpoint.breakOnEntry()) { if (entryId == null) { // Create entry breakpoint entryId = commands.setCallBreakpoint(bpUri, entryBreakpoint.getMethodName(), config); // depends on control dependency: [if], data = [none] entryBreakpoint.setEntryBreakpointId(entryId); // depends on control dependency: [if], data = [(entryId] } else { // Update entry breakpoint commands.updateBreakpoint(entryId, config); // depends on control dependency: [if], data = [(entryId] } } else { if (entryId != null) { // Remove existing entry breakpoint commands.removeBreakpoint(entryId); // depends on control dependency: [if], data = [(entryId] entryBreakpoint.setEntryBreakpointId(null); // depends on control dependency: [if], data = [null)] } } String exitId = entryBreakpoint.getExitBreakpointId(); if (entryBreakpoint.breakOnExit()) { if (exitId == null) { // Create exit breakpoint exitId = commands.setReturnBreakpoint(bpUri, entryBreakpoint.getMethodName(), config); // depends on control dependency: [if], data = [none] entryBreakpoint.setExitBreakpointId(exitId); // depends on control dependency: [if], data = [(exitId] } else { // Update exit breakpoint commands.updateBreakpoint(exitId, config); // depends on control dependency: [if], data = [(exitId] } } else { if (exitId != null) { // Remove exit breakpoint commands.removeBreakpoint(exitId); // depends on control dependency: [if], data = [(exitId] entryBreakpoint.setExitBreakpointId(null); // depends on control dependency: [if], data = [null)] } } } else { // All other breakpoints final String id = breakpoint.getId(session); if (id != null) { final DbgpBreakpointConfig config = createBreakpointConfig(breakpoint); if (breakpoint instanceof IVdmWatchpoint) { config.setExpression(makeWatchpointExpression((IVdmWatchpoint) breakpoint)); // depends on control dependency: [if], data = [none] } commands.updateBreakpoint(id, config); // depends on control dependency: [if], data = [(id] } } } }
public class class_name { static <T extends FunctionMeta> Map<T, IDataModel> toMetaFunctions(Workbook book, Class<T> metaClass) { Map<T, IDataModel> map = new HashMap<>(); book.addToolPack(Functions.getUdfFinder()); final FormulaParsingWorkbook parsingBook = create((XSSFWorkbook) book); Sheet s = book.getSheetAt(0); /* TODO: only one sheet is supported */ for (Row r : s) { for (Cell c : r) { if (c == null || CELL_TYPE_FORMULA != c.getCellType()) { continue; } try { String formula = c.getCellFormula(); String keyword = metaClass.getAnnotation(FunctionMeta.MetaFunctionKeyword.class).value(); if (!formula.startsWith(keyword)) { continue; } IDataModel dataModel = createDataModelFromCell(s, parsingBook, fromRowColumn(c.getRowIndex(), c.getColumnIndex())); T meta = createAttributeFunctionMeta(metaClass, formula, dataModel); map.put(meta, dataModel); } catch (Exception e) { log.debug("Warning while parsing custom excel formula. It is OK.", e); } } } return map; } }
public class class_name { static <T extends FunctionMeta> Map<T, IDataModel> toMetaFunctions(Workbook book, Class<T> metaClass) { Map<T, IDataModel> map = new HashMap<>(); book.addToolPack(Functions.getUdfFinder()); final FormulaParsingWorkbook parsingBook = create((XSSFWorkbook) book); Sheet s = book.getSheetAt(0); /* TODO: only one sheet is supported */ for (Row r : s) { for (Cell c : r) { if (c == null || CELL_TYPE_FORMULA != c.getCellType()) { continue; } try { String formula = c.getCellFormula(); String keyword = metaClass.getAnnotation(FunctionMeta.MetaFunctionKeyword.class).value(); if (!formula.startsWith(keyword)) { continue; } IDataModel dataModel = createDataModelFromCell(s, parsingBook, fromRowColumn(c.getRowIndex(), c.getColumnIndex())); T meta = createAttributeFunctionMeta(metaClass, formula, dataModel); map.put(meta, dataModel); // depends on control dependency: [try], data = [none] } catch (Exception e) { log.debug("Warning while parsing custom excel formula. It is OK.", e); } // depends on control dependency: [catch], data = [none] } } return map; } }
public class class_name { @SuppressWarnings("unchecked") public static <T> T invoke(Object obj, Method method, Object... args) throws UtilException { if (false == method.isAccessible()) { method.setAccessible(true); } try { return (T) method.invoke(ClassUtil.isStatic(method) ? null : obj, args); } catch (Exception e) { throw new UtilException(e); } } }
public class class_name { @SuppressWarnings("unchecked") public static <T> T invoke(Object obj, Method method, Object... args) throws UtilException { if (false == method.isAccessible()) { method.setAccessible(true); // depends on control dependency: [if], data = [none] } try { return (T) method.invoke(ClassUtil.isStatic(method) ? null : obj, args); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new UtilException(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private boolean isStringTokenMatched(AnalyzedToken token) { String testToken = getTestToken(token); if (stringRegExp) { Matcher m = pattern.matcher(new InterruptibleCharSequence(testToken)); return m.matches(); } if (caseSensitive) { return stringToken.equals(testToken); } return stringToken.equalsIgnoreCase(testToken); } }
public class class_name { private boolean isStringTokenMatched(AnalyzedToken token) { String testToken = getTestToken(token); if (stringRegExp) { Matcher m = pattern.matcher(new InterruptibleCharSequence(testToken)); return m.matches(); // depends on control dependency: [if], data = [none] } if (caseSensitive) { return stringToken.equals(testToken); // depends on control dependency: [if], data = [none] } return stringToken.equalsIgnoreCase(testToken); } }
public class class_name { public List<CloudTrailSource> parseMessage(List<Message> sqsMessages) { List<CloudTrailSource> sources = new ArrayList<>(); for (Message sqsMessage : sqsMessages) { boolean parseMessageSuccess = false; ProgressStatus parseMessageStatus = new ProgressStatus(ProgressState.parseMessage, new BasicParseMessageInfo(sqsMessage, parseMessageSuccess)); final Object reportObject = progressReporter.reportStart(parseMessageStatus); CloudTrailSource ctSource = null; try { ctSource = sourceSerializer.getSource(sqsMessage); if (containsCloudTrailLogs(ctSource)) { sources.add(ctSource); parseMessageSuccess = true; } } catch (Exception e) { LibraryUtils.handleException(exceptionHandler, parseMessageStatus, e, "Failed to parse sqs message."); } finally { if (containsCloudTrailValidationMessage(ctSource) || shouldDeleteMessageUponFailure(parseMessageSuccess)) { deleteMessageFromQueue(sqsMessage, new ProgressStatus(ProgressState.deleteMessage, new BasicParseMessageInfo(sqsMessage, false))); } LibraryUtils.endToProcess(progressReporter, parseMessageSuccess, parseMessageStatus, reportObject); } } return sources; } }
public class class_name { public List<CloudTrailSource> parseMessage(List<Message> sqsMessages) { List<CloudTrailSource> sources = new ArrayList<>(); for (Message sqsMessage : sqsMessages) { boolean parseMessageSuccess = false; ProgressStatus parseMessageStatus = new ProgressStatus(ProgressState.parseMessage, new BasicParseMessageInfo(sqsMessage, parseMessageSuccess)); final Object reportObject = progressReporter.reportStart(parseMessageStatus); CloudTrailSource ctSource = null; try { ctSource = sourceSerializer.getSource(sqsMessage); // depends on control dependency: [try], data = [none] if (containsCloudTrailLogs(ctSource)) { sources.add(ctSource); // depends on control dependency: [if], data = [none] parseMessageSuccess = true; // depends on control dependency: [if], data = [none] } } catch (Exception e) { LibraryUtils.handleException(exceptionHandler, parseMessageStatus, e, "Failed to parse sqs message."); } finally { // depends on control dependency: [catch], data = [none] if (containsCloudTrailValidationMessage(ctSource) || shouldDeleteMessageUponFailure(parseMessageSuccess)) { deleteMessageFromQueue(sqsMessage, new ProgressStatus(ProgressState.deleteMessage, new BasicParseMessageInfo(sqsMessage, false))); // depends on control dependency: [if], data = [none] } LibraryUtils.endToProcess(progressReporter, parseMessageSuccess, parseMessageStatus, reportObject); } } return sources; } }
public class class_name { public Observable<ServiceResponse<SharedAccessSignatureAuthorizationRuleInner>> getKeysForKeyNameWithServiceResponseAsync(String resourceGroupName, String resourceName, String keyName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (resourceName == null) { throw new IllegalArgumentException("Parameter resourceName is required and cannot be null."); } if (keyName == null) { throw new IllegalArgumentException("Parameter keyName is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.getKeysForKeyName(this.client.subscriptionId(), resourceGroupName, resourceName, keyName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<SharedAccessSignatureAuthorizationRuleInner>>>() { @Override public Observable<ServiceResponse<SharedAccessSignatureAuthorizationRuleInner>> call(Response<ResponseBody> response) { try { ServiceResponse<SharedAccessSignatureAuthorizationRuleInner> clientResponse = getKeysForKeyNameDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } }
public class class_name { public Observable<ServiceResponse<SharedAccessSignatureAuthorizationRuleInner>> getKeysForKeyNameWithServiceResponseAsync(String resourceGroupName, String resourceName, String keyName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (resourceName == null) { throw new IllegalArgumentException("Parameter resourceName is required and cannot be null."); } if (keyName == null) { throw new IllegalArgumentException("Parameter keyName is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.getKeysForKeyName(this.client.subscriptionId(), resourceGroupName, resourceName, keyName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<SharedAccessSignatureAuthorizationRuleInner>>>() { @Override public Observable<ServiceResponse<SharedAccessSignatureAuthorizationRuleInner>> call(Response<ResponseBody> response) { try { ServiceResponse<SharedAccessSignatureAuthorizationRuleInner> clientResponse = getKeysForKeyNameDelegate(response); return Observable.just(clientResponse); // depends on control dependency: [try], data = [none] } catch (Throwable t) { return Observable.error(t); } // depends on control dependency: [catch], data = [none] } }); } }
public class class_name { public static int typeCompareTo(Writable one, Writable other) { PrimitiveObject noOne = hadoop2Primitive(one); PrimitiveObject noOther = hadoop2Primitive(other); if (noOne.getType() != noOther.getType()) { return -1; } return 0; } }
public class class_name { public static int typeCompareTo(Writable one, Writable other) { PrimitiveObject noOne = hadoop2Primitive(one); PrimitiveObject noOther = hadoop2Primitive(other); if (noOne.getType() != noOther.getType()) { return -1; // depends on control dependency: [if], data = [none] } return 0; } }
public class class_name { public boolean runOptimize() { boolean answer = false; for (int i = 0; i < this.highLowContainer.size(); i++) { Container c = this.highLowContainer.getContainerAtIndex(i).runOptimize(); if (c instanceof RunContainer) { answer = true; } this.highLowContainer.setContainerAtIndex(i, c); } return answer; } }
public class class_name { public boolean runOptimize() { boolean answer = false; for (int i = 0; i < this.highLowContainer.size(); i++) { Container c = this.highLowContainer.getContainerAtIndex(i).runOptimize(); if (c instanceof RunContainer) { answer = true; // depends on control dependency: [if], data = [none] } this.highLowContainer.setContainerAtIndex(i, c); // depends on control dependency: [for], data = [i] } return answer; } }
public class class_name { public void uninstall(String pluginKey, File uninstallDir) { Set<String> uninstallKeys = new HashSet<>(); uninstallKeys.add(pluginKey); appendDependentPluginKeys(pluginKey, uninstallKeys); for (String uninstallKey : uninstallKeys) { PluginInfo info = getPluginInfo(uninstallKey); try { if (!getPluginFile(info).exists()) { LOG.info("Plugin already uninstalled: {} [{}]", info.getName(), info.getKey()); continue; } LOG.info("Uninstalling plugin {} [{}]", info.getName(), info.getKey()); File masterFile = getPluginFile(info); moveFileToDirectory(masterFile, uninstallDir, true); } catch (IOException e) { throw new IllegalStateException(format("Fail to uninstall plugin %s [%s]", info.getName(), info.getKey()), e); } } } }
public class class_name { public void uninstall(String pluginKey, File uninstallDir) { Set<String> uninstallKeys = new HashSet<>(); uninstallKeys.add(pluginKey); appendDependentPluginKeys(pluginKey, uninstallKeys); for (String uninstallKey : uninstallKeys) { PluginInfo info = getPluginInfo(uninstallKey); try { if (!getPluginFile(info).exists()) { LOG.info("Plugin already uninstalled: {} [{}]", info.getName(), info.getKey()); // depends on control dependency: [if], data = [none] continue; } LOG.info("Uninstalling plugin {} [{}]", info.getName(), info.getKey()); // depends on control dependency: [try], data = [none] File masterFile = getPluginFile(info); moveFileToDirectory(masterFile, uninstallDir, true); // depends on control dependency: [try], data = [none] } catch (IOException e) { throw new IllegalStateException(format("Fail to uninstall plugin %s [%s]", info.getName(), info.getKey()), e); } // depends on control dependency: [catch], data = [none] } } }
public class class_name { public void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { PrintWriter out; String title = "Index of "; String dirName = (String)req.getAttribute("com.ibm.servlet.engine.webapp.dir.browsing.path"); String reqURI = (String)req.getAttribute("com.ibm.servlet.engine.webapp.dir.browsing.uri"); // PK81387 Start //I believe dirName is now the local dir, not the file system path File dir = null; URL dirURL = null; String dirNameFileSystemPath = req.getRealPath(dirName); //this is a servlet, we only have access to the ServletContextFacade ServletContext context = getServletConfig().getServletContext(); boolean fileSystem = false; //dirNameFileSystemPath!=null is not a good enough check to determine if you're in a container //if we have a container and there have been jsps compiled, it gives you the directory in the workarea if (dirNameFileSystemPath!=null) { fileSystem = true; dirName=dirNameFileSystemPath; dir = new File(dirNameFileSystemPath); // get path to war directory String contextRealPath = context.getRealPath("/"); int idx=dirName.lastIndexOf(contextRealPath); if (idx!=-1) { // subtract the war directory from teh reqiested directory String matchString=dirName.substring(idx+contextRealPath.length()); matchString=matchString.replace(File.separator,"/"); // Ensure matchString starts with "/" so WSUtil.resolveURI processes leading "."s if (!matchString.startsWith("/")) { matchString="/"+matchString; } // remove a trailing "/" so tthat uriCaseCheck does a vlaid check. if (matchString.endsWith("/")) { matchString=matchString.substring(0, matchString.length()-1); } // checkWEB-INF unless esposeWebInfoOnDispatch is set and we are in a dispatched request boolean checkWEBINF = !WCCustomProperties.EXPOSE_WEB_INF_ON_DISPATCH || (req.getAttribute(WebAppRequestDispatcher.DISPATCH_NESTED_ATTR)==null) ; try { if (!com.ibm.wsspi.webcontainer.util.FileSystem.uriCaseCheck(dir, matchString,checkWEBINF)) { resp.sendError(404, nls.getString("File.not.found", "File not found")); return; } } catch (java.lang.IllegalArgumentException exc) { // Must be traversing back directories resp.sendError(404, nls.getString("File.not.found", "File not found")); return; } } else { // Must be traversing back directories //dirNameFileSystemPath was normalized in Liberty, whereas in tWAS it contained the root path followed by whatever was entered resp.sendError(404, nls.getString("File.not.found", "File not found")); return; } } else { //container dirURL = context.getResource(dirName); if (dirURL == null) { resp.sendError(404, nls.getString("File.not.found", "File not found")); return; } } // PK81387 End if (!reqURI.endsWith("/")) reqURI += '/'; title += reqURI; // make sure we can access it if (fileSystem && !dir.canRead()) resp.sendError(404, nls.getString("File.not.found", "File not found")); // set the content type // set the content type as UTF-8 as filenames are encoded in UTF-8 resp.setContentType("text/html; charset=UTF-8"); // write the output out = resp.getWriter(); out.println("<HTML><HEAD><TITLE>"); out.println(title); out.println("</TITLE></HEAD><BODY>"); out.println("<H1 align=\"left\">" + title + "</H1>"); out.println("<HR size=\"3\"><TABLE cellpadding=\"2\"><TBODY><TR bgcolor=\"#d7ffff\">"); // output the table headers out.println("<TH width=\"250\" nowrap><P align=\"left\">Name</P></TH>"); out.println("<TH width=\"250\" nowrap><P align=\"left\">Last Modified</P></TH>"); out.println("<TH width=\"150\" nowrap><P align=\"left\">Size</P></TH>"); out.println("<TH width=\"300\" nowrap><P align=\"left\">Description</P></TH></TR>"); // output a row for each file in the directory if (fileSystem) { fillTableRows(dir, reqURI, out); } else { fillTableRows(context, dirName, reqURI, out); } // finish the page out.println("</TBODY></TABLE></BODY></HTML>"); // close it up out.close(); } }
public class class_name { public void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { PrintWriter out; String title = "Index of "; String dirName = (String)req.getAttribute("com.ibm.servlet.engine.webapp.dir.browsing.path"); String reqURI = (String)req.getAttribute("com.ibm.servlet.engine.webapp.dir.browsing.uri"); // PK81387 Start //I believe dirName is now the local dir, not the file system path File dir = null; URL dirURL = null; String dirNameFileSystemPath = req.getRealPath(dirName); //this is a servlet, we only have access to the ServletContextFacade ServletContext context = getServletConfig().getServletContext(); boolean fileSystem = false; //dirNameFileSystemPath!=null is not a good enough check to determine if you're in a container //if we have a container and there have been jsps compiled, it gives you the directory in the workarea if (dirNameFileSystemPath!=null) { fileSystem = true; dirName=dirNameFileSystemPath; dir = new File(dirNameFileSystemPath); // get path to war directory String contextRealPath = context.getRealPath("/"); int idx=dirName.lastIndexOf(contextRealPath); if (idx!=-1) { // subtract the war directory from teh reqiested directory String matchString=dirName.substring(idx+contextRealPath.length()); matchString=matchString.replace(File.separator,"/"); // Ensure matchString starts with "/" so WSUtil.resolveURI processes leading "."s if (!matchString.startsWith("/")) { matchString="/"+matchString; } // remove a trailing "/" so tthat uriCaseCheck does a vlaid check. if (matchString.endsWith("/")) { matchString=matchString.substring(0, matchString.length()-1); } // checkWEB-INF unless esposeWebInfoOnDispatch is set and we are in a dispatched request boolean checkWEBINF = !WCCustomProperties.EXPOSE_WEB_INF_ON_DISPATCH || (req.getAttribute(WebAppRequestDispatcher.DISPATCH_NESTED_ATTR)==null) ; try { if (!com.ibm.wsspi.webcontainer.util.FileSystem.uriCaseCheck(dir, matchString,checkWEBINF)) { resp.sendError(404, nls.getString("File.not.found", "File not found")); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } } catch (java.lang.IllegalArgumentException exc) { // Must be traversing back directories resp.sendError(404, nls.getString("File.not.found", "File not found")); return; } } else { // Must be traversing back directories //dirNameFileSystemPath was normalized in Liberty, whereas in tWAS it contained the root path followed by whatever was entered resp.sendError(404, nls.getString("File.not.found", "File not found")); return; } } else { //container dirURL = context.getResource(dirName); if (dirURL == null) { resp.sendError(404, nls.getString("File.not.found", "File not found")); return; } } // PK81387 End if (!reqURI.endsWith("/")) reqURI += '/'; title += reqURI; // make sure we can access it if (fileSystem && !dir.canRead()) resp.sendError(404, nls.getString("File.not.found", "File not found")); // set the content type // set the content type as UTF-8 as filenames are encoded in UTF-8 resp.setContentType("text/html; charset=UTF-8"); // write the output out = resp.getWriter(); out.println("<HTML><HEAD><TITLE>"); out.println(title); out.println("</TITLE></HEAD><BODY>"); out.println("<H1 align=\"left\">" + title + "</H1>"); out.println("<HR size=\"3\"><TABLE cellpadding=\"2\"><TBODY><TR bgcolor=\"#d7ffff\">"); // output the table headers out.println("<TH width=\"250\" nowrap><P align=\"left\">Name</P></TH>"); out.println("<TH width=\"250\" nowrap><P align=\"left\">Last Modified</P></TH>"); out.println("<TH width=\"150\" nowrap><P align=\"left\">Size</P></TH>"); out.println("<TH width=\"300\" nowrap><P align=\"left\">Description</P></TH></TR>"); // output a row for each file in the directory if (fileSystem) { fillTableRows(dir, reqURI, out); } else { fillTableRows(context, dirName, reqURI, out); } // finish the page out.println("</TBODY></TABLE></BODY></HTML>"); // close it up out.close(); } }
public class class_name { public static synchronized void removeBuffer(String installationID) { for (final Iterator i = buffers.iterator(); i.hasNext();) { final NetworkBuffer b = (NetworkBuffer) i.next(); if (b.inst.equals(installationID)) { i.remove(); while (!b.configs.isEmpty()) b.removeConfiguration((Configuration) b.configs .get(b.configs.size() - 1)); logger.info("removed network buffer \"" + installationID + "\""); return; } } } }
public class class_name { public static synchronized void removeBuffer(String installationID) { for (final Iterator i = buffers.iterator(); i.hasNext();) { final NetworkBuffer b = (NetworkBuffer) i.next(); if (b.inst.equals(installationID)) { i.remove(); // depends on control dependency: [if], data = [none] while (!b.configs.isEmpty()) b.removeConfiguration((Configuration) b.configs .get(b.configs.size() - 1)); logger.info("removed network buffer \"" + installationID + "\""); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } } } }
public class class_name { int getTrailCCFromCompYesAndZeroCC(CharSequence s, int cpStart, int cpLimit) { int c; if(cpStart==(cpLimit-1)) { c=s.charAt(cpStart); } else { c=Character.codePointAt(s, cpStart); } int prevNorm16=getNorm16(c); if(prevNorm16<=minYesNo) { return 0; // yesYes and Hangul LV/LVT have ccc=tccc=0 } else { return extraData.charAt(prevNorm16)>>8; // tccc from yesNo } } }
public class class_name { int getTrailCCFromCompYesAndZeroCC(CharSequence s, int cpStart, int cpLimit) { int c; if(cpStart==(cpLimit-1)) { c=s.charAt(cpStart); // depends on control dependency: [if], data = [(cpStart] } else { c=Character.codePointAt(s, cpStart); // depends on control dependency: [if], data = [none] } int prevNorm16=getNorm16(c); if(prevNorm16<=minYesNo) { return 0; // yesYes and Hangul LV/LVT have ccc=tccc=0 // depends on control dependency: [if], data = [none] } else { return extraData.charAt(prevNorm16)>>8; // tccc from yesNo // depends on control dependency: [if], data = [(prevNorm16] } } }
public class class_name { private String getCategoryLabel(String categoryPath) { CmsObject cms = A_CmsUI.getCmsObject(); String result = ""; if (CmsStringUtil.isEmptyOrWhitespaceOnly(categoryPath)) { return result; } Locale locale = UI.getCurrent().getLocale(); CmsCategoryService catService = CmsCategoryService.getInstance(); try { if (m_useFullPathCategories) { //cut last slash categoryPath = categoryPath.substring(0, categoryPath.length() - 1); String currentPath = ""; boolean isFirst = true; for (String part : categoryPath.split("/")) { currentPath += part + "/"; CmsCategory cat = catService.localizeCategory( cms, catService.readCategory(cms, currentPath, "/"), locale); if (!isFirst) { result += " / "; } else { isFirst = false; } result += cat.getTitle(); } } else { CmsCategory cat = catService.localizeCategory( cms, catService.readCategory(cms, categoryPath, "/"), locale); result = cat.getTitle(); } } catch (Exception e) { LOG.error("Error reading category " + categoryPath + ".", e); } return result; } }
public class class_name { private String getCategoryLabel(String categoryPath) { CmsObject cms = A_CmsUI.getCmsObject(); String result = ""; if (CmsStringUtil.isEmptyOrWhitespaceOnly(categoryPath)) { return result; // depends on control dependency: [if], data = [none] } Locale locale = UI.getCurrent().getLocale(); CmsCategoryService catService = CmsCategoryService.getInstance(); try { if (m_useFullPathCategories) { //cut last slash categoryPath = categoryPath.substring(0, categoryPath.length() - 1); // depends on control dependency: [if], data = [none] String currentPath = ""; boolean isFirst = true; for (String part : categoryPath.split("/")) { currentPath += part + "/"; // depends on control dependency: [for], data = [part] CmsCategory cat = catService.localizeCategory( cms, catService.readCategory(cms, currentPath, "/"), locale); if (!isFirst) { result += " / "; // depends on control dependency: [if], data = [none] } else { isFirst = false; // depends on control dependency: [if], data = [none] } result += cat.getTitle(); // depends on control dependency: [for], data = [none] } } else { CmsCategory cat = catService.localizeCategory( cms, catService.readCategory(cms, categoryPath, "/"), locale); result = cat.getTitle(); // depends on control dependency: [if], data = [none] } } catch (Exception e) { LOG.error("Error reading category " + categoryPath + ".", e); } // depends on control dependency: [catch], data = [none] return result; } }
public class class_name { @Override public CommerceSubscriptionEntry fetchByPrimaryKey(Serializable primaryKey) { Serializable serializable = entityCache.getResult(CommerceSubscriptionEntryModelImpl.ENTITY_CACHE_ENABLED, CommerceSubscriptionEntryImpl.class, primaryKey); if (serializable == nullModel) { return null; } CommerceSubscriptionEntry commerceSubscriptionEntry = (CommerceSubscriptionEntry)serializable; if (commerceSubscriptionEntry == null) { Session session = null; try { session = openSession(); commerceSubscriptionEntry = (CommerceSubscriptionEntry)session.get(CommerceSubscriptionEntryImpl.class, primaryKey); if (commerceSubscriptionEntry != null) { cacheResult(commerceSubscriptionEntry); } else { entityCache.putResult(CommerceSubscriptionEntryModelImpl.ENTITY_CACHE_ENABLED, CommerceSubscriptionEntryImpl.class, primaryKey, nullModel); } } catch (Exception e) { entityCache.removeResult(CommerceSubscriptionEntryModelImpl.ENTITY_CACHE_ENABLED, CommerceSubscriptionEntryImpl.class, primaryKey); throw processException(e); } finally { closeSession(session); } } return commerceSubscriptionEntry; } }
public class class_name { @Override public CommerceSubscriptionEntry fetchByPrimaryKey(Serializable primaryKey) { Serializable serializable = entityCache.getResult(CommerceSubscriptionEntryModelImpl.ENTITY_CACHE_ENABLED, CommerceSubscriptionEntryImpl.class, primaryKey); if (serializable == nullModel) { return null; // depends on control dependency: [if], data = [none] } CommerceSubscriptionEntry commerceSubscriptionEntry = (CommerceSubscriptionEntry)serializable; if (commerceSubscriptionEntry == null) { Session session = null; try { session = openSession(); // depends on control dependency: [try], data = [none] commerceSubscriptionEntry = (CommerceSubscriptionEntry)session.get(CommerceSubscriptionEntryImpl.class, primaryKey); // depends on control dependency: [try], data = [none] if (commerceSubscriptionEntry != null) { cacheResult(commerceSubscriptionEntry); // depends on control dependency: [if], data = [(commerceSubscriptionEntry] } else { entityCache.putResult(CommerceSubscriptionEntryModelImpl.ENTITY_CACHE_ENABLED, CommerceSubscriptionEntryImpl.class, primaryKey, nullModel); // depends on control dependency: [if], data = [none] } } catch (Exception e) { entityCache.removeResult(CommerceSubscriptionEntryModelImpl.ENTITY_CACHE_ENABLED, CommerceSubscriptionEntryImpl.class, primaryKey); throw processException(e); } // depends on control dependency: [catch], data = [none] finally { closeSession(session); } } return commerceSubscriptionEntry; } }
public class class_name { public boolean removeRelation( TemporalExtendedPropositionDefinition lhsRule, TemporalExtendedPropositionDefinition rhsRule) { List<TemporalExtendedPropositionDefinition> key = Arrays.asList( lhsRule, rhsRule); if (defPairsMap.remove(key) != null) { return true; } else { return false; } } }
public class class_name { public boolean removeRelation( TemporalExtendedPropositionDefinition lhsRule, TemporalExtendedPropositionDefinition rhsRule) { List<TemporalExtendedPropositionDefinition> key = Arrays.asList( lhsRule, rhsRule); if (defPairsMap.remove(key) != null) { return true; // depends on control dependency: [if], data = [none] } else { return false; // depends on control dependency: [if], data = [none] } } }
public class class_name { public double getStorageSize(String name, String defaultValue, StorageUnit targetUnit) { Preconditions.checkState(isNotBlank(name), "Key cannot be blank."); String vString = get(name); if (isBlank(vString)) { vString = defaultValue; } // Please note: There is a bit of subtlety here. If the user specifies // the default unit as "1GB", but the requested unit is MB, we will return // the format in MB even thought the default string is specified in GB. // Converts a string like "1GB" to to unit specified in targetUnit. StorageSize measure = StorageSize.parse(vString); return convertStorageUnit(measure.getValue(), measure.getUnit(), targetUnit); } }
public class class_name { public double getStorageSize(String name, String defaultValue, StorageUnit targetUnit) { Preconditions.checkState(isNotBlank(name), "Key cannot be blank."); String vString = get(name); if (isBlank(vString)) { vString = defaultValue; // depends on control dependency: [if], data = [none] } // Please note: There is a bit of subtlety here. If the user specifies // the default unit as "1GB", but the requested unit is MB, we will return // the format in MB even thought the default string is specified in GB. // Converts a string like "1GB" to to unit specified in targetUnit. StorageSize measure = StorageSize.parse(vString); return convertStorageUnit(measure.getValue(), measure.getUnit(), targetUnit); } }
public class class_name { private void start() { if (independent) { logger.info("KAFKA 启动模式[independent]"); new Thread(new Runnable() { @Override public void run() { registerKafkaSubscriber(); } }).start(); } else { registerKafkaSubscriber(); } } }
public class class_name { private void start() { if (independent) { logger.info("KAFKA 启动模式[independent]"); // depends on control dependency: [if], data = [none] new Thread(new Runnable() { @Override public void run() { registerKafkaSubscriber(); } }).start(); // depends on control dependency: [if], data = [none] } else { registerKafkaSubscriber(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public Pattern getPattern() { if ((this.pattern == null) && (alias != null)) { this.pattern = Pattern.compile("\\s*" + ParserUtility.wildcardToRegex(alias)); } return pattern; } }
public class class_name { public Pattern getPattern() { if ((this.pattern == null) && (alias != null)) { this.pattern = Pattern.compile("\\s*" + ParserUtility.wildcardToRegex(alias)); // depends on control dependency: [if], data = [none] } return pattern; } }
public class class_name { public static void setClipboardText(final Context context, final String text) { if(text != null) { final ClipboardManager clipboard = (ClipboardManager) context.getSystemService(Context.CLIPBOARD_SERVICE); final ClipData clipData = ClipData.newPlainText(text, text); clipboard.setPrimaryClip(clipData); } } }
public class class_name { public static void setClipboardText(final Context context, final String text) { if(text != null) { final ClipboardManager clipboard = (ClipboardManager) context.getSystemService(Context.CLIPBOARD_SERVICE); final ClipData clipData = ClipData.newPlainText(text, text); clipboard.setPrimaryClip(clipData); // depends on control dependency: [if], data = [none] } } }
public class class_name { public void fit(DataSet dataSet) { if (numInputArrays != 1 || numOutputArrays != 1) throw new UnsupportedOperationException("Cannot train ComputationGraph network with " + " multiple inputs or outputs using a DataSet"); boolean hasMaskArrays = dataSet.hasMaskArrays(); if (hasMaskArrays) { INDArray[] fMask = (dataSet.getFeaturesMaskArray() != null ? new INDArray[]{dataSet.getFeaturesMaskArray()} : null); INDArray[] lMask = (dataSet.getLabelsMaskArray() != null ? new INDArray[]{dataSet.getLabelsMaskArray()} : null); fit(new INDArray[]{dataSet.getFeatures()}, new INDArray[]{dataSet.getLabels()}, fMask, lMask); } else { fit(new INDArray[]{dataSet.getFeatures()}, new INDArray[]{dataSet.getLabels()}); } if (hasMaskArrays) clearLayerMaskArrays(); clearLayersStates(); } }
public class class_name { public void fit(DataSet dataSet) { if (numInputArrays != 1 || numOutputArrays != 1) throw new UnsupportedOperationException("Cannot train ComputationGraph network with " + " multiple inputs or outputs using a DataSet"); boolean hasMaskArrays = dataSet.hasMaskArrays(); if (hasMaskArrays) { INDArray[] fMask = (dataSet.getFeaturesMaskArray() != null ? new INDArray[]{dataSet.getFeaturesMaskArray()} : null); INDArray[] lMask = (dataSet.getLabelsMaskArray() != null ? new INDArray[]{dataSet.getLabelsMaskArray()} : null); fit(new INDArray[]{dataSet.getFeatures()}, new INDArray[]{dataSet.getLabels()}, fMask, lMask); // depends on control dependency: [if], data = [none] } else { fit(new INDArray[]{dataSet.getFeatures()}, new INDArray[]{dataSet.getLabels()}); // depends on control dependency: [if], data = [none] } if (hasMaskArrays) clearLayerMaskArrays(); clearLayersStates(); } }
public class class_name { public static FontUIResource getFont(PropertyOwner propertyOwner, Map<String,Object> properties, boolean returnDefaultIfNone) { String strFontName = ScreenUtil.getPropery(ScreenUtil.FONT_NAME, propertyOwner, properties, null); String strFontSize = ScreenUtil.getPropery(ScreenUtil.FONT_SIZE, propertyOwner, properties, null); String strFontStyle = ScreenUtil.getPropery(ScreenUtil.FONT_STYLE, propertyOwner, properties, null); if ((strFontName == null) || (strFontName.length() == 0)) { if (!returnDefaultIfNone) return null; strFontName = Font.DIALOG; // Default font } int iSize = 18; // Default size if ((strFontSize != null) && (strFontSize.length() > 0)) iSize = Integer.parseInt(strFontSize); int iStyle = Font.PLAIN; if ((strFontStyle != null) && (strFontStyle.length() > 0)) iStyle = Integer.parseInt(strFontStyle); return new FontUIResource(strFontName, iStyle, iSize); } }
public class class_name { public static FontUIResource getFont(PropertyOwner propertyOwner, Map<String,Object> properties, boolean returnDefaultIfNone) { String strFontName = ScreenUtil.getPropery(ScreenUtil.FONT_NAME, propertyOwner, properties, null); String strFontSize = ScreenUtil.getPropery(ScreenUtil.FONT_SIZE, propertyOwner, properties, null); String strFontStyle = ScreenUtil.getPropery(ScreenUtil.FONT_STYLE, propertyOwner, properties, null); if ((strFontName == null) || (strFontName.length() == 0)) { if (!returnDefaultIfNone) return null; strFontName = Font.DIALOG; // Default font // depends on control dependency: [if], data = [none] } int iSize = 18; // Default size if ((strFontSize != null) && (strFontSize.length() > 0)) iSize = Integer.parseInt(strFontSize); int iStyle = Font.PLAIN; if ((strFontStyle != null) && (strFontStyle.length() > 0)) iStyle = Integer.parseInt(strFontStyle); return new FontUIResource(strFontName, iStyle, iSize); } }
public class class_name { private int countInstances(HeaderElement root) { int count = 0; HeaderElement elem = root; while (null != elem) { if (!elem.wasRemoved()) { count++; } elem = elem.nextInstance; } return count; } }
public class class_name { private int countInstances(HeaderElement root) { int count = 0; HeaderElement elem = root; while (null != elem) { if (!elem.wasRemoved()) { count++; // depends on control dependency: [if], data = [none] } elem = elem.nextInstance; // depends on control dependency: [while], data = [none] } return count; } }
public class class_name { private String getCookieParam(String param, String cookieString) { if (param.equals(NAME)) { return cookieString.substring(0, cookieString.indexOf('=')); } if (param.equals(VALUE)) { if (cookieString.contains(";")) { return cookieString.substring(cookieString.indexOf('=') + 1, cookieString.indexOf(';')); } else { return cookieString.substring(cookieString.indexOf('=') + 1); } } if(containsFlag(SECURE, param, cookieString) || containsFlag(HTTP_ONLY, param, cookieString)) { return String.valueOf(true); } if (cookieString.contains(param + '=')) { final int endParam = cookieString.indexOf(';', cookieString.indexOf(param + '=')); final int beginIndex = cookieString.indexOf(param + '=') + param.length() + 1; if (endParam > 0) { return cookieString.substring(beginIndex, endParam); } else { return cookieString.substring(beginIndex); } } throw new CitrusRuntimeException(String.format( "Unable to get cookie argument '%s' from cookie String: %s", param, cookieString)); } }
public class class_name { private String getCookieParam(String param, String cookieString) { if (param.equals(NAME)) { return cookieString.substring(0, cookieString.indexOf('=')); // depends on control dependency: [if], data = [none] } if (param.equals(VALUE)) { if (cookieString.contains(";")) { return cookieString.substring(cookieString.indexOf('=') + 1, cookieString.indexOf(';')); // depends on control dependency: [if], data = [none] } else { return cookieString.substring(cookieString.indexOf('=') + 1); // depends on control dependency: [if], data = [none] } } if(containsFlag(SECURE, param, cookieString) || containsFlag(HTTP_ONLY, param, cookieString)) { return String.valueOf(true); // depends on control dependency: [if], data = [none] } if (cookieString.contains(param + '=')) { final int endParam = cookieString.indexOf(';', cookieString.indexOf(param + '=')); final int beginIndex = cookieString.indexOf(param + '=') + param.length() + 1; if (endParam > 0) { return cookieString.substring(beginIndex, endParam); // depends on control dependency: [if], data = [none] } else { return cookieString.substring(beginIndex); // depends on control dependency: [if], data = [none] } } throw new CitrusRuntimeException(String.format( "Unable to get cookie argument '%s' from cookie String: %s", param, cookieString)); } }
public class class_name { @Override public Filters.Filter adapt(FilterAdapterContext context, FilterList filter) throws IOException { try (ContextCloseable ignored = context.beginFilterList(filter)) { List<Filters.Filter> childFilters = collectChildFilters(context, filter); if (childFilters.isEmpty()) { return null; } else if (childFilters.size() == 1) { return childFilters.get(0); } else if (filter.getOperator() == Operator.MUST_PASS_ALL) { ChainFilter chain = FILTERS.chain(); for (Filters.Filter filterModel : childFilters) { chain.filter(filterModel); } return chain; } else { InterleaveFilter interleave = FILTERS.interleave(); for (Filters.Filter filterModel : childFilters) { interleave.filter(filterModel); } return interleave; } } } }
public class class_name { @Override public Filters.Filter adapt(FilterAdapterContext context, FilterList filter) throws IOException { try (ContextCloseable ignored = context.beginFilterList(filter)) { List<Filters.Filter> childFilters = collectChildFilters(context, filter); if (childFilters.isEmpty()) { return null; } else if (childFilters.size() == 1) { return childFilters.get(0); } else if (filter.getOperator() == Operator.MUST_PASS_ALL) { ChainFilter chain = FILTERS.chain(); for (Filters.Filter filterModel : childFilters) { chain.filter(filterModel); // depends on control dependency: [for], data = [filterModel] } return chain; } else { InterleaveFilter interleave = FILTERS.interleave(); for (Filters.Filter filterModel : childFilters) { interleave.filter(filterModel); // depends on control dependency: [for], data = [filterModel] } return interleave; } } } }
public class class_name { private Collection<Snapshot> loadSnapshots() { // Ensure log directories are created. storage.directory().mkdirs(); List<Snapshot> snapshots = new ArrayList<>(); // Iterate through all files in the log directory. for (File file : storage.directory().listFiles(File::isFile)) { // If the file looks like a segment file, attempt to load the segment. if (SnapshotFile.isSnapshotFile(file)) { SnapshotFile snapshotFile = new SnapshotFile(file); SnapshotDescriptor descriptor = new SnapshotDescriptor(FileBuffer.allocate(file, SnapshotDescriptor.BYTES)); // Valid segments will have been locked. Segments that resulting from failures during log cleaning will be // unlocked and should ultimately be deleted from disk. if (descriptor.isLocked()) { log.debug("Loaded disk snapshot: {} ({})", descriptor.index(), snapshotFile.file().getName()); snapshots.add(new FileSnapshot(snapshotFile, descriptor, this)); descriptor.close(); } // If the segment descriptor wasn't locked, close and delete the descriptor. else { log.debug("Deleting partial snapshot: {} ({})", descriptor.index(), snapshotFile.file().getName()); descriptor.close(); descriptor.delete(); } } } return snapshots; } }
public class class_name { private Collection<Snapshot> loadSnapshots() { // Ensure log directories are created. storage.directory().mkdirs(); List<Snapshot> snapshots = new ArrayList<>(); // Iterate through all files in the log directory. for (File file : storage.directory().listFiles(File::isFile)) { // If the file looks like a segment file, attempt to load the segment. if (SnapshotFile.isSnapshotFile(file)) { SnapshotFile snapshotFile = new SnapshotFile(file); SnapshotDescriptor descriptor = new SnapshotDescriptor(FileBuffer.allocate(file, SnapshotDescriptor.BYTES)); // Valid segments will have been locked. Segments that resulting from failures during log cleaning will be // unlocked and should ultimately be deleted from disk. if (descriptor.isLocked()) { log.debug("Loaded disk snapshot: {} ({})", descriptor.index(), snapshotFile.file().getName()); // depends on control dependency: [if], data = [none] snapshots.add(new FileSnapshot(snapshotFile, descriptor, this)); // depends on control dependency: [if], data = [none] descriptor.close(); // depends on control dependency: [if], data = [none] } // If the segment descriptor wasn't locked, close and delete the descriptor. else { log.debug("Deleting partial snapshot: {} ({})", descriptor.index(), snapshotFile.file().getName()); // depends on control dependency: [if], data = [none] descriptor.close(); // depends on control dependency: [if], data = [none] descriptor.delete(); // depends on control dependency: [if], data = [none] } } } return snapshots; } }
public class class_name { private void parsePreamble(ByteBuffer buffer) { if (_partialBoundary > 0) { int partial = _delimiterSearch.startsWith(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining(), _partialBoundary); if (partial > 0) { if (partial == _delimiterSearch.getLength()) { buffer.position(buffer.position() + partial - _partialBoundary); _partialBoundary = 0; setState(State.DELIMITER); return; } _partialBoundary = partial; BufferUtils.clear(buffer); return; } _partialBoundary = 0; } int delimiter = _delimiterSearch.match(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining()); if (delimiter >= 0) { buffer.position(delimiter - buffer.arrayOffset() + _delimiterSearch.getLength()); setState(State.DELIMITER); return; } _partialBoundary = _delimiterSearch.endsWith(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining()); BufferUtils.clear(buffer); } }
public class class_name { private void parsePreamble(ByteBuffer buffer) { if (_partialBoundary > 0) { int partial = _delimiterSearch.startsWith(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining(), _partialBoundary); if (partial > 0) { if (partial == _delimiterSearch.getLength()) { buffer.position(buffer.position() + partial - _partialBoundary); // depends on control dependency: [if], data = [none] _partialBoundary = 0; // depends on control dependency: [if], data = [none] setState(State.DELIMITER); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } _partialBoundary = partial; // depends on control dependency: [if], data = [none] BufferUtils.clear(buffer); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } _partialBoundary = 0; // depends on control dependency: [if], data = [none] } int delimiter = _delimiterSearch.match(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining()); if (delimiter >= 0) { buffer.position(delimiter - buffer.arrayOffset() + _delimiterSearch.getLength()); // depends on control dependency: [if], data = [(delimiter] setState(State.DELIMITER); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } _partialBoundary = _delimiterSearch.endsWith(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining()); BufferUtils.clear(buffer); } }
public class class_name { public static OutputMapping createOutputMapping( Element outputElement ){ if( outputElement == null ){ return null; } switch( outputElement.getType() ) { case VARIABLE: return new ValueMapping( outputElement.getString( 0 ) ); case VARIABLES: Map<String, String> mappings = new LinkedHashMap<>(); for( String mapping : outputElement.getStringArray( 0 ) ){ String[] pair = mapping.split( "=" ); mappings.put( pair[1], pair[0] ); } return new MapEntryMapping( mappings ); default : throw new IllegalStateException( "No strategy defined for creating an OutputMapping based on: " + outputElement ); } } }
public class class_name { public static OutputMapping createOutputMapping( Element outputElement ){ if( outputElement == null ){ return null; // depends on control dependency: [if], data = [none] } switch( outputElement.getType() ) { case VARIABLE: return new ValueMapping( outputElement.getString( 0 ) ); case VARIABLES: Map<String, String> mappings = new LinkedHashMap<>(); for( String mapping : outputElement.getStringArray( 0 ) ){ String[] pair = mapping.split( "=" ); mappings.put( pair[1], pair[0] ); // depends on control dependency: [for], data = [mapping] } return new MapEntryMapping( mappings ); default : throw new IllegalStateException( "No strategy defined for creating an OutputMapping based on: " + outputElement ); } } }
public class class_name { public static double dotProduct(double[] pointA) { double product = 0.0; for (int i = 0; i < pointA.length; i++) { product += pointA[i] * pointA[i]; } return product; } }
public class class_name { public static double dotProduct(double[] pointA) { double product = 0.0; for (int i = 0; i < pointA.length; i++) { product += pointA[i] * pointA[i]; // depends on control dependency: [for], data = [i] } return product; } }
public class class_name { public static String extractHtmlBody(String content) { Matcher startMatcher = BODY_START_PATTERN.matcher(content); Matcher endMatcher = BODY_END_PATTERN.matcher(content); int start = 0; int end = content.length(); if (startMatcher.find()) { start = startMatcher.end(); } if (endMatcher.find(start)) { end = endMatcher.start(); } return content.substring(start, end); } }
public class class_name { public static String extractHtmlBody(String content) { Matcher startMatcher = BODY_START_PATTERN.matcher(content); Matcher endMatcher = BODY_END_PATTERN.matcher(content); int start = 0; int end = content.length(); if (startMatcher.find()) { start = startMatcher.end(); // depends on control dependency: [if], data = [none] } if (endMatcher.find(start)) { end = endMatcher.start(); // depends on control dependency: [if], data = [none] } return content.substring(start, end); } }
public class class_name { protected final int applyAllCaseFoldWithMap(int mapSize, int[][]map, boolean essTsettFlag, int flag, ApplyAllCaseFoldFunction fun, Object arg) { asciiApplyAllCaseFold(flag, fun, arg); int[]code = new int[]{0}; for (int i=0; i<mapSize; i++) { code[0] = map[i][1]; fun.apply(map[i][0], code, 1, arg); code[0] = map[i][0]; fun.apply(map[i][1], code, 1, arg); } if (essTsettFlag) ssApplyAllCaseFold(flag, fun, arg); return 0; } }
public class class_name { protected final int applyAllCaseFoldWithMap(int mapSize, int[][]map, boolean essTsettFlag, int flag, ApplyAllCaseFoldFunction fun, Object arg) { asciiApplyAllCaseFold(flag, fun, arg); int[]code = new int[]{0}; for (int i=0; i<mapSize; i++) { code[0] = map[i][1]; // depends on control dependency: [for], data = [i] fun.apply(map[i][0], code, 1, arg); // depends on control dependency: [for], data = [i] code[0] = map[i][0]; // depends on control dependency: [for], data = [i] fun.apply(map[i][1], code, 1, arg); // depends on control dependency: [for], data = [i] } if (essTsettFlag) ssApplyAllCaseFold(flag, fun, arg); return 0; } }
public class class_name { public Observable<ServiceResponse<Page<ProjectTaskInner>>> listWithServiceResponseAsync(final String groupName, final String serviceName, final String projectName, final String taskType) { return listSinglePageAsync(groupName, serviceName, projectName, taskType) .concatMap(new Func1<ServiceResponse<Page<ProjectTaskInner>>, Observable<ServiceResponse<Page<ProjectTaskInner>>>>() { @Override public Observable<ServiceResponse<Page<ProjectTaskInner>>> call(ServiceResponse<Page<ProjectTaskInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink)); } }); } }
public class class_name { public Observable<ServiceResponse<Page<ProjectTaskInner>>> listWithServiceResponseAsync(final String groupName, final String serviceName, final String projectName, final String taskType) { return listSinglePageAsync(groupName, serviceName, projectName, taskType) .concatMap(new Func1<ServiceResponse<Page<ProjectTaskInner>>, Observable<ServiceResponse<Page<ProjectTaskInner>>>>() { @Override public Observable<ServiceResponse<Page<ProjectTaskInner>>> call(ServiceResponse<Page<ProjectTaskInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); // depends on control dependency: [if], data = [none] } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink)); } }); } }
public class class_name { @SuppressWarnings("unchecked") public List<URI> getLocatorUrlList() { List<URI> locatorList = new ArrayList<URI>(); Map<String, ?> credentials = getCredentials(); List<String> locators = null; if (credentials != null) locators = (List<String>) credentials.get("locators"); try { if(locators == null || locators.isEmpty()) { //get for LOCATORS env String locatorsConfig = Config.getProperty(GeodeConfigConstants.LOCATORS_PROP,""); if(locatorsConfig.length() == 0) return null; String[] parsedLocators = locatorsConfig.split(","); if(parsedLocators == null || parsedLocators.length == 0) return null; locators = Arrays.asList(parsedLocators); } for (String locator : locators) { Matcher m = regExpPattern.matcher(locator); if (!m.matches()) { throw new IllegalStateException("Unexpected locator format. expected host[port], but got:" + locator); } locatorList.add(new URI("locator://" + m.group(1) + ":" + m.group(2))); } return locatorList; } catch (URISyntaxException e) { throw new ConfigException("One of the provided locators has an incorrect syntax:"+locatorList); } } }
public class class_name { @SuppressWarnings("unchecked") public List<URI> getLocatorUrlList() { List<URI> locatorList = new ArrayList<URI>(); Map<String, ?> credentials = getCredentials(); List<String> locators = null; if (credentials != null) locators = (List<String>) credentials.get("locators"); try { if(locators == null || locators.isEmpty()) { //get for LOCATORS env String locatorsConfig = Config.getProperty(GeodeConfigConstants.LOCATORS_PROP,""); if(locatorsConfig.length() == 0) return null; String[] parsedLocators = locatorsConfig.split(","); if(parsedLocators == null || parsedLocators.length == 0) return null; locators = Arrays.asList(parsedLocators); // depends on control dependency: [if], data = [none] } for (String locator : locators) { Matcher m = regExpPattern.matcher(locator); if (!m.matches()) { throw new IllegalStateException("Unexpected locator format. expected host[port], but got:" + locator); } locatorList.add(new URI("locator://" + m.group(1) + ":" + m.group(2))); } return locatorList; } catch (URISyntaxException e) { throw new ConfigException("One of the provided locators has an incorrect syntax:"+locatorList); } } }
public class class_name { public static JsonObject toJson( Project project) { JsonObjectBuilder builder = Json.createObjectBuilder(); Optional.ofNullable( project.getBaseLocation()) .ifPresent( uri -> builder.add( REFBASE_KEY, String.valueOf( uri))); if( project.getSystemInputLocation() != null) { builder.add( INPUTDEF_KEY, String.valueOf( project.getSystemInputLocation())); } else if( project.getSystemInput() != null) { builder.add( INPUTDEF_KEY, SystemInputJson.toJson( project.getSystemInput())); } if( project.getGeneratorsLocation() != null) { builder.add( GENERATORS_KEY, String.valueOf( project.getGeneratorsLocation())); } else if( project.getGenerators() != null) { builder.add( GENERATORS_KEY, GeneratorSetJson.toJson( project.getGenerators())); } if( project.getBaseTestsLocation() != null) { builder.add( BASETESTS_KEY, String.valueOf( project.getBaseTestsLocation())); } else if( project.getBaseTests() != null) { builder.add( BASETESTS_KEY, SystemTestJson.toJson( project.getBaseTests())); } return builder.build(); } }
public class class_name { public static JsonObject toJson( Project project) { JsonObjectBuilder builder = Json.createObjectBuilder(); Optional.ofNullable( project.getBaseLocation()) .ifPresent( uri -> builder.add( REFBASE_KEY, String.valueOf( uri))); if( project.getSystemInputLocation() != null) { builder.add( INPUTDEF_KEY, String.valueOf( project.getSystemInputLocation())); // depends on control dependency: [if], data = [( project.getSystemInputLocation()] } else if( project.getSystemInput() != null) { builder.add( INPUTDEF_KEY, SystemInputJson.toJson( project.getSystemInput())); // depends on control dependency: [if], data = [( project.getSystemInput()] } if( project.getGeneratorsLocation() != null) { builder.add( GENERATORS_KEY, String.valueOf( project.getGeneratorsLocation())); // depends on control dependency: [if], data = [( project.getGeneratorsLocation()] } else if( project.getGenerators() != null) { builder.add( GENERATORS_KEY, GeneratorSetJson.toJson( project.getGenerators())); // depends on control dependency: [if], data = [( project.getGenerators()] } if( project.getBaseTestsLocation() != null) { builder.add( BASETESTS_KEY, String.valueOf( project.getBaseTestsLocation())); // depends on control dependency: [if], data = [( project.getBaseTestsLocation()] } else if( project.getBaseTests() != null) { builder.add( BASETESTS_KEY, SystemTestJson.toJson( project.getBaseTests())); // depends on control dependency: [if], data = [( project.getBaseTests()] } return builder.build(); } }
public class class_name { public static Writer getWriterForProperty(final Class<?> beanClass, final String propertyName) { int splitPoint = propertyName.indexOf('.'); if (splitPoint > 0) { String firstPart = propertyName.substring(0, splitPoint); String secondPart = propertyName.substring(splitPoint + 1); return new NestedWriter(beanClass, firstPart, secondPart); } return new SimpleWriter(beanClass, propertyName); } }
public class class_name { public static Writer getWriterForProperty(final Class<?> beanClass, final String propertyName) { int splitPoint = propertyName.indexOf('.'); if (splitPoint > 0) { String firstPart = propertyName.substring(0, splitPoint); String secondPart = propertyName.substring(splitPoint + 1); return new NestedWriter(beanClass, firstPart, secondPart); // depends on control dependency: [if], data = [none] } return new SimpleWriter(beanClass, propertyName); } }
public class class_name { public static String getMtLvESS( int mtLv ) { // MtLvESS = Method Level External Shift String // if ( mtLv < 0 ) return "?"; // String Result = ""; // // String LvS = ". "; String LvS = "."; // for ( int K = 1; K <= mtLv; K ++ ) { // Result = Result + LvS; } // return Result; } }
public class class_name { public static String getMtLvESS( int mtLv ) { // MtLvESS = Method Level External Shift String // if ( mtLv < 0 ) return "?"; // String Result = ""; // // String LvS = ". "; String LvS = "."; // for ( int K = 1; K <= mtLv; K ++ ) { // Result = Result + LvS; // depends on control dependency: [for], data = [none] } // return Result; } }
public class class_name { <K, V> ConnectionFuture<StatefulRedisConnection<K, V>> connectToNodeAsync(RedisCodec<K, V> codec, String nodeId, RedisChannelWriter clusterWriter, Mono<SocketAddress> socketAddressSupplier) { assertNotNull(codec); assertNotEmpty(initialUris); LettuceAssert.notNull(socketAddressSupplier, "SocketAddressSupplier must not be null"); ClusterNodeEndpoint endpoint = new ClusterNodeEndpoint(clientOptions, getResources(), clusterWriter); RedisChannelWriter writer = endpoint; if (CommandExpiryWriter.isSupported(clientOptions)) { writer = new CommandExpiryWriter(writer, clientOptions, clientResources); } StatefulRedisConnectionImpl<K, V> connection = new StatefulRedisConnectionImpl<K, V>(writer, codec, timeout); ConnectionFuture<StatefulRedisConnection<K, V>> connectionFuture = connectStatefulAsync(connection, codec, endpoint, getFirstUri(), socketAddressSupplier, () -> new CommandHandler(clientOptions, clientResources, endpoint)); return connectionFuture.whenComplete((conn, throwable) -> { if (throwable != null) { connection.close(); } }); } }
public class class_name { <K, V> ConnectionFuture<StatefulRedisConnection<K, V>> connectToNodeAsync(RedisCodec<K, V> codec, String nodeId, RedisChannelWriter clusterWriter, Mono<SocketAddress> socketAddressSupplier) { assertNotNull(codec); assertNotEmpty(initialUris); LettuceAssert.notNull(socketAddressSupplier, "SocketAddressSupplier must not be null"); ClusterNodeEndpoint endpoint = new ClusterNodeEndpoint(clientOptions, getResources(), clusterWriter); RedisChannelWriter writer = endpoint; if (CommandExpiryWriter.isSupported(clientOptions)) { writer = new CommandExpiryWriter(writer, clientOptions, clientResources); // depends on control dependency: [if], data = [none] } StatefulRedisConnectionImpl<K, V> connection = new StatefulRedisConnectionImpl<K, V>(writer, codec, timeout); ConnectionFuture<StatefulRedisConnection<K, V>> connectionFuture = connectStatefulAsync(connection, codec, endpoint, getFirstUri(), socketAddressSupplier, () -> new CommandHandler(clientOptions, clientResources, endpoint)); return connectionFuture.whenComplete((conn, throwable) -> { if (throwable != null) { connection.close(); } }); } }
public class class_name { public String getGeneratedValueAnnotation() { if (useSequenceNameShortcut()) { return getGeneratedValueForSequenceNameByConfiguration(); } if (attribute.getColumnConfig().useConfigForIdGenerator()) { // TODO: explain why we do not directly check attribute.getColumnConfig().hasGeneratedValue() in the if statement above. return getGeneratedValueAnnotationByConfiguration(); } else if (attribute.getAutoIncrement() == TRUE && !attribute.isSimpleFk()) { // the jdbc driver supports IS_AUTOINCREMENT metadata, great! // if it is an fk, we do not want @GeneratedValue because we use instead @MapsId on the association... addImport("javax.persistence.GeneratedValue"); addImport("static javax.persistence.GenerationType.IDENTITY"); return "@GeneratedValue(strategy = IDENTITY)"; } else if (attribute.getAutoIncrement() == FALSE && /* 32 length string are special for us */!attribute.isString()) { // the jdbc driver supports IS_AUTOINCREMENT metadata, great! return ""; } else { // the jdbc driver does not support IS_AUTOINCREMENT // fall back to convention return getGeneratedValueAnnotationByConvention(); } } }
public class class_name { public String getGeneratedValueAnnotation() { if (useSequenceNameShortcut()) { return getGeneratedValueForSequenceNameByConfiguration(); // depends on control dependency: [if], data = [none] } if (attribute.getColumnConfig().useConfigForIdGenerator()) { // TODO: explain why we do not directly check attribute.getColumnConfig().hasGeneratedValue() in the if statement above. return getGeneratedValueAnnotationByConfiguration(); // depends on control dependency: [if], data = [none] } else if (attribute.getAutoIncrement() == TRUE && !attribute.isSimpleFk()) { // the jdbc driver supports IS_AUTOINCREMENT metadata, great! // if it is an fk, we do not want @GeneratedValue because we use instead @MapsId on the association... addImport("javax.persistence.GeneratedValue"); // depends on control dependency: [if], data = [none] addImport("static javax.persistence.GenerationType.IDENTITY"); return "@GeneratedValue(strategy = IDENTITY)"; } else if (attribute.getAutoIncrement() == FALSE && /* 32 length string are special for us */!attribute.isString()) { // the jdbc driver supports IS_AUTOINCREMENT metadata, great! return ""; // depends on control dependency: [if], data = [none] } else { // the jdbc driver does not support IS_AUTOINCREMENT // fall back to convention return getGeneratedValueAnnotationByConvention(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public String getDestinationName(Destination destination) { try { if (destination instanceof Queue) { return ((Queue) destination).getQueueName(); } else if (destination instanceof Topic) { return ((Topic) destination).getTopicName(); } else { return destination.toString(); } } catch (JMSException e) { log.error("Unable to resolve destination name", e); return ""; } } }
public class class_name { public String getDestinationName(Destination destination) { try { if (destination instanceof Queue) { return ((Queue) destination).getQueueName(); // depends on control dependency: [if], data = [none] } else if (destination instanceof Topic) { return ((Topic) destination).getTopicName(); // depends on control dependency: [if], data = [none] } else { return destination.toString(); // depends on control dependency: [if], data = [none] } } catch (JMSException e) { log.error("Unable to resolve destination name", e); return ""; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static Comparator universalComparator( final FieldAccess field, final boolean ascending, final boolean nullsFirst) { return new Comparator() { @Override public int compare( Object o1, Object o2 ) { Object value1 = null; Object value2 = null; if ( ascending ) { value1 = field.getValue( o1 ); value2 = field.getValue( o2 ); } else { value1 = field.getValue( o2 ); value2 = field.getValue( o1 ); } return Sorting.compare(value1, value2, nullsFirst); } }; } }
public class class_name { public static Comparator universalComparator( final FieldAccess field, final boolean ascending, final boolean nullsFirst) { return new Comparator() { @Override public int compare( Object o1, Object o2 ) { Object value1 = null; Object value2 = null; if ( ascending ) { value1 = field.getValue( o1 ); // depends on control dependency: [if], data = [none] value2 = field.getValue( o2 ); // depends on control dependency: [if], data = [none] } else { value1 = field.getValue( o2 ); // depends on control dependency: [if], data = [none] value2 = field.getValue( o1 ); // depends on control dependency: [if], data = [none] } return Sorting.compare(value1, value2, nullsFirst); } }; } }
public class class_name { @Override public void addDigestMetadata(StorageObjectMetadata meta, String digest) { if (!SnowflakeUtil.isBlank(digest)) { // Azure doesn't allow hyphens in the name of a metadata field. meta.addUserMetadata("sfcdigest", digest); } } }
public class class_name { @Override public void addDigestMetadata(StorageObjectMetadata meta, String digest) { if (!SnowflakeUtil.isBlank(digest)) { // Azure doesn't allow hyphens in the name of a metadata field. meta.addUserMetadata("sfcdigest", digest); // depends on control dependency: [if], data = [none] } } }
public class class_name { public int rank(double t) { int rank = n; for(int j = 0; j < n; j++) { if(Math.abs(Rdiag[j]) <= t) { --rank; } } return rank; } }
public class class_name { public int rank(double t) { int rank = n; for(int j = 0; j < n; j++) { if(Math.abs(Rdiag[j]) <= t) { --rank; // depends on control dependency: [if], data = [none] } } return rank; } }
public class class_name { public final EObject entryRuleGuarded() throws RecognitionException { EObject current = null; EObject iv_ruleGuarded = null; try { // InternalSimpleAntlr.g:877:2: (iv_ruleGuarded= ruleGuarded EOF ) // InternalSimpleAntlr.g:878:2: iv_ruleGuarded= ruleGuarded EOF { if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getGuardedRule()); } pushFollow(FOLLOW_1); iv_ruleGuarded=ruleGuarded(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { current =iv_ruleGuarded; } match(input,EOF,FOLLOW_2); if (state.failed) return current; } } catch (RecognitionException re) { recover(input,re); appendSkippedTokens(); } finally { } return current; } }
public class class_name { public final EObject entryRuleGuarded() throws RecognitionException { EObject current = null; EObject iv_ruleGuarded = null; try { // InternalSimpleAntlr.g:877:2: (iv_ruleGuarded= ruleGuarded EOF ) // InternalSimpleAntlr.g:878:2: iv_ruleGuarded= ruleGuarded EOF { if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getGuardedRule()); // depends on control dependency: [if], data = [none] } pushFollow(FOLLOW_1); iv_ruleGuarded=ruleGuarded(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { current =iv_ruleGuarded; // depends on control dependency: [if], data = [none] } match(input,EOF,FOLLOW_2); if (state.failed) return current; } } catch (RecognitionException re) { recover(input,re); appendSkippedTokens(); } finally { } return current; } }
public class class_name { public static String getFileNameOr(TaskResponse response, NameGenerator nameGenerator) { String name = getFileName(response); if (null == name) { name = nameGenerator.name(); } return name; } }
public class class_name { public static String getFileNameOr(TaskResponse response, NameGenerator nameGenerator) { String name = getFileName(response); if (null == name) { name = nameGenerator.name(); // depends on control dependency: [if], data = [none] } return name; } }
public class class_name { public FieldDescriptor[] getFieldDescriptor(boolean withInherited) { if(withInherited && getSuperClassDescriptor() != null) { /* arminw: only return no-PK fields, because all PK fields are declared in sub-class too. */ FieldDescriptor[] superFlds = getSuperClassDescriptor().getFieldDescriptorNonPk(true); if(m_FieldDescriptions == null) { m_FieldDescriptions = new FieldDescriptor[0]; } FieldDescriptor[] result = new FieldDescriptor[m_FieldDescriptions.length + superFlds.length]; System.arraycopy(m_FieldDescriptions, 0, result, 0, m_FieldDescriptions.length); System.arraycopy(superFlds, 0, result, m_FieldDescriptions.length, superFlds.length); // System.out.println("all fields: " + ArrayUtils.toString(result)); return result; } else { return m_FieldDescriptions; } } }
public class class_name { public FieldDescriptor[] getFieldDescriptor(boolean withInherited) { if(withInherited && getSuperClassDescriptor() != null) { /* arminw: only return no-PK fields, because all PK fields are declared in sub-class too. */ FieldDescriptor[] superFlds = getSuperClassDescriptor().getFieldDescriptorNonPk(true); if(m_FieldDescriptions == null) { m_FieldDescriptions = new FieldDescriptor[0]; // depends on control dependency: [if], data = [none] } FieldDescriptor[] result = new FieldDescriptor[m_FieldDescriptions.length + superFlds.length]; System.arraycopy(m_FieldDescriptions, 0, result, 0, m_FieldDescriptions.length); // depends on control dependency: [if], data = [none] System.arraycopy(superFlds, 0, result, m_FieldDescriptions.length, superFlds.length); // depends on control dependency: [if], data = [none] // System.out.println("all fields: " + ArrayUtils.toString(result)); return result; // depends on control dependency: [if], data = [none] } else { return m_FieldDescriptions; // depends on control dependency: [if], data = [none] } } }
public class class_name { protected void failure(final Throwable thrwbl, final Job job, final String curQueue) { try { PoolUtils.doWorkInPool(this.jedisPool, new PoolWork<Jedis, Void>() { /** * {@inheritDoc} */ @Override public Void doWork(final Jedis jedis) throws IOException { jedis.incr(key(STAT, FAILED)); jedis.incr(key(STAT, FAILED, name)); final FailQueueStrategy strategy = failQueueStrategyRef.get(); final String failQueueKey = strategy.getFailQueueKey(thrwbl, job, curQueue); if (failQueueKey != null) { final int failQueueMaxItems = strategy.getFailQueueMaxItems(curQueue); if (failQueueMaxItems > 0) { Long currentItems = jedis.llen(failQueueKey); if (currentItems >= failQueueMaxItems) { Transaction tx = jedis.multi(); tx.ltrim(failQueueKey, 1, -1); tx.rpush(failQueueKey, failMsg(thrwbl, curQueue, job)); tx.exec(); } else { jedis.rpush(failQueueKey, failMsg(thrwbl, curQueue, job)); } } else { jedis.rpush(failQueueKey, failMsg(thrwbl, curQueue, job)); } } return null; } }); } catch (JedisException je) { LOG.warn("Error updating failure stats for throwable=" + thrwbl + " job=" + job, je); } catch (IOException ioe) { LOG.warn("Error serializing failure payload for throwable=" + thrwbl + " job=" + job, ioe); } catch (Exception e) { throw new RuntimeException(e); } this.listenerDelegate.fireEvent(JOB_FAILURE, this, curQueue, job, null, null, thrwbl); } }
public class class_name { protected void failure(final Throwable thrwbl, final Job job, final String curQueue) { try { PoolUtils.doWorkInPool(this.jedisPool, new PoolWork<Jedis, Void>() { /** * {@inheritDoc} */ @Override public Void doWork(final Jedis jedis) throws IOException { jedis.incr(key(STAT, FAILED)); jedis.incr(key(STAT, FAILED, name)); final FailQueueStrategy strategy = failQueueStrategyRef.get(); final String failQueueKey = strategy.getFailQueueKey(thrwbl, job, curQueue); if (failQueueKey != null) { final int failQueueMaxItems = strategy.getFailQueueMaxItems(curQueue); if (failQueueMaxItems > 0) { Long currentItems = jedis.llen(failQueueKey); if (currentItems >= failQueueMaxItems) { Transaction tx = jedis.multi(); tx.ltrim(failQueueKey, 1, -1); // depends on control dependency: [if], data = [none] tx.rpush(failQueueKey, failMsg(thrwbl, curQueue, job)); // depends on control dependency: [if], data = [none] tx.exec(); // depends on control dependency: [if], data = [none] } else { jedis.rpush(failQueueKey, failMsg(thrwbl, curQueue, job)); // depends on control dependency: [if], data = [none] } } else { jedis.rpush(failQueueKey, failMsg(thrwbl, curQueue, job)); } } return null; } }); // depends on control dependency: [try], data = [none] } catch (JedisException je) { LOG.warn("Error updating failure stats for throwable=" + thrwbl + " job=" + job, je); } catch (IOException ioe) { // depends on control dependency: [catch], data = [none] LOG.warn("Error serializing failure payload for throwable=" + thrwbl + " job=" + job, ioe); } catch (Exception e) { // depends on control dependency: [catch], data = [none] throw new RuntimeException(e); } // depends on control dependency: [catch], data = [none] this.listenerDelegate.fireEvent(JOB_FAILURE, this, curQueue, job, null, null, thrwbl); } }
public class class_name { public int readVarIntFlag (boolean optimizePositive) { if (require(1) < 5) return readVarIntFlag_slow(optimizePositive); int b = buffer[position++]; int result = b & 0x3F; // Mask first 6 bits. if ((b & 0x40) != 0) { // Bit 7 means another byte, bit 8 means UTF8. byte[] buffer = this.buffer; int p = position; b = buffer[p++]; result |= (b & 0x7F) << 6; if ((b & 0x80) != 0) { b = buffer[p++]; result |= (b & 0x7F) << 13; if ((b & 0x80) != 0) { b = buffer[p++]; result |= (b & 0x7F) << 20; if ((b & 0x80) != 0) { b = buffer[p++]; result |= (b & 0x7F) << 27; } } } position = p; } return optimizePositive ? result : ((result >>> 1) ^ -(result & 1)); } }
public class class_name { public int readVarIntFlag (boolean optimizePositive) { if (require(1) < 5) return readVarIntFlag_slow(optimizePositive); int b = buffer[position++]; int result = b & 0x3F; // Mask first 6 bits. if ((b & 0x40) != 0) { // Bit 7 means another byte, bit 8 means UTF8. byte[] buffer = this.buffer; int p = position; b = buffer[p++]; // depends on control dependency: [if], data = [none] result |= (b & 0x7F) << 6; // depends on control dependency: [if], data = [none] if ((b & 0x80) != 0) { b = buffer[p++]; // depends on control dependency: [if], data = [none] result |= (b & 0x7F) << 13; // depends on control dependency: [if], data = [none] if ((b & 0x80) != 0) { b = buffer[p++]; // depends on control dependency: [if], data = [none] result |= (b & 0x7F) << 20; // depends on control dependency: [if], data = [none] if ((b & 0x80) != 0) { b = buffer[p++]; // depends on control dependency: [if], data = [none] result |= (b & 0x7F) << 27; // depends on control dependency: [if], data = [none] } } } position = p; // depends on control dependency: [if], data = [none] } return optimizePositive ? result : ((result >>> 1) ^ -(result & 1)); } }
public class class_name { public static FileSystemFactory decorateIfLimited(FileSystemFactory factory, String scheme, Configuration config) { checkNotNull(factory, "factory"); final ConnectionLimitingSettings settings = ConnectionLimitingSettings.fromConfig(config, scheme); // decorate only if any limit is configured if (settings == null) { // no limit configured return factory; } else { return new ConnectionLimitingFactory(factory, settings); } } }
public class class_name { public static FileSystemFactory decorateIfLimited(FileSystemFactory factory, String scheme, Configuration config) { checkNotNull(factory, "factory"); final ConnectionLimitingSettings settings = ConnectionLimitingSettings.fromConfig(config, scheme); // decorate only if any limit is configured if (settings == null) { // no limit configured return factory; // depends on control dependency: [if], data = [none] } else { return new ConnectionLimitingFactory(factory, settings); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override protected ResolutionResult resolveDependencies(String projectFolder, String topLevelFolder, Set<String> bomFiles) { logger.debug("gradleAggregateModules={}",gradleAggregateModules); // In order to use the gradle wrapper, we define the top folder that contains the wrapper this.gradleCli.setTopLevelFolderGradlew(topLevelFolder); // each bom-file ( = build.gradle) represents a module - identify its folder and scan it using 'gradle dependencies' Map<AgentProjectInfo, Path> projectInfoPathMap = new HashMap<>(); Collection<String> excludes = new HashSet<>(); // // Get the list of projects as paths // List<String> projectsList = null; // if (bomFiles.size() > 1) { // projectsList = collectProjects(topLevelFolder); // } // if (projectsList == null) { // logger.warn("Command \"gradle projects\" did not return a list of projects"); // } if (gradleRunPreStep) { downloadMissingDependencies(projectFolder); } for (String bomFile : bomFiles) { String bomFileFolder = new File(bomFile).getParent(); File bomFolder = new File(new File(bomFile).getParent()); String moduleName = bomFolder.getName(); // String moduleRelativeName = Constants.EMPTY_STRING; // try { // String canonicalPath = bomFolder.getCanonicalPath(); // // Relative name by replacing the root folder with "." - will look something like .\abc\def // moduleRelativeName = Constants.DOT + canonicalPath.replaceFirst(Pattern.quote(topLevelFolder), Constants.EMPTY_STRING); // } catch (Exception e) { // logger.debug("Error getting path - {} ", e.getMessage()); // } // // making sure the module's folder was listed by "gradle projects" command // if (!moduleRelativeName.isEmpty() && projectsList != null && !projectsList.contains(moduleRelativeName)) { // logger.debug("Ignoring project at {} - because it was not listed by \"gradle projects\" command", moduleRelativeName); // continue; // } List<String> lines = getDependenciesTree(bomFileFolder, moduleName); if (lines != null) { List<DependencyInfo> dependencies = collectDependencies(lines, bomFileFolder, bomFileFolder.equals(topLevelFolder), bomFile); if (dependencies.size() > 0) { AgentProjectInfo agentProjectInfo = new AgentProjectInfo(); agentProjectInfo.getDependencies().addAll(dependencies); if (!gradleAggregateModules) { Coordinates coordinates = new Coordinates(); coordinates.setArtifactId(moduleName); agentProjectInfo.setCoordinates(coordinates); } projectInfoPathMap.put(agentProjectInfo, bomFolder.toPath()); if (ignoreSourceCode) { excludes.addAll(normalizeLocalPath(projectFolder, topLevelFolder, extensionPattern(GRADLE_SCRIPT_EXTENSION), null)); } } } } topLevelFoldersNames.add(topLevelFolder.substring(topLevelFolder.lastIndexOf(fileSeparator) + 1)); excludes.addAll(getExcludes()); ResolutionResult resolutionResult; if (!gradleAggregateModules) { resolutionResult = new ResolutionResult(projectInfoPathMap, excludes, getDependencyType(), topLevelFolder); } else { resolutionResult = new ResolutionResult(projectInfoPathMap.keySet().stream() .flatMap(project -> project.getDependencies().stream()).collect(Collectors.toList()), excludes, getDependencyType(), topLevelFolder); } logger.debug("total projects = {}",resolutionResult.getResolvedProjects().size()); return resolutionResult; } }
public class class_name { @Override protected ResolutionResult resolveDependencies(String projectFolder, String topLevelFolder, Set<String> bomFiles) { logger.debug("gradleAggregateModules={}",gradleAggregateModules); // In order to use the gradle wrapper, we define the top folder that contains the wrapper this.gradleCli.setTopLevelFolderGradlew(topLevelFolder); // each bom-file ( = build.gradle) represents a module - identify its folder and scan it using 'gradle dependencies' Map<AgentProjectInfo, Path> projectInfoPathMap = new HashMap<>(); Collection<String> excludes = new HashSet<>(); // // Get the list of projects as paths // List<String> projectsList = null; // if (bomFiles.size() > 1) { // projectsList = collectProjects(topLevelFolder); // } // if (projectsList == null) { // logger.warn("Command \"gradle projects\" did not return a list of projects"); // } if (gradleRunPreStep) { downloadMissingDependencies(projectFolder); // depends on control dependency: [if], data = [none] } for (String bomFile : bomFiles) { String bomFileFolder = new File(bomFile).getParent(); File bomFolder = new File(new File(bomFile).getParent()); String moduleName = bomFolder.getName(); // String moduleRelativeName = Constants.EMPTY_STRING; // try { // String canonicalPath = bomFolder.getCanonicalPath(); // // Relative name by replacing the root folder with "." - will look something like .\abc\def // moduleRelativeName = Constants.DOT + canonicalPath.replaceFirst(Pattern.quote(topLevelFolder), Constants.EMPTY_STRING); // } catch (Exception e) { // logger.debug("Error getting path - {} ", e.getMessage()); // } // // making sure the module's folder was listed by "gradle projects" command // if (!moduleRelativeName.isEmpty() && projectsList != null && !projectsList.contains(moduleRelativeName)) { // logger.debug("Ignoring project at {} - because it was not listed by \"gradle projects\" command", moduleRelativeName); // continue; // } List<String> lines = getDependenciesTree(bomFileFolder, moduleName); if (lines != null) { List<DependencyInfo> dependencies = collectDependencies(lines, bomFileFolder, bomFileFolder.equals(topLevelFolder), bomFile); if (dependencies.size() > 0) { AgentProjectInfo agentProjectInfo = new AgentProjectInfo(); agentProjectInfo.getDependencies().addAll(dependencies); // depends on control dependency: [if], data = [none] if (!gradleAggregateModules) { Coordinates coordinates = new Coordinates(); coordinates.setArtifactId(moduleName); // depends on control dependency: [if], data = [none] agentProjectInfo.setCoordinates(coordinates); // depends on control dependency: [if], data = [none] } projectInfoPathMap.put(agentProjectInfo, bomFolder.toPath()); // depends on control dependency: [if], data = [none] if (ignoreSourceCode) { excludes.addAll(normalizeLocalPath(projectFolder, topLevelFolder, extensionPattern(GRADLE_SCRIPT_EXTENSION), null)); // depends on control dependency: [if], data = [none] } } } } topLevelFoldersNames.add(topLevelFolder.substring(topLevelFolder.lastIndexOf(fileSeparator) + 1)); excludes.addAll(getExcludes()); ResolutionResult resolutionResult; if (!gradleAggregateModules) { resolutionResult = new ResolutionResult(projectInfoPathMap, excludes, getDependencyType(), topLevelFolder); // depends on control dependency: [if], data = [none] } else { resolutionResult = new ResolutionResult(projectInfoPathMap.keySet().stream() .flatMap(project -> project.getDependencies().stream()).collect(Collectors.toList()), excludes, getDependencyType(), topLevelFolder); // depends on control dependency: [if], data = [none] } logger.debug("total projects = {}",resolutionResult.getResolvedProjects().size()); return resolutionResult; } }
public class class_name { @Override protected Optional<TermType> reduceInferredTypes(ImmutableList<Optional<TermType>> argumentTypes) { if (argumentTypes.stream().allMatch(Optional::isPresent)) { return argumentTypes.stream() .map(Optional::get) .map(t -> (ConcreteNumericRDFDatatype) t) .reduce(ConcreteNumericRDFDatatype::getCommonPropagatedOrSubstitutedType) .map(t -> (TermType) t); } return Optional.empty(); } }
public class class_name { @Override protected Optional<TermType> reduceInferredTypes(ImmutableList<Optional<TermType>> argumentTypes) { if (argumentTypes.stream().allMatch(Optional::isPresent)) { return argumentTypes.stream() .map(Optional::get) .map(t -> (ConcreteNumericRDFDatatype) t) .reduce(ConcreteNumericRDFDatatype::getCommonPropagatedOrSubstitutedType) .map(t -> (TermType) t); // depends on control dependency: [if], data = [none] } return Optional.empty(); } }
public class class_name { public static double asin(double x) { if (x != x) { return Double.NaN; } if (x > 1.0 || x < -1.0) { return Double.NaN; } if (x == 1.0) { return Math.PI/2.0; } if (x == -1.0) { return -Math.PI/2.0; } if (x == 0.0) { // Matches +/- 0.0; return correct sign return x; } /* Compute asin(x) = atan(x/sqrt(1-x*x)) */ /* Split x */ double temp = x * HEX_40000000; final double xa = x + temp - temp; final double xb = x - xa; /* Square it */ double ya = xa*xa; double yb = xa*xb*2.0 + xb*xb; /* Subtract from 1 */ ya = -ya; yb = -yb; double za = 1.0 + ya; double zb = -(za - 1.0 - ya); temp = za + yb; zb += -(temp - za - yb); za = temp; /* Square root */ double y; y = sqrt(za); temp = y * HEX_40000000; ya = y + temp - temp; yb = y - ya; /* Extend precision of sqrt */ yb += (za - ya*ya - 2*ya*yb - yb*yb) / (2.0*y); /* Contribution of zb to sqrt */ double dx = zb / (2.0*y); // Compute ratio r = x/y double r = x/y; temp = r * HEX_40000000; double ra = r + temp - temp; double rb = r - ra; rb += (x - ra*ya - ra*yb - rb*ya - rb*yb) / y; // Correct for rounding in division rb += -x * dx / y / y; // Add in effect additional bits of sqrt. temp = ra + rb; rb = -(temp - ra - rb); ra = temp; return atan(ra, rb, false); } }
public class class_name { public static double asin(double x) { if (x != x) { return Double.NaN; // depends on control dependency: [if], data = [none] } if (x > 1.0 || x < -1.0) { return Double.NaN; // depends on control dependency: [if], data = [none] } if (x == 1.0) { return Math.PI/2.0; // depends on control dependency: [if], data = [none] } if (x == -1.0) { return -Math.PI/2.0; // depends on control dependency: [if], data = [none] } if (x == 0.0) { // Matches +/- 0.0; return correct sign return x; // depends on control dependency: [if], data = [none] } /* Compute asin(x) = atan(x/sqrt(1-x*x)) */ /* Split x */ double temp = x * HEX_40000000; final double xa = x + temp - temp; final double xb = x - xa; /* Square it */ double ya = xa*xa; double yb = xa*xb*2.0 + xb*xb; /* Subtract from 1 */ ya = -ya; yb = -yb; double za = 1.0 + ya; double zb = -(za - 1.0 - ya); temp = za + yb; zb += -(temp - za - yb); za = temp; /* Square root */ double y; y = sqrt(za); temp = y * HEX_40000000; ya = y + temp - temp; yb = y - ya; /* Extend precision of sqrt */ yb += (za - ya*ya - 2*ya*yb - yb*yb) / (2.0*y); /* Contribution of zb to sqrt */ double dx = zb / (2.0*y); // Compute ratio r = x/y double r = x/y; temp = r * HEX_40000000; double ra = r + temp - temp; double rb = r - ra; rb += (x - ra*ya - ra*yb - rb*ya - rb*yb) / y; // Correct for rounding in division rb += -x * dx / y / y; // Add in effect additional bits of sqrt. temp = ra + rb; rb = -(temp - ra - rb); ra = temp; return atan(ra, rb, false); } }
public class class_name { public DescribeEcsClustersRequest withEcsClusterArns(String... ecsClusterArns) { if (this.ecsClusterArns == null) { setEcsClusterArns(new com.amazonaws.internal.SdkInternalList<String>(ecsClusterArns.length)); } for (String ele : ecsClusterArns) { this.ecsClusterArns.add(ele); } return this; } }
public class class_name { public DescribeEcsClustersRequest withEcsClusterArns(String... ecsClusterArns) { if (this.ecsClusterArns == null) { setEcsClusterArns(new com.amazonaws.internal.SdkInternalList<String>(ecsClusterArns.length)); // depends on control dependency: [if], data = [none] } for (String ele : ecsClusterArns) { this.ecsClusterArns.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public static boolean pickDirectory(Activity activity, File startPath, int requestCode) { PackageManager packageMgr = activity.getPackageManager(); for (String[] intent : PICK_DIRECTORY_INTENTS) { String intentAction = intent[0]; String uriPrefix = intent[1]; Intent startIntent = new Intent(intentAction) .putExtra("org.openintents.extra.TITLE", activity.getString(R.string.save_as)) .setData(Uri.parse(uriPrefix + startPath.getPath())); try { if (startIntent.resolveActivity(packageMgr) != null) { activity.startActivityForResult(startIntent, requestCode); return true; } } catch (ActivityNotFoundException e) { showNoFilePickerError(activity, e); } } return false; } }
public class class_name { public static boolean pickDirectory(Activity activity, File startPath, int requestCode) { PackageManager packageMgr = activity.getPackageManager(); for (String[] intent : PICK_DIRECTORY_INTENTS) { String intentAction = intent[0]; String uriPrefix = intent[1]; Intent startIntent = new Intent(intentAction) .putExtra("org.openintents.extra.TITLE", activity.getString(R.string.save_as)) .setData(Uri.parse(uriPrefix + startPath.getPath())); try { if (startIntent.resolveActivity(packageMgr) != null) { activity.startActivityForResult(startIntent, requestCode); // depends on control dependency: [if], data = [none] return true; // depends on control dependency: [if], data = [none] } } catch (ActivityNotFoundException e) { showNoFilePickerError(activity, e); } // depends on control dependency: [catch], data = [none] } return false; } }
public class class_name { public void onStartObject() { this.currentObjectKey = this.currentLocalName; if (this.firstObjectKey == null && this.currentLocalName != null) { this.firstObjectKey = this.currentObjectKey; } if (this.currentObjectKey != null) { stackObjectKey.push(this.currentObjectKey); } else { if (!stackObjectKey.isEmpty()) { this.currentObjectKey = stackObjectKey.peek(); } else { this.currentObjectKey = this.firstObjectKey; } } } }
public class class_name { public void onStartObject() { this.currentObjectKey = this.currentLocalName; if (this.firstObjectKey == null && this.currentLocalName != null) { this.firstObjectKey = this.currentObjectKey; // depends on control dependency: [if], data = [none] } if (this.currentObjectKey != null) { stackObjectKey.push(this.currentObjectKey); // depends on control dependency: [if], data = [(this.currentObjectKey] } else { if (!stackObjectKey.isEmpty()) { this.currentObjectKey = stackObjectKey.peek(); // depends on control dependency: [if], data = [none] } else { this.currentObjectKey = this.firstObjectKey; // depends on control dependency: [if], data = [none] } } } }
public class class_name { public boolean removeInterval(Bounds i) { if(bs.remove(i)) { Bounds intersection = new Bounds(0, APSPSolver.INF); for(Bounds toIntersect : bs) { //intersection = intervalIntersect(intersection, toIntersect);//intervalIntersect(intersection, inter); intersection = intersection.intersect(toIntersect); } minimum = intersection.min; maximum = intersection.max; return true; } return false; } }
public class class_name { public boolean removeInterval(Bounds i) { if(bs.remove(i)) { Bounds intersection = new Bounds(0, APSPSolver.INF); for(Bounds toIntersect : bs) { //intersection = intervalIntersect(intersection, toIntersect);//intervalIntersect(intersection, inter); intersection = intersection.intersect(toIntersect); // depends on control dependency: [for], data = [toIntersect] } minimum = intersection.min; // depends on control dependency: [if], data = [none] maximum = intersection.max; // depends on control dependency: [if], data = [none] return true; // depends on control dependency: [if], data = [none] } return false; } }
public class class_name { public static boolean startsWithPrefix(final String input) { boolean ret = false; if (input != null) { ret = input.toLowerCase().startsWith(PREFIX_BIGINT_DASH_CHECKSUM); } else { ret = false; } return ret; } }
public class class_name { public static boolean startsWithPrefix(final String input) { boolean ret = false; if (input != null) { ret = input.toLowerCase().startsWith(PREFIX_BIGINT_DASH_CHECKSUM); // depends on control dependency: [if], data = [none] } else { ret = false; // depends on control dependency: [if], data = [none] } return ret; } }
public class class_name { public static <R> DoubleFunction<R> doubleFunction(CheckedDoubleFunction<R> function, Consumer<Throwable> handler) { return t -> { try { return function.apply(t); } catch (Throwable e) { handler.accept(e); throw new IllegalStateException("Exception handler must throw a RuntimeException", e); } }; } }
public class class_name { public static <R> DoubleFunction<R> doubleFunction(CheckedDoubleFunction<R> function, Consumer<Throwable> handler) { return t -> { try { return function.apply(t); // depends on control dependency: [try], data = [none] } catch (Throwable e) { handler.accept(e); throw new IllegalStateException("Exception handler must throw a RuntimeException", e); } // depends on control dependency: [catch], data = [none] }; } }
public class class_name { public void setPortProbeDetails(java.util.Collection<PortProbeDetail> portProbeDetails) { if (portProbeDetails == null) { this.portProbeDetails = null; return; } this.portProbeDetails = new java.util.ArrayList<PortProbeDetail>(portProbeDetails); } }
public class class_name { public void setPortProbeDetails(java.util.Collection<PortProbeDetail> portProbeDetails) { if (portProbeDetails == null) { this.portProbeDetails = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.portProbeDetails = new java.util.ArrayList<PortProbeDetail>(portProbeDetails); } }
public class class_name { public boolean doCommand(String strCommand, ScreenField sourceSField, int iCommandOptions) { if (strCommand.equalsIgnoreCase(this.getButtonCommand())) if (sourceSField == this) { // Only process this command Map<String,Object> properties = new Hashtable<String,Object>(); strCommand = this.getProperties(strCommand, properties); if (this.getRecord() != null) { Task task = null; if (this.getRecord().getRecordOwner() != null) task = this.getRecord().getRecordOwner().getTask(); //xif (task == null) //x task = BaseApplet.getSharedInstance(); Application application = (Application)task.getApplication(); if (strCommand.equalsIgnoreCase(ThinMenuConstants.LOOKUP)) { BasePanel parentScreen = Screen.makeWindow(application); GridScreen screen = (GridScreen)this.getRecord().makeScreen(null, parentScreen, ScreenConstants.SELECT_MODE, true, true, true, true, properties); if (this.getRecord().getRecordOwner() == null) screen.setSelectQuery(this.getRecord(), false); // Since this record isn't linked to the screen, manually link it. } if (strCommand.equals(ThinMenuConstants.NEXT)) { int errorCode = DBConstants.NORMAL_RETURN; if (this.getRecord() != null) { try { if (this.getRecord().getEditMode() == Constants.EDIT_IN_PROGRESS) this.getRecord().set(); if (this.getRecord().getEditMode() == Constants.EDIT_ADD) this.getRecord().add(); } catch( DBException e ) { this.getRootScreen().displayError(e); } if (errorCode == DBConstants.NORMAL_RETURN) { try { this.getRecord().next(); } catch( DBException e ) { this.getRootScreen().displayError(e); } } } } if (strCommand.equalsIgnoreCase(ThinMenuConstants.FORM)) { BasePanel parentScreen = Screen.makeWindow(application); this.getRecord().makeScreen(null, parentScreen, ScreenConstants.MAINT_MODE, true, true, true, true, properties); } if (strCommand.equals(ThinMenuConstants.PRINT)) { // *** FIX THIS *** BasePanel parentScreen = Screen.makeWindow(application); this.getRecord().makeScreen(null, parentScreen, ScreenConstants.MAINT_MODE, true, true, true, true, properties); } return true; // Command handled } if (m_field != null) { if (strCommand.equals(ThinMenuConstants.CANCEL)) { m_field.setString(Constants.BLANK); } else if (strCommand.equals(ScreenModel.CLEAR)) { m_field.setData(null); } else if (strCommand.equals(ThinMenuConstants.UNDO)) { m_field.setString(Constants.BLANK); } else if (strCommand.equals(ScreenModel.MAIL)) { String strHyperlink = m_field.getHyperlink(); if (strHyperlink != null) if (strHyperlink.length() > 0) if (this.getScreenFieldView() != null) this.getScreenFieldView().showDocument(strHyperlink, 0); } else if (strCommand.equals(ScreenModel.EMAIL)) { String strHyperlink = m_field.getHyperlink(); if (strHyperlink != null) if (strHyperlink.length() > 0) if (this.getScreenFieldView() != null) this.getScreenFieldView().showDocument(strHyperlink, 0); } else if (strCommand.equals(ScreenModel.PHONE)) { String strHyperlink = m_field.getHyperlink(); if (strHyperlink != null) if (strHyperlink.length() > 0) if (this.getScreenFieldView() != null) this.getScreenFieldView().showDocument(strHyperlink, 0); } else if (strCommand.equals(ScreenModel.FAX)) { String strHyperlink = m_field.getHyperlink(); if (strHyperlink != null) if (strHyperlink.length() > 0) if (this.getScreenFieldView() != null) this.getScreenFieldView().showDocument(strHyperlink, 0); } else if (strCommand.equals(ScreenModel.URL)) { String strHyperlink = m_field.getHyperlink(); if (strHyperlink != null) if (strHyperlink.length() > 0) if (this.getScreenFieldView() != null) this.getScreenFieldView().showDocument(strHyperlink, 0); } else if (m_strValue != null) { m_field.setString(m_strValue); } else return super.doCommand(strCommand, sourceSField, iCommandOptions); // Command handled return true; // Command handled } if (m_strValue != null) { Task task = null; RecordOwner recordOwner = null; if (m_field != null) recordOwner = m_field.getRecord().getRecordOwner(); if (recordOwner == null) if (this.getRecord() != null) recordOwner = this.getRecord().getRecordOwner(); if (recordOwner == null) if (this.getConverter() != null) if (this.getConverter().getField() != null) recordOwner = ((BaseField)this.getConverter().getField()).getRecord().getRecordOwner(); if (recordOwner != null) task = recordOwner.getTask(); //xif (task == null) //x task = BaseApplet.getSharedInstance(); Application application = (Application)task.getApplication(); BasePanel screenParent = BasePanel.makeWindow(application); if (strCommand.equals("Record")) BaseScreen.makeScreenFromRecord(null, screenParent, m_strValue, ScreenConstants.MAINT_MODE | ScreenConstants.DEFAULT_DISPLAY, null); else Record.makeNewScreen(m_strValue, null, screenParent, ScreenConstants.DEFAULT_DISPLAY, null, null, true); return true; // Command handled } if (strCommand != null) if (sourceSField == this) if (this.getConverter() != null) { // HACK - I say that SCannedBox should set the target field to the command value, but // if I connect a command cannedbox to a field so it gets enabled with it I don't want // to set the value, I just want to send the command. You should use a SButtonBox // with the value you want set in the constructor! // This is a hack, until you can review the code and take this out. if (this.getConverter().getField() != null) if ((!(this.getConverter().getField() instanceof NumberField)) || (Utility.isNumeric(strCommand))) { this.getConverter().setString(strCommand); return true; // Command handled } // End hack } } // Do the normal command return super.doCommand(strCommand, sourceSField, iCommandOptions); } }
public class class_name { public boolean doCommand(String strCommand, ScreenField sourceSField, int iCommandOptions) { if (strCommand.equalsIgnoreCase(this.getButtonCommand())) if (sourceSField == this) { // Only process this command Map<String,Object> properties = new Hashtable<String,Object>(); strCommand = this.getProperties(strCommand, properties); // depends on control dependency: [if], data = [none] if (this.getRecord() != null) { Task task = null; if (this.getRecord().getRecordOwner() != null) task = this.getRecord().getRecordOwner().getTask(); //xif (task == null) //x task = BaseApplet.getSharedInstance(); Application application = (Application)task.getApplication(); if (strCommand.equalsIgnoreCase(ThinMenuConstants.LOOKUP)) { BasePanel parentScreen = Screen.makeWindow(application); GridScreen screen = (GridScreen)this.getRecord().makeScreen(null, parentScreen, ScreenConstants.SELECT_MODE, true, true, true, true, properties); if (this.getRecord().getRecordOwner() == null) screen.setSelectQuery(this.getRecord(), false); // Since this record isn't linked to the screen, manually link it. } if (strCommand.equals(ThinMenuConstants.NEXT)) { int errorCode = DBConstants.NORMAL_RETURN; if (this.getRecord() != null) { try { if (this.getRecord().getEditMode() == Constants.EDIT_IN_PROGRESS) this.getRecord().set(); if (this.getRecord().getEditMode() == Constants.EDIT_ADD) this.getRecord().add(); } catch( DBException e ) { this.getRootScreen().displayError(e); } // depends on control dependency: [catch], data = [none] if (errorCode == DBConstants.NORMAL_RETURN) { try { this.getRecord().next(); // depends on control dependency: [try], data = [none] } catch( DBException e ) { this.getRootScreen().displayError(e); } // depends on control dependency: [catch], data = [none] } } } if (strCommand.equalsIgnoreCase(ThinMenuConstants.FORM)) { BasePanel parentScreen = Screen.makeWindow(application); this.getRecord().makeScreen(null, parentScreen, ScreenConstants.MAINT_MODE, true, true, true, true, properties); // depends on control dependency: [if], data = [none] } if (strCommand.equals(ThinMenuConstants.PRINT)) { // *** FIX THIS *** BasePanel parentScreen = Screen.makeWindow(application); this.getRecord().makeScreen(null, parentScreen, ScreenConstants.MAINT_MODE, true, true, true, true, properties); // depends on control dependency: [if], data = [none] } return true; // Command handled // depends on control dependency: [if], data = [none] } if (m_field != null) { if (strCommand.equals(ThinMenuConstants.CANCEL)) { m_field.setString(Constants.BLANK); // depends on control dependency: [if], data = [none] } else if (strCommand.equals(ScreenModel.CLEAR)) { m_field.setData(null); // depends on control dependency: [if], data = [none] } else if (strCommand.equals(ThinMenuConstants.UNDO)) { m_field.setString(Constants.BLANK); // depends on control dependency: [if], data = [none] } else if (strCommand.equals(ScreenModel.MAIL)) { String strHyperlink = m_field.getHyperlink(); if (strHyperlink != null) if (strHyperlink.length() > 0) if (this.getScreenFieldView() != null) this.getScreenFieldView().showDocument(strHyperlink, 0); } else if (strCommand.equals(ScreenModel.EMAIL)) { String strHyperlink = m_field.getHyperlink(); if (strHyperlink != null) if (strHyperlink.length() > 0) if (this.getScreenFieldView() != null) this.getScreenFieldView().showDocument(strHyperlink, 0); } else if (strCommand.equals(ScreenModel.PHONE)) { String strHyperlink = m_field.getHyperlink(); if (strHyperlink != null) if (strHyperlink.length() > 0) if (this.getScreenFieldView() != null) this.getScreenFieldView().showDocument(strHyperlink, 0); } else if (strCommand.equals(ScreenModel.FAX)) { String strHyperlink = m_field.getHyperlink(); if (strHyperlink != null) if (strHyperlink.length() > 0) if (this.getScreenFieldView() != null) this.getScreenFieldView().showDocument(strHyperlink, 0); } else if (strCommand.equals(ScreenModel.URL)) { String strHyperlink = m_field.getHyperlink(); if (strHyperlink != null) if (strHyperlink.length() > 0) if (this.getScreenFieldView() != null) this.getScreenFieldView().showDocument(strHyperlink, 0); } else if (m_strValue != null) { m_field.setString(m_strValue); // depends on control dependency: [if], data = [(m_strValue] } else return super.doCommand(strCommand, sourceSField, iCommandOptions); // Command handled return true; // Command handled // depends on control dependency: [if], data = [none] } if (m_strValue != null) { Task task = null; RecordOwner recordOwner = null; if (m_field != null) recordOwner = m_field.getRecord().getRecordOwner(); if (recordOwner == null) if (this.getRecord() != null) recordOwner = this.getRecord().getRecordOwner(); if (recordOwner == null) if (this.getConverter() != null) if (this.getConverter().getField() != null) recordOwner = ((BaseField)this.getConverter().getField()).getRecord().getRecordOwner(); if (recordOwner != null) task = recordOwner.getTask(); //xif (task == null) //x task = BaseApplet.getSharedInstance(); Application application = (Application)task.getApplication(); BasePanel screenParent = BasePanel.makeWindow(application); if (strCommand.equals("Record")) BaseScreen.makeScreenFromRecord(null, screenParent, m_strValue, ScreenConstants.MAINT_MODE | ScreenConstants.DEFAULT_DISPLAY, null); else Record.makeNewScreen(m_strValue, null, screenParent, ScreenConstants.DEFAULT_DISPLAY, null, null, true); return true; // Command handled // depends on control dependency: [if], data = [none] } if (strCommand != null) if (sourceSField == this) if (this.getConverter() != null) { // HACK - I say that SCannedBox should set the target field to the command value, but // if I connect a command cannedbox to a field so it gets enabled with it I don't want // to set the value, I just want to send the command. You should use a SButtonBox // with the value you want set in the constructor! // This is a hack, until you can review the code and take this out. if (this.getConverter().getField() != null) if ((!(this.getConverter().getField() instanceof NumberField)) || (Utility.isNumeric(strCommand))) { this.getConverter().setString(strCommand); // depends on control dependency: [if], data = [none] return true; // Command handled // depends on control dependency: [if], data = [none] } // End hack } } // Do the normal command return super.doCommand(strCommand, sourceSField, iCommandOptions); } }
public class class_name { public boolean isPrintableControl(ScreenField sField, int iPrintOptions) { // Override this to break if ((sField == null) || (sField == this)) { // Asking about this control return false; // Tool screens are not printed as a sub-screen. } return super.isPrintableControl(sField, iPrintOptions); } }
public class class_name { public boolean isPrintableControl(ScreenField sField, int iPrintOptions) { // Override this to break if ((sField == null) || (sField == this)) { // Asking about this control return false; // Tool screens are not printed as a sub-screen. // depends on control dependency: [if], data = [none] } return super.isPrintableControl(sField, iPrintOptions); } }
public class class_name { public void marshall(UpdateGroupRequest updateGroupRequest, ProtocolMarshaller protocolMarshaller) { if (updateGroupRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(updateGroupRequest.getGroupName(), GROUPNAME_BINDING); protocolMarshaller.marshall(updateGroupRequest.getUserPoolId(), USERPOOLID_BINDING); protocolMarshaller.marshall(updateGroupRequest.getDescription(), DESCRIPTION_BINDING); protocolMarshaller.marshall(updateGroupRequest.getRoleArn(), ROLEARN_BINDING); protocolMarshaller.marshall(updateGroupRequest.getPrecedence(), PRECEDENCE_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(UpdateGroupRequest updateGroupRequest, ProtocolMarshaller protocolMarshaller) { if (updateGroupRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(updateGroupRequest.getGroupName(), GROUPNAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(updateGroupRequest.getUserPoolId(), USERPOOLID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(updateGroupRequest.getDescription(), DESCRIPTION_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(updateGroupRequest.getRoleArn(), ROLEARN_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(updateGroupRequest.getPrecedence(), PRECEDENCE_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void start() { try { if (init) { setErrorStatus("Cannot initialize multiple times", null); return; } init = true; HttpPost postRequest = setupRequest(new ByteArrayOutputStream(0)); UploadImageParam.setHeaders(postRequest, journalId, namespaceInfoString, epoch, txid, 0, segmentId++, false); HttpClient httpClient = new DefaultHttpClient(); HttpResponse response = httpClient.execute(postRequest); if (response.getStatusLine().getStatusCode() == HttpServletResponse.SC_NOT_ACCEPTABLE) { throwIOException("Error when starting upload to : " + uri + " status: " + response.getStatusLine().toString()); } // get the session id for (Header h : response.getAllHeaders()) { if (h.getName().equals("sessionId")) { sessionId = Long.parseLong(h.getValue()); break; } } // we must have the session id if (sessionId < 0) { throw new IOException("Session id is missing"); } } catch (Exception e) { setErrorStatus("Exception when starting upload channel for: " + uri, e); } } }
public class class_name { public void start() { try { if (init) { setErrorStatus("Cannot initialize multiple times", null); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } init = true; // depends on control dependency: [try], data = [none] HttpPost postRequest = setupRequest(new ByteArrayOutputStream(0)); UploadImageParam.setHeaders(postRequest, journalId, namespaceInfoString, epoch, txid, 0, segmentId++, false); // depends on control dependency: [try], data = [none] HttpClient httpClient = new DefaultHttpClient(); HttpResponse response = httpClient.execute(postRequest); if (response.getStatusLine().getStatusCode() == HttpServletResponse.SC_NOT_ACCEPTABLE) { throwIOException("Error when starting upload to : " + uri + " status: " + response.getStatusLine().toString()); // depends on control dependency: [if], data = [none] } // get the session id for (Header h : response.getAllHeaders()) { if (h.getName().equals("sessionId")) { sessionId = Long.parseLong(h.getValue()); // depends on control dependency: [if], data = [none] break; } } // we must have the session id if (sessionId < 0) { throw new IOException("Session id is missing"); } } catch (Exception e) { setErrorStatus("Exception when starting upload channel for: " + uri, e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public DescribeEndpointsResult withEndpoints(Endpoint... endpoints) { if (this.endpoints == null) { setEndpoints(new java.util.ArrayList<Endpoint>(endpoints.length)); } for (Endpoint ele : endpoints) { this.endpoints.add(ele); } return this; } }
public class class_name { public DescribeEndpointsResult withEndpoints(Endpoint... endpoints) { if (this.endpoints == null) { setEndpoints(new java.util.ArrayList<Endpoint>(endpoints.length)); // depends on control dependency: [if], data = [none] } for (Endpoint ele : endpoints) { this.endpoints.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { protected synchronized void saveRows(CachedObject[] rows, int offset, int count) { if (count == 0) { return; } for (int i = offset; i < offset + count; i++) { CachedObject r = rows[i]; uncommittedCache.put(r.getPos(), r); rows[i] = null; } } }
public class class_name { protected synchronized void saveRows(CachedObject[] rows, int offset, int count) { if (count == 0) { return; // depends on control dependency: [if], data = [none] } for (int i = offset; i < offset + count; i++) { CachedObject r = rows[i]; uncommittedCache.put(r.getPos(), r); // depends on control dependency: [for], data = [none] rows[i] = null; // depends on control dependency: [for], data = [i] } } }
public class class_name { @Override public ExtensionProcessor createExtensionProcessor(IServletContext servletContext) throws Exception { WebModuleMetaData moduleMetaData = ((WebAppConfigExtended) (servletContext.getWebAppConfig())).getMetaData(); JaxRsModuleMetaData jaxRsModuleMetaData = JaxRsMetaDataManager.getJaxRsModuleMetaData(moduleMetaData); //If jaxrs-2.0 feature is enabled while the server is on the running status, WebContainer service may receive the JaxRsExtensionFactory registration //service before the started applications are removed, at this time, no JaxRsModuleMeta was stored in the application metadata //So, now we just return null, as the application will be restarted as it is configured in the jaxrs feature file if (jaxRsModuleMetaData == null) { return null; } //Add WebAppInjectionInterceptor to JaxWsInstanceManager // jaxRsModuleMetaData.getJaxRsInstanceManager().addInterceptor(new WebAppInjectionInstanceInterceptor(servletContext)); //Get JaxRsModuleInfo NonPersistentCache overlayCache = servletContext.getModuleContainer().adapt(NonPersistentCache.class); JaxRsModuleInfo jaxRsModuleInfo = (JaxRsModuleInfo) overlayCache.getFromCache(JaxRsModuleInfo.class); //No WebService Implementation is found and just return null to indicate no interest on the request processing if (jaxRsModuleInfo == null || jaxRsModuleInfo.endpointInfoSize() == 0) { if (tc.isDebugEnabled()) { Tr.debug(tc, "No JAX-RS service is found in the web module, will not create web service processor"); } return null; } Container publisherModuleContainer = servletContext.getModuleContainer(); JaxRsPublisherContext publisherContext = new JaxRsPublisherContext(jaxRsModuleMetaData, publisherModuleContainer, JaxRsUtils.getWebModuleInfo(publisherModuleContainer)); publisherContext.setAttribute(JaxRsServerConstants.SERVLET_CONTEXT, servletContext); // WebApp webApp = (WebApp) servletContext; // publisherContext.setAttribute(JaxRsWebContainerConstants.NAMESPACE_COLLABORATOR, webApp.getCollaboratorHelper().getWebAppNameSpaceCollaborator()); publisherContext.setAttribute(JaxRsConstants.ENDPOINT_INFO_BUILDER_CONTEXT, new EndpointInfoBuilderContext( servletContext.getModuleContainer().adapt(WebAnnotations.class).getInfoStore(), servletContext.getModuleContainer() )); //Add collaborator to publisherContext, IBMRestServlet can get it later WebApp webApp = (WebApp) servletContext; publisherContext.setAttribute(JaxRsConstants.COLLABORATOR, webApp.getCollaboratorHelper().getWebAppNameSpaceCollaborator()); // get endpoint publisher and do publish // EndpointPublisher endpointPublisher = getEndpointPublisher(JaxRsConstants.WEB_ENDPOINT_PUBLISHER_TYPE); EndpointPublisher endpointPublisher = endpointPublisherSR.getServiceWithException(); for (EndpointInfo endpointInfo : jaxRsModuleInfo.getEndpointInfos()) { endpointPublisher.publish(endpointInfo, publisherContext); } for (JaxRsWebAppConfigurator jaxRsWebAppConfigurator : jaxRsWebAppConfigurators) { jaxRsWebAppConfigurator.configure(jaxRsModuleInfo, servletContext.getWebAppConfig()); } return new JaxRsExtensionProcessor(servletContext); } }
public class class_name { @Override public ExtensionProcessor createExtensionProcessor(IServletContext servletContext) throws Exception { WebModuleMetaData moduleMetaData = ((WebAppConfigExtended) (servletContext.getWebAppConfig())).getMetaData(); JaxRsModuleMetaData jaxRsModuleMetaData = JaxRsMetaDataManager.getJaxRsModuleMetaData(moduleMetaData); //If jaxrs-2.0 feature is enabled while the server is on the running status, WebContainer service may receive the JaxRsExtensionFactory registration //service before the started applications are removed, at this time, no JaxRsModuleMeta was stored in the application metadata //So, now we just return null, as the application will be restarted as it is configured in the jaxrs feature file if (jaxRsModuleMetaData == null) { return null; } //Add WebAppInjectionInterceptor to JaxWsInstanceManager // jaxRsModuleMetaData.getJaxRsInstanceManager().addInterceptor(new WebAppInjectionInstanceInterceptor(servletContext)); //Get JaxRsModuleInfo NonPersistentCache overlayCache = servletContext.getModuleContainer().adapt(NonPersistentCache.class); JaxRsModuleInfo jaxRsModuleInfo = (JaxRsModuleInfo) overlayCache.getFromCache(JaxRsModuleInfo.class); //No WebService Implementation is found and just return null to indicate no interest on the request processing if (jaxRsModuleInfo == null || jaxRsModuleInfo.endpointInfoSize() == 0) { if (tc.isDebugEnabled()) { Tr.debug(tc, "No JAX-RS service is found in the web module, will not create web service processor"); // depends on control dependency: [if], data = [none] } return null; } Container publisherModuleContainer = servletContext.getModuleContainer(); JaxRsPublisherContext publisherContext = new JaxRsPublisherContext(jaxRsModuleMetaData, publisherModuleContainer, JaxRsUtils.getWebModuleInfo(publisherModuleContainer)); publisherContext.setAttribute(JaxRsServerConstants.SERVLET_CONTEXT, servletContext); // WebApp webApp = (WebApp) servletContext; // publisherContext.setAttribute(JaxRsWebContainerConstants.NAMESPACE_COLLABORATOR, webApp.getCollaboratorHelper().getWebAppNameSpaceCollaborator()); publisherContext.setAttribute(JaxRsConstants.ENDPOINT_INFO_BUILDER_CONTEXT, new EndpointInfoBuilderContext( servletContext.getModuleContainer().adapt(WebAnnotations.class).getInfoStore(), servletContext.getModuleContainer() )); //Add collaborator to publisherContext, IBMRestServlet can get it later WebApp webApp = (WebApp) servletContext; publisherContext.setAttribute(JaxRsConstants.COLLABORATOR, webApp.getCollaboratorHelper().getWebAppNameSpaceCollaborator()); // get endpoint publisher and do publish // EndpointPublisher endpointPublisher = getEndpointPublisher(JaxRsConstants.WEB_ENDPOINT_PUBLISHER_TYPE); EndpointPublisher endpointPublisher = endpointPublisherSR.getServiceWithException(); for (EndpointInfo endpointInfo : jaxRsModuleInfo.getEndpointInfos()) { endpointPublisher.publish(endpointInfo, publisherContext); } for (JaxRsWebAppConfigurator jaxRsWebAppConfigurator : jaxRsWebAppConfigurators) { jaxRsWebAppConfigurator.configure(jaxRsModuleInfo, servletContext.getWebAppConfig()); } return new JaxRsExtensionProcessor(servletContext); } }
public class class_name { public static <T> List<String> getModifiedFields(T src, T dst, String... ignoreFields) { Class clazz = src.getClass(); Method[] methods = clazz.getMethods(); List<String> ignoreFiledList = Arrays.asList(ignoreFields); List<String> modifiedFields = new ArrayList<String>(); for (Method getterMethod : methods) { // 遍历目标对象的方法 if (Modifier.isStatic(getterMethod.getModifiers()) || !ReflectUtils.isBeanPropertyReadMethod(getterMethod)) { // 不是static方法, 是getter方法 continue; } String propertyName = ReflectUtils.getPropertyNameFromBeanReadMethod(getterMethod); if (ignoreFiledList.contains(propertyName)) { // 忽略字段 continue; } Class returnType = getterMethod.getReturnType(); try { // 同时目标字段还需要有set方法 Method setterMethod = ReflectUtils.getPropertySetterMethod(clazz, propertyName, returnType); if (setterMethod != null) { Object srcVal = getterMethod.invoke(src); // 原始值 Object dstVal = getterMethod.invoke(dst); // 修改后值 if (srcVal == null) { // 左边为空 if (dstVal != null) { modifiedFields.add(propertyName); } } else { if (dstVal == null) { // 右边为空 modifiedFields.add(propertyName); } else { if (!srcVal.equals(dstVal)) { // 都不为空且不同 modifiedFields.add(propertyName); } } } } } catch (Exception ignore) { // ignore 下一循环 } } return modifiedFields; } }
public class class_name { public static <T> List<String> getModifiedFields(T src, T dst, String... ignoreFields) { Class clazz = src.getClass(); Method[] methods = clazz.getMethods(); List<String> ignoreFiledList = Arrays.asList(ignoreFields); List<String> modifiedFields = new ArrayList<String>(); for (Method getterMethod : methods) { // 遍历目标对象的方法 if (Modifier.isStatic(getterMethod.getModifiers()) || !ReflectUtils.isBeanPropertyReadMethod(getterMethod)) { // 不是static方法, 是getter方法 continue; } String propertyName = ReflectUtils.getPropertyNameFromBeanReadMethod(getterMethod); if (ignoreFiledList.contains(propertyName)) { // 忽略字段 continue; } Class returnType = getterMethod.getReturnType(); try { // 同时目标字段还需要有set方法 Method setterMethod = ReflectUtils.getPropertySetterMethod(clazz, propertyName, returnType); if (setterMethod != null) { Object srcVal = getterMethod.invoke(src); // 原始值 Object dstVal = getterMethod.invoke(dst); // 修改后值 if (srcVal == null) { // 左边为空 if (dstVal != null) { modifiedFields.add(propertyName); // depends on control dependency: [if], data = [none] } } else { if (dstVal == null) { // 右边为空 modifiedFields.add(propertyName); // depends on control dependency: [if], data = [none] } else { if (!srcVal.equals(dstVal)) { // 都不为空且不同 modifiedFields.add(propertyName); // depends on control dependency: [if], data = [none] } } } } } catch (Exception ignore) { // ignore 下一循环 } } return modifiedFields; } }
public class class_name { public String printISO8601() { StringBuilder sb = new StringBuilder(); if (_year > 0) { sb.append((_year / 1000) % 10); sb.append((_year / 100) % 10); sb.append((_year / 10) % 10); sb.append(_year % 10); sb.append('-'); sb.append(((_month + 1) / 10) % 10); sb.append((_month + 1) % 10); sb.append('-'); sb.append(((_dayOfMonth + 1) / 10) % 10); sb.append((_dayOfMonth + 1) % 10); } long time = _timeOfDay / 1000; long ms = _timeOfDay % 1000; sb.append('T'); sb.append((time / 36000) % 10); sb.append((time / 3600) % 10); sb.append(':'); sb.append((time / 600) % 6); sb.append((time / 60) % 10); sb.append(':'); sb.append((time / 10) % 6); sb.append((time / 1) % 10); if (ms != 0) { sb.append('.'); sb.append((ms / 100) % 10); sb.append((ms / 10) % 10); sb.append(ms % 10); } if (_zoneName == null) { sb.append("Z"); return sb.toString(); } // server/1471 - XXX: was commented out long offset = _zoneOffset; if (offset < 0) { sb.append("-"); offset = - offset; } else sb.append("+"); sb.append((offset / 36000000) % 10); sb.append((offset / 3600000) % 10); sb.append(':'); sb.append((offset / 600000) % 6); sb.append((offset / 60000) % 10); return sb.toString(); } }
public class class_name { public String printISO8601() { StringBuilder sb = new StringBuilder(); if (_year > 0) { sb.append((_year / 1000) % 10); // depends on control dependency: [if], data = [(_year] sb.append((_year / 100) % 10); // depends on control dependency: [if], data = [(_year] sb.append((_year / 10) % 10); // depends on control dependency: [if], data = [(_year] sb.append(_year % 10); // depends on control dependency: [if], data = [(_year] sb.append('-'); // depends on control dependency: [if], data = [none] sb.append(((_month + 1) / 10) % 10); // depends on control dependency: [if], data = [0)] sb.append((_month + 1) % 10); // depends on control dependency: [if], data = [0)] sb.append('-'); // depends on control dependency: [if], data = [none] sb.append(((_dayOfMonth + 1) / 10) % 10); // depends on control dependency: [if], data = [0)] sb.append((_dayOfMonth + 1) % 10); // depends on control dependency: [if], data = [0)] } long time = _timeOfDay / 1000; long ms = _timeOfDay % 1000; sb.append('T'); sb.append((time / 36000) % 10); sb.append((time / 3600) % 10); sb.append(':'); sb.append((time / 600) % 6); sb.append((time / 60) % 10); sb.append(':'); sb.append((time / 10) % 6); sb.append((time / 1) % 10); if (ms != 0) { sb.append('.'); // depends on control dependency: [if], data = [none] sb.append((ms / 100) % 10); // depends on control dependency: [if], data = [(ms] sb.append((ms / 10) % 10); // depends on control dependency: [if], data = [(ms] sb.append(ms % 10); // depends on control dependency: [if], data = [(ms] } if (_zoneName == null) { sb.append("Z"); // depends on control dependency: [if], data = [none] return sb.toString(); // depends on control dependency: [if], data = [none] } // server/1471 - XXX: was commented out long offset = _zoneOffset; if (offset < 0) { sb.append("-"); // depends on control dependency: [if], data = [none] offset = - offset; // depends on control dependency: [if], data = [none] } else sb.append("+"); sb.append((offset / 36000000) % 10); sb.append((offset / 3600000) % 10); sb.append(':'); sb.append((offset / 600000) % 6); sb.append((offset / 60000) % 10); return sb.toString(); } }
public class class_name { private List<Runner> addDynamicClasses(final RunnerBuilder builder, final ContractTestMap contractTestMap, final Dynamic dynamic) throws InitializationError { final Class<? extends Dynamic> dynamicClass = dynamic.getClass(); // this is the list of all the JUnit runners in the suite. final List<Runner> runners = new ArrayList<Runner>(); ContractImpl impl = getContractImpl( dynamicClass ); if (impl == null) { return runners; } final DynamicSuiteInfo dynamicSuiteInfo = new DynamicSuiteInfo( dynamicClass, impl ); final Collection<Class<?>> tests = dynamic.getSuiteClasses(); if ((tests == null) || (tests.size() == 0)) { dynamicSuiteInfo .addError( new InitializationError( "Dynamic suite did not return a list of classes to execute" ) ); runners.add( new TestInfoErrorRunner( dynamicClass, dynamicSuiteInfo ) ); } else { for (final Class<?> test : tests) { final RunWith runwith = test.getAnnotation( RunWith.class ); if ((runwith != null) && runwith.value().equals( ContractSuite.class )) { impl = getContractImpl( test ); if (impl != null) { final DynamicTestInfo parentTestInfo = new DynamicTestInfo( test, impl, dynamicSuiteInfo ); if (!parentTestInfo.hasErrors()) { addSpecifiedClasses( runners, test, builder, contractTestMap, dynamic, parentTestInfo ); } // this is not an else as addSpecifiedClasses may add // errors to parentTestInfo if (parentTestInfo.hasErrors()) { runners.add( new TestInfoErrorRunner( dynamicClass, parentTestInfo ) ); } } } else { try { runners.add( builder.runnerForClass( test ) ); } catch (final Throwable t) { throw new InitializationError( t ); } } } } return runners; } }
public class class_name { private List<Runner> addDynamicClasses(final RunnerBuilder builder, final ContractTestMap contractTestMap, final Dynamic dynamic) throws InitializationError { final Class<? extends Dynamic> dynamicClass = dynamic.getClass(); // this is the list of all the JUnit runners in the suite. final List<Runner> runners = new ArrayList<Runner>(); ContractImpl impl = getContractImpl( dynamicClass ); if (impl == null) { return runners; } final DynamicSuiteInfo dynamicSuiteInfo = new DynamicSuiteInfo( dynamicClass, impl ); final Collection<Class<?>> tests = dynamic.getSuiteClasses(); if ((tests == null) || (tests.size() == 0)) { dynamicSuiteInfo .addError( new InitializationError( "Dynamic suite did not return a list of classes to execute" ) ); runners.add( new TestInfoErrorRunner( dynamicClass, dynamicSuiteInfo ) ); } else { for (final Class<?> test : tests) { final RunWith runwith = test.getAnnotation( RunWith.class ); if ((runwith != null) && runwith.value().equals( ContractSuite.class )) { impl = getContractImpl( test ); if (impl != null) { final DynamicTestInfo parentTestInfo = new DynamicTestInfo( test, impl, dynamicSuiteInfo ); if (!parentTestInfo.hasErrors()) { addSpecifiedClasses( runners, test, builder, contractTestMap, dynamic, parentTestInfo ); // depends on control dependency: [if], data = [none] } // this is not an else as addSpecifiedClasses may add // errors to parentTestInfo if (parentTestInfo.hasErrors()) { runners.add( new TestInfoErrorRunner( dynamicClass, parentTestInfo ) ); // depends on control dependency: [if], data = [none] } } } else { try { runners.add( builder.runnerForClass( test ) ); // depends on control dependency: [try], data = [none] } catch (final Throwable t) { throw new InitializationError( t ); } // depends on control dependency: [catch], data = [none] } } } return runners; } }
public class class_name { public synchronized void storeEventForCurrentAppVersion(double timestamp, int versionCode, String versionName, String eventLabel) { EventRecord eventRecord = events.get(eventLabel); if (eventRecord == null) { eventRecord = new EventRecord(); events.put(eventLabel, eventRecord); } eventRecord.update(timestamp, versionName, versionCode); notifyDataChanged(); } }
public class class_name { public synchronized void storeEventForCurrentAppVersion(double timestamp, int versionCode, String versionName, String eventLabel) { EventRecord eventRecord = events.get(eventLabel); if (eventRecord == null) { eventRecord = new EventRecord(); // depends on control dependency: [if], data = [none] events.put(eventLabel, eventRecord); // depends on control dependency: [if], data = [none] } eventRecord.update(timestamp, versionName, versionCode); notifyDataChanged(); } }
public class class_name { public long[] getShapeForVarName(String varName) { if (variableNameToArr.containsKey(varName)) { return variableNameToArr.get(varName).shape(); } return variableNameToShape.get(varName); } }
public class class_name { public long[] getShapeForVarName(String varName) { if (variableNameToArr.containsKey(varName)) { return variableNameToArr.get(varName).shape(); // depends on control dependency: [if], data = [none] } return variableNameToShape.get(varName); } }
public class class_name { @SuppressWarnings({"unchecked", "rawtypes"}) @Private static <T> Parser<T>[] toArray(Iterable<? extends Parser<? extends T>> parsers) { if (parsers instanceof Collection<?>) { return toArray((Collection) parsers); } return toArrayWithIteration(parsers); } }
public class class_name { @SuppressWarnings({"unchecked", "rawtypes"}) @Private static <T> Parser<T>[] toArray(Iterable<? extends Parser<? extends T>> parsers) { if (parsers instanceof Collection<?>) { return toArray((Collection) parsers); // depends on control dependency: [if], data = [)] } return toArrayWithIteration(parsers); } }
public class class_name { protected boolean createStandardSARLEventTemplates(String elementTypeName, Function1<? super String, ? extends ISarlBehaviorUnitBuilder> behaviorUnitAdder, Procedure1<? super String> usesAdder) { if (!isCreateStandardEventHandlers()) { return false; } Object type; try { type = getTypeFinder().findType(INITIALIZE_EVENT_NAME); } catch (JavaModelException e) { type = null; } if (type != null) { // SARL Libraries are on the classpath usesAdder.apply(LOGGING_CAPACITY_NAME); ISarlBehaviorUnitBuilder unit = behaviorUnitAdder.apply(INITIALIZE_EVENT_NAME); IBlockExpressionBuilder block = unit.getExpression(); block.setInnerDocumentation(MessageFormat.format( Messages.AbstractNewSarlElementWizardPage_9, elementTypeName)); IExpressionBuilder expr = block.addExpression(); createInfoCall(expr, "The " + elementTypeName + " was started."); //$NON-NLS-1$ //$NON-NLS-2$ unit = behaviorUnitAdder.apply(DESTROY_EVENT_NAME); block = unit.getExpression(); block.setInnerDocumentation(MessageFormat.format( Messages.AbstractNewSarlElementWizardPage_10, elementTypeName)); expr = block.addExpression(); createInfoCall(expr, "The " + elementTypeName + " was stopped."); //$NON-NLS-1$ //$NON-NLS-2$ unit = behaviorUnitAdder.apply(AGENTSPAWNED_EVENT_NAME); block = unit.getExpression(); block.setInnerDocumentation(MessageFormat.format( Messages.AbstractNewSarlElementWizardPage_11, elementTypeName)); unit = behaviorUnitAdder.apply(AGENTKILLED_EVENT_NAME); block = unit.getExpression(); block.setInnerDocumentation(MessageFormat.format( Messages.AbstractNewSarlElementWizardPage_12, elementTypeName)); unit = behaviorUnitAdder.apply(CONTEXTJOINED_EVENT_NAME); block = unit.getExpression(); block.setInnerDocumentation(MessageFormat.format( Messages.AbstractNewSarlElementWizardPage_13, elementTypeName)); unit = behaviorUnitAdder.apply(CONTEXTLEFT_EVENT_NAME); block = unit.getExpression(); block.setInnerDocumentation(MessageFormat.format( Messages.AbstractNewSarlElementWizardPage_14, elementTypeName)); unit = behaviorUnitAdder.apply(MEMBERJOINED_EVENT_NAME); block = unit.getExpression(); block.setInnerDocumentation(MessageFormat.format( Messages.AbstractNewSarlElementWizardPage_15, elementTypeName)); unit = behaviorUnitAdder.apply(MEMBERLEFT_EVENT_NAME); block = unit.getExpression(); block.setInnerDocumentation(MessageFormat.format( Messages.AbstractNewSarlElementWizardPage_16, elementTypeName)); return true; } return false; } }
public class class_name { protected boolean createStandardSARLEventTemplates(String elementTypeName, Function1<? super String, ? extends ISarlBehaviorUnitBuilder> behaviorUnitAdder, Procedure1<? super String> usesAdder) { if (!isCreateStandardEventHandlers()) { return false; // depends on control dependency: [if], data = [none] } Object type; try { type = getTypeFinder().findType(INITIALIZE_EVENT_NAME); // depends on control dependency: [try], data = [none] } catch (JavaModelException e) { type = null; } // depends on control dependency: [catch], data = [none] if (type != null) { // SARL Libraries are on the classpath usesAdder.apply(LOGGING_CAPACITY_NAME); // depends on control dependency: [if], data = [none] ISarlBehaviorUnitBuilder unit = behaviorUnitAdder.apply(INITIALIZE_EVENT_NAME); IBlockExpressionBuilder block = unit.getExpression(); block.setInnerDocumentation(MessageFormat.format( Messages.AbstractNewSarlElementWizardPage_9, elementTypeName)); // depends on control dependency: [if], data = [none] IExpressionBuilder expr = block.addExpression(); createInfoCall(expr, "The " + elementTypeName + " was started."); //$NON-NLS-1$ //$NON-NLS-2$ // depends on control dependency: [if], data = [none] unit = behaviorUnitAdder.apply(DESTROY_EVENT_NAME); // depends on control dependency: [if], data = [none] block = unit.getExpression(); // depends on control dependency: [if], data = [none] block.setInnerDocumentation(MessageFormat.format( Messages.AbstractNewSarlElementWizardPage_10, elementTypeName)); // depends on control dependency: [if], data = [none] expr = block.addExpression(); // depends on control dependency: [if], data = [none] createInfoCall(expr, "The " + elementTypeName + " was stopped."); //$NON-NLS-1$ //$NON-NLS-2$ // depends on control dependency: [if], data = [none] unit = behaviorUnitAdder.apply(AGENTSPAWNED_EVENT_NAME); // depends on control dependency: [if], data = [none] block = unit.getExpression(); // depends on control dependency: [if], data = [none] block.setInnerDocumentation(MessageFormat.format( Messages.AbstractNewSarlElementWizardPage_11, elementTypeName)); // depends on control dependency: [if], data = [none] unit = behaviorUnitAdder.apply(AGENTKILLED_EVENT_NAME); // depends on control dependency: [if], data = [none] block = unit.getExpression(); // depends on control dependency: [if], data = [none] block.setInnerDocumentation(MessageFormat.format( Messages.AbstractNewSarlElementWizardPage_12, elementTypeName)); // depends on control dependency: [if], data = [none] unit = behaviorUnitAdder.apply(CONTEXTJOINED_EVENT_NAME); // depends on control dependency: [if], data = [none] block = unit.getExpression(); // depends on control dependency: [if], data = [none] block.setInnerDocumentation(MessageFormat.format( Messages.AbstractNewSarlElementWizardPage_13, elementTypeName)); // depends on control dependency: [if], data = [none] unit = behaviorUnitAdder.apply(CONTEXTLEFT_EVENT_NAME); // depends on control dependency: [if], data = [none] block = unit.getExpression(); // depends on control dependency: [if], data = [none] block.setInnerDocumentation(MessageFormat.format( Messages.AbstractNewSarlElementWizardPage_14, elementTypeName)); // depends on control dependency: [if], data = [none] unit = behaviorUnitAdder.apply(MEMBERJOINED_EVENT_NAME); // depends on control dependency: [if], data = [none] block = unit.getExpression(); // depends on control dependency: [if], data = [none] block.setInnerDocumentation(MessageFormat.format( Messages.AbstractNewSarlElementWizardPage_15, elementTypeName)); // depends on control dependency: [if], data = [none] unit = behaviorUnitAdder.apply(MEMBERLEFT_EVENT_NAME); // depends on control dependency: [if], data = [none] block = unit.getExpression(); // depends on control dependency: [if], data = [none] block.setInnerDocumentation(MessageFormat.format( Messages.AbstractNewSarlElementWizardPage_16, elementTypeName)); // depends on control dependency: [if], data = [none] return true; // depends on control dependency: [if], data = [none] } return false; } }
public class class_name { public static <S extends Storable> OrderingScore<S> evaluate (OrderedProperty<S>[] indexProperties, boolean unique, boolean clustered, Filter<S> filter, OrderingList<S> ordering) { if (indexProperties == null) { throw new IllegalArgumentException("Index properties required"); } // Get filter list early to detect errors. List<PropertyFilter<S>> filterList = PropertyFilterList.get(filter); if (ordering == null) { ordering = OrderingList.emptyList(); } // Ordering properties which match identity filters don't affect order // results. Build up this set to find them quickly. Set<ChainedProperty<S>> identityPropSet = new HashSet<ChainedProperty<S>>(filterList.size()); for (PropertyFilter<S> propFilter : filterList) { if (propFilter.getOperator() == RelOp.EQ) { identityPropSet.add(propFilter.getChainedProperty()); } } OrderingList<S> handledOrdering = OrderingList.emptyList(); OrderingList<S> remainderOrdering = OrderingList.emptyList(); OrderingList<S> freeOrdering = OrderingList.emptyList(); OrderingList<S> unusedOrdering = OrderingList.emptyList(); // Build up list of unused properties that were filtered out. for (int i=0; i<indexProperties.length; i++) { OrderedProperty<S> indexProp = indexProperties[i]; ChainedProperty<S> indexChained = indexProp.getChainedProperty(); if (identityPropSet.contains(indexChained)) { unusedOrdering = unusedOrdering.concat(indexProp.direction(UNSPECIFIED)); } } // If index is unique and every property is matched by an identity // filter, then there won't be any handled or remainder properties. uniquelyCheck: if (unique) { for (int i=0; i<indexProperties.length; i++) { ChainedProperty<S> indexChained = indexProperties[i].getChainedProperty(); if (!identityPropSet.contains(indexChained)) { // Missed a property, so ordering is still relevant. break uniquelyCheck; } } return new OrderingScore<S>(indexProperties, clustered, handledOrdering, // no handled properties remainderOrdering, // no remainder properties false, // no need to reverse order freeOrdering, // no free properties unusedOrdering); } Boolean shouldReverseOrder = null; Set<ChainedProperty<S>> seen = new HashSet<ChainedProperty<S>>(); boolean gap = false; int indexPos = 0; calcScore: for (int i=0; i<ordering.size(); i++) { OrderedProperty<S> property = ordering.get(i); ChainedProperty<S> chained = property.getChainedProperty(); if (seen.contains(chained)) { // Redundant property doesn't affect ordering. continue calcScore; } seen.add(chained); if (identityPropSet.contains(chained)) { // Doesn't affect ordering. continue calcScore; } indexPosMatch: while (!gap && indexPos < indexProperties.length) { OrderedProperty<S> indexProp = indexProperties[indexPos]; ChainedProperty<S> indexChained = indexProp.getChainedProperty(); if (chained.equals(indexChained)) { Direction indexDir = indexProp.getDirection(); if (indexDir == UNSPECIFIED) { // Assume index natural order is ascending. indexDir = ASCENDING; } if (shouldReverseOrder != null && shouldReverseOrder) { indexDir = indexDir.reverse(); } if (property.getDirection() == UNSPECIFIED) { // Set handled property direction to match index. property = property.direction(indexDir); } else if (shouldReverseOrder == null) { shouldReverseOrder = indexDir != property.getDirection(); // Any properies already in the list had been // originally unspecified. They might need to be // reversed now. if (shouldReverseOrder) { handledOrdering = handledOrdering.reverseDirections(); } } else if (indexDir != property.getDirection()) { // Direction mismatch, so cannot be handled. break indexPosMatch; } handledOrdering = handledOrdering.concat(property); indexPos++; continue calcScore; } if (identityPropSet.contains(indexChained)) { // Even though ordering did not match index at current // position, the search for handled propertes can continue if // index gap matches an identity filter. indexPos++; continue indexPosMatch; } // Index gap, so cannot be handled. break indexPosMatch; } // Property not handled and not an identity filter. remainderOrdering = remainderOrdering.concat(property); gap = true; } // Walk through all remaining index properties and list them as free. while (indexPos < indexProperties.length) { OrderedProperty<S> freeProp = indexProperties[indexPos]; ChainedProperty<S> freeChained = freeProp.getChainedProperty(); // Don't list as free if already listed as unused. if (!identityPropSet.contains(freeChained)) { if (shouldReverseOrder == null) { freeProp = freeProp.direction(UNSPECIFIED); } else { Direction freePropDir = freeProp.getDirection(); if (freePropDir == UNSPECIFIED) { freePropDir = ASCENDING; } if (shouldReverseOrder) { freeProp = freeProp.direction(freePropDir.reverse()); } } freeOrdering = freeOrdering.concat(freeProp); } indexPos++; } if (shouldReverseOrder == null) { shouldReverseOrder = false; } return new OrderingScore<S>(indexProperties, clustered, handledOrdering, remainderOrdering, shouldReverseOrder, freeOrdering, unusedOrdering); } }
public class class_name { public static <S extends Storable> OrderingScore<S> evaluate (OrderedProperty<S>[] indexProperties, boolean unique, boolean clustered, Filter<S> filter, OrderingList<S> ordering) { if (indexProperties == null) { throw new IllegalArgumentException("Index properties required"); } // Get filter list early to detect errors. List<PropertyFilter<S>> filterList = PropertyFilterList.get(filter); if (ordering == null) { ordering = OrderingList.emptyList(); // depends on control dependency: [if], data = [none] } // Ordering properties which match identity filters don't affect order // results. Build up this set to find them quickly. Set<ChainedProperty<S>> identityPropSet = new HashSet<ChainedProperty<S>>(filterList.size()); for (PropertyFilter<S> propFilter : filterList) { if (propFilter.getOperator() == RelOp.EQ) { identityPropSet.add(propFilter.getChainedProperty()); // depends on control dependency: [if], data = [none] } } OrderingList<S> handledOrdering = OrderingList.emptyList(); OrderingList<S> remainderOrdering = OrderingList.emptyList(); OrderingList<S> freeOrdering = OrderingList.emptyList(); OrderingList<S> unusedOrdering = OrderingList.emptyList(); // Build up list of unused properties that were filtered out. for (int i=0; i<indexProperties.length; i++) { OrderedProperty<S> indexProp = indexProperties[i]; ChainedProperty<S> indexChained = indexProp.getChainedProperty(); if (identityPropSet.contains(indexChained)) { unusedOrdering = unusedOrdering.concat(indexProp.direction(UNSPECIFIED)); // depends on control dependency: [if], data = [none] } } // If index is unique and every property is matched by an identity // filter, then there won't be any handled or remainder properties. uniquelyCheck: if (unique) { for (int i=0; i<indexProperties.length; i++) { ChainedProperty<S> indexChained = indexProperties[i].getChainedProperty(); if (!identityPropSet.contains(indexChained)) { // Missed a property, so ordering is still relevant. break uniquelyCheck; } } return new OrderingScore<S>(indexProperties, clustered, handledOrdering, // no handled properties remainderOrdering, // no remainder properties false, // no need to reverse order freeOrdering, // no free properties unusedOrdering); // depends on control dependency: [if], data = [none] } Boolean shouldReverseOrder = null; Set<ChainedProperty<S>> seen = new HashSet<ChainedProperty<S>>(); boolean gap = false; int indexPos = 0; calcScore: for (int i=0; i<ordering.size(); i++) { OrderedProperty<S> property = ordering.get(i); ChainedProperty<S> chained = property.getChainedProperty(); if (seen.contains(chained)) { // Redundant property doesn't affect ordering. continue calcScore; } seen.add(chained); // depends on control dependency: [for], data = [none] if (identityPropSet.contains(chained)) { // Doesn't affect ordering. continue calcScore; } indexPosMatch: while (!gap && indexPos < indexProperties.length) { OrderedProperty<S> indexProp = indexProperties[indexPos]; ChainedProperty<S> indexChained = indexProp.getChainedProperty(); if (chained.equals(indexChained)) { Direction indexDir = indexProp.getDirection(); if (indexDir == UNSPECIFIED) { // Assume index natural order is ascending. indexDir = ASCENDING; // depends on control dependency: [if], data = [none] } if (shouldReverseOrder != null && shouldReverseOrder) { indexDir = indexDir.reverse(); // depends on control dependency: [if], data = [none] } if (property.getDirection() == UNSPECIFIED) { // Set handled property direction to match index. property = property.direction(indexDir); // depends on control dependency: [if], data = [none] } else if (shouldReverseOrder == null) { shouldReverseOrder = indexDir != property.getDirection(); // depends on control dependency: [if], data = [none] // Any properies already in the list had been // originally unspecified. They might need to be // reversed now. if (shouldReverseOrder) { handledOrdering = handledOrdering.reverseDirections(); // depends on control dependency: [if], data = [none] } } else if (indexDir != property.getDirection()) { // Direction mismatch, so cannot be handled. break indexPosMatch; } handledOrdering = handledOrdering.concat(property); // depends on control dependency: [if], data = [none] indexPos++; // depends on control dependency: [if], data = [none] continue calcScore; } if (identityPropSet.contains(indexChained)) { // Even though ordering did not match index at current // position, the search for handled propertes can continue if // index gap matches an identity filter. indexPos++; // depends on control dependency: [if], data = [none] continue indexPosMatch; } // Index gap, so cannot be handled. break indexPosMatch; } // Property not handled and not an identity filter. remainderOrdering = remainderOrdering.concat(property); // depends on control dependency: [for], data = [none] gap = true; // depends on control dependency: [for], data = [none] } // Walk through all remaining index properties and list them as free. while (indexPos < indexProperties.length) { OrderedProperty<S> freeProp = indexProperties[indexPos]; ChainedProperty<S> freeChained = freeProp.getChainedProperty(); // Don't list as free if already listed as unused. if (!identityPropSet.contains(freeChained)) { if (shouldReverseOrder == null) { freeProp = freeProp.direction(UNSPECIFIED); // depends on control dependency: [if], data = [none] } else { Direction freePropDir = freeProp.getDirection(); if (freePropDir == UNSPECIFIED) { freePropDir = ASCENDING; // depends on control dependency: [if], data = [none] } if (shouldReverseOrder) { freeProp = freeProp.direction(freePropDir.reverse()); // depends on control dependency: [if], data = [none] } } freeOrdering = freeOrdering.concat(freeProp); // depends on control dependency: [if], data = [none] } indexPos++; // depends on control dependency: [while], data = [none] } if (shouldReverseOrder == null) { shouldReverseOrder = false; // depends on control dependency: [if], data = [none] } return new OrderingScore<S>(indexProperties, clustered, handledOrdering, remainderOrdering, shouldReverseOrder, freeOrdering, unusedOrdering); } }
public class class_name { public boolean movePosition(float[] npos, NavMeshQuery navquery, QueryFilter filter) { // Move along navmesh and update new position. Result<MoveAlongSurfaceResult> masResult = navquery.moveAlongSurface(m_path.get(0), m_pos, npos, filter); if (masResult.succeeded()) { m_path = mergeCorridorStartMoved(m_path, masResult.result.getVisited()); // Adjust the position to stay on top of the navmesh. vCopy(m_pos, masResult.result.getResultPos()); Result<Float> hr = navquery.getPolyHeight(m_path.get(0), masResult.result.getResultPos()); if (hr.succeeded()) { m_pos[1] = hr.result; } return true; } return false; } }
public class class_name { public boolean movePosition(float[] npos, NavMeshQuery navquery, QueryFilter filter) { // Move along navmesh and update new position. Result<MoveAlongSurfaceResult> masResult = navquery.moveAlongSurface(m_path.get(0), m_pos, npos, filter); if (masResult.succeeded()) { m_path = mergeCorridorStartMoved(m_path, masResult.result.getVisited()); // depends on control dependency: [if], data = [none] // Adjust the position to stay on top of the navmesh. vCopy(m_pos, masResult.result.getResultPos()); // depends on control dependency: [if], data = [none] Result<Float> hr = navquery.getPolyHeight(m_path.get(0), masResult.result.getResultPos()); if (hr.succeeded()) { m_pos[1] = hr.result; // depends on control dependency: [if], data = [none] } return true; // depends on control dependency: [if], data = [none] } return false; } }
public class class_name { public DeleteParametersResult withDeletedParameters(String... deletedParameters) { if (this.deletedParameters == null) { setDeletedParameters(new com.amazonaws.internal.SdkInternalList<String>(deletedParameters.length)); } for (String ele : deletedParameters) { this.deletedParameters.add(ele); } return this; } }
public class class_name { public DeleteParametersResult withDeletedParameters(String... deletedParameters) { if (this.deletedParameters == null) { setDeletedParameters(new com.amazonaws.internal.SdkInternalList<String>(deletedParameters.length)); // depends on control dependency: [if], data = [none] } for (String ele : deletedParameters) { this.deletedParameters.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public HystrixObservableCommand.Setter buildObservableCommandSetter() { HystrixObservableCommand.Setter setter = HystrixObservableCommand.Setter .withGroupKey(HystrixCommandGroupKey.Factory.asKey(groupKey)) .andCommandKey(HystrixCommandKey.Factory.asKey(commandKey)); try { setter.andCommandPropertiesDefaults(HystrixPropertiesManager.initializeCommandProperties(commandProperties)); } catch (IllegalArgumentException e) { throw new HystrixPropertyException("Failed to set Command properties. " + getInfo(), e); } return setter; } }
public class class_name { public HystrixObservableCommand.Setter buildObservableCommandSetter() { HystrixObservableCommand.Setter setter = HystrixObservableCommand.Setter .withGroupKey(HystrixCommandGroupKey.Factory.asKey(groupKey)) .andCommandKey(HystrixCommandKey.Factory.asKey(commandKey)); try { setter.andCommandPropertiesDefaults(HystrixPropertiesManager.initializeCommandProperties(commandProperties)); // depends on control dependency: [try], data = [none] } catch (IllegalArgumentException e) { throw new HystrixPropertyException("Failed to set Command properties. " + getInfo(), e); } // depends on control dependency: [catch], data = [none] return setter; } }
public class class_name { @SuppressWarnings("unchecked") public synchronized <T extends SebEvent> T constructEvent(Class<T> eventCls, SebContext context) { try { return (T) eventCls.getConstructor().newInstance().with(context, LocalDateTime.now()); } catch (Exception e) { throw new EventConstructException("Unable to construct event " + eventCls.getName(), e); } } }
public class class_name { @SuppressWarnings("unchecked") public synchronized <T extends SebEvent> T constructEvent(Class<T> eventCls, SebContext context) { try { return (T) eventCls.getConstructor().newInstance().with(context, LocalDateTime.now()); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new EventConstructException("Unable to construct event " + eventCls.getName(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override public void visitToken(Token token) { if (!token.getType().equals(EOF)) { /* Handle all the lines of the token */ String[] tokenLines = EOL_PATTERN.split(token.getValue(), -1); int firstLineAlreadyCounted = lastTokenLine == token.getLine() ? 1 : 0; getContext().peekSourceCode().add(metric, (double) tokenLines.length - firstLineAlreadyCounted); lastTokenLine = token.getLine() + tokenLines.length - 1; } } }
public class class_name { @Override public void visitToken(Token token) { if (!token.getType().equals(EOF)) { /* Handle all the lines of the token */ String[] tokenLines = EOL_PATTERN.split(token.getValue(), -1); int firstLineAlreadyCounted = lastTokenLine == token.getLine() ? 1 : 0; getContext().peekSourceCode().add(metric, (double) tokenLines.length - firstLineAlreadyCounted); // depends on control dependency: [if], data = [none] lastTokenLine = token.getLine() + tokenLines.length - 1; // depends on control dependency: [if], data = [none] } } }
public class class_name { public EClass getIfcPlanarForceMeasure() { if (ifcPlanarForceMeasureEClass == null) { ifcPlanarForceMeasureEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(722); } return ifcPlanarForceMeasureEClass; } }
public class class_name { public EClass getIfcPlanarForceMeasure() { if (ifcPlanarForceMeasureEClass == null) { ifcPlanarForceMeasureEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(722); // depends on control dependency: [if], data = [none] } return ifcPlanarForceMeasureEClass; } }
public class class_name { @Nonnull public static ESuccess readXMLSAX (@WillClose @Nonnull final InputSource aIS, @Nonnull final ISAXReaderSettings aSettings) { ValueEnforcer.notNull (aIS, "InputStream"); ValueEnforcer.notNull (aSettings, "Settings"); try { boolean bFromPool = false; org.xml.sax.XMLReader aParser; if (aSettings.requiresNewXMLParser ()) { aParser = SAXReaderFactory.createXMLReader (); } else { // use parser from pool aParser = s_aSAXPool.borrowObject (); bFromPool = true; } try { final StopWatch aSW = StopWatch.createdStarted (); // Apply settings aSettings.applyToSAXReader (aParser); // Start parsing aParser.parse (aIS); // Statistics s_aSaxSuccessCounterHdl.increment (); s_aSaxTimerHdl.addTime (aSW.stopAndGetMillis ()); return ESuccess.SUCCESS; } finally { if (bFromPool) { // Return parser to pool s_aSAXPool.returnObject (aParser); } } } catch (final SAXParseException ex) { boolean bHandled = false; if (aSettings.getErrorHandler () != null) try { aSettings.getErrorHandler ().fatalError (ex); bHandled = true; } catch (final SAXException ex2) { // fall-through } if (!bHandled) aSettings.exceptionCallbacks ().forEach (x -> x.onException (ex)); } catch (final Exception ex) { aSettings.exceptionCallbacks ().forEach (x -> x.onException (ex)); } finally { // Close both byte stream and character stream, as we don't know which one // was used StreamHelper.close (aIS.getByteStream ()); StreamHelper.close (aIS.getCharacterStream ()); } s_aSaxErrorCounterHdl.increment (); return ESuccess.FAILURE; } }
public class class_name { @Nonnull public static ESuccess readXMLSAX (@WillClose @Nonnull final InputSource aIS, @Nonnull final ISAXReaderSettings aSettings) { ValueEnforcer.notNull (aIS, "InputStream"); ValueEnforcer.notNull (aSettings, "Settings"); try { boolean bFromPool = false; org.xml.sax.XMLReader aParser; if (aSettings.requiresNewXMLParser ()) { aParser = SAXReaderFactory.createXMLReader (); // depends on control dependency: [if], data = [none] } else { // use parser from pool aParser = s_aSAXPool.borrowObject (); // depends on control dependency: [if], data = [none] bFromPool = true; // depends on control dependency: [if], data = [none] } try { final StopWatch aSW = StopWatch.createdStarted (); // Apply settings aSettings.applyToSAXReader (aParser); // depends on control dependency: [try], data = [none] // Start parsing aParser.parse (aIS); // depends on control dependency: [try], data = [none] // Statistics s_aSaxSuccessCounterHdl.increment (); // depends on control dependency: [try], data = [none] s_aSaxTimerHdl.addTime (aSW.stopAndGetMillis ()); // depends on control dependency: [try], data = [none] return ESuccess.SUCCESS; // depends on control dependency: [try], data = [none] } finally { if (bFromPool) { // Return parser to pool s_aSAXPool.returnObject (aParser); // depends on control dependency: [if], data = [none] } } } catch (final SAXParseException ex) { boolean bHandled = false; if (aSettings.getErrorHandler () != null) try { aSettings.getErrorHandler ().fatalError (ex); // depends on control dependency: [try], data = [none] bHandled = true; // depends on control dependency: [try], data = [none] } catch (final SAXException ex2) { // fall-through } // depends on control dependency: [catch], data = [none] if (!bHandled) aSettings.exceptionCallbacks ().forEach (x -> x.onException (ex)); } // depends on control dependency: [catch], data = [none] catch (final Exception ex) { aSettings.exceptionCallbacks ().forEach (x -> x.onException (ex)); } // depends on control dependency: [catch], data = [none] finally { // Close both byte stream and character stream, as we don't know which one // was used StreamHelper.close (aIS.getByteStream ()); StreamHelper.close (aIS.getCharacterStream ()); } s_aSaxErrorCounterHdl.increment (); return ESuccess.FAILURE; } }
public class class_name { @SuppressWarnings("deprecation") public List<AbstractSleeComponent> buildComponents( String componentJarFileName, JarFile deployableUnitJar, File deploymentDir) throws DeploymentException { // extract the component jar from the DU jar, to the temp du dir File extractedFile = extractFile(componentJarFileName, deployableUnitJar, deploymentDir); JarFile componentJarFile = null; try { componentJarFile = new JarFile(extractedFile); } catch (IOException e) { throw new DeploymentException( "failed to create jar file for extracted file " + extractedFile); } InputStream componentDescriptorInputStream = null; List<AbstractSleeComponent> components = new ArrayList<AbstractSleeComponent>(); try { // now extract the jar file to a new dir File componentJarDeploymentDir = new File(deploymentDir, componentJarFileName + "-contents"); if (!componentJarDeploymentDir.exists()) { // the jar may not be on root of DU, create additional dirs if needed LinkedList<File> dirsToCreate = new LinkedList<File>(); File dir = componentJarDeploymentDir.getParentFile(); while(!dir.equals(deploymentDir)) { dirsToCreate.addFirst(dir); dir = dir.getParentFile(); } for (File f : dirsToCreate) { f.mkdir(); } // now create the dir for the component jar if (!componentJarDeploymentDir.mkdir()) { throw new SLEEException("dir for jar " + componentJarFileName + " not created in " + deploymentDir); } } else { throw new SLEEException("dir for jar " + componentJarFileName + " already exists in " + deploymentDir); } extractJar(componentJarFile, componentJarDeploymentDir); // create components from descriptor JarEntry componentDescriptor = null; if ((componentDescriptor = componentJarFile .getJarEntry("META-INF/sbb-jar.xml")) != null) { // create class loader domain shared by all components URLClassLoaderDomainImpl classLoaderDomain = componentManagement .getClassLoaderFactory() .newClassLoaderDomain( new URL[] { componentJarDeploymentDir.toURL() }, Thread.currentThread().getContextClassLoader()); // parse descriptor componentDescriptorInputStream = componentJarFile .getInputStream(componentDescriptor); SbbDescriptorFactoryImpl descriptorFactory = componentManagement .getComponentDescriptorFactory() .getSbbDescriptorFactory(); List<SbbDescriptorImpl> descriptors = descriptorFactory .parse(componentDescriptorInputStream); // create components for (SbbDescriptorImpl descriptor : descriptors) { PreferredPackagesBuilder.buildPreferredPackages(descriptor, classLoaderDomain); SbbComponentImpl component = new SbbComponentImpl( descriptor); component.setDeploymentDir(componentJarDeploymentDir); component.setClassLoaderDomain(classLoaderDomain); components.add(component); } } else if ((componentDescriptor = componentJarFile .getJarEntry("META-INF/profile-spec-jar.xml")) != null) { // create class loader domain shared by all components URLClassLoaderDomainImpl classLoaderDomain = componentManagement .getClassLoaderFactory() .newClassLoaderDomain( new URL[] { componentJarDeploymentDir.toURL() }, Thread.currentThread().getContextClassLoader()); // parse descriptor componentDescriptorInputStream = componentJarFile .getInputStream(componentDescriptor); ProfileSpecificationDescriptorFactoryImpl descriptorFactory = componentManagement .getComponentDescriptorFactory() .getProfileSpecificationDescriptorFactory(); List<ProfileSpecificationDescriptorImpl> descriptors = descriptorFactory .parse(componentDescriptorInputStream); // create components for (ProfileSpecificationDescriptorImpl descriptor : descriptors) { ProfileSpecificationComponentImpl component = new ProfileSpecificationComponentImpl( descriptor); component.setDeploymentDir(componentJarDeploymentDir); component.setClassLoaderDomain(classLoaderDomain); components.add(component); } } else if ((componentDescriptor = componentJarFile .getJarEntry("META-INF/library-jar.xml")) != null) { Set<LibraryComponentImpl> libraryComponents = new HashSet<LibraryComponentImpl>(); // we need to gather all URLs for the shared class loader domain // to watch Set<URL> classLoaderDomainURLs = new HashSet<URL>(); classLoaderDomainURLs.add(componentJarDeploymentDir.toURL()); // parse the descriptor componentDescriptorInputStream = componentJarFile .getInputStream(componentDescriptor); LibraryDescriptorFactoryImpl descriptorFactory = componentManagement .getComponentDescriptorFactory() .getLibraryDescriptorFactory(); List<LibraryDescriptorImpl> descriptors = descriptorFactory .parse(componentDescriptorInputStream); // create components for (LibraryDescriptorImpl descriptor : descriptors) { LibraryComponentImpl component = new LibraryComponentImpl( descriptor); for (JarDescriptor mJar : descriptor.getJars()) { classLoaderDomainURLs.add(new File( componentJarDeploymentDir, mJar.getJarName()) .toURL()); } // set deploy dir and cl domain component.setDeploymentDir(componentJarDeploymentDir); components.add(component); libraryComponents.add(component); } // create shared url domain URLClassLoaderDomainImpl classLoaderDomain = componentManagement .getClassLoaderFactory() .newClassLoaderDomain( classLoaderDomainURLs .toArray(new URL[classLoaderDomainURLs.size()]), Thread.currentThread().getContextClassLoader()); // add it to each component for (LibraryComponentImpl component : libraryComponents) { component.setClassLoaderDomain(classLoaderDomain); } } else if ((componentDescriptor = componentJarFile .getJarEntry("META-INF/event-jar.xml")) != null) { // create class loader domain shared by all components URLClassLoaderDomainImpl classLoaderDomain = componentManagement .getClassLoaderFactory() .newClassLoaderDomain( new URL[] { componentJarDeploymentDir.toURL() }, Thread.currentThread().getContextClassLoader()); // parse descriptor componentDescriptorInputStream = componentJarFile .getInputStream(componentDescriptor); EventTypeDescriptorFactoryImpl descriptorFactory = componentManagement .getComponentDescriptorFactory() .getEventTypeDescriptorFactory(); List<EventTypeDescriptorImpl> descriptors = descriptorFactory .parse(componentDescriptorInputStream); // create components for (EventTypeDescriptorImpl descriptor : descriptors) { EventTypeComponentImpl component = new EventTypeComponentImpl( descriptor); component.setDeploymentDir(componentJarDeploymentDir); component.setClassLoaderDomain(classLoaderDomain); components.add(component); } } else if ((componentDescriptor = componentJarFile .getJarEntry("META-INF/resource-adaptor-type-jar.xml")) != null) { // create class loader domain shared by all components URLClassLoaderDomainImpl classLoaderDomain = componentManagement.getClassLoaderFactory().newClassLoaderDomain(new URL[] { componentJarDeploymentDir.toURL() }, Thread .currentThread().getContextClassLoader()); // parse descriptor componentDescriptorInputStream = componentJarFile .getInputStream(componentDescriptor); ResourceAdaptorTypeDescriptorFactoryImpl descriptorFactory = componentManagement .getComponentDescriptorFactory() .getResourceAdaptorTypeDescriptorFactory(); List<ResourceAdaptorTypeDescriptorImpl> descriptors = descriptorFactory .parse(componentDescriptorInputStream); // create components for (ResourceAdaptorTypeDescriptorImpl descriptor : descriptors) { ResourceAdaptorTypeComponentImpl component = new ResourceAdaptorTypeComponentImpl( descriptor); component.setDeploymentDir(componentJarDeploymentDir); component.setClassLoaderDomain(classLoaderDomain); components.add(component); } } else if ((componentDescriptor = componentJarFile .getJarEntry("META-INF/resource-adaptor-jar.xml")) != null) { // create class loader domain shared by all components URLClassLoaderDomainImpl classLoaderDomain = componentManagement .getClassLoaderFactory() .newClassLoaderDomain( new URL[] { componentJarDeploymentDir.toURL() }, Thread.currentThread().getContextClassLoader()); // parse descriptor componentDescriptorInputStream = componentJarFile .getInputStream(componentDescriptor); ResourceAdaptorDescriptorFactoryImpl descriptorFactory = componentManagement .getComponentDescriptorFactory() .getResourceAdaptorDescriptorFactory(); List<ResourceAdaptorDescriptorImpl> descriptors = descriptorFactory .parse(componentDescriptorInputStream); // create components for (ResourceAdaptorDescriptorImpl descriptor : descriptors) { ResourceAdaptorComponentImpl component = new ResourceAdaptorComponentImpl( descriptor); component.setDeploymentDir(componentJarDeploymentDir); component.setClassLoaderDomain(classLoaderDomain); components.add(component); } } else { throw new DeploymentException( "No Deployment Descriptor found in the " + componentJarFile.getName() + " entry of a deployable unit."); } } catch (IOException e) { throw new DeploymentException( "failed to parse jar descriptor from " + componentJarFile.getName(), e); } finally { if (componentDescriptorInputStream != null) { try { componentDescriptorInputStream.close(); } catch (IOException e) { logger .error("failed to close inputstream of descriptor for jar " + componentJarFile); } } } // close component jar file try { componentJarFile.close(); } catch (IOException e) { logger.error("failed to close component jar file", e); } // and delete the extracted jar file, we don't need it anymore if (!extractedFile.delete()) { logger.warn("failed to delete " + extractedFile); } return components; } }
public class class_name { @SuppressWarnings("deprecation") public List<AbstractSleeComponent> buildComponents( String componentJarFileName, JarFile deployableUnitJar, File deploymentDir) throws DeploymentException { // extract the component jar from the DU jar, to the temp du dir File extractedFile = extractFile(componentJarFileName, deployableUnitJar, deploymentDir); JarFile componentJarFile = null; try { componentJarFile = new JarFile(extractedFile); } catch (IOException e) { throw new DeploymentException( "failed to create jar file for extracted file " + extractedFile); } InputStream componentDescriptorInputStream = null; List<AbstractSleeComponent> components = new ArrayList<AbstractSleeComponent>(); try { // now extract the jar file to a new dir File componentJarDeploymentDir = new File(deploymentDir, componentJarFileName + "-contents"); if (!componentJarDeploymentDir.exists()) { // the jar may not be on root of DU, create additional dirs if needed LinkedList<File> dirsToCreate = new LinkedList<File>(); File dir = componentJarDeploymentDir.getParentFile(); while(!dir.equals(deploymentDir)) { dirsToCreate.addFirst(dir); // depends on control dependency: [while], data = [none] dir = dir.getParentFile(); // depends on control dependency: [while], data = [none] } for (File f : dirsToCreate) { f.mkdir(); // depends on control dependency: [for], data = [f] } // now create the dir for the component jar if (!componentJarDeploymentDir.mkdir()) { throw new SLEEException("dir for jar " + componentJarFileName + " not created in " + deploymentDir); } } else { throw new SLEEException("dir for jar " + componentJarFileName + " already exists in " + deploymentDir); } extractJar(componentJarFile, componentJarDeploymentDir); // create components from descriptor JarEntry componentDescriptor = null; if ((componentDescriptor = componentJarFile .getJarEntry("META-INF/sbb-jar.xml")) != null) { // create class loader domain shared by all components URLClassLoaderDomainImpl classLoaderDomain = componentManagement .getClassLoaderFactory() .newClassLoaderDomain( new URL[] { componentJarDeploymentDir.toURL() }, Thread.currentThread().getContextClassLoader()); // parse descriptor componentDescriptorInputStream = componentJarFile .getInputStream(componentDescriptor); SbbDescriptorFactoryImpl descriptorFactory = componentManagement .getComponentDescriptorFactory() .getSbbDescriptorFactory(); List<SbbDescriptorImpl> descriptors = descriptorFactory .parse(componentDescriptorInputStream); // create components for (SbbDescriptorImpl descriptor : descriptors) { PreferredPackagesBuilder.buildPreferredPackages(descriptor, classLoaderDomain); // depends on control dependency: [for], data = [descriptor] SbbComponentImpl component = new SbbComponentImpl( descriptor); component.setDeploymentDir(componentJarDeploymentDir); // depends on control dependency: [for], data = [none] component.setClassLoaderDomain(classLoaderDomain); // depends on control dependency: [for], data = [none] components.add(component); // depends on control dependency: [for], data = [none] } } else if ((componentDescriptor = componentJarFile .getJarEntry("META-INF/profile-spec-jar.xml")) != null) { // create class loader domain shared by all components URLClassLoaderDomainImpl classLoaderDomain = componentManagement .getClassLoaderFactory() .newClassLoaderDomain( new URL[] { componentJarDeploymentDir.toURL() }, Thread.currentThread().getContextClassLoader()); // parse descriptor componentDescriptorInputStream = componentJarFile .getInputStream(componentDescriptor); ProfileSpecificationDescriptorFactoryImpl descriptorFactory = componentManagement .getComponentDescriptorFactory() .getProfileSpecificationDescriptorFactory(); List<ProfileSpecificationDescriptorImpl> descriptors = descriptorFactory .parse(componentDescriptorInputStream); // create components for (ProfileSpecificationDescriptorImpl descriptor : descriptors) { ProfileSpecificationComponentImpl component = new ProfileSpecificationComponentImpl( descriptor); component.setDeploymentDir(componentJarDeploymentDir); // depends on control dependency: [for], data = [none] component.setClassLoaderDomain(classLoaderDomain); // depends on control dependency: [for], data = [none] components.add(component); // depends on control dependency: [for], data = [none] } } else if ((componentDescriptor = componentJarFile .getJarEntry("META-INF/library-jar.xml")) != null) { Set<LibraryComponentImpl> libraryComponents = new HashSet<LibraryComponentImpl>(); // we need to gather all URLs for the shared class loader domain // to watch Set<URL> classLoaderDomainURLs = new HashSet<URL>(); classLoaderDomainURLs.add(componentJarDeploymentDir.toURL()); // parse the descriptor componentDescriptorInputStream = componentJarFile .getInputStream(componentDescriptor); LibraryDescriptorFactoryImpl descriptorFactory = componentManagement .getComponentDescriptorFactory() .getLibraryDescriptorFactory(); List<LibraryDescriptorImpl> descriptors = descriptorFactory .parse(componentDescriptorInputStream); // create components for (LibraryDescriptorImpl descriptor : descriptors) { LibraryComponentImpl component = new LibraryComponentImpl( descriptor); for (JarDescriptor mJar : descriptor.getJars()) { classLoaderDomainURLs.add(new File( componentJarDeploymentDir, mJar.getJarName()) .toURL()); // depends on control dependency: [for], data = [none] } // set deploy dir and cl domain component.setDeploymentDir(componentJarDeploymentDir); // depends on control dependency: [for], data = [none] components.add(component); // depends on control dependency: [for], data = [none] libraryComponents.add(component); // depends on control dependency: [for], data = [none] } // create shared url domain URLClassLoaderDomainImpl classLoaderDomain = componentManagement .getClassLoaderFactory() .newClassLoaderDomain( classLoaderDomainURLs .toArray(new URL[classLoaderDomainURLs.size()]), Thread.currentThread().getContextClassLoader()); // add it to each component for (LibraryComponentImpl component : libraryComponents) { component.setClassLoaderDomain(classLoaderDomain); // depends on control dependency: [for], data = [component] } } else if ((componentDescriptor = componentJarFile .getJarEntry("META-INF/event-jar.xml")) != null) { // create class loader domain shared by all components URLClassLoaderDomainImpl classLoaderDomain = componentManagement .getClassLoaderFactory() .newClassLoaderDomain( new URL[] { componentJarDeploymentDir.toURL() }, Thread.currentThread().getContextClassLoader()); // parse descriptor componentDescriptorInputStream = componentJarFile .getInputStream(componentDescriptor); EventTypeDescriptorFactoryImpl descriptorFactory = componentManagement .getComponentDescriptorFactory() .getEventTypeDescriptorFactory(); List<EventTypeDescriptorImpl> descriptors = descriptorFactory .parse(componentDescriptorInputStream); // create components for (EventTypeDescriptorImpl descriptor : descriptors) { EventTypeComponentImpl component = new EventTypeComponentImpl( descriptor); component.setDeploymentDir(componentJarDeploymentDir); component.setClassLoaderDomain(classLoaderDomain); components.add(component); } } else if ((componentDescriptor = componentJarFile .getJarEntry("META-INF/resource-adaptor-type-jar.xml")) != null) { // create class loader domain shared by all components URLClassLoaderDomainImpl classLoaderDomain = componentManagement.getClassLoaderFactory().newClassLoaderDomain(new URL[] { componentJarDeploymentDir.toURL() }, Thread .currentThread().getContextClassLoader()); // parse descriptor componentDescriptorInputStream = componentJarFile .getInputStream(componentDescriptor); ResourceAdaptorTypeDescriptorFactoryImpl descriptorFactory = componentManagement .getComponentDescriptorFactory() .getResourceAdaptorTypeDescriptorFactory(); List<ResourceAdaptorTypeDescriptorImpl> descriptors = descriptorFactory .parse(componentDescriptorInputStream); // create components for (ResourceAdaptorTypeDescriptorImpl descriptor : descriptors) { ResourceAdaptorTypeComponentImpl component = new ResourceAdaptorTypeComponentImpl( descriptor); component.setDeploymentDir(componentJarDeploymentDir); component.setClassLoaderDomain(classLoaderDomain); components.add(component); } } else if ((componentDescriptor = componentJarFile .getJarEntry("META-INF/resource-adaptor-jar.xml")) != null) { // create class loader domain shared by all components URLClassLoaderDomainImpl classLoaderDomain = componentManagement .getClassLoaderFactory() .newClassLoaderDomain( new URL[] { componentJarDeploymentDir.toURL() }, Thread.currentThread().getContextClassLoader()); // parse descriptor componentDescriptorInputStream = componentJarFile .getInputStream(componentDescriptor); ResourceAdaptorDescriptorFactoryImpl descriptorFactory = componentManagement .getComponentDescriptorFactory() .getResourceAdaptorDescriptorFactory(); List<ResourceAdaptorDescriptorImpl> descriptors = descriptorFactory .parse(componentDescriptorInputStream); // create components for (ResourceAdaptorDescriptorImpl descriptor : descriptors) { ResourceAdaptorComponentImpl component = new ResourceAdaptorComponentImpl( descriptor); component.setDeploymentDir(componentJarDeploymentDir); component.setClassLoaderDomain(classLoaderDomain); components.add(component); } } else { throw new DeploymentException( "No Deployment Descriptor found in the " + componentJarFile.getName() + " entry of a deployable unit."); } } catch (IOException e) { throw new DeploymentException( "failed to parse jar descriptor from " + componentJarFile.getName(), e); } finally { if (componentDescriptorInputStream != null) { try { componentDescriptorInputStream.close(); } catch (IOException e) { logger .error("failed to close inputstream of descriptor for jar " + componentJarFile); } } } // close component jar file try { componentJarFile.close(); } catch (IOException e) { logger.error("failed to close component jar file", e); } // and delete the extracted jar file, we don't need it anymore if (!extractedFile.delete()) { logger.warn("failed to delete " + extractedFile); } return components; } }
public class class_name { public ListElasticsearchVersionsResult withElasticsearchVersions(String... elasticsearchVersions) { if (this.elasticsearchVersions == null) { setElasticsearchVersions(new java.util.ArrayList<String>(elasticsearchVersions.length)); } for (String ele : elasticsearchVersions) { this.elasticsearchVersions.add(ele); } return this; } }
public class class_name { public ListElasticsearchVersionsResult withElasticsearchVersions(String... elasticsearchVersions) { if (this.elasticsearchVersions == null) { setElasticsearchVersions(new java.util.ArrayList<String>(elasticsearchVersions.length)); // depends on control dependency: [if], data = [none] } for (String ele : elasticsearchVersions) { this.elasticsearchVersions.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public void start() { int numHandler = 3; InetSocketAddress socketAddr = new InetSocketAddress(port); Executor executor = Executors.newFixedThreadPool(numHandler); try { hs = HttpServer.create(socketAddr, 0); hs.createContext(HttpserverUtils.HTTPSERVER_CONTEXT_PATH_LOGVIEW, new LogHandler(conf)); hs.setExecutor(executor); hs.start(); } catch (BindException e) { LOG.info("HttpServer has started already!"); hs = null; return; } catch (IOException e) { LOG.error("Failed to start HttpServer", e); hs = null; return; } LOG.info("Success started HttpServer at port:" + port); } }
public class class_name { public void start() { int numHandler = 3; InetSocketAddress socketAddr = new InetSocketAddress(port); Executor executor = Executors.newFixedThreadPool(numHandler); try { hs = HttpServer.create(socketAddr, 0); // depends on control dependency: [try], data = [none] hs.createContext(HttpserverUtils.HTTPSERVER_CONTEXT_PATH_LOGVIEW, new LogHandler(conf)); // depends on control dependency: [try], data = [none] hs.setExecutor(executor); // depends on control dependency: [try], data = [none] hs.start(); // depends on control dependency: [try], data = [none] } catch (BindException e) { LOG.info("HttpServer has started already!"); hs = null; return; } catch (IOException e) { // depends on control dependency: [catch], data = [none] LOG.error("Failed to start HttpServer", e); hs = null; return; } // depends on control dependency: [catch], data = [none] LOG.info("Success started HttpServer at port:" + port); } }
public class class_name { public Glyph.State createStateVar(EntityFeature ef, ObjectFactory factory) { if (ef instanceof FragmentFeature) { FragmentFeature ff = (FragmentFeature) ef; SequenceLocation loc = ff.getFeatureLocation(); if (loc instanceof SequenceInterval) { SequenceInterval si = (SequenceInterval) loc; SequenceSite begin = si.getSequenceIntervalBegin(); SequenceSite end = si.getSequenceIntervalEnd(); if (begin != null && end != null) { Glyph.State state = factory.createGlyphState(); state.setValue("x[" + begin.getSequencePosition() + " - " + end.getSequencePosition() + "]"); return state; } } } else if (ef instanceof ModificationFeature) { ModificationFeature mf = (ModificationFeature) ef; SequenceModificationVocabulary modType = mf.getModificationType(); if (modType != null) { Set<String> terms = modType.getTerm(); if (terms != null && !terms.isEmpty()) { String orig = terms.iterator().next(); String term = orig.toLowerCase(); String s = symbolMapping.containsKey(term) ? symbolMapping.get(term) : orig; Glyph.State state = factory.createGlyphState(); state.setValue(s); SequenceLocation loc = mf.getFeatureLocation(); if (locMapping.containsKey(term)) { state.setVariable(locMapping.get(term)); } if (loc instanceof SequenceSite) { SequenceSite ss = (SequenceSite) loc; if (ss.getSequencePosition() > 0) { state.setVariable( (state.getVariable() != null ? state.getVariable() : "") + ss.getSequencePosition()); } } return state; } } } // Binding features are ignored return null; } }
public class class_name { public Glyph.State createStateVar(EntityFeature ef, ObjectFactory factory) { if (ef instanceof FragmentFeature) { FragmentFeature ff = (FragmentFeature) ef; SequenceLocation loc = ff.getFeatureLocation(); if (loc instanceof SequenceInterval) { SequenceInterval si = (SequenceInterval) loc; SequenceSite begin = si.getSequenceIntervalBegin(); SequenceSite end = si.getSequenceIntervalEnd(); if (begin != null && end != null) { Glyph.State state = factory.createGlyphState(); state.setValue("x[" + begin.getSequencePosition() + " - " + end.getSequencePosition() + "]"); // depends on control dependency: [if], data = [none] return state; // depends on control dependency: [if], data = [none] } } } else if (ef instanceof ModificationFeature) { ModificationFeature mf = (ModificationFeature) ef; SequenceModificationVocabulary modType = mf.getModificationType(); if (modType != null) { Set<String> terms = modType.getTerm(); if (terms != null && !terms.isEmpty()) { String orig = terms.iterator().next(); String term = orig.toLowerCase(); String s = symbolMapping.containsKey(term) ? symbolMapping.get(term) : orig; Glyph.State state = factory.createGlyphState(); state.setValue(s); // depends on control dependency: [if], data = [none] SequenceLocation loc = mf.getFeatureLocation(); if (locMapping.containsKey(term)) { state.setVariable(locMapping.get(term)); // depends on control dependency: [if], data = [none] } if (loc instanceof SequenceSite) { SequenceSite ss = (SequenceSite) loc; if (ss.getSequencePosition() > 0) { state.setVariable( (state.getVariable() != null ? state.getVariable() : "") + ss.getSequencePosition()); // depends on control dependency: [if], data = [none] } } return state; // depends on control dependency: [if], data = [none] } } } // Binding features are ignored return null; } }
public class class_name { private Set<DagNode<T>> getAncestorNodes(Set<DagNode<T>> dagNodes) { Set<DagNode<T>> ancestorNodes = new HashSet<>(); for (DagNode<T> dagNode : dagNodes) { LinkedList<DagNode<T>> nodesToExpand = Lists.newLinkedList(this.getParents(dagNode)); while (!nodesToExpand.isEmpty()) { DagNode<T> nextNode = nodesToExpand.poll(); ancestorNodes.add(nextNode); nodesToExpand.addAll(this.getParents(nextNode)); } } return ancestorNodes; } }
public class class_name { private Set<DagNode<T>> getAncestorNodes(Set<DagNode<T>> dagNodes) { Set<DagNode<T>> ancestorNodes = new HashSet<>(); for (DagNode<T> dagNode : dagNodes) { LinkedList<DagNode<T>> nodesToExpand = Lists.newLinkedList(this.getParents(dagNode)); while (!nodesToExpand.isEmpty()) { DagNode<T> nextNode = nodesToExpand.poll(); ancestorNodes.add(nextNode); // depends on control dependency: [while], data = [none] nodesToExpand.addAll(this.getParents(nextNode)); // depends on control dependency: [while], data = [none] } } return ancestorNodes; } }
public class class_name { @Override public void motionDetected(WebcamMotionEvent wme) { for (Point p : wme.getPoints()) { motionPoints.put(p, 0); } } }
public class class_name { @Override public void motionDetected(WebcamMotionEvent wme) { for (Point p : wme.getPoints()) { motionPoints.put(p, 0); // depends on control dependency: [for], data = [p] } } }
public class class_name { public void addSubResource(final String resourceName, final long nodeId, final InputStream input, final EIdAccessType type) throws JaxRxException { ISession session = null; INodeWriteTrx wtx = null; synchronized (resourceName) { boolean abort; if (mDatabase.existsResource(resourceName)) { abort = false; try { // Creating a new session session = mDatabase.getSession(new SessionConfiguration(resourceName, StandardSettings.KEY)); // Creating a write transaction wtx = new NodeWriteTrx(session, session.beginBucketWtx(), HashKind.Rolling); final boolean exist = wtx.moveTo(nodeId); if (exist) { if (type == EIdAccessType.FIRSTCHILD) { WorkerHelper.shredInputStream(wtx, input, EShredderInsert.ADDASFIRSTCHILD); } else if (type == EIdAccessType.RIGHTSIBLING) { WorkerHelper.shredInputStream(wtx, input, EShredderInsert.ADDASRIGHTSIBLING); } else if (type == EIdAccessType.LASTCHILD) { if (wtx.moveTo(((ITreeStructData)wtx.getNode()).getFirstChildKey())) { long last = wtx.getNode().getDataKey(); while (wtx.moveTo(((ITreeStructData)wtx.getNode()).getRightSiblingKey())) { last = wtx.getNode().getDataKey(); } wtx.moveTo(last); WorkerHelper.shredInputStream(wtx, input, EShredderInsert.ADDASRIGHTSIBLING); } else { throw new JaxRxException(404, NOTFOUND); } } else if (type == EIdAccessType.LEFTSIBLING && wtx.moveTo(((ITreeStructData)wtx.getNode()).getLeftSiblingKey())) { WorkerHelper.shredInputStream(wtx, input, EShredderInsert.ADDASRIGHTSIBLING); } } else { throw new JaxRxException(404, NOTFOUND); } } catch (final JaxRxException exce) { abort = true; throw exce; } catch (final Exception exce) { abort = true; throw new JaxRxException(exce); } finally { try { WorkerHelper.closeWTX(abort, wtx, session); } catch (final TTException exce) { throw new JaxRxException(exce); } } } } } }
public class class_name { public void addSubResource(final String resourceName, final long nodeId, final InputStream input, final EIdAccessType type) throws JaxRxException { ISession session = null; INodeWriteTrx wtx = null; synchronized (resourceName) { boolean abort; if (mDatabase.existsResource(resourceName)) { abort = false; // depends on control dependency: [if], data = [none] try { // Creating a new session session = mDatabase.getSession(new SessionConfiguration(resourceName, StandardSettings.KEY)); // depends on control dependency: [try], data = [none] // Creating a write transaction wtx = new NodeWriteTrx(session, session.beginBucketWtx(), HashKind.Rolling); // depends on control dependency: [try], data = [none] final boolean exist = wtx.moveTo(nodeId); if (exist) { if (type == EIdAccessType.FIRSTCHILD) { WorkerHelper.shredInputStream(wtx, input, EShredderInsert.ADDASFIRSTCHILD); // depends on control dependency: [if], data = [none] } else if (type == EIdAccessType.RIGHTSIBLING) { WorkerHelper.shredInputStream(wtx, input, EShredderInsert.ADDASRIGHTSIBLING); // depends on control dependency: [if], data = [none] } else if (type == EIdAccessType.LASTCHILD) { if (wtx.moveTo(((ITreeStructData)wtx.getNode()).getFirstChildKey())) { long last = wtx.getNode().getDataKey(); while (wtx.moveTo(((ITreeStructData)wtx.getNode()).getRightSiblingKey())) { last = wtx.getNode().getDataKey(); // depends on control dependency: [while], data = [none] } wtx.moveTo(last); // depends on control dependency: [if], data = [none] WorkerHelper.shredInputStream(wtx, input, EShredderInsert.ADDASRIGHTSIBLING); // depends on control dependency: [if], data = [none] } else { throw new JaxRxException(404, NOTFOUND); } } else if (type == EIdAccessType.LEFTSIBLING && wtx.moveTo(((ITreeStructData)wtx.getNode()).getLeftSiblingKey())) { WorkerHelper.shredInputStream(wtx, input, EShredderInsert.ADDASRIGHTSIBLING); // depends on control dependency: [if], data = [none] } } else { throw new JaxRxException(404, NOTFOUND); } } catch (final JaxRxException exce) { abort = true; throw exce; } catch (final Exception exce) { // depends on control dependency: [catch], data = [none] abort = true; throw new JaxRxException(exce); } finally { // depends on control dependency: [catch], data = [none] try { WorkerHelper.closeWTX(abort, wtx, session); // depends on control dependency: [try], data = [none] } catch (final TTException exce) { throw new JaxRxException(exce); } // depends on control dependency: [catch], data = [none] } } } } }
public class class_name { private static InetAddress findNonLoopbackAddress() throws SocketException { final Enumeration<NetworkInterface> enumInterfaceAddress = NetworkInterface.getNetworkInterfaces(); while (enumInterfaceAddress.hasMoreElements()) { final NetworkInterface netIf = enumInterfaceAddress.nextElement(); // Iterate over inet address final Enumeration<InetAddress> enumInetAdress = netIf.getInetAddresses(); while (enumInetAdress.hasMoreElements()) { final InetAddress address = enumInetAdress.nextElement(); if (!address.isLoopbackAddress()) { return address; } } } return null; } }
public class class_name { private static InetAddress findNonLoopbackAddress() throws SocketException { final Enumeration<NetworkInterface> enumInterfaceAddress = NetworkInterface.getNetworkInterfaces(); while (enumInterfaceAddress.hasMoreElements()) { final NetworkInterface netIf = enumInterfaceAddress.nextElement(); // Iterate over inet address final Enumeration<InetAddress> enumInetAdress = netIf.getInetAddresses(); while (enumInetAdress.hasMoreElements()) { final InetAddress address = enumInetAdress.nextElement(); if (!address.isLoopbackAddress()) { return address; // depends on control dependency: [if], data = [none] } } } return null; } }
public class class_name { @Route(method = HttpMethod.GET, uri = "/monitor/logs/loggers") public Result loggers() { LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory(); List<LoggerModel> loggers = new ArrayList<>(); for (Logger logger : context.getLoggerList()) { loggers.add(new LoggerModel(logger)); } return ok(loggers).json(); } }
public class class_name { @Route(method = HttpMethod.GET, uri = "/monitor/logs/loggers") public Result loggers() { LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory(); List<LoggerModel> loggers = new ArrayList<>(); for (Logger logger : context.getLoggerList()) { loggers.add(new LoggerModel(logger)); // depends on control dependency: [for], data = [logger] } return ok(loggers).json(); } }
public class class_name { private List<IndexedQueryAnalyzer<S>.Result> splitIntoSubResults(Filter<S> filter, OrderingList<S> ordering, QueryHints hints) throws SupportException, RepositoryException { // Required for split to work. Filter<S> dnfFilter = filter.disjunctiveNormalForm(); Splitter splitter = new Splitter(ordering, hints); RepositoryException e = dnfFilter.accept(splitter, null); if (e != null) { throw e; } List<IndexedQueryAnalyzer<S>.Result> subResults = splitter.mSubResults; // Check if any sub-result handles nothing. If so, a full scan is the // best option for the entire query and all sub-results merge into a // single sub-result. Any sub-results which filter anything and contain // a join property in the filter are exempt from the merge. This is // because fewer joins are read than if a full scan is performed for // the entire query. The resulting union has both a full scan and an // index scan. IndexedQueryAnalyzer<S>.Result full = null; for (IndexedQueryAnalyzer<S>.Result result : subResults) { if (!result.handlesAnything()) { full = result; break; } if (!result.getCompositeScore().getFilteringScore().hasAnyMatches()) { if (full == null) { // This index is used only for its ordering, and it will be // tentatively selected as the "full scan". If a result is // found doesn't use an index for anything, then it becomes // the "full scan" index. full = result; } } } if (full == null) { // Okay, no full scan needed. return subResults; } List<IndexedQueryAnalyzer<S>.Result> mergedResults = new ArrayList<IndexedQueryAnalyzer<S>.Result>(); for (IndexedQueryAnalyzer<S>.Result result : subResults) { if (result == full) { // Add after everything has been merged into it. continue; } boolean exempt = result.getCompositeScore().getFilteringScore().hasAnyMatches(); if (exempt) { // Must also have a join in the filter to be exempt. List<PropertyFilter<S>> subFilters = PropertyFilterList.get(result.getFilter()); joinCheck: { for (PropertyFilter<S> subFilter : subFilters) { if (subFilter.getChainedProperty().getChainCount() > 0) { // A chain implies a join was followed, so result is exempt. break joinCheck; } } // No joins found, result is not exempt from merging into full scan. exempt = false; } } if (exempt) { mergedResults.add(result); } else { full = full.mergeRemainderFilter(result.getFilter()); } } if (mergedResults.size() == 0) { // Nothing was exempt. Rather than return a result with a dnf // filter, return full scan with a simpler reduced filter. full = full.withRemainderFilter(filter.reduce()); } mergedResults.add(full); return mergedResults; } }
public class class_name { private List<IndexedQueryAnalyzer<S>.Result> splitIntoSubResults(Filter<S> filter, OrderingList<S> ordering, QueryHints hints) throws SupportException, RepositoryException { // Required for split to work. Filter<S> dnfFilter = filter.disjunctiveNormalForm(); Splitter splitter = new Splitter(ordering, hints); RepositoryException e = dnfFilter.accept(splitter, null); if (e != null) { throw e; } List<IndexedQueryAnalyzer<S>.Result> subResults = splitter.mSubResults; // Check if any sub-result handles nothing. If so, a full scan is the // best option for the entire query and all sub-results merge into a // single sub-result. Any sub-results which filter anything and contain // a join property in the filter are exempt from the merge. This is // because fewer joins are read than if a full scan is performed for // the entire query. The resulting union has both a full scan and an // index scan. IndexedQueryAnalyzer<S>.Result full = null; for (IndexedQueryAnalyzer<S>.Result result : subResults) { if (!result.handlesAnything()) { full = result; // depends on control dependency: [if], data = [none] break; } if (!result.getCompositeScore().getFilteringScore().hasAnyMatches()) { if (full == null) { // This index is used only for its ordering, and it will be // tentatively selected as the "full scan". If a result is // found doesn't use an index for anything, then it becomes // the "full scan" index. full = result; // depends on control dependency: [if], data = [none] } } } if (full == null) { // Okay, no full scan needed. return subResults; } List<IndexedQueryAnalyzer<S>.Result> mergedResults = new ArrayList<IndexedQueryAnalyzer<S>.Result>(); for (IndexedQueryAnalyzer<S>.Result result : subResults) { if (result == full) { // Add after everything has been merged into it. continue; } boolean exempt = result.getCompositeScore().getFilteringScore().hasAnyMatches(); if (exempt) { // Must also have a join in the filter to be exempt. List<PropertyFilter<S>> subFilters = PropertyFilterList.get(result.getFilter()); joinCheck: { for (PropertyFilter<S> subFilter : subFilters) { if (subFilter.getChainedProperty().getChainCount() > 0) { // A chain implies a join was followed, so result is exempt. break joinCheck; } } // No joins found, result is not exempt from merging into full scan. exempt = false; } } if (exempt) { mergedResults.add(result); // depends on control dependency: [if], data = [none] } else { full = full.mergeRemainderFilter(result.getFilter()); // depends on control dependency: [if], data = [none] } } if (mergedResults.size() == 0) { // Nothing was exempt. Rather than return a result with a dnf // filter, return full scan with a simpler reduced filter. full = full.withRemainderFilter(filter.reduce()); } mergedResults.add(full); return mergedResults; } }
public class class_name { public synchronized boolean delete(final Job job) { // if already shutdown in the process of shutdown, there is no // need to remove Jobs as they will never be executed. if (shutdown) { return false; } int i = findIndex(job); if (i != -1) { ScheduledJobEntry se = (ScheduledJobEntry) jobList.remove(i); if (se.job != job) { // this should never happen new IllegalStateException("Internal programming error"); } // if the job is the first on the list, // then notify the scheduler thread to schedule a new job if (i == 0) { this.notifyAll(); } return true; } else { return false; } } }
public class class_name { public synchronized boolean delete(final Job job) { // if already shutdown in the process of shutdown, there is no // need to remove Jobs as they will never be executed. if (shutdown) { return false; // depends on control dependency: [if], data = [none] } int i = findIndex(job); if (i != -1) { ScheduledJobEntry se = (ScheduledJobEntry) jobList.remove(i); if (se.job != job) { // this should never happen new IllegalStateException("Internal programming error"); // depends on control dependency: [if], data = [none] } // if the job is the first on the list, // then notify the scheduler thread to schedule a new job if (i == 0) { this.notifyAll(); // depends on control dependency: [if], data = [none] } return true; // depends on control dependency: [if], data = [none] } else { return false; // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public String[] getResources() { String resource = getResource(); if (resource == null) { return null; } return resource.split(" "); } }
public class class_name { @Override public String[] getResources() { String resource = getResource(); if (resource == null) { return null; // depends on control dependency: [if], data = [none] } return resource.split(" "); } }
public class class_name { public void configLogger() { Map<String, Level> logLevels = new HashMap<String, Level>(); logLevels.put("trace", Level.TRACE); logLevels.put("debug", Level.DEBUG); logLevels.put("info", Level.INFO); logLevels.put("warning", Level.WARNING); logLevels.put("error", Level.ERROR); if( this.get("logging_log_type", "file").equals("file") ){ DateFormat dateFormat = new SimpleDateFormat(this.get("logging_current_date_format","yyyy-MM-dd")); Date date = new Date(); String logFileName = (this.get("logging_file_format", "current_date").equals("current_date")) ? dateFormat.format(date) + ".log" : this.get("logging_file_format", "current_date") + ".log"; Configurator.defaultConfig() .writer(new FileWriter(this.get("logging_file_path", "src/main/java/resources/") + logFileName, (this.get("logging_buffered", "false").equals("true")) ? true : false, (this.get("logging_append", "true").equals("true")) ? true : false)) .level((logLevels.containsKey(this.get("logging_level","debug"))) ? logLevels.get(this.get("logging_level","debug")) : Level.INFO) .activate(); }else if( this.get("logging_log_type", "file").equals("both") ){ DateFormat dateFormat = new SimpleDateFormat(this.get("logging_current_date_format","yyyy-MM-dd")); Date date = new Date(); String logFileName = (this.get("logging_file_format", "current_date").equals("current_date")) ? dateFormat.format(date) + ".log" : this.get("logging_file_format", "current_date") + ".log"; Configurator.defaultConfig() .writer(new ConsoleWriter()) .addWriter(new FileWriter(this.get("logging_file_path", "src/main/java/resources/") + logFileName)) .level((logLevels.containsKey(this.get("logging_level","debug"))) ? logLevels.get(this.get("logging_level","debug")) : Level.INFO) .activate(); }else{ Configurator.defaultConfig() .writer(new ConsoleWriter()) .level((logLevels.containsKey(this.get("logging_level","debug"))) ? logLevels.get(this.get("logging_level","debug")) : Level.INFO) .activate(); } } }
public class class_name { public void configLogger() { Map<String, Level> logLevels = new HashMap<String, Level>(); logLevels.put("trace", Level.TRACE); logLevels.put("debug", Level.DEBUG); logLevels.put("info", Level.INFO); logLevels.put("warning", Level.WARNING); logLevels.put("error", Level.ERROR); if( this.get("logging_log_type", "file").equals("file") ){ DateFormat dateFormat = new SimpleDateFormat(this.get("logging_current_date_format","yyyy-MM-dd")); Date date = new Date(); String logFileName = (this.get("logging_file_format", "current_date").equals("current_date")) ? dateFormat.format(date) + ".log" : this.get("logging_file_format", "current_date") + ".log"; Configurator.defaultConfig() .writer(new FileWriter(this.get("logging_file_path", "src/main/java/resources/") + logFileName, (this.get("logging_buffered", "false").equals("true")) ? true : false, (this.get("logging_append", "true").equals("true")) ? true : false)) .level((logLevels.containsKey(this.get("logging_level","debug"))) ? logLevels.get(this.get("logging_level","debug")) : Level.INFO) .activate(); // depends on control dependency: [if], data = [none] }else if( this.get("logging_log_type", "file").equals("both") ){ DateFormat dateFormat = new SimpleDateFormat(this.get("logging_current_date_format","yyyy-MM-dd")); Date date = new Date(); String logFileName = (this.get("logging_file_format", "current_date").equals("current_date")) ? dateFormat.format(date) + ".log" : this.get("logging_file_format", "current_date") + ".log"; Configurator.defaultConfig() .writer(new ConsoleWriter()) .addWriter(new FileWriter(this.get("logging_file_path", "src/main/java/resources/") + logFileName)) .level((logLevels.containsKey(this.get("logging_level","debug"))) ? logLevels.get(this.get("logging_level","debug")) : Level.INFO) .activate(); // depends on control dependency: [if], data = [none] }else{ Configurator.defaultConfig() .writer(new ConsoleWriter()) .level((logLevels.containsKey(this.get("logging_level","debug"))) ? logLevels.get(this.get("logging_level","debug")) : Level.INFO) .activate(); // depends on control dependency: [if], data = [none] } } }
public class class_name { static SocketChannelUDT newConnectorChannelUDT(final TypeUDT type) { try { return SelectorProviderUDT.from(type).openSocketChannel(); } catch (final IOException e) { throw new ChannelException("failed to open a socket channel", e); } } }
public class class_name { static SocketChannelUDT newConnectorChannelUDT(final TypeUDT type) { try { return SelectorProviderUDT.from(type).openSocketChannel(); // depends on control dependency: [try], data = [none] } catch (final IOException e) { throw new ChannelException("failed to open a socket channel", e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private void parseMethodSpec(String methodSpec) { methodName = null; methodInterface = null; methodSig = null; if (methodSpec != null) { StringTokenizer tokenizer = new StringTokenizer(methodSpec, ",", true); // Method name if (tokenizer.hasMoreTokens()) { methodName = tokenizer.nextToken(); if (methodName.equals(",")) methodName = null; } // Method interface if (tokenizer.hasMoreTokens()) { methodInterface = tokenizer.nextToken(); if (methodName != null && methodInterface.equals(",")) methodInterface = tokenizer.nextToken(); if (methodInterface.equals(",")) { methodInterface = null; methodSig = ""; } } // Method args if (tokenizer.hasMoreTokens()) { if (methodInterface != null) tokenizer.nextToken(); StringBuffer tmp = new StringBuffer(); while (tokenizer.hasMoreTokens()) { tmp.append(tokenizer.nextToken()); } methodSig = tmp.toString(); } } } }
public class class_name { private void parseMethodSpec(String methodSpec) { methodName = null; methodInterface = null; methodSig = null; if (methodSpec != null) { StringTokenizer tokenizer = new StringTokenizer(methodSpec, ",", true); // Method name if (tokenizer.hasMoreTokens()) { methodName = tokenizer.nextToken(); // depends on control dependency: [if], data = [none] if (methodName.equals(",")) methodName = null; } // Method interface if (tokenizer.hasMoreTokens()) { methodInterface = tokenizer.nextToken(); // depends on control dependency: [if], data = [none] if (methodName != null && methodInterface.equals(",")) methodInterface = tokenizer.nextToken(); if (methodInterface.equals(",")) { methodInterface = null; // depends on control dependency: [if], data = [none] methodSig = ""; // depends on control dependency: [if], data = [none] } } // Method args if (tokenizer.hasMoreTokens()) { if (methodInterface != null) tokenizer.nextToken(); StringBuffer tmp = new StringBuffer(); while (tokenizer.hasMoreTokens()) { tmp.append(tokenizer.nextToken()); // depends on control dependency: [while], data = [none] } methodSig = tmp.toString(); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public DescribeLagsResult withLags(Lag... lags) { if (this.lags == null) { setLags(new com.amazonaws.internal.SdkInternalList<Lag>(lags.length)); } for (Lag ele : lags) { this.lags.add(ele); } return this; } }
public class class_name { public DescribeLagsResult withLags(Lag... lags) { if (this.lags == null) { setLags(new com.amazonaws.internal.SdkInternalList<Lag>(lags.length)); // depends on control dependency: [if], data = [none] } for (Lag ele : lags) { this.lags.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }