Diff
stringlengths
5
2k
FaultInducingLabel
int64
0
1
* * @return the list of RuleFinder objects * * @param the list of RuleFinder objects * * @return the namespace for the xml attribute which indicates which class is to be plugged in. * * @return the namespace for the xml attribute which indicates which class is to be plugged in. * * @return the namespace for the xml attribute which indicates which previous plugin declaration should be used. * * @return the namespace for the xml attribute which indicates which previous plugin declaration should be used.
0
import java.lang.annotation.Repeatable; * or an array if the value is multi-valued. You can apply this annotation on a component class multiple times * (it's a java8 repeatable property). * Eight primitive types are supported: * @Component * @Property(name="p1", value="v") // String value type (scalar) * @Property(name="p2", value={"s1", "s2"}) // Array of Strings * @Property(name="service.ranking", intValue=10) // Integer value type (scalar) * @Property(name="p3", intValue={1,2}) // Array of Integers * @Property(name="p3", value="1", type=Long.class) // Long value (scalar) @Target( { ElementType.TYPE, ElementType.ANNOTATION_TYPE }) @Repeatable(RepeatableProperty.class) * @return the long value(s). * @return the double value(s). * @return the float value(s). * @return the int value(s). * @return the byte value(s). * @return the char value(s). * A Boolean value or an array of Boolean values. * @return the boolean value(s). * @return the short value(s).
0
* @author Enver Haase
0
/** Parse tree for {@code CREATE EXTERNAL TABLE} statement. */
0
if (elem.getWindows().size() <= 1 || (!RequiresWindowAccess.class.isAssignableFrom(fn.getClass()) && context.sideInputs.getAll().isEmpty())) {
0
/* * Copyright 2005 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.felix.mosgi.console.ifc; import java.awt.Component; import java.beans.PropertyChangeListener; public interface Plugin extends PropertyChangeListener { public String getName(); public Component getGUI(); public void registerServicePlugin(); public void unregisterServicePlugin(); public String pluginLocation(); public static final String NEW_NODE_SELECTED="newNodeSelected"; public static final String NEW_NODE_READY="newNodeReady"; public static final String NEW_NODE_CONNECTION="newNodeConnection"; public static final String EMPTY_NODE="emptyNode"; public static final String PLUGIN_ADDED="pluggin_added"; public static final String PLUGIN_REMOVED="pluggin_removed"; public static final String PLUGIN_ACTIVATED="pluggin_activated"; }
0
/** Utilities useful when executing a pipeline on a single machine. */
1
// Make sure no splits occurred in the table
0
* Implementations are encouraged to return * the URI that was initially requested.
0
package org.apache.aurora.scheduler.offers; import org.apache.aurora.scheduler.offers.OfferManager.OfferReturnDelay;
0
protected final JexlEngine JEXL; this(name, new JexlEngine()); } protected JexlTestCase(String name, JexlEngine jexl) { JEXL = jexl; this(new JexlEngine()); } protected JexlTestCase(JexlEngine jexl) { JEXL = jexl;
0
assertDatabaseIsRegistered(dbName); //There should be just one entity per dbname runCommand("drop database " + dbName); runCommand("create database " + dbName);
0
import org.apache.accumulo.core.tabletserver.thrift.ReplicationFailedException; import org.apache.commons.lang.NotImplementedException; @Override public boolean replicateData(TInfo tinfo, TCredentials credentials, String file, long offset, long count) throws ThriftSecurityException, ReplicationFailedException, TException { // TODO ACCUMULO-2581 throw new UnsupportedOperationException("Not yet implemented"); }
0
import org.apache.accumulo.core.security.tokens.AuthenticationToken;
0
if (ba.isRemovalPending()) { ba.purge(); } // TODO: OSGi R4.3 - We can probably do this in a more efficient way once // BundleWiring.getCapabilities() returns the proper result. We probably // Won't even need this method.
0
import static org.junit.Assert.assertEquals;
0
* @author $Id$
0
* @author <a href="mailto:[email protected]">Bj&ouml;rn L&uuml;tkemeier</a> * @version CVS $Id: CopletDataReferenceFieldHandler.java,v 1.3 2003/07/10 13:16:56 cziegeler Exp $
0
if (!isZKServerRunning()) {
0
extends LegacyReaderIterator<WindowedValue<KV<K, Reiterable<V>>>> {
0
pojoization.disableAnnotationProcessing();
0
if ( id == null || id.length() == 0 ) { throw new IllegalArgumentException( "ObjectClassDefinition ID must not be null or empty" ); } MetaTypeProvider mtp = ( MetaTypeProvider ) this.metaTypeProviders.get( id ); if ( mtp == null ) { throw new IllegalArgumentException( "No ObjectClassDefinition for id=" + id ); } ObjectClassDefinition ocd = mtp.getObjectClassDefinition( id, locale ); if ( ocd == null ) { throw new IllegalArgumentException( "No localized ObjectClassDefinition for id=" + id ); } return ocd;
0
public class SharedOutputBuffer extends ExpandableBuffer implements ContentOutputBuffer {
0
* Copyright 2016-2017 Seznam.cz, a.s.
0
/***************************************************************************** * Copyright (C) The Apache Software Foundation. All rights reserved. * * ------------------------------------------------------------------------- * * This software is published under the terms of the Apache Software License * * version 1.1, a copy of which has been included with this distribution in * * the LICENSE file. * *****************************************************************************/ package org.apache.batik.test; /** * Interface for classes that can process <tt>TestReport</tt> instances * This allows different applications to use the same <tt>TestReport</tt> * for different purposes, such as generating an XML output or * emailing a test result summary. * * @author <a href="mailto:[email protected]">Vincent Hardy</a> * @version $Id$ */ public interface TestReportProcessor { /** * Generic error code. Takes no parameter. */ public static final String INTERNAL_ERROR = "TestReportProcessor.error.code.internal.error"; /** * Requests the processor to process the input * <tt>TestReport</tt> instances. Note that a processor * should make its own copy of any resource described * by a <tt>TestReport</tt> such as files, as these * may be transient resources. In particular, a * processor should not keep a reference to the * input <tt>TestReport</tt> */ public void processReport(TestReport report) throws TestException; }
0
* Autogenerated by Thrift Compiler (0.9.1) import org.apache.thrift.async.AsyncMethodCallback; import org.apache.thrift.server.AbstractNonblockingServer.*; @SuppressWarnings("all") public class TCredentials implements org.apache.thrift.TBase<TCredentials, TCredentials._Fields>, java.io.Serializable, Cloneable, Comparable<TCredentials> { @Override lastComparison = Boolean.valueOf(isSetPrincipal()).compareTo(other.isSetPrincipal()); lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.principal, other.principal); lastComparison = Boolean.valueOf(isSetTokenClassName()).compareTo(other.isSetTokenClassName()); lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.tokenClassName, other.tokenClassName); lastComparison = Boolean.valueOf(isSetToken()).compareTo(other.isSetToken()); lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.token, other.token); lastComparison = Boolean.valueOf(isSetInstanceId()).compareTo(other.isSetInstanceId()); lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.instanceId, other.instanceId);
0
&& (!"user".equals(update.getName().getOrigin().toLowerCase())
0
* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at
0
"Content-Type: application/octet-stream\r\n" + "Content-Type: application/octet-stream\r\n" +
0
} catch (AccumuloException e) { // expected, ignore }
0
* @version CVS $Id$ private final static Vector returnNames; return RealPathModule.returnNames.iterator();
0
import org.apache.sshd.agent.SshAgentFactory; protected SshAgentFactory agentFactory; public SshAgentFactory getAgentFactory() { return agentFactory; } public void setAgentFactory(SshAgentFactory agentFactory) { this.agentFactory = agentFactory; }
0
if ("id".equals(NName)) {
0
* Copyright 2000-2009 The Apache Software Foundation
0
package org.apache.bcel.classfile; import org.apache.bcel.Const;
1
import java.util.Map; import com.google.common.collect.ImmutableMap; private static final TaskGroupKey GROUP_KEY = TaskGroupKey.from(TASK_A.getAssignedTask().getTask()); private static final Map<String, TaskGroupKey> NO_RESERVATION = ImmutableMap.of(); private BiCache<String, TaskGroupKey> reservations; reservations = createMock(new Clazz<BiCache<String, TaskGroupKey>>() { }); bind(new TypeLiteral<BiCache<String, TaskGroupKey>>() { }).toInstance(reservations); Map<String, TaskGroupKey> reservationMap) { reservationMap)); expectAsMap(NO_RESERVATION); expectAsMap(NO_RESERVATION); expectNoReservation(TASK_A); expectPreemptorCall(TASK_A, Optional.<String>absent()); expectAsMap(NO_RESERVATION); expectNoReservation(TASK_A); expectAsMap(ImmutableMap.of(SLAVE_ID, GROUP_KEY)); expectAssigned(TASK_A, ImmutableMap.of(SLAVE_ID, GROUP_KEY)).andReturn(true); expectAsMap(NO_RESERVATION); expectAsMap(NO_RESERVATION); reservations.remove(SLAVE_ID, TaskGroupKey.from(TASK_A.getAssignedTask().getTask())); expectAsMap(NO_RESERVATION); expectAsMap(NO_RESERVATION); reservations.put(slaveId, TaskGroupKey.from(task.getAssignedTask().getTask())); return expect(reservations.getByValue(TaskGroupKey.from(task.getAssignedTask().getTask()))) .andReturn(ImmutableSet.of(slaveId)); } private IExpectationSetters<?> expectNoReservation(IScheduledTask task) { return expect(reservations.getByValue(TaskGroupKey.from(task.getAssignedTask().getTask()))) .andReturn(ImmutableSet.of()); private IExpectationSetters<?> expectAsMap(Map<String, TaskGroupKey> map) { return expect(reservations.asMap()).andReturn(map);
0
@Override
0
public ConstantDouble(final double bytes) { public ConstantDouble(final ConstantDouble c) { ConstantDouble(final DataInput file) throws IOException { public void accept( final Visitor v ) { public final void dump( final DataOutputStream file ) throws IOException { public final void setBytes( final double bytes ) { public Object getConstantValue( final ConstantPool cp ) {
0
* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at
0
import org.apache.aurora.scheduler.cron.CronException; import org.apache.aurora.scheduler.cron.CronJobManager; import org.apache.aurora.scheduler.cron.CrontabEntry; import org.apache.aurora.scheduler.cron.SanitizedCronJob; sanitized.getJobConfig().getInstanceCount(), JobKeys.canonicalString(job.getKey()))); cronJobManager.updateJob(SanitizedCronJob.fromUnsanitized(job)); } catch (CronException | TaskDescriptionException e) { return response.setResponseCode(AUTH_FAILED).setMessage(e.getMessage()); return response.setResponseCode(OK).setMessage("Cron run started."); } catch (CronException e) { return response.setResponseCode(INVALID_REQUEST) JobSummary summary = new JobSummary() ? summary : summary.setNextCronRunMs( cronPredictor.predictNextRun(CrontabEntry.parse(job.getCronSchedule())).getTime()); error = Optional.of( "No jobs found for key " + JobKeys.canonicalString(existingJob.getKey())); error = Optional.of( "CAS compare failed for " + JobKeys.canonicalString(storedJob.getKey())); error = Optional.of("Multiple jobs found for key " + JobKeys.canonicalString(existingJob.getKey()));
0
public IntegerArrayConverter(final Object defaultValue) { public Object convert(final Class type, final Object value) { final String[] values = (String[]) value; final int[] results = new int[values.length]; } catch (final Exception e) { final List list = parseElements(value.toString()); final int[] results = new int[list.size()]; } catch (final Exception e) {
0
Collections.unmodifiableSet(EnumSet.of(ClientChannelEvent.OPENED, ClientChannelEvent.CLOSED));
0
import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; grace(); grace(); grace(); grace(); grace(); grace(); grace(); grace(); grace(); grace(); grace(); grace(); grace(); grace();
0
Map<String,String> siteConfig = new HashMap<String,String>(); cfg.setSiteConfig(siteConfig);
0
import org.apache.hc.core5.pool.PoolReusePolicy; this(getDefaultRegistry(), null, null ,null, PoolReusePolicy.LIFO, null, timeToLive); this(socketFactoryRegistry, connFactory, null, dnsResolver, PoolReusePolicy.LIFO, null, TimeValue.NEG_ONE_MILLISECONDS); final PoolReusePolicy poolReusePolicy, connFactory, poolReusePolicy, connPoolListener, timeToLive); final PoolReusePolicy poolReusePolicy, DEFAULT_MAX_CONNECTIONS_PER_ROUTE, DEFAULT_MAX_TOTAL_CONNECTIONS, timeToLive, poolReusePolicy, connPoolListener);
0
import org.xml.sax.Attributes; public Tag createTag(String name, Attributes attributes) {
0
package org.apache.http.impl.io;
0
package org.apache.bcel.verifier.structurals; import org.apache.bcel.Const; import org.apache.bcel.classfile.Constant; import org.apache.bcel.classfile.ConstantClass; import org.apache.bcel.classfile.ConstantDouble; import org.apache.bcel.classfile.ConstantFloat; import org.apache.bcel.classfile.ConstantInteger; import org.apache.bcel.classfile.ConstantLong; import org.apache.bcel.classfile.ConstantString; import org.apache.bcel.generic.*;
1
* any, must include the following acknowledgement: * Alternately, this acknowledgement may appear in the software itself, * if and wherever such third-party acknowledgements normally appear. * permission of the Apache Software Foundation.
0
params, request[i], host,
0
expect(storageUtil.storeProvider.getTaskStore()).andReturn(storageUtil.taskStore); storageUtil.expectRead(); storageUtil.expectTaskFetch(TASK_ID, makeTask(TASK_ID, ASSIGNED)); assertEquals(0, timedOutTaskCounter.intValue()); expect(storageUtil.storeProvider.getTaskStore()).andReturn(storageUtil.taskStore); storageUtil.expectRead(); storageUtil.expectTaskFetch(TASK_ID, makeTask(TASK_ID, ASSIGNED)); storageUtil.expectWrite(); assertEquals(1, timedOutTaskCounter.intValue()); expect(storageUtil.storeProvider.getTaskStore()).andReturn(storageUtil.taskStore); storageUtil.expectRead(); storageUtil.expectTaskFetch(TASK_ID); assertEquals(0, timedOutTaskCounter.intValue()); } private static IScheduledTask makeTask(String taskId, ScheduleStatus status) { return makeTask(taskId, status, 0L); assertEquals(0, timedOutTaskCounter.intValue());
0
int cmd = req.getUByte();
0
+ " " + MoreObjects.firstNonNull(LEVELS.get(record.getLevel()),
0
* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at
1
import org.apache.sshd.agent.SshAgentFactory; import org.apache.sshd.common.PropertyResolverUtils; String channelType = PropertyResolverUtils.getStringProperty(session, SshAgentFactory.PROXY_AUTH_CHANNEL_TYPE, SshAgentFactory.DEFAULT_PROXY_AUTH_CHANNEL_TYPE); buffer.putString(channelType);
0
* * package org.apache.ambari.logsearch.conf; import org.apache.solr.client.solrj.SolrClient; import java.util.EnumMap; import java.util.Map; public class SolrClientsHolder { private Map<CollectionType, SolrClient> clientsMap = new EnumMap<>(CollectionType.class); public enum CollectionType { SERVICE, AUDIT, HISTORY } public SolrClientsHolder() { clientsMap.put(CollectionType.SERVICE, null); clientsMap.put(CollectionType.AUDIT, null); clientsMap.put(CollectionType.HISTORY, null); public SolrClient getSolrClient(CollectionType type) { return clientsMap.get(type); public synchronized void setSolrClient(SolrClient solrClient, CollectionType type) { clientsMap.put(type, solrClient);
0
import java.util.Objects; this.parser = Objects.requireNonNull(parser, "No public keys extractor");
0
return creator.connect(hostConfig);
0
package org.apache.aurora.scheduler.quota; import org.apache.aurora.scheduler.base.JobKeys; import org.apache.aurora.scheduler.base.Query; import org.apache.aurora.scheduler.base.Tasks; import org.apache.aurora.scheduler.quota.QuotaManager.QuotaManagerImpl; import org.apache.aurora.scheduler.state.JobFilter; import org.apache.aurora.scheduler.storage.Storage; import org.apache.aurora.scheduler.storage.entities.IJobConfiguration; import org.apache.aurora.scheduler.storage.entities.IJobKey; import org.apache.aurora.scheduler.storage.entities.IQuota; import org.apache.aurora.scheduler.storage.entities.ITaskConfig; import static org.apache.aurora.scheduler.quota.QuotaComparisonResult.Result.INSUFFICIENT_QUOTA;
0
import org.apache.beam.vendor.grpc.v1p13p1.com.google.protobuf.ByteString; import org.apache.beam.vendor.grpc.v1p13p1.io.grpc.Server; import org.apache.beam.vendor.grpc.v1p13p1.io.grpc.inprocess.InProcessServerBuilder; import org.apache.beam.vendor.grpc.v1p13p1.io.grpc.stub.StreamObserver; import org.apache.beam.vendor.grpc.v1p13p1.io.grpc.util.MutableHandlerRegistry;
0
Status status = entity.getStatus(); if (status == null) { status = Status.ACTIVE; } Referenceable referenceable = new Referenceable(entity.getGuid(), entity.getTypeName(), status.name(),
0
PCollection<KeyedWorkItem<K, V>>, PCollection<KV<K, Iterable<V>>>, PCollection<KeyedWorkItem<K, V>>, PCollection<KV<K, Iterable<V>>>, PCollection<KeyedWorkItem<K, V>>, PCollection<KV<K, Iterable<V>>>,
0
private boolean isSupportDeleteViaUI; isSupportDeleteViaUI = service.isSupportDeleteViaUI(); public boolean isSupportDeleteViaUI(){ return isSupportDeleteViaUI; }
0
if (dataSource != null && !options.isIsolated()) { } else if (dataSource != null) {
0
import org.apache.felix.sigil.common.repository.IRepositoryChangeListener; import org.apache.felix.sigil.common.repository.RepositoryChangeEvent;
0
import static org.apache.accumulo.fate.util.UtilWaitThread.sleepUninterruptibly;
1
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//collections/src/java/org/apache/commons/collections/primitives/adapters/io/Attic/CharIteratorReader.java,v 1.2 2003/08/31 17:21:21 scolebourne Exp $ * any, must include the following acknowledgement: * Alternately, this acknowledgement may appear in the software itself, * if and wherever such third-party acknowledgements normally appear. * @version $Revision: 1.2 $ $Date: 2003/08/31 17:21:21 $
0
final void writeAttribute( Attribute attribute, String anchor ) { final void writeAttribute( Attribute attribute, String anchor, int method_number ) {
0
p.apply(TextIO.read().from("gs://apache-beam-samples/shakespeare/*"))
0
Collections.emptyList(), false, false, false, false)).andReturn(stageContainer).once(); RequestStageContainer requestStages = provider.updateHostComponents(null, requests, requestProperties, runSmokeTest, false, false);
0
* @param referenceType Type of the reference * @param fieldType Type of the field final String referenceType, final String fieldType, createMethod(writer, className, referenceName, referenceType, fieldName, fieldType, true); createMethod(writer, className, referenceName, referenceType, fieldName, fieldType, false); throw new SCRDescriptorException("Unable to add methods to " + className, referenceType, e); private static void createMethod(final ClassWriter cw, final String className, final String referenceName, final String referenceTypeName, final String fieldName, final String fieldTypeName, final boolean bind) { final org.objectweb.asm.Type referenceType = org.objectweb.asm.Type.getType("L" + referenceTypeName.replace('.', '/') + ";"); final org.objectweb.asm.Type fieldType = org.objectweb.asm.Type.getType("L" + fieldTypeName.replace('.', '/') + ";"); final MethodVisitor mv = cw.visitMethod(Opcodes.ACC_PROTECTED, methodName, "(" + referenceType.toString() + ")V", null, null); mv.visitVarInsn(referenceType.getOpcode(Opcodes.ILOAD), 1); mv.visitFieldInsn(Opcodes.PUTFIELD, className.replace('.', '/'), fieldName, fieldType.toString()); mv.visitFieldInsn(Opcodes.GETFIELD, className.replace('.', '/'), fieldName, fieldType.toString()); mv.visitFieldInsn(Opcodes.PUTFIELD, className.replace('.', '/'), fieldName, fieldType.toString());
0
import org.apache.sshd.util.test.Utils; try (SshClient client = setupTestClient()) { try (SshClient client = setupTestClient()) {
0
* Copyright (c) 2002, 2003 The Apache Software Foundation. All rights * @version $Revision: 1.4 $ $Date: 2003/02/12 07:56:19 $
0
package org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.reinterpret; import org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.BeamSqlExpression; import org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.BeamSqlPrimitive; * {@code BeamSqlExpression} for Reinterpret call. * <p>Currently supported conversions: * - {@link SqlTypeName#DATETIME_TYPES} to {@code BIGINT}; * - {@link SqlTypeName#INTEGER} to {@code BIGINT}; private static final Reinterpreter REINTERPRETER = Reinterpreter.builder() .withConversion(DatetimeReinterpretConversions.TIME_TO_BIGINT) .withConversion(DatetimeReinterpretConversions.DATE_TYPES_TO_BIGINT) .withConversion(IntegerReinterpretConversions.INTEGER_TYPES_TO_BIGINT) .build(); && REINTERPRETER.canConvert(opType(0), SqlTypeName.BIGINT); return REINTERPRETER.convert( SqlTypeName.BIGINT, operands.get(0).evaluate(inputRow, window));
0
reduceFnRunner.onTimers(workItem.timersIterable());
0
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ @SuppressWarnings("all") public enum _Fields implements org.apache.thrift.TFieldIdEnum {
0
package org.apache.accumulo.examples.simple.filedata;
0
public Requirement(String namespace, R4Directive[] directives, R4Attribute[] attributes)
0
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//validator/src/test/org/apache/commons/validator/TestValidator.java,v 1.11 2003/08/26 15:18:56 rleland Exp $ * $Revision: 1.11 $ * $Date: 2003/08/26 15:18:56 $ * @version $Revision: 1.11 $ $Date: 2003/08/26 15:18:56 $ * Checks if field is positive assuming it is an integer * * @param value The value validation is being performed on. * @param field Description of the field to be evaluated * @return boolean If the integer field is greater than zero, returns * true, otherwise returns false. */ public static boolean validatePositive(Object bean , Field field) { String value = ValidatorUtils.getValueAsString(bean, field.getProperty()); return GenericTypeValidator.formatInt(value).intValue() > 0; } /**
0
Collection<Resource> relatedResources = rc.findRelatedResources(resource); m_session.setRelatedResources(resource, relatedResources); for (Resource relatedResource : relatedResources) if (m_session.isValidRelatedResource(relatedResource)) // This resource is a valid related resource; // populate it now, consider it optional toPopulate.addFirst(relatedResource);
0
import com.google.api.services.bigquery.model.TableRow; import com.google.common.collect.ImmutableList; import java.util.List;
0
* Generated data to check that the wire format has not changed. To regenerate, see {@link * org.apache.beam.sdk.coders.PrintBase64Encodings}. private static final List<String> TEST_ENCODINGS = Arrays.asList("AAAAAA", "AAAAAv____8PA2ZvbwFoZWxsbw"); @Rule public ExpectedException thrown = ExpectedException.none();
1
HashMap<String,String> properties = new HashMap<>();
0
public void getTimeoutFactor() {
0
package org.apache.accumulo.shell.commands; import org.apache.accumulo.shell.Shell; import org.apache.accumulo.shell.Shell.Command;
0
* Creates an instance of {@code AuthSchemeBase} with the given challenge
0
import com.google.api.services.bigquery.model.Table; import com.google.api.services.bigquery.model.TableDataInsertAllRequest; import com.google.api.services.bigquery.model.TableDataInsertAllRequest.Rows; import java.util.List; import java.util.Map; import java.util.stream.Collectors; * <p>Execute a query with retries: * <p>Create a new dataset in one project: * <p>Delete a dataset in one project, included its all tables: * * <p>Create a new table * * <pre>{@code [ * client.createNewTable(projectId, datasetId, newTable) * ]}</pre> * * <p>Insert data into table * * <pre>{@code [ * client.insertDataToTable(projectId, datasetId, tableName, rows) * ]}</pre> LOG.debug("Exceptions caught when creating new dataset: " + e.getMessage()); LOG.debug("Exception caught when deleting table: " + e.getMessage()); LOG.debug("Exceptions caught when listing all tables: " + e.getMessage()); LOG.debug("Exceptions caught when deleting dataset: " + e.getMessage()); } } public void createNewTable(String projectId, String datasetId, Table newTable) { try { this.bqClient.tables().insert(projectId, datasetId, newTable).execute(); LOG.info("Successfully created new table: " + newTable.getId()); } catch (Exception e) { LOG.debug("Exceptions caught when creating new table: " + e.getMessage()); } } public void insertDataToTable( String projectId, String datasetId, String tableName, List<Map<String, Object>> rows) { try { List<Rows> dataRows = rows.stream().map(row -> new Rows().setJson(row)).collect(Collectors.toList()); this.bqClient .tabledata() .insertAll( projectId, datasetId, tableName, new TableDataInsertAllRequest().setRows(dataRows)) .execute(); LOG.info("Successfully inserted data into table : " + tableName); } catch (Exception e) { LOG.debug("Exceptions caught when inserting data: " + e.getMessage());
0
private static final String SERVER_STALE_CONFIG_CACHE_EXPIRATION_DEFAULT = "600";
0
String host = requestURI.getHost(); if (host == null) {
0
log.info("Tablet seems to be already assigned to {}",
0
new PoolableConnectionFactory(connectionFactory); poolableConnectionFactory.setDefaultReadOnly(false); poolableConnectionFactory.setDefaultAutoCommit(true);
0
JobConfiguration service = createJob("serviceJob" + i, mesosUser); service.getTaskConfig().setIsService(true); submitJob(service);
0
import org.slf4j.Logger;
0
* * * /** * * * */ project.setProperty("ant.file", /** if ( name.equals("fileScanner") ) { ); if ( name.equals("setProperty") ) { );
1
import java.util.ArrayList; import java.util.Collection; * @param <T> the type of nodes this class can handle public class NodeAddData<T> private final T parent; * Stores a list with the names of nodes that are on the path between the * parent node and the new node. private final List<String> pathNodes; private final String newNodeName; private final boolean attribute; * Creates a new instance of {@code NodeAddData} and initializes it. * @param parentNode the parent node of the add operation * @param newName the name of the new node * @param isAttr flag whether the new node is an attribute * @param intermediateNodes an optional collection with path nodes public NodeAddData(T parentNode, String newName, boolean isAttr, Collection<String> intermediateNodes) parent = parentNode; newNodeName = newName; attribute = isAttr; pathNodes = createPathNodes(intermediateNodes); public T getParent() * complete branch is to be added at once. For instance, imagine that there return pathNodes; * Creates the list with path nodes. Handles null input. * @param intermediateNodes the nodes passed to the constructor * @return an unmodifiable list of path nodes private static List<String> createPathNodes( Collection<String> intermediateNodes) if (intermediateNodes == null) { return Collections.emptyList(); } else return Collections.unmodifiableList(new ArrayList<String>( intermediateNodes));
0
Assert.assertEquals(serverFactory.getZooKeeperServer()
0
if (info.multiplicity.upper > 1 && !( info.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP || info.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY)) { throw new MetadataException( String.format("A multiplicty of more than one requires a collection type for attribute '%s'", info.name)); }
0
boolean isLastFilePart = no == 1;
0
package org.apache.commons.ognl.test; import org.apache.commons.ognl.Ognl; import org.apache.commons.ognl.OgnlContext; import org.apache.commons.ognl.OgnlException; import org.apache.commons.ognl.OgnlRuntime; import org.apache.commons.ognl.SimpleNode;
0
package org.apache.accumulo.monitor.rest.tservers;
0