Dataset Viewer
Auto-converted to Parquet Duplicate
buggy_function
stringlengths
1
391k
fixed_function
stringlengths
0
392k
public static synchronized void printException(String where, Exception e) { if (e instanceof SQLException) { SQLException se = (SQLException) e; if (se.getSQLState() != null) { // SQLSTATE is NULL for a if (se.getSQLState().equals("40001")) System.out.println("deadlocked detected"); if (se.getSQLS...
public static synchronized void printException(String where, Exception e) { if (e instanceof SQLException) { SQLException se = (SQLException) e; if (se.getSQLState() != null) { // SQLSTATE is NULL for a if (se.getSQLState().equals("40001")) System.out.println("deadlocked detected"); if (se.getSQLS...
public MonotonicAppendingLongBuffer(int initialPageCount, int pageSize) { super(initialPageCount, pageSize); averages = new float[pageSize]; }
public MonotonicAppendingLongBuffer(int initialPageCount, int pageSize) { super(initialPageCount, pageSize); averages = new float[initialPageCount]; }
public void testBuild() throws IOException { final String LF = System.getProperty("line.separator"); String input = "oneword" + LF + "twoword" + LF + "threeword"; PlainTextDictionary ptd = new PlainTextDictionary(new StringReader(input)); Directory ramDir = newDirectory(); SpellChecker spellChecke...
public void testBuild() throws IOException { final String LF = System.getProperty("line.separator"); String input = "oneword" + LF + "twoword" + LF + "threeword"; PlainTextDictionary ptd = new PlainTextDictionary(new StringReader(input)); Directory ramDir = newDirectory(); SpellChecker spellChecke...
public void testExtendedResultsCount() throws Exception { assertJQ(req("qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", SpellCheckComponent.SPELLCHECK_BUILD, "true", "q","bluo", SpellCheckComponent.SPELLCHECK_COUNT,"5", SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS,"false") ,"/spellcheck/suggestions/...
public void testExtendedResultsCount() throws Exception { assertJQ(req("qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", SpellCheckComponent.SPELLCHECK_BUILD, "true", "q","bluo", SpellCheckComponent.SPELLCHECK_COUNT,"5", SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS,"false") ,"/spellcheck/suggestions/...
public String[] listAll() { ensureOpen(); String[] res; if (writer != null) { res = writer.listAll(); } else { res = entries.keySet().toArray(new String[entries.size()]); // Add the segment name String seg = fileName.substring(0, fileName.indexOf('.')); for (int i = 0; i...
public String[] listAll() { ensureOpen(); String[] res; if (writer != null) { res = writer.listAll(); } else { res = entries.keySet().toArray(new String[entries.size()]); // Add the segment name String seg = IndexFileNames.parseSegmentName(fileName); for (int i = 0; i < ...
public static void prepareClass() throws Exception { LOCAL = FBUtilities.getLocalAddress(); tablename = "Keyspace4"; StorageService.instance.initServer(); // generate a fake endpoint for which we can spoof receiving/sending trees REMOTE = InetAddress.getByName("127.0.0.2"...
public static void prepareClass() throws Exception { LOCAL = FBUtilities.getLocalAddress(); tablename = "Keyspace5"; StorageService.instance.initServer(); // generate a fake endpoint for which we can spoof receiving/sending trees REMOTE = InetAddress.getByName("127.0.0.2"...
public TestOrdValues(String name) { super(name); }
public TestOrdValues(String name) { super(name, false); }
public StorageService() { MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); try { mbs.registerMBean(this, new ObjectName("org.apache.cassandra.service:type=StorageService")); } catch (Exception e) { throw new RuntimeException(e)...
public StorageService() { MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); try { mbs.registerMBean(this, new ObjectName("org.apache.cassandra.service:type=StorageService")); } catch (Exception e) { throw new RuntimeException(e)...
public void testSearch() throws Exception { Query query = QueryParser.parse("test", "contents", analyzer); Hits hits = searcher.search(query); assertEquals("Find document(s)", 2, hits.length()); }
public void testSearch() throws Exception { Query query = new QueryParser("contents",analyzer).parse("test"); Hits hits = searcher.search(query); assertEquals("Find document(s)", 2, hits.length()); }
public void setScorer(Scorer scorer) { super.setScorer(scorer); // TODO: might be cleaner to lazy-init 'source' and set scorer after? assert readerContext != null; try { Map<String,Object> context = new HashMap<String,Object>(); assert scorer != null; context.put("scorer", new ScoreF...
public void setScorer(Scorer scorer) { super.setScorer(scorer); // TODO: might be cleaner to lazy-init 'source' and set scorer after? assert readerContext != null; try { Map<String,Object> context = new HashMap<String,Object>(); assert scorer != null; context.put("scorer", scorer); ...
public ConcurrentUpdateSolrServer(String solrServerUrl, HttpClient client, int queueSize, int threadCount) { this(solrServerUrl, null, queueSize, threadCount, Executors.newCachedThreadPool( new SolrjNamedThreadFactory("concurrentUpdateScheduler"))); shutdownExecutor = true; }
public ConcurrentUpdateSolrServer(String solrServerUrl, HttpClient client, int queueSize, int threadCount) { this(solrServerUrl, client, queueSize, threadCount, Executors.newCachedThreadPool( new SolrjNamedThreadFactory("concurrentUpdateScheduler"))); shutdownExecutor = true; }
public Token getBootstrapToken() { Range range = getLocalPrimaryRange(); List<DecoratedKey> keys = new ArrayList<DecoratedKey>(); for (ColumnFamilyStore cfs : ColumnFamilyStore.all()) { for (IndexSummary.KeyPosition info: cfs.allIndexPositions()) { ...
public Token getBootstrapToken() { Range range = getLocalPrimaryRange(); List<DecoratedKey> keys = new ArrayList<DecoratedKey>(); for (ColumnFamilyStore cfs : ColumnFamilyStore.all()) { for (IndexSummary.KeyPosition info: cfs.allIndexPositions()) { ...
public void setText(CharacterIterator newText) { start = newText.getBeginIndex(); end = newText.getEndIndex(); text = newText; current = newText.getIndex(); }
public void setText(CharacterIterator newText) { start = newText.getBeginIndex(); end = newText.getEndIndex(); text = newText; current = start; }
public void testUpdateDelteSlices() { DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue(); final int size = 200 + random().nextInt(500) * RANDOM_MULTIPLIER; Integer[] ids = new Integer[size]; for (int i = 0; i < ids.length; i++) { ids[i] = random().nextInt(); } DeleteSlic...
public void testUpdateDelteSlices() { DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue(); final int size = 200 + random().nextInt(500) * RANDOM_MULTIPLIER; Integer[] ids = new Integer[size]; for (int i = 0; i < ids.length; i++) { ids[i] = random().nextInt(); } DeleteSlic...
public String toString() { return getFilename() + "/" + StringUtils.join(sections, ",") + "\n\t progress=" + progress + "/" + size + " - " + progress*100/size + "%"; }
public String toString() { return getFilename() + " sections=" + sections.size() + " progress=" + progress + "/" + size + " - " + progress*100/size + "%"; }
public int run(String[] args) throws Exception { /** Option seqOpt = obuilder.withLongName("seqFile").withRequired(false).withArgument( abuilder.withName("seqFile").withMinimum(1).withMaximum(1).create()).withDescription( "The Sequence File containing the Vectors").withShortName("s").create(); ...
public int run(String[] args) throws Exception { /** Option seqOpt = obuilder.withLongName("seqFile").withRequired(false).withArgument( abuilder.withName("seqFile").withMinimum(1).withMaximum(1).create()).withDescription( "The Sequence File containing the Vectors").withShortName("s").create(); ...
private void initParents(IndexReader reader, int first) throws IOException { if (reader.maxDoc() == first) { return; } // it's ok to use MultiFields because we only iterate on one posting list. // breaking it to loop over the leaves() only complicates code for no // apparent gain. D...
private void initParents(IndexReader reader, int first) throws IOException { if (reader.maxDoc() == first) { return; } // it's ok to use MultiFields because we only iterate on one posting list. // breaking it to loop over the leaves() only complicates code for no // apparent gain. D...
private int getConnFromDatabaseName() throws DRDAProtocolException { Properties p = new Properties(); databaseAccessException = null; //if we haven't got the correlation token yet, use session number for drdaID if (session.drdaID == null) session.drdaID = leftBrace + session.connNum + rightBrace; p.put(A...
private int getConnFromDatabaseName() throws DRDAProtocolException { Properties p = new Properties(); databaseAccessException = null; //if we haven't got the correlation token yet, use session number for drdaID if (session.drdaID == null) session.drdaID = leftBrace + session.connNum + rightBrace; p.put(A...
private void showFromZooKeeper(SolrQueryRequest req, SolrQueryResponse rsp, CoreContainer coreContainer) throws KeeperException, InterruptedException, UnsupportedEncodingException { SolrZkClient zkClient = coreContainer.getZkController().getZkClient(); String adminFile = getAdminFileFromZooKeepe...
private void showFromZooKeeper(SolrQueryRequest req, SolrQueryResponse rsp, CoreContainer coreContainer) throws KeeperException, InterruptedException, UnsupportedEncodingException { SolrZkClient zkClient = coreContainer.getZkController().getZkClient(); String adminFile = getAdminFileFromZooKeepe...
private void parseSQLDTA(DRDAStatement stmt) throws DRDAProtocolException,SQLException { try { parseSQLDTA_work(stmt); } catch (SQLException se) { skipRemainder(false); throw se; } }
private void parseSQLDTA(DRDAStatement stmt) throws DRDAProtocolException,SQLException { try { parseSQLDTA_work(stmt); } catch (SQLException se) { skipRemainder(true); throw se; } }
public int compare(ColumnFamilyStore o1, ColumnFamilyStore o2) { long size1 = o1.getTotalMemtableLiveSize(); long size2 = o2.getTotalMemtableLiveSize(); if (size1 < size2) return -1; i...
public int compare(ColumnFamilyStore o1, ColumnFamilyStore o2) { long size1 = o1.getTotalMemtableLiveSize(); long size2 = o2.getTotalMemtableLiveSize(); if (size1 < size2) return -1; i...
public void addSSTable(SSTableReader sstable) { ssTables_.add(sstable); CompactionManager.instance.submitMinor(this); }
public void addSSTable(SSTableReader sstable) { ssTables_.add(sstable); CompactionManager.instance.submitMinorIfNeeded(this); }
public void testCompactions() throws IOException, ExecutionException, InterruptedException { CompactionManager.instance.disableAutoCompaction(); // this test does enough rows to force multiple block indexes to be used Table table = Table.open(TABLE1); ColumnFamilyStore store = t...
public void testCompactions() throws IOException, ExecutionException, InterruptedException { CompactionManager.instance.disableAutoCompaction(); // this test does enough rows to force multiple block indexes to be used Table table = Table.open(TABLE1); ColumnFamilyStore store = t...
private final SimpleDocValuesFormat defaultDVFormat = SimpleDocValuesFormat.forName("Memory"); // nocommit need simpleNormsFormat }
private final SimpleDocValuesFormat defaultDVFormat = SimpleDocValuesFormat.forName("Lucene41"); // nocommit need simpleNormsFormat }
public List<String> getIncomingFiles(String host) throws IOException { List<String> files = new ArrayList<String>(); for (PendingFile pf : StreamInManager.getIncomingFiles(InetAddress.getByName(host))) { files.add(String.format("%s: %s", pf.getDescriptor().ksname, pf.toString...
public List<String> getIncomingFiles(String host) throws IOException { List<String> files = new ArrayList<String>(); for (PendingFile pf : StreamInManager.getIncomingFiles(InetAddress.getByName(host))) { files.add(String.format("%s: %s", pf.desc.ksname, pf.toString())); ...
public LinkedHashMap<PendingFile, PendingFile> getContextMapping(PendingFile[] remoteFiles) throws IOException { /* Create a local sstable for each remote sstable */ LinkedHashMap<PendingFile, PendingFile> mapping = new LinkedHashMap<PendingFile, PendingFile>(); for (PendingFile remote :...
public LinkedHashMap<PendingFile, PendingFile> getContextMapping(PendingFile[] remoteFiles) throws IOException { /* Create a local sstable for each remote sstable */ LinkedHashMap<PendingFile, PendingFile> mapping = new LinkedHashMap<PendingFile, PendingFile>(); for (PendingFile remote :...
public void geohashRecursiveRandom() throws IOException { init(12); //1. Iterate test with the cluster at some worldly point of interest Point[] clusterCenters = new Point[]{ctx.makePoint(-180,0), ctx.makePoint(0,90), ctx.makePoint(0,-90)}; for (Point clusterCenter : clusterCenters) { //2. Iter...
public void geohashRecursiveRandom() throws IOException { init(12); //1. Iterate test with the cluster at some worldly point of interest Point[] clusterCenters = new Point[]{ctx.makePoint(-180,0), ctx.makePoint(0,90), ctx.makePoint(0,-90)}; for (Point clusterCenter : clusterCenters) { //2. Iter...
public CoreContainer initialize() throws IOException, ParserConfigurationException, SAXException { CoreContainer cores = null; String instanceDir = SolrResourceLoader.locateInstanceDir(); File fconf = new File(instanceDir, solrConfigFilename == null? "solr.xml": solrConfigFilename); log.info...
public CoreContainer initialize() throws IOException, ParserConfigurationException, SAXException { CoreContainer cores = null; String instanceDir = SolrResourceLoader.locateInstanceDir(); File fconf = new File(instanceDir, solrConfigFilename == null? "solr.xml": solrConfigFilename); log.info...
public static HashFunction[] createHashFunctions(HashType type, int numFunctions) { HashFunction[] hashFunction = new HashFunction[numFunctions]; Random seed = new Random(11); switch (type) { case LINEAR: for (int i = 0; i < numFunctions; i++) { hashFunction[i] = new LinearHash(see...
public static HashFunction[] createHashFunctions(HashType type, int numFunctions) { HashFunction[] hashFunction = new HashFunction[numFunctions]; Random seed = RandomUtils.getRandom(11); switch (type) { case LINEAR: for (int i = 0; i < numFunctions; i++) { hashFunction[i] = new Lin...
public List<TokenRange> describe_ring(String keyspace)throws InvalidRequestException { if (!DatabaseDescriptor.getNonSystemTables().contains(keyspace)) throw new InvalidRequestException("There is no ring for the keyspace: " + keyspace); List<TokenRange> ranges = new ArrayList<TokenRa...
public List<TokenRange> describe_ring(String keyspace)throws InvalidRequestException { if (keyspace == null || !DatabaseDescriptor.getNonSystemTables().contains(keyspace)) throw new InvalidRequestException("There is no ring for the keyspace: " + keyspace); List<TokenRange> ranges = n...
public void testSortedBytes() throws IOException { DocValuesType type = DocValuesType.SORTED; final Directory d = newDirectory(); IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); IndexWriter w = new IndexWriter(d, cfg); int numDocs...
public void testSortedBytes() throws IOException { DocValuesType type = DocValuesType.SORTED; final Directory d = newDirectory(); IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); IndexWriter w = new IndexWriter(d, cfg); int numDocs...
public void testAddDocument() throws Exception { Document testDoc = new Document(); DocHelper.setupDoc(testDoc); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); writer.addDocument(testDoc); writer.commit(); SegmentInfoPerCommit...
public void testAddDocument() throws Exception { Document testDoc = new Document(); DocHelper.setupDoc(testDoc); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); writer.addDocument(testDoc); writer.commit(); SegmentInfoPerCommit...
public void testFloatNorms() throws IOException { Directory dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); Similarity provider = new MySimProvider(); config.setSimilarity(provider); RandomIndexWriter writer = new Ra...
public void testFloatNorms() throws IOException { Directory dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); Similarity provider = new MySimProvider(); config.setSimilarity(provider); RandomIndexWriter writer = new Ra...
public void test() throws Exception { NumericDocValues fooNorms = MultiSimpleDocValues.simpleNormValues(reader, "foo"); assertNotNull(fooNorms); for (int i = 0; i < reader.maxDoc(); i++) { assertEquals(expected.get(i).longValue(), fooNorms.get(i)); } }
public void test() throws Exception { NumericDocValues fooNorms = MultiDocValues.getNormValues(reader, "foo"); assertNotNull(fooNorms); for (int i = 0; i < reader.maxDoc(); i++) { assertEquals(expected.get(i).longValue(), fooNorms.get(i)); } }
public void test() throws Exception { NumericDocValues fooNorms = MultiSimpleDocValues.simpleNormValues(reader, "foo"); for (int i = 0; i < reader.maxDoc(); i++) { assertEquals(expected.get(i).intValue(), fooNorms.get(i) & 0xff); } }
public void test() throws Exception { NumericDocValues fooNorms = MultiDocValues.getNormValues(reader, "foo"); for (int i = 0; i < reader.maxDoc(); i++) { assertEquals(expected.get(i).intValue(), fooNorms.get(i) & 0xff); } }
public void test1PhraseShortMV() throws Exception { makeIndexShortMV(); FieldQuery fq = new FieldQuery( tq( "d" ), true, true ); FieldTermStack stack = new FieldTermStack( reader, 0, F, fq ); FieldPhraseList fpl = new FieldPhraseList( stack, fq ); SimpleFragListBuilder sflb = new SimpleFragListBu...
public void test1PhraseShortMV() throws Exception { makeIndexShortMV(); FieldQuery fq = new FieldQuery( tq( "d" ), true, true ); FieldTermStack stack = new FieldTermStack( reader, 0, F, fq ); FieldPhraseList fpl = new FieldPhraseList( stack, fq ); SimpleFragListBuilder sflb = new SimpleFragListBu...
public void test1PhraseShortMV() throws Exception { makeIndexShortMV(); FieldQuery fq = new FieldQuery( tq( "d" ), true, true ); FieldTermStack stack = new FieldTermStack( reader, 0, F, fq ); assertEquals( 1, stack.termList.size() ); assertEquals( "d(6,7,3)", stack.pop().toString() ); }
public void test1PhraseShortMV() throws Exception { makeIndexShortMV(); FieldQuery fq = new FieldQuery( tq( "d" ), true, true ); FieldTermStack stack = new FieldTermStack( reader, 0, F, fq ); assertEquals( 1, stack.termList.size() ); assertEquals( "d(9,10,3)", stack.pop().toString() ); }
public void test1PhraseShortMV() throws Exception { makeIndexShortMV(); FieldQuery fq = new FieldQuery( tq( "d" ), true, true ); FieldTermStack stack = new FieldTermStack( reader, 0, F, fq ); FieldPhraseList fpl = new FieldPhraseList( stack, fq ); assertEquals( 1, fpl.phraseList.size() ); ass...
public void test1PhraseShortMV() throws Exception { makeIndexShortMV(); FieldQuery fq = new FieldQuery( tq( "d" ), true, true ); FieldTermStack stack = new FieldTermStack( reader, 0, F, fq ); FieldPhraseList fpl = new FieldPhraseList( stack, fq ); assertEquals( 1, fpl.phraseList.size() ); ass...
private void unCache(String fileName) throws IOException { // Only let one thread uncache at a time; this only // happens during commit() or close(): synchronized(uncacheLock) { if (VERBOSE) { System.out.println("nrtdir.unCache name=" + fileName); } if (!cache.fileExists(fileName...
private void unCache(String fileName) throws IOException { // Only let one thread uncache at a time; this only // happens during commit() or close(): synchronized(uncacheLock) { if (VERBOSE) { System.out.println("nrtdir.unCache name=" + fileName); } if (!cache.fileExists(fileName...
public void testDerby3000() throws SQLException, IOException { ResultSet rs; // Derby-3000 make sure we process only valid TableType values and // process them correctly. DatabaseMetaData dmd = getConnection().getMetaData(); Statement s = createStatement(); s.executeUpdate("CREATE TABLE APP.TAB (i int)")...
public void testDerby3000() throws SQLException, IOException { ResultSet rs; // Derby-3000 make sure we process only valid TableType values and // process them correctly. DatabaseMetaData dmd = getConnection().getMetaData(); Statement s = createStatement(); s.executeUpdate("CREATE TABLE APP.TAB (i int)")...
public void testClobCreateLocatorSP() throws SQLException { //initialize the locator to a default value. int locator = -1; //call the stored procedure to return the created locator. CallableStatement cs = prepareCall ("? = CALL SYSIBM.CLOBCREATELOCATOR()"); cs.re...
public void testClobCreateLocatorSP() throws SQLException { //initialize the locator to a default value. int locator = -1; //call the stored procedure to return the created locator. CallableStatement cs = prepareCall ("? = CALL SYSIBM.CLOBCREATELOCATOR()"); cs.re...
public void testFragmentCreation() throws Exception { Bundle exportBundle = makeBundleWithExports("export.bundle", "1.2.3", "export.package;version=\"1.0.0\";singleton:=true"); Dictionary fragmentHeaders = makeFragmentFromExportBundle(exportBundle) .getHeaders(); ...
public void testFragmentCreation() throws Exception { Bundle exportBundle = makeBundleWithExports("export.bundle", "1.2.3", "export.package;version=\"1.0.0\";uses:=\"foo.jar,bar.jar\";singleton:=true"); Dictionary fragmentHeaders = makeFragmentFromExportBundle(exportBundle) ...
public static String docValuesId(String segmentsName, int fieldId) { return segmentsName + "-" + fieldId; }
public static String docValuesId(String segmentsName, int fieldId) { return segmentsName + "_" + fieldId; }
private boolean[] expandBooleanArray(boolean[] array, int newLength) { if (array == null) { boolean[] newArray = new boolean[newLength]; return newArray; } if (array.length < newLength) { boolean[] newArray = new boolean[newLength]; System.arra...
private boolean[] expandBooleanArray(boolean[] array, int newLength) { if (array == null) { boolean[] newArray = new boolean[newLength]; return newArray; } if (array.length < newLength) { boolean[] newArray = new boolean[newLength]; System.arra...
public void distribCommit(CommitUpdateCommand cmd, List<Node> nodes, ModifiableSolrParams params) throws IOException { // we need to do any retries before commit... servers.blockUntilFinished(); doRetriesIfNeeded(); UpdateRequest uReq = new UpdateRequest(); uReq.setParams(params); ...
public void distribCommit(CommitUpdateCommand cmd, List<Node> nodes, ModifiableSolrParams params) throws IOException { // we need to do any retries before commit... servers.blockUntilFinished(); doRetriesIfNeeded(); UpdateRequest uReq = new UpdateRequest(); uReq.setParams(params); ...
public static void validateKeyspaceNotYetExisting(String newKsName) throws InvalidRequestException { // keyspace names must be unique case-insensitively because the keyspace name becomes the directory // where we store CF sstables. Names that differ only in case would thus cause problems on ...
public static void validateKeyspaceNotYetExisting(String newKsName) throws InvalidRequestException { // keyspace names must be unique case-insensitively because the keyspace name becomes the directory // where we store CF sstables. Names that differ only in case would thus cause problems on ...
public void testTriggersWithClobColumn() throws Exception { insertDefaultData(); Statement stmt = createStatement(); stmt.executeUpdate( "CREATE TABLE testClobTriggerA (a CLOB(400k), b int)"); stmt.executeUpdate( "CREATE TABLE testClobTriggerB (a CLOB...
public void testTriggersWithClobColumn() throws Exception { insertDefaultData(); Statement stmt = createStatement(); stmt.executeUpdate( "CREATE TABLE testClobTriggerA (a CLOB(400k), b int)"); stmt.executeUpdate( "CREATE TABLE testClobTriggerB (a CLOB...
public static Test suite() { String testName = "InterruptResilienceTest"; if (! isSunJVM()) { // DERBY-4463 test fails on IBM VM 1.5. // It's fixed in IBM VM 1.6 SR9 and above. // Remove this condition when that issue is solved in IBM VM 1.5 SR13. ...
public static Test suite() { String testName = "InterruptResilienceTest"; if (isIBMJVM()) { // DERBY-4463 test fails on IBM VM 1.5. // It's fixed in IBM VM 1.6 SR9 and above. // Remove this condition when that issue is solved in IBM VM 1.5 SR13. i...
public static Test suite() { if (! isSunJVM()) { // DERBY-4463 test fails on IBM VM 1.5. // It's fixed in IBM VM 1.6 SR9 and above. // Remove this condition when that issue is solved in IBM VM 1.5 SR13. if (getSystemProperty("java.version").startsWith...
public static Test suite() { if (isIBMJVM()) { // DERBY-4463 test fails on IBM VM 1.5. // It's fixed in IBM VM 1.6 SR9 and above. // Remove this condition when that issue is solved in IBM VM 1.5 SR13. if (getSystemProperty("java.version").startsWith("...
public void map(LongWritable userID, VectorWritable vectorWritable, OutputCollector<LongWritable, RecommendedItemsWritable> output, Reporter reporter) throws IOException { if (usersToRecommendFor != null && !usersToRecommendFor.contains(userID.get())) { ...
public void map(LongWritable userID, VectorWritable vectorWritable, OutputCollector<LongWritable, RecommendedItemsWritable> output, Reporter reporter) throws IOException { if (usersToRecommendFor != null && !usersToRecommendFor.contains(userID.get())) { ...
public int docID() { return docIt >= upto ? NO_MORE_DOCS : docs[docIt]; }
public int docID() { return docIt < 0 ? -1 : docIt >= upto ? NO_MORE_DOCS : docs[docIt]; }
private boolean mergeClosestClusters(int numUsers, List<FastIDSet> clusters, boolean done) throws TasteException { // We find a certain number of closest clusters... List<ClusterClusterPair> queue = findClosestClusters(numUsers, clusters); // The first one is definitely the closest pair in existence ...
private boolean mergeClosestClusters(int numUsers, List<FastIDSet> clusters, boolean done) throws TasteException { // We find a certain number of closest clusters... List<ClusterClusterPair> queue = findClosestClusters(numUsers, clusters); // The first one is definitely the closest pair in existence ...
public static long getTotalBytes(Iterable<SSTableReader> sstables) { long sum = 0; for (SSTableReader sstable : sstables) { sum += sstable.length(); } return sum; }
public static long getTotalBytes(Iterable<SSTableReader> sstables) { long sum = 0; for (SSTableReader sstable : sstables) { sum += sstable.onDiskLength(); } return sum; }
public CompressedSegmentedFile(String path, CompressionMetadata metadata) { super(path, metadata.dataLength); this.metadata = metadata; }
public CompressedSegmentedFile(String path, CompressionMetadata metadata) { super(path, metadata.dataLength, metadata.compressedFileLength); this.metadata = metadata; }
private static List<Pair<SSTableReader, Long>> createSSTableAndLengthPairs(Collection<SSTableReader> collection) { List<Pair<SSTableReader, Long>> tableLengthPairs = new ArrayList<Pair<SSTableReader, Long>>(); for(SSTableReader table: collection) tableLengthPairs.add(new Pair<SSTable...
private static List<Pair<SSTableReader, Long>> createSSTableAndLengthPairs(Collection<SSTableReader> collection) { List<Pair<SSTableReader, Long>> tableLengthPairs = new ArrayList<Pair<SSTableReader, Long>>(); for(SSTableReader table: collection) tableLengthPairs.add(new Pair<SSTable...
public final void maybeRefreshBlocking() throws IOException, InterruptedException { ensureOpen(); // Ensure only 1 thread does reopen at once refreshLock.lock(); try { doMaybeRefresh(); } finally { refreshLock.lock(); } }
public final void maybeRefreshBlocking() throws IOException, InterruptedException { ensureOpen(); // Ensure only 1 thread does reopen at once refreshLock.lock(); try { doMaybeRefresh(); } finally { refreshLock.unlock(); } }
public Sorter newSorter(Entry[] arr) { return new ArrayTimSorter<Entry>(arr, ArrayUtil.<Entry>naturalComparator(), random().nextInt(arr.length)); }
public Sorter newSorter(Entry[] arr) { return new ArrayTimSorter<Entry>(arr, ArrayUtil.<Entry>naturalComparator(), _TestUtil.nextInt(random(), 0, arr.length)); }
protected synchronized int addCategoryDocument(CategoryPath categoryPath, int length, int parent) throws CorruptIndexException, IOException { // Before Lucene 2.9, position increments >=0 were supported, so we // added 1 to parent to allow the parent -1 ...
protected synchronized int addCategoryDocument(CategoryPath categoryPath, int length, int parent) throws CorruptIndexException, IOException { // Before Lucene 2.9, position increments >=0 were supported, so we // added 1 to parent to allow the parent -1 ...
public void testPerFieldCodec() throws Exception { final int NUM_DOCS = atLeast(173); if (VERBOSE) { System.out.println("TEST: NUM_DOCS=" + NUM_DOCS); } MockDirectoryWrapper dir = newDirectory(); dir.setCheckIndexOnClose(false); // we use a custom codec provider IndexWriter w = new...
public void testPerFieldCodec() throws Exception { final int NUM_DOCS = atLeast(173); if (VERBOSE) { System.out.println("TEST: NUM_DOCS=" + NUM_DOCS); } MockDirectoryWrapper dir = newDirectory(); dir.setCheckIndexOnClose(false); // we use a custom codec provider IndexWriter w = new...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(_TestUtil.nextInt(random, 50, 1000))); Document doc = ...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(_TestUtil.nextInt(random, 50, 1000))); Document doc = ...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(_TestUtil.nextInt(random, 50, 1000))); Document doc = ...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(_TestUtil.nextInt(random, 50, 1000))); Document doc = ...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.KEYWORD, false)) .setMaxBufferedDocs(_TestUtil.nextInt(random, 50, 10...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.KEYWORD, false)) .setMaxBufferedDocs(_TestUtil.nextInt(random, 50, 10...
public void assertFromTestData(int codePointTable[]) throws Exception { if (VERBOSE) { System.out.println("TEST: codePointTable=" + codePointTable); } InputStream stream = getClass().getResourceAsStream("fuzzyTestData.txt"); BufferedReader reader = new BufferedReader(new InputStreamReader(stream...
public void assertFromTestData(int codePointTable[]) throws Exception { if (VERBOSE) { System.out.println("TEST: codePointTable=" + codePointTable); } InputStream stream = getClass().getResourceAsStream("fuzzyTestData.txt"); BufferedReader reader = new BufferedReader(new InputStreamReader(stream...
public void testRandomPhrases() throws Exception { Directory dir = newDirectory(); Analyzer analyzer = new MockAnalyzer(random); RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).setMergePolicy(newLogMergePolicy())); List<List<String>> docs...
public void testRandomPhrases() throws Exception { Directory dir = newDirectory(); Analyzer analyzer = new MockAnalyzer(random); RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).setMergePolicy(newLogMergePolicy())); List<List<String>> docs...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); fieldName = random.nextBoolean() ? "field" : ""; // sometimes use an empty string as field name RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); fieldName = random.nextBoolean() ? "field" : ""; // sometimes use an empty string as field name RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer...
public void setUp() throws Exception { super.setUp(); // we generate aweful regexps: good for testing. // but for preflex codec, the test can be very slow, so use less iterations. numIterations = Codec.getDefault().getName().equals("Lucene3x") ? 10 * RANDOM_MULTIPLIER : atLeast(50); dir = newDirec...
public void setUp() throws Exception { super.setUp(); // we generate aweful regexps: good for testing. // but for preflex codec, the test can be very slow, so use less iterations. numIterations = Codec.getDefault().getName().equals("Lucene3x") ? 10 * RANDOM_MULTIPLIER : atLeast(50); dir = newDirec...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.KEYWORD, false)) .setMaxBufferedDocs(_TestUtil.nextInt(random, 50, 10...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.KEYWORD, false)) .setMaxBufferedDocs(_TestUtil.nextInt(random, 50, 10...
public void testCustomEncoder() throws Exception { Directory dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); config.setSimilarity(new CustomNormEncodingSimilarity()); RandomIndexWriter writer = new RandomIndexWriter(random, dir, co...
public void testCustomEncoder() throws Exception { Directory dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); config.setSimilarity(new CustomNormEncodingSimilarity()); RandomIndexWriter writer = new RandomIndexWriter(random, dir, co...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.SIMPLE, true)).setMergePolicy(newLogMergePolicy()); config.setSi...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.SIMPLE, true)).setMergePolicy(newLogMergePolicy()); config.setSi...
public void testRollingUpdates() throws Exception { final MockDirectoryWrapper dir = newDirectory(); dir.setCheckIndexOnClose(false); // we use a custom codec provider final LineFileDocs docs = new LineFileDocs(random); //provider.register(new MemoryCodec()); if ( (!"Lucene3x".equals(Codec.getDef...
public void testRollingUpdates() throws Exception { final MockDirectoryWrapper dir = newDirectory(); dir.setCheckIndexOnClose(false); // we use a custom codec provider final LineFileDocs docs = new LineFileDocs(random); //provider.register(new MemoryCodec()); if ( (!"Lucene3x".equals(Codec.getDef...
public void testRandomStoredFields() throws IOException { Directory dir = newDirectory(); Random rand = random; RandomIndexWriter w = new RandomIndexWriter(rand, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(_TestUtil.nextInt(rand, 5, 20))); //w.w.setUseC...
public void testRandomStoredFields() throws IOException { Directory dir = newDirectory(); Random rand = random; RandomIndexWriter w = new RandomIndexWriter(rand, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(_TestUtil.nextInt(rand, 5, 20))); //w.w.setUseC...
End of preview. Expand in Data Studio

YAML Metadata Warning:empty or missing yaml metadata in repo card

Check out the documentation for more information.

This is the Retrieval dataset used in the paper "ReAPR: Automatic Program Repair via Retrieval-Augmented Large Language Models"

Downloads last month
28