Java 类org.apache.lucene.util.PrintStreamInfoStream 实例源码

项目:search    文件:ShardSearchingTestBase.java   
public NodeState(Random random, int nodeID, int numNodes) throws IOException {
  myNodeID = nodeID;
  dir = newFSDirectory(createTempDir("ShardSearchingTestBase"));
  // TODO: set warmer
  MockAnalyzer analyzer = new MockAnalyzer(random());
  analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));
  IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
  iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
  if (VERBOSE) {
    iwc.setInfoStream(new PrintStreamInfoStream(System.out));
  }
  writer = new IndexWriter(dir, iwc);
  mgr = new SearcherManager(writer, true, null);
  searchers = new SearcherLifetimeManager();

  // Init w/ 0s... caller above will do initial
  // "broadcast" by calling initSearcher:
  currentNodeVersions = new long[numNodes];
}
项目:search    文件:TestDocInverterPerFieldErrorInfo.java   
@Test
public void testInfoStreamGetsFieldName() throws Exception {
  Directory dir = newDirectory();
  IndexWriter writer;
  IndexWriterConfig c = new IndexWriterConfig(TEST_VERSION_CURRENT, new ThrowingAnalyzer());
  final ByteArrayOutputStream infoBytes = new ByteArrayOutputStream();
  PrintStream infoPrintStream = new PrintStream(infoBytes, true, IOUtils.UTF_8);
  PrintStreamInfoStream printStreamInfoStream = new PrintStreamInfoStream(infoPrintStream);
  c.setInfoStream(printStreamInfoStream);
  writer = new IndexWriter(dir, c);
  Document doc = new Document();
  doc.add(newField("distinctiveFieldName", "aaa ", storedTextType));
  try {
    writer.addDocument(doc);
    fail("Failed to fail.");
  } catch(BadNews badNews) {
    infoPrintStream.flush();
    String infoStream = new String(infoBytes.toByteArray(), IOUtils.UTF_8);
    assertTrue(infoStream.contains("distinctiveFieldName"));
  }

  writer.close();
  dir.close();
}
项目:search    文件:TestDocInverterPerFieldErrorInfo.java   
@Test
public void testNoExtraNoise() throws Exception {
  Directory dir = newDirectory();
  IndexWriter writer;
  IndexWriterConfig c = new IndexWriterConfig(TEST_VERSION_CURRENT, new ThrowingAnalyzer());
  final ByteArrayOutputStream infoBytes = new ByteArrayOutputStream();
  PrintStream infoPrintStream = new PrintStream(infoBytes, true, IOUtils.UTF_8);
  PrintStreamInfoStream printStreamInfoStream = new PrintStreamInfoStream(infoPrintStream);
  c.setInfoStream(printStreamInfoStream);
  writer = new IndexWriter(dir, c);
  Document doc = new Document();
  doc.add(newField("boringFieldName", "aaa ", storedTextType));
  try {
    writer.addDocument(doc);
  } catch(BadNews badNews) {
    fail("Unwanted exception");
  }
  infoPrintStream.flush();
  String infoStream = new String(infoBytes.toByteArray(), IOUtils.UTF_8);
  assertFalse(infoStream.contains("boringFieldName"));

  writer.close();
  dir.close();
}
项目:NYBC    文件:ShardSearchingTestBase.java   
public NodeState(Random random, int nodeID, int numNodes) throws IOException {
  myNodeID = nodeID;
  dir = newFSDirectory(_TestUtil.getTempDir("ShardSearchingTestBase"));
  // TODO: set warmer
  IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
  iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
  if (VERBOSE) {
    iwc.setInfoStream(new PrintStreamInfoStream(System.out));
  }
  writer = new IndexWriter(dir, iwc);
  mgr = new SearcherManager(writer, true, null);
  searchers = new SearcherLifetimeManager();

  // Init w/ 0s... caller above will do initial
  // "broadcast" by calling initSearcher:
  currentNodeVersions = new long[numNodes];
}
项目:Maskana-Gestor-de-Conocimiento    文件:ShardSearchingTestBase.java   
public NodeState(Random random, int nodeID, int numNodes) throws IOException {
  myNodeID = nodeID;
  dir = newFSDirectory(_TestUtil.getTempDir("ShardSearchingTestBase"));
  // TODO: set warmer
  IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
  iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
  if (VERBOSE) {
    iwc.setInfoStream(new PrintStreamInfoStream(System.out));
  }
  writer = new IndexWriter(dir, iwc);
  mgr = new SearcherManager(writer, true, null);
  searchers = new SearcherLifetimeManager();

  // Init w/ 0s... caller above will do initial
  // "broadcast" by calling initSearcher:
  currentNodeVersions = new long[numNodes];
}
项目:lams    文件:IndexWriterConfig.java   
/** 
 * Convenience method that uses {@link PrintStreamInfoStream}.  Must not be null.
 */
public IndexWriterConfig setInfoStream(PrintStream printStream) {
  if (printStream == null) {
    throw new IllegalArgumentException("printStream must not be null");
  }
  return setInfoStream(new PrintStreamInfoStream(printStream));
}
项目:search    文件:IndexWriterConfig.java   
/** 
 * Convenience method that uses {@link PrintStreamInfoStream}.  Must not be null.
 */
public IndexWriterConfig setInfoStream(PrintStream printStream) {
  if (printStream == null) {
    throw new IllegalArgumentException("printStream must not be null");
  }
  return setInfoStream(new PrintStreamInfoStream(printStream));
}
项目:search    文件:TestSolrIndexConfig.java   
private void checkIndexWriterConfig(LiveIndexWriterConfig iwc) {

    assertEquals(123, iwc.getMaxThreadStates());
    assertTrue(iwc.getInfoStream() instanceof PrintStreamInfoStream);
    assertTrue(iwc.getMergePolicy().getClass().toString(),
               iwc.getMergePolicy() instanceof RandomMergePolicy);

  }
项目:NYBC    文件:SolrIndexWriter.java   
private static InfoStream toInfoStream(SolrIndexConfig config) throws IOException {
  String infoStreamFile = config.infoStreamFile;
  if (infoStreamFile != null) {
    File f = new File(infoStreamFile);
    File parent = f.getParentFile();
    if (parent != null) parent.mkdirs();
    FileOutputStream fos = new FileOutputStream(f, true);
    return new PrintStreamInfoStream(new PrintStream(fos, true, "UTF-8"));
  } else {
    return InfoStream.NO_OUTPUT;
  }
}
项目:search-core    文件:SolrIndexWriter.java   
private static InfoStream toInfoStream(SolrIndexConfig config) throws IOException {
  String infoStreamFile = config.infoStreamFile;
  if (infoStreamFile != null) {
    File f = new File(infoStreamFile);
    File parent = f.getParentFile();
    if (parent != null) parent.mkdirs();
    FileOutputStream fos = new FileOutputStream(f, true);
    return new PrintStreamInfoStream(new PrintStream(fos, true, "UTF-8"));
  } else {
    return InfoStream.NO_OUTPUT;
  }
}
项目:read-open-source-code    文件:IndexWriterConfig.java   
/** 
 * Convenience method that uses {@link PrintStreamInfoStream}.  Must not be null.
 */
public IndexWriterConfig setInfoStream(PrintStream printStream) {
  if (printStream == null) {
    throw new IllegalArgumentException("printStream must not be null");
  }
  return setInfoStream(new PrintStreamInfoStream(printStream));
}
项目:read-open-source-code    文件:IndexWriterConfig.java   
/** 
 * Convenience method that uses {@link PrintStreamInfoStream}.  Must not be null.
 */
public IndexWriterConfig setInfoStream(PrintStream printStream) {
  if (printStream == null) {
    throw new IllegalArgumentException("printStream must not be null");
  }
  return setInfoStream(new PrintStreamInfoStream(printStream));
}
项目:read-open-source-code    文件:IndexWriterConfig.java   
/** 
 * Convenience method that uses {@link PrintStreamInfoStream}.  Must not be null.
 */
public IndexWriterConfig setInfoStream(PrintStream printStream) {
  if (printStream == null) {
    throw new IllegalArgumentException("printStream must not be null");
  }
  return setInfoStream(new PrintStreamInfoStream(printStream));
}
项目:Maskana-Gestor-de-Conocimiento    文件:IndexWriterConfig.java   
/** 
 * Convenience method that uses {@link PrintStreamInfoStream}.  Must not be null.
 */
public IndexWriterConfig setInfoStream(PrintStream printStream) {
  if (printStream == null) {
    throw new IllegalArgumentException("printStream must not be null");
  }
  return setInfoStream(new PrintStreamInfoStream(printStream));
}
项目:search    文件:SolrIndexConfig.java   
/**
 * Constructs a SolrIndexConfig which parses the Lucene related config params in solrconfig.xml
 * @param solrConfig the overall SolrConfig object
 * @param prefix the XPath prefix for which section to parse (mandatory)
 * @param def a SolrIndexConfig instance to pick default values from (optional)
 */
@SuppressWarnings("deprecation")
public SolrIndexConfig(SolrConfig solrConfig, String prefix, SolrIndexConfig def)  {
  if (prefix == null) {
    prefix = "indexConfig";
    log.debug("Defaulting to prefix \""+prefix+"\" for index configuration");
  }

  if (def == null) {
    def = new SolrIndexConfig(solrConfig);
  }

  // sanity check: this will throw an error for us if there is more then one
  // config section
  Object unused = solrConfig.getNode(prefix, false);

  luceneVersion = solrConfig.luceneMatchVersion;

  // Assert that end-of-life parameters or syntax is not in our config.
  // Warn for luceneMatchVersion's before LUCENE_3_6, fail fast above
  assertWarnOrFail("The <mergeScheduler>myclass</mergeScheduler> syntax is no longer supported in solrconfig.xml. Please use syntax <mergeScheduler class=\"myclass\"/> instead.",
      !((solrConfig.getNode(prefix+"/mergeScheduler",false) != null) && (solrConfig.get(prefix+"/mergeScheduler/@class",null) == null)),
      true);
  assertWarnOrFail("The <mergePolicy>myclass</mergePolicy> syntax is no longer supported in solrconfig.xml. Please use syntax <mergePolicy class=\"myclass\"/> instead.",
      !((solrConfig.getNode(prefix+"/mergePolicy",false) != null) && (solrConfig.get(prefix+"/mergePolicy/@class",null) == null)),
      true);
  assertWarnOrFail("The <luceneAutoCommit>true|false</luceneAutoCommit> parameter is no longer valid in solrconfig.xml.",
      solrConfig.get(prefix+"/luceneAutoCommit", null) == null,
      true);

  defaultMergePolicyClassName = def.defaultMergePolicyClassName;
  useCompoundFile=solrConfig.getBool(prefix+"/useCompoundFile", def.useCompoundFile);
  effectiveUseCompountFileSetting = useCompoundFile;
  maxBufferedDocs=solrConfig.getInt(prefix+"/maxBufferedDocs",def.maxBufferedDocs);
  maxMergeDocs=solrConfig.getInt(prefix+"/maxMergeDocs",def.maxMergeDocs);
  maxIndexingThreads=solrConfig.getInt(prefix+"/maxIndexingThreads",def.maxIndexingThreads);
  mergeFactor=solrConfig.getInt(prefix+"/mergeFactor",def.mergeFactor);
  ramBufferSizeMB = solrConfig.getDouble(prefix+"/ramBufferSizeMB", def.ramBufferSizeMB);

  writeLockTimeout=solrConfig.getInt(prefix+"/writeLockTimeout", def.writeLockTimeout);
  lockType=solrConfig.get(prefix+"/lockType", def.lockType);

  mergeSchedulerInfo = getPluginInfo(prefix + "/mergeScheduler", solrConfig, def.mergeSchedulerInfo);
  mergePolicyInfo = getPluginInfo(prefix + "/mergePolicy", solrConfig, def.mergePolicyInfo);

  termIndexInterval = solrConfig.getInt(prefix + "/termIndexInterval", def.termIndexInterval);

  boolean infoStreamEnabled = solrConfig.getBool(prefix + "/infoStream", false);
  if(infoStreamEnabled) {
    String infoStreamFile = solrConfig.get(prefix + "/infoStream/@file", null);
    if (infoStreamFile == null) {
      log.info("IndexWriter infoStream solr logging is enabled");
      infoStream = new LoggingInfoStream();
    } else {
      log.warn("IndexWriter infoStream file log is enabled: " + infoStreamFile +
               "\nThis feature is deprecated. Remove @file from <infoStream> to output messages to solr's logfile");
      File f = new File(infoStreamFile);
      File parent = f.getParentFile();
      if (parent != null) parent.mkdirs();
      try {
        FileOutputStream fos = new FileOutputStream(f, true);
        infoStream = new PrintStreamInfoStream(new PrintStream(fos, true, "UTF-8"));
      } catch (Exception e) {
        log.error("Could not create info stream for file " + infoStreamFile, e);
      }
    }
  }
  mergedSegmentWarmerInfo = getPluginInfo(prefix + "/mergedSegmentWarmer", solrConfig, def.mergedSegmentWarmerInfo);
  if (mergedSegmentWarmerInfo != null && solrConfig.nrtMode == false) {
    throw new IllegalArgumentException("Supplying a mergedSegmentWarmer will do nothing since nrtMode is false");
  }

  checkIntegrityAtMerge = solrConfig.getBool(prefix + "/checkIntegrityAtMerge", def.checkIntegrityAtMerge);
}
项目:NYBC    文件:IndexWriterConfig.java   
/** Convenience method that uses {@link PrintStreamInfoStream} */
public IndexWriterConfig setInfoStream(PrintStream printStream) {
  return setInfoStream(printStream == null ? InfoStream.NO_OUTPUT : new PrintStreamInfoStream(printStream));
}
项目:read-open-source-code    文件:SolrIndexConfig.java   
/**
 * Constructs a SolrIndexConfig which parses the Lucene related config params in solrconfig.xml
 * @param solrConfig the overall SolrConfig object
 * @param prefix the XPath prefix for which section to parse (mandatory)
 * @param def a SolrIndexConfig instance to pick default values from (optional)
 */
@SuppressWarnings("deprecation")
public SolrIndexConfig(SolrConfig solrConfig, String prefix, SolrIndexConfig def)  {
  if (prefix == null) {
    prefix = "indexConfig";
    log.debug("Defaulting to prefix \""+prefix+"\" for index configuration");
  }

  if (def == null) {
    def = new SolrIndexConfig(solrConfig);
  }

  // sanity check: this will throw an error for us if there is more then one
  // config section
  Object unused = solrConfig.getNode(prefix, false);

  luceneVersion = solrConfig.luceneMatchVersion;

  // Assert that end-of-life parameters or syntax is not in our config.
  // Warn for luceneMatchVersion's before LUCENE_36, fail fast above
  assertWarnOrFail("The <mergeScheduler>myclass</mergeScheduler> syntax is no longer supported in solrconfig.xml. Please use syntax <mergeScheduler class=\"myclass\"/> instead.",
      !((solrConfig.getNode(prefix+"/mergeScheduler",false) != null) && (solrConfig.get(prefix+"/mergeScheduler/@class",null) == null)),
      true);
  assertWarnOrFail("The <mergePolicy>myclass</mergePolicy> syntax is no longer supported in solrconfig.xml. Please use syntax <mergePolicy class=\"myclass\"/> instead.",
      !((solrConfig.getNode(prefix+"/mergePolicy",false) != null) && (solrConfig.get(prefix+"/mergePolicy/@class",null) == null)),
      true);
  assertWarnOrFail("The <luceneAutoCommit>true|false</luceneAutoCommit> parameter is no longer valid in solrconfig.xml.",
      solrConfig.get(prefix+"/luceneAutoCommit", null) == null,
      true);

  defaultMergePolicyClassName = def.defaultMergePolicyClassName;
  useCompoundFile=solrConfig.getBool(prefix+"/useCompoundFile", def.useCompoundFile);
  effectiveUseCompountFileSetting = useCompoundFile;
  maxBufferedDocs=solrConfig.getInt(prefix+"/maxBufferedDocs",def.maxBufferedDocs);
  maxMergeDocs=solrConfig.getInt(prefix+"/maxMergeDocs",def.maxMergeDocs);
  maxIndexingThreads=solrConfig.getInt(prefix+"/maxIndexingThreads",def.maxIndexingThreads);
  mergeFactor=solrConfig.getInt(prefix+"/mergeFactor",def.mergeFactor);
  ramBufferSizeMB = solrConfig.getDouble(prefix+"/ramBufferSizeMB", def.ramBufferSizeMB);

  writeLockTimeout=solrConfig.getInt(prefix+"/writeLockTimeout", def.writeLockTimeout);
  lockType=solrConfig.get(prefix+"/lockType", def.lockType);

  mergeSchedulerInfo = getPluginInfo(prefix + "/mergeScheduler", solrConfig, def.mergeSchedulerInfo);
  mergePolicyInfo = getPluginInfo(prefix + "/mergePolicy", solrConfig, def.mergePolicyInfo);

  termIndexInterval = solrConfig.getInt(prefix + "/termIndexInterval", def.termIndexInterval);

  boolean infoStreamEnabled = solrConfig.getBool(prefix + "/infoStream", false);
  if(infoStreamEnabled) {
    String infoStreamFile = solrConfig.get(prefix + "/infoStream/@file", null);
    if (infoStreamFile == null) {
      log.info("IndexWriter infoStream solr logging is enabled");
      infoStream = new LoggingInfoStream();
    } else {
      log.warn("IndexWriter infoStream file log is enabled: " + infoStreamFile +
               "\nThis feature is deprecated. Remove @file from <infoStream> to output messages to solr's logfile");
      File f = new File(infoStreamFile);
      File parent = f.getParentFile();
      if (parent != null) parent.mkdirs();
      try {
        FileOutputStream fos = new FileOutputStream(f, true);
        infoStream = new PrintStreamInfoStream(new PrintStream(fos, true, "UTF-8"));
      } catch (Exception e) {
        log.error("Could not create info stream for file " + infoStreamFile, e);
      }
    }
  }
  mergedSegmentWarmerInfo = getPluginInfo(prefix + "/mergedSegmentWarmer", solrConfig, def.mergedSegmentWarmerInfo);
  if (mergedSegmentWarmerInfo != null && solrConfig.nrtMode == false) {
    throw new IllegalArgumentException("Supplying a mergedSegmentWarmer will do nothing since nrtMode is false");
  }
}
项目:read-open-source-code    文件:SolrIndexConfig.java   
/**
 * Constructs a SolrIndexConfig which parses the Lucene related config params in solrconfig.xml
 * @param solrConfig the overall SolrConfig object
 * @param prefix the XPath prefix for which section to parse (mandatory)
 * @param def a SolrIndexConfig instance to pick default values from (optional)
 */
@SuppressWarnings("deprecation")
public SolrIndexConfig(SolrConfig solrConfig, String prefix, SolrIndexConfig def)  {
  if (prefix == null) {
    prefix = "indexConfig";
    log.debug("Defaulting to prefix \""+prefix+"\" for index configuration");
  }

  if (def == null) {
    def = new SolrIndexConfig(solrConfig);
  }

  // sanity check: this will throw an error for us if there is more then one
  // config section
  Object unused = solrConfig.getNode(prefix, false);

  luceneVersion = solrConfig.luceneMatchVersion;

  // Assert that end-of-life parameters or syntax is not in our config.
  // Warn for luceneMatchVersion's before LUCENE_3_6, fail fast above
  assertWarnOrFail("The <mergeScheduler>myclass</mergeScheduler> syntax is no longer supported in solrconfig.xml. Please use syntax <mergeScheduler class=\"myclass\"/> instead.",
      !((solrConfig.getNode(prefix+"/mergeScheduler",false) != null) && (solrConfig.get(prefix+"/mergeScheduler/@class",null) == null)),
      true);
  assertWarnOrFail("The <mergePolicy>myclass</mergePolicy> syntax is no longer supported in solrconfig.xml. Please use syntax <mergePolicy class=\"myclass\"/> instead.",
      !((solrConfig.getNode(prefix+"/mergePolicy",false) != null) && (solrConfig.get(prefix+"/mergePolicy/@class",null) == null)),
      true);
  assertWarnOrFail("The <luceneAutoCommit>true|false</luceneAutoCommit> parameter is no longer valid in solrconfig.xml.",
      solrConfig.get(prefix+"/luceneAutoCommit", null) == null,
      true);

  defaultMergePolicyClassName = def.defaultMergePolicyClassName;
  useCompoundFile=solrConfig.getBool(prefix+"/useCompoundFile", def.useCompoundFile);
  effectiveUseCompountFileSetting = useCompoundFile;
  maxBufferedDocs=solrConfig.getInt(prefix+"/maxBufferedDocs",def.maxBufferedDocs);
  maxMergeDocs=solrConfig.getInt(prefix+"/maxMergeDocs",def.maxMergeDocs);
  maxIndexingThreads=solrConfig.getInt(prefix+"/maxIndexingThreads",def.maxIndexingThreads);
  mergeFactor=solrConfig.getInt(prefix+"/mergeFactor",def.mergeFactor);
  ramBufferSizeMB = solrConfig.getDouble(prefix+"/ramBufferSizeMB", def.ramBufferSizeMB);

  writeLockTimeout=solrConfig.getInt(prefix+"/writeLockTimeout", def.writeLockTimeout);
  lockType=solrConfig.get(prefix+"/lockType", def.lockType);

  mergeSchedulerInfo = getPluginInfo(prefix + "/mergeScheduler", solrConfig, def.mergeSchedulerInfo);
  mergePolicyInfo = getPluginInfo(prefix + "/mergePolicy", solrConfig, def.mergePolicyInfo);

  termIndexInterval = solrConfig.getInt(prefix + "/termIndexInterval", def.termIndexInterval);

  boolean infoStreamEnabled = solrConfig.getBool(prefix + "/infoStream", false);
  if(infoStreamEnabled) {
    String infoStreamFile = solrConfig.get(prefix + "/infoStream/@file", null);
    if (infoStreamFile == null) {
      log.info("IndexWriter infoStream solr logging is enabled");
      infoStream = new LoggingInfoStream();
    } else {
      log.warn("IndexWriter infoStream file log is enabled: " + infoStreamFile +
               "\nThis feature is deprecated. Remove @file from <infoStream> to output messages to solr's logfile");
      File f = new File(infoStreamFile);
      File parent = f.getParentFile();
      if (parent != null) parent.mkdirs();
      try {
        FileOutputStream fos = new FileOutputStream(f, true);
        infoStream = new PrintStreamInfoStream(new PrintStream(fos, true, "UTF-8"));
      } catch (Exception e) {
        log.error("Could not create info stream for file " + infoStreamFile, e);
      }
    }
  }
  mergedSegmentWarmerInfo = getPluginInfo(prefix + "/mergedSegmentWarmer", solrConfig, def.mergedSegmentWarmerInfo);
  if (mergedSegmentWarmerInfo != null && solrConfig.nrtMode == false) {
    throw new IllegalArgumentException("Supplying a mergedSegmentWarmer will do nothing since nrtMode is false");
  }

  checkIntegrityAtMerge = solrConfig.getBool(prefix + "/checkIntegrityAtMerge", def.checkIntegrityAtMerge);
}