From 3d03db775da74c417c23512b6c43c60fc7e46154 Mon Sep 17 00:00:00 2001 From: Abhinav Sarkar Date: Wed, 22 May 2013 23:28:40 +0530 Subject: [PATCH] Moved out configuration to external file --- irc-search.conf | 20 ++++++ pom.xml | 11 ++++ .../net/abhinavsarkar/ircsearch/Server.scala | 64 +++++++++---------- .../ircsearch/lucene/Indexer.scala | 29 +++++---- .../ircsearch/lucene/Searcher.scala | 9 ++- 5 files changed, 84 insertions(+), 49 deletions(-) create mode 100644 irc-search.conf diff --git a/irc-search.conf b/irc-search.conf new file mode 100644 index 0000000..8e587c1 --- /dev/null +++ b/irc-search.conf @@ -0,0 +1,20 @@ +server { + port = 9090 +} + +indexing { + context { + size = 2 + durationSecs = 20 + } + + runIntervalSecs = 10 + flushIntervalSecs = 60 + rateLimitPerSec = 1000 +} + +searching { + maxHits = 1000 + messageFieldBoost = 2 +} + diff --git a/pom.xml b/pom.xml index 479ef7d..bc4140b 100644 --- a/pom.xml +++ b/pom.xml @@ -84,6 +84,17 @@ guava 14.0.1 + + org.streum + configrity-core_${scala.majorversion} + 1.0.0 + + + org.scalatest + scalatest_${scala.majorversion} + + + diff --git a/src/main/scala/net/abhinavsarkar/ircsearch/Server.scala b/src/main/scala/net/abhinavsarkar/ircsearch/Server.scala index 0842d2f..909e857 100644 --- a/src/main/scala/net/abhinavsarkar/ircsearch/Server.scala +++ b/src/main/scala/net/abhinavsarkar/ircsearch/Server.scala @@ -9,6 +9,8 @@ import scala.concurrent.future import com.typesafe.scalalogging.slf4j.Logging +import org.streum.configrity.Configuration + import au.com.bytecode.opencsv.CSVParser import io.netty.bootstrap.ServerBootstrap @@ -31,42 +33,39 @@ import net.liftweb.json.Serialization object Server extends App with Logging { - if (args.isEmpty) { - println("Please specify port to run the server on") - System.exit(1) - } else { - val port = args(0).toInt - logger.info("Starting server at port {}", port: Integer) + val config = Configuration.loadResource("/irc-search.conf") + val port = config[Int]("server.port") - val server = (new ServerBootstrap) - .group(new NioEventLoopGroup(1), new NioEventLoopGroup(1)) - .channel(classOf[NioServerSocketChannel]) - .childHandler(new ChannelInitializer[SocketChannel] { - def initChannel(ch: SocketChannel) { - val p = ch.pipeline - .addLast("unihandler", UnifiedHandler) - }}) - .localAddress(new InetSocketAddress(port)) + logger.info("Starting server at port {}", port: Integer) - val cleanup = { () => - stopServer(server) - Indexer.stop - Searcher.close - } + val server = (new ServerBootstrap) + .group(new NioEventLoopGroup(1), new NioEventLoopGroup(1)) + .channel(classOf[NioServerSocketChannel]) + .childHandler(new ChannelInitializer[SocketChannel] { + def initChannel(ch: SocketChannel) { + val p = ch.pipeline + .addLast("unihandler", UnifiedHandler) + }}) + .localAddress(new InetSocketAddress(port)) - Runtime.getRuntime.addShutdownHook( - new Thread("ShutdownHook") { - override def run = cleanup() - }) + val cleanup = { () => + stopServer(server) + Indexer.stop + Searcher.close + } - try { - Indexer.start - server.bind().sync.channel.closeFuture.sync - } catch { - case e : Exception => { - logger.error("Exception while running server. Stopping server", e) - cleanup() - } + Runtime.getRuntime.addShutdownHook( + new Thread("ShutdownHook") { + override def run = cleanup() + }) + + try { + Indexer.start + server.bind().sync.channel.closeFuture.sync + } catch { + case e : Exception => { + logger.error("Exception while running server. Stopping server", e) + cleanup() } } @@ -77,7 +76,6 @@ object Server extends App with Logging { } } - @Sharable private object UnifiedHandler extends ChannelInboundByteHandlerAdapter { diff --git a/src/main/scala/net/abhinavsarkar/ircsearch/lucene/Indexer.scala b/src/main/scala/net/abhinavsarkar/ircsearch/lucene/Indexer.scala index 7b7f0da..c1c3f09 100644 --- a/src/main/scala/net/abhinavsarkar/ircsearch/lucene/Indexer.scala +++ b/src/main/scala/net/abhinavsarkar/ircsearch/lucene/Indexer.scala @@ -20,6 +20,7 @@ import org.apache.lucene.document.{ Field, LongField, StringField, TextField } import org.apache.lucene.index.{ IndexWriter, IndexWriterConfig } import org.apache.lucene.store.FSDirectory import org.apache.lucene.util.Version +import org.streum.configrity.Configuration import com.google.common.util.concurrent.RateLimiter import com.typesafe.scalalogging.slf4j.Logging @@ -45,12 +46,14 @@ object Indexer extends Logging { } } - val LUCENE_VERSION = Version.LUCENE_43 - val ContextSize = 2 - val ContextDurationSecs = 20 - val IndexingDurationSecs = 10 - val FlushDurationSecs = 60 - val RateLimitPerSec = 1000 + private val config = Configuration.loadResource("/irc-search.conf").detach("indexing") + + val LuceneVersion = Version.LUCENE_43 + private val ContextSize = config[Int]("context.size") + private val ContextDurationSecs = config[Int]("context.durationSecs") + private val RunIntervalSecs = config[Int]("runIntervalSecs") + private val FlushIntervalSecs = config[Int]("flushIntervalSecs") + private val RateLimitPerSec = config[Int]("rateLimitPerSec") private val indexQueue = new PriorityBlockingQueue[IndexRecord] private val scheduler = Executors.newScheduledThreadPool(2) @@ -72,12 +75,12 @@ object Indexer extends Logging { } def mkAnalyzer : Analyzer = { - val defAnalyzer = new StandardAnalyzer(LUCENE_VERSION) + val defAnalyzer = new StandardAnalyzer(LuceneVersion) val fieldAnalyzers = Map( ChatLine.USER -> new KeywordAnalyzer, - ChatLine.MSG -> new EnglishAnalyzer(LUCENE_VERSION), - ChatLine.CTXB -> new EnglishAnalyzer(LUCENE_VERSION), - ChatLine.CTXA -> new EnglishAnalyzer(LUCENE_VERSION)) + ChatLine.MSG -> new EnglishAnalyzer(LuceneVersion), + ChatLine.CTXB -> new EnglishAnalyzer(LuceneVersion), + ChatLine.CTXA -> new EnglishAnalyzer(LuceneVersion)) new PerFieldAnalyzerWrapper(defAnalyzer, fieldAnalyzers) } @@ -90,7 +93,7 @@ object Indexer extends Logging { assert(indexDir.isDirectory) } val indexer = new IndexWriter(FSDirectory.open(indexDir), - new IndexWriterConfig(LUCENE_VERSION, mkAnalyzer)) + new IndexWriterConfig(LuceneVersion, mkAnalyzer)) indexers += (dirPath -> indexer) } } @@ -141,7 +144,7 @@ object Indexer extends Logging { def start { logger.info("Starting indexer") - indexingFuture = schedule(0, IndexingDurationSecs, TimeUnit.SECONDS) { + indexingFuture = schedule(0, RunIntervalSecs, TimeUnit.SECONDS) { if (!indexQueue.isEmpty) { val indexRecs = new ArrayList[IndexRecord] indexQueue drainTo indexRecs @@ -169,7 +172,7 @@ object Indexer extends Logging { } } - flushFuture = schedule(0, FlushDurationSecs, TimeUnit.SECONDS) { + flushFuture = schedule(0, FlushIntervalSecs, TimeUnit.SECONDS) { doInLock(flush) } } diff --git a/src/main/scala/net/abhinavsarkar/ircsearch/lucene/Searcher.scala b/src/main/scala/net/abhinavsarkar/ircsearch/lucene/Searcher.scala index c16ddd0..c2f91e6 100644 --- a/src/main/scala/net/abhinavsarkar/ircsearch/lucene/Searcher.scala +++ b/src/main/scala/net/abhinavsarkar/ircsearch/lucene/Searcher.scala @@ -15,6 +15,7 @@ import org.apache.lucene.search.{ BooleanClause, BooleanQuery, Filter, FilteredQ NumericRangeFilter, Query, QueryWrapperFilter, SearcherFactory, SearcherManager, Sort, SortField, TermQuery } import org.apache.lucene.store.FSDirectory +import org.streum.configrity.Configuration import com.typesafe.scalalogging.slf4j.Logging @@ -22,8 +23,10 @@ import net.abhinavsarkar.ircsearch.model._ object Searcher extends Logging { - val MaxHits = 1000 - val MessageFieldBoost = java.lang.Float.valueOf(2.0f) + private val config = Configuration.loadResource("/irc-search.conf").detach("searching") + + private val MaxHits = config[Int]("maxHits") + private val MessageFieldBoost = java.lang.Float.valueOf(config[Float]("messageFieldBoost")) private val searcherMgrs = mutable.Map[String, SearcherManager]() @@ -47,7 +50,7 @@ object Searcher extends Logging { } private def mkQueryParser(analyzer : Analyzer) = - new MultiFieldQueryParser(Indexer.LUCENE_VERSION, + new MultiFieldQueryParser(Indexer.LuceneVersion, List(ChatLine.MSG, ChatLine.CTXB, ChatLine.CTXA).toArray, analyzer, Map(ChatLine.MSG -> MessageFieldBoost))