Moved out configuration to external file

master
Abhinav Sarkar 2013-05-22 23:28:40 +05:30
parent 72ac950374
commit 3d03db775d
5 changed files with 84 additions and 49 deletions

20
irc-search.conf Normal file
View File

@ -0,0 +1,20 @@
server {
port = 9090
}
indexing {
context {
size = 2
durationSecs = 20
}
runIntervalSecs = 10
flushIntervalSecs = 60
rateLimitPerSec = 1000
}
searching {
maxHits = 1000
messageFieldBoost = 2
}

11
pom.xml
View File

@ -84,6 +84,17 @@
<artifactId>guava</artifactId>
<version>14.0.1</version>
</dependency>
<dependency>
<groupId>org.streum</groupId>
<artifactId>configrity-core_${scala.majorversion}</artifactId>
<version>1.0.0</version>
<exclusions>
<exclusion>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.majorversion}</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<build>

View File

@ -9,6 +9,8 @@ import scala.concurrent.future
import com.typesafe.scalalogging.slf4j.Logging
import org.streum.configrity.Configuration
import au.com.bytecode.opencsv.CSVParser
import io.netty.bootstrap.ServerBootstrap
@ -31,11 +33,9 @@ import net.liftweb.json.Serialization
object Server extends App with Logging {
if (args.isEmpty) {
println("Please specify port to run the server on")
System.exit(1)
} else {
val port = args(0).toInt
val config = Configuration.loadResource("/irc-search.conf")
val port = config[Int]("server.port")
logger.info("Starting server at port {}", port: Integer)
val server = (new ServerBootstrap)
@ -68,7 +68,6 @@ object Server extends App with Logging {
cleanup()
}
}
}
private def stopServer(server : ServerBootstrap) {
logger.info("Stopping server")
@ -77,7 +76,6 @@ object Server extends App with Logging {
}
}
@Sharable
private object UnifiedHandler extends ChannelInboundByteHandlerAdapter {

View File

@ -20,6 +20,7 @@ import org.apache.lucene.document.{ Field, LongField, StringField, TextField }
import org.apache.lucene.index.{ IndexWriter, IndexWriterConfig }
import org.apache.lucene.store.FSDirectory
import org.apache.lucene.util.Version
import org.streum.configrity.Configuration
import com.google.common.util.concurrent.RateLimiter
import com.typesafe.scalalogging.slf4j.Logging
@ -45,12 +46,14 @@ object Indexer extends Logging {
}
}
val LUCENE_VERSION = Version.LUCENE_43
val ContextSize = 2
val ContextDurationSecs = 20
val IndexingDurationSecs = 10
val FlushDurationSecs = 60
val RateLimitPerSec = 1000
private val config = Configuration.loadResource("/irc-search.conf").detach("indexing")
val LuceneVersion = Version.LUCENE_43
private val ContextSize = config[Int]("context.size")
private val ContextDurationSecs = config[Int]("context.durationSecs")
private val RunIntervalSecs = config[Int]("runIntervalSecs")
private val FlushIntervalSecs = config[Int]("flushIntervalSecs")
private val RateLimitPerSec = config[Int]("rateLimitPerSec")
private val indexQueue = new PriorityBlockingQueue[IndexRecord]
private val scheduler = Executors.newScheduledThreadPool(2)
@ -72,12 +75,12 @@ object Indexer extends Logging {
}
def mkAnalyzer : Analyzer = {
val defAnalyzer = new StandardAnalyzer(LUCENE_VERSION)
val defAnalyzer = new StandardAnalyzer(LuceneVersion)
val fieldAnalyzers = Map(
ChatLine.USER -> new KeywordAnalyzer,
ChatLine.MSG -> new EnglishAnalyzer(LUCENE_VERSION),
ChatLine.CTXB -> new EnglishAnalyzer(LUCENE_VERSION),
ChatLine.CTXA -> new EnglishAnalyzer(LUCENE_VERSION))
ChatLine.MSG -> new EnglishAnalyzer(LuceneVersion),
ChatLine.CTXB -> new EnglishAnalyzer(LuceneVersion),
ChatLine.CTXA -> new EnglishAnalyzer(LuceneVersion))
new PerFieldAnalyzerWrapper(defAnalyzer, fieldAnalyzers)
}
@ -90,7 +93,7 @@ object Indexer extends Logging {
assert(indexDir.isDirectory)
}
val indexer = new IndexWriter(FSDirectory.open(indexDir),
new IndexWriterConfig(LUCENE_VERSION, mkAnalyzer))
new IndexWriterConfig(LuceneVersion, mkAnalyzer))
indexers += (dirPath -> indexer)
}
}
@ -141,7 +144,7 @@ object Indexer extends Logging {
def start {
logger.info("Starting indexer")
indexingFuture = schedule(0, IndexingDurationSecs, TimeUnit.SECONDS) {
indexingFuture = schedule(0, RunIntervalSecs, TimeUnit.SECONDS) {
if (!indexQueue.isEmpty) {
val indexRecs = new ArrayList[IndexRecord]
indexQueue drainTo indexRecs
@ -169,7 +172,7 @@ object Indexer extends Logging {
}
}
flushFuture = schedule(0, FlushDurationSecs, TimeUnit.SECONDS) {
flushFuture = schedule(0, FlushIntervalSecs, TimeUnit.SECONDS) {
doInLock(flush)
}
}

View File

@ -15,6 +15,7 @@ import org.apache.lucene.search.{ BooleanClause, BooleanQuery, Filter, FilteredQ
NumericRangeFilter, Query, QueryWrapperFilter, SearcherFactory,
SearcherManager, Sort, SortField, TermQuery }
import org.apache.lucene.store.FSDirectory
import org.streum.configrity.Configuration
import com.typesafe.scalalogging.slf4j.Logging
@ -22,8 +23,10 @@ import net.abhinavsarkar.ircsearch.model._
object Searcher extends Logging {
val MaxHits = 1000
val MessageFieldBoost = java.lang.Float.valueOf(2.0f)
private val config = Configuration.loadResource("/irc-search.conf").detach("searching")
private val MaxHits = config[Int]("maxHits")
private val MessageFieldBoost = java.lang.Float.valueOf(config[Float]("messageFieldBoost"))
private val searcherMgrs = mutable.Map[String, SearcherManager]()
@ -47,7 +50,7 @@ object Searcher extends Logging {
}
private def mkQueryParser(analyzer : Analyzer) =
new MultiFieldQueryParser(Indexer.LUCENE_VERSION,
new MultiFieldQueryParser(Indexer.LuceneVersion,
List(ChatLine.MSG, ChatLine.CTXB, ChatLine.CTXA).toArray, analyzer,
Map(ChatLine.MSG -> MessageFieldBoost))