Moved out configuration to external file
This commit is contained in:
parent
72ac950374
commit
3d03db775d
20
irc-search.conf
Normal file
20
irc-search.conf
Normal file
@ -0,0 +1,20 @@
|
||||
server {
|
||||
port = 9090
|
||||
}
|
||||
|
||||
indexing {
|
||||
context {
|
||||
size = 2
|
||||
durationSecs = 20
|
||||
}
|
||||
|
||||
runIntervalSecs = 10
|
||||
flushIntervalSecs = 60
|
||||
rateLimitPerSec = 1000
|
||||
}
|
||||
|
||||
searching {
|
||||
maxHits = 1000
|
||||
messageFieldBoost = 2
|
||||
}
|
||||
|
11
pom.xml
11
pom.xml
@ -84,6 +84,17 @@
|
||||
<artifactId>guava</artifactId>
|
||||
<version>14.0.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.streum</groupId>
|
||||
<artifactId>configrity-core_${scala.majorversion}</artifactId>
|
||||
<version>1.0.0</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.scalatest</groupId>
|
||||
<artifactId>scalatest_${scala.majorversion}</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
@ -9,6 +9,8 @@ import scala.concurrent.future
|
||||
|
||||
import com.typesafe.scalalogging.slf4j.Logging
|
||||
|
||||
import org.streum.configrity.Configuration
|
||||
|
||||
import au.com.bytecode.opencsv.CSVParser
|
||||
|
||||
import io.netty.bootstrap.ServerBootstrap
|
||||
@ -31,42 +33,39 @@ import net.liftweb.json.Serialization
|
||||
|
||||
object Server extends App with Logging {
|
||||
|
||||
if (args.isEmpty) {
|
||||
println("Please specify port to run the server on")
|
||||
System.exit(1)
|
||||
} else {
|
||||
val port = args(0).toInt
|
||||
logger.info("Starting server at port {}", port: Integer)
|
||||
val config = Configuration.loadResource("/irc-search.conf")
|
||||
val port = config[Int]("server.port")
|
||||
|
||||
val server = (new ServerBootstrap)
|
||||
.group(new NioEventLoopGroup(1), new NioEventLoopGroup(1))
|
||||
.channel(classOf[NioServerSocketChannel])
|
||||
.childHandler(new ChannelInitializer[SocketChannel] {
|
||||
def initChannel(ch: SocketChannel) {
|
||||
val p = ch.pipeline
|
||||
.addLast("unihandler", UnifiedHandler)
|
||||
}})
|
||||
.localAddress(new InetSocketAddress(port))
|
||||
logger.info("Starting server at port {}", port: Integer)
|
||||
|
||||
val cleanup = { () =>
|
||||
stopServer(server)
|
||||
Indexer.stop
|
||||
Searcher.close
|
||||
}
|
||||
val server = (new ServerBootstrap)
|
||||
.group(new NioEventLoopGroup(1), new NioEventLoopGroup(1))
|
||||
.channel(classOf[NioServerSocketChannel])
|
||||
.childHandler(new ChannelInitializer[SocketChannel] {
|
||||
def initChannel(ch: SocketChannel) {
|
||||
val p = ch.pipeline
|
||||
.addLast("unihandler", UnifiedHandler)
|
||||
}})
|
||||
.localAddress(new InetSocketAddress(port))
|
||||
|
||||
Runtime.getRuntime.addShutdownHook(
|
||||
new Thread("ShutdownHook") {
|
||||
override def run = cleanup()
|
||||
})
|
||||
val cleanup = { () =>
|
||||
stopServer(server)
|
||||
Indexer.stop
|
||||
Searcher.close
|
||||
}
|
||||
|
||||
try {
|
||||
Indexer.start
|
||||
server.bind().sync.channel.closeFuture.sync
|
||||
} catch {
|
||||
case e : Exception => {
|
||||
logger.error("Exception while running server. Stopping server", e)
|
||||
cleanup()
|
||||
}
|
||||
Runtime.getRuntime.addShutdownHook(
|
||||
new Thread("ShutdownHook") {
|
||||
override def run = cleanup()
|
||||
})
|
||||
|
||||
try {
|
||||
Indexer.start
|
||||
server.bind().sync.channel.closeFuture.sync
|
||||
} catch {
|
||||
case e : Exception => {
|
||||
logger.error("Exception while running server. Stopping server", e)
|
||||
cleanup()
|
||||
}
|
||||
}
|
||||
|
||||
@ -77,7 +76,6 @@ object Server extends App with Logging {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Sharable
|
||||
private object UnifiedHandler extends ChannelInboundByteHandlerAdapter {
|
||||
|
||||
|
@ -20,6 +20,7 @@ import org.apache.lucene.document.{ Field, LongField, StringField, TextField }
|
||||
import org.apache.lucene.index.{ IndexWriter, IndexWriterConfig }
|
||||
import org.apache.lucene.store.FSDirectory
|
||||
import org.apache.lucene.util.Version
|
||||
import org.streum.configrity.Configuration
|
||||
|
||||
import com.google.common.util.concurrent.RateLimiter
|
||||
import com.typesafe.scalalogging.slf4j.Logging
|
||||
@ -45,12 +46,14 @@ object Indexer extends Logging {
|
||||
}
|
||||
}
|
||||
|
||||
val LUCENE_VERSION = Version.LUCENE_43
|
||||
val ContextSize = 2
|
||||
val ContextDurationSecs = 20
|
||||
val IndexingDurationSecs = 10
|
||||
val FlushDurationSecs = 60
|
||||
val RateLimitPerSec = 1000
|
||||
private val config = Configuration.loadResource("/irc-search.conf").detach("indexing")
|
||||
|
||||
val LuceneVersion = Version.LUCENE_43
|
||||
private val ContextSize = config[Int]("context.size")
|
||||
private val ContextDurationSecs = config[Int]("context.durationSecs")
|
||||
private val RunIntervalSecs = config[Int]("runIntervalSecs")
|
||||
private val FlushIntervalSecs = config[Int]("flushIntervalSecs")
|
||||
private val RateLimitPerSec = config[Int]("rateLimitPerSec")
|
||||
|
||||
private val indexQueue = new PriorityBlockingQueue[IndexRecord]
|
||||
private val scheduler = Executors.newScheduledThreadPool(2)
|
||||
@ -72,12 +75,12 @@ object Indexer extends Logging {
|
||||
}
|
||||
|
||||
def mkAnalyzer : Analyzer = {
|
||||
val defAnalyzer = new StandardAnalyzer(LUCENE_VERSION)
|
||||
val defAnalyzer = new StandardAnalyzer(LuceneVersion)
|
||||
val fieldAnalyzers = Map(
|
||||
ChatLine.USER -> new KeywordAnalyzer,
|
||||
ChatLine.MSG -> new EnglishAnalyzer(LUCENE_VERSION),
|
||||
ChatLine.CTXB -> new EnglishAnalyzer(LUCENE_VERSION),
|
||||
ChatLine.CTXA -> new EnglishAnalyzer(LUCENE_VERSION))
|
||||
ChatLine.MSG -> new EnglishAnalyzer(LuceneVersion),
|
||||
ChatLine.CTXB -> new EnglishAnalyzer(LuceneVersion),
|
||||
ChatLine.CTXA -> new EnglishAnalyzer(LuceneVersion))
|
||||
|
||||
new PerFieldAnalyzerWrapper(defAnalyzer, fieldAnalyzers)
|
||||
}
|
||||
@ -90,7 +93,7 @@ object Indexer extends Logging {
|
||||
assert(indexDir.isDirectory)
|
||||
}
|
||||
val indexer = new IndexWriter(FSDirectory.open(indexDir),
|
||||
new IndexWriterConfig(LUCENE_VERSION, mkAnalyzer))
|
||||
new IndexWriterConfig(LuceneVersion, mkAnalyzer))
|
||||
indexers += (dirPath -> indexer)
|
||||
}
|
||||
}
|
||||
@ -141,7 +144,7 @@ object Indexer extends Logging {
|
||||
|
||||
def start {
|
||||
logger.info("Starting indexer")
|
||||
indexingFuture = schedule(0, IndexingDurationSecs, TimeUnit.SECONDS) {
|
||||
indexingFuture = schedule(0, RunIntervalSecs, TimeUnit.SECONDS) {
|
||||
if (!indexQueue.isEmpty) {
|
||||
val indexRecs = new ArrayList[IndexRecord]
|
||||
indexQueue drainTo indexRecs
|
||||
@ -169,7 +172,7 @@ object Indexer extends Logging {
|
||||
}
|
||||
}
|
||||
|
||||
flushFuture = schedule(0, FlushDurationSecs, TimeUnit.SECONDS) {
|
||||
flushFuture = schedule(0, FlushIntervalSecs, TimeUnit.SECONDS) {
|
||||
doInLock(flush)
|
||||
}
|
||||
}
|
||||
|
@ -15,6 +15,7 @@ import org.apache.lucene.search.{ BooleanClause, BooleanQuery, Filter, FilteredQ
|
||||
NumericRangeFilter, Query, QueryWrapperFilter, SearcherFactory,
|
||||
SearcherManager, Sort, SortField, TermQuery }
|
||||
import org.apache.lucene.store.FSDirectory
|
||||
import org.streum.configrity.Configuration
|
||||
|
||||
import com.typesafe.scalalogging.slf4j.Logging
|
||||
|
||||
@ -22,8 +23,10 @@ import net.abhinavsarkar.ircsearch.model._
|
||||
|
||||
object Searcher extends Logging {
|
||||
|
||||
val MaxHits = 1000
|
||||
val MessageFieldBoost = java.lang.Float.valueOf(2.0f)
|
||||
private val config = Configuration.loadResource("/irc-search.conf").detach("searching")
|
||||
|
||||
private val MaxHits = config[Int]("maxHits")
|
||||
private val MessageFieldBoost = java.lang.Float.valueOf(config[Float]("messageFieldBoost"))
|
||||
|
||||
private val searcherMgrs = mutable.Map[String, SearcherManager]()
|
||||
|
||||
@ -47,7 +50,7 @@ object Searcher extends Logging {
|
||||
}
|
||||
|
||||
private def mkQueryParser(analyzer : Analyzer) =
|
||||
new MultiFieldQueryParser(Indexer.LUCENE_VERSION,
|
||||
new MultiFieldQueryParser(Indexer.LuceneVersion,
|
||||
List(ChatLine.MSG, ChatLine.CTXB, ChatLine.CTXA).toArray, analyzer,
|
||||
Map(ChatLine.MSG -> MessageFieldBoost))
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user