Merge branch 'master' into update/yamusca-circe-0.9.0

This commit is contained in:
Scala Steward 2022-02-22 00:17:43 +01:00
commit 3cb293b29c
No known key found for this signature in database
GPG Key ID: 96BDF10FFAB8B6A6
157 changed files with 1426 additions and 764 deletions

137
build.sbt
View File

@ -9,9 +9,6 @@ val elmCompileMode = settingKey[ElmCompileMode]("How to compile elm sources")
// --- Settings
def inTest(d0: Seq[ModuleID], ds: Seq[ModuleID]*) =
ds.fold(d0)(_ ++ _).map(_ % Test)
val scalafixSettings = Seq(
semanticdbEnabled := true, // enable SemanticDB
semanticdbVersion := scalafixSemanticdb.revision, // "4.4.0"
@ -58,14 +55,10 @@ val sharedSettings = Seq(
libraryDependencySchemes ++= Seq(
"com.github.eikek" %% "calev-core" % VersionScheme.Always,
"com.github.eikek" %% "calev-circe" % VersionScheme.Always
)
),
addCompilerPlugin(Dependencies.kindProjectorPlugin)
) ++ scalafixSettings
val testSettingsMUnit = Seq(
libraryDependencies ++= inTest(Dependencies.munit, Dependencies.logging),
testFrameworks += new TestFramework("munit.Framework")
)
lazy val noPublish = Seq(
publish := {},
publishLocal := {},
@ -294,6 +287,20 @@ val openapiScalaSettings = Seq(
// --- Modules
val loggingApi = project
.in(file("modules/logging/api"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.withTestSettings
.settings(
name := "docspell-logging-api",
libraryDependencies ++=
Dependencies.catsEffect ++
Dependencies.circeCore ++
Dependencies.fs2Core ++
Dependencies.sourcecode
)
// Base module, everything depends on this including restapi and
// joexapi modules. This should aim to have least possible
// dependencies
@ -301,31 +308,44 @@ val common = project
.in(file("modules/common"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettings
.settings(
name := "docspell-common",
addCompilerPlugin(Dependencies.kindProjectorPlugin),
libraryDependencies ++=
Dependencies.fs2 ++
Dependencies.circe ++
Dependencies.loggingApi ++
Dependencies.calevCore ++
Dependencies.calevCirce
)
.dependsOn(loggingApi)
val config = project
.in(file("modules/config"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettings
.settings(
name := "docspell-config",
addCompilerPlugin(Dependencies.kindProjectorPlugin),
libraryDependencies ++=
Dependencies.fs2 ++
Dependencies.pureconfig
)
.dependsOn(common)
.dependsOn(common, loggingApi)
val loggingScribe = project
.in(file("modules/logging/scribe"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.withTestSettings
.settings(
name := "docspell-logging-scribe",
libraryDependencies ++=
Dependencies.scribe ++
Dependencies.catsEffect ++
Dependencies.circeCore ++
Dependencies.fs2Core
)
.dependsOn(loggingApi)
// Some example files for testing
// https://file-examples.com/index.php/sample-documents-download/sample-doc-download/
@ -333,7 +353,7 @@ val files = project
.in(file("modules/files"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettings
.settings(
name := "docspell-files",
libraryDependencies ++=
@ -372,7 +392,7 @@ val query =
.in(file("modules/query"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettings
.settings(
name := "docspell-query",
libraryDependencies +=
@ -392,7 +412,7 @@ val totp = project
.in(file("modules/totp"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettings
.settings(
name := "docspell-totp",
libraryDependencies ++=
@ -406,7 +426,7 @@ val jsonminiq = project
.in(file("modules/jsonminiq"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettings
.settings(
name := "docspell-jsonminiq",
libraryDependencies ++=
@ -419,25 +439,23 @@ val notificationApi = project
.in(file("modules/notification/api"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettings
.settings(
name := "docspell-notification-api",
addCompilerPlugin(Dependencies.kindProjectorPlugin),
libraryDependencies ++=
Dependencies.fs2 ++
Dependencies.emilCommon ++
Dependencies.circeGenericExtra
)
.dependsOn(common)
.dependsOn(common, loggingScribe)
val store = project
.in(file("modules/store"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettingsDependsOn(loggingScribe)
.settings(
name := "docspell-store",
addCompilerPlugin(Dependencies.kindProjectorPlugin),
libraryDependencies ++=
Dependencies.doobie ++
Dependencies.binny ++
@ -445,7 +463,6 @@ val store = project
Dependencies.fs2 ++
Dependencies.databases ++
Dependencies.flyway ++
Dependencies.loggingApi ++
Dependencies.emil ++
Dependencies.emilDoobie ++
Dependencies.calevCore ++
@ -453,16 +470,15 @@ val store = project
libraryDependencies ++=
Dependencies.testContainer.map(_ % Test)
)
.dependsOn(common, query.jvm, totp, files, notificationApi, jsonminiq)
.dependsOn(common, query.jvm, totp, files, notificationApi, jsonminiq, loggingScribe)
val notificationImpl = project
.in(file("modules/notification/impl"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettings
.settings(
name := "docspell-notification-impl",
addCompilerPlugin(Dependencies.kindProjectorPlugin),
libraryDependencies ++=
Dependencies.fs2 ++
Dependencies.emil ++
@ -479,10 +495,9 @@ val pubsubApi = project
.in(file("modules/pubsub/api"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettings
.settings(
name := "docspell-pubsub-api",
addCompilerPlugin(Dependencies.kindProjectorPlugin),
libraryDependencies ++=
Dependencies.fs2
)
@ -492,10 +507,9 @@ val pubsubNaive = project
.in(file("modules/pubsub/naive"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettings
.settings(
name := "docspell-pubsub-naive",
addCompilerPlugin(Dependencies.kindProjectorPlugin),
libraryDependencies ++=
Dependencies.fs2 ++
Dependencies.http4sCirce ++
@ -509,7 +523,7 @@ val extract = project
.in(file("modules/extract"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettingsDependsOn(loggingScribe)
.settings(
name := "docspell-extract",
libraryDependencies ++=
@ -517,16 +531,15 @@ val extract = project
Dependencies.twelvemonkeys ++
Dependencies.pdfbox ++
Dependencies.poi ++
Dependencies.commonsIO ++
Dependencies.julOverSlf4j
Dependencies.commonsIO
)
.dependsOn(common, files % "compile->compile;test->test")
.dependsOn(common, loggingScribe, files % "compile->compile;test->test")
val convert = project
.in(file("modules/convert"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettingsDependsOn(loggingScribe)
.settings(
name := "docspell-convert",
libraryDependencies ++=
@ -541,7 +554,7 @@ val analysis = project
.disablePlugins(RevolverPlugin)
.enablePlugins(NerModelsPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettingsDependsOn(loggingScribe)
.settings(NerModelsPlugin.nerClassifierSettings)
.settings(
name := "docspell-analysis",
@ -549,24 +562,24 @@ val analysis = project
Dependencies.fs2 ++
Dependencies.stanfordNlpCore
)
.dependsOn(common, files % "test->test")
.dependsOn(common, files % "test->test", loggingScribe)
val ftsclient = project
.in(file("modules/fts-client"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettings
.settings(
name := "docspell-fts-client",
libraryDependencies ++= Seq.empty
)
.dependsOn(common)
.dependsOn(common, loggingScribe)
val ftssolr = project
.in(file("modules/fts-solr"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettings
.settings(
name := "docspell-fts-solr",
libraryDependencies ++=
@ -582,7 +595,7 @@ val restapi = project
.disablePlugins(RevolverPlugin)
.enablePlugins(OpenApiSchema)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettings
.settings(openapiScalaSettings)
.settings(
name := "docspell-restapi",
@ -600,7 +613,7 @@ val joexapi = project
.disablePlugins(RevolverPlugin)
.enablePlugins(OpenApiSchema)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettings
.settings(openapiScalaSettings)
.settings(
name := "docspell-joexapi",
@ -613,41 +626,39 @@ val joexapi = project
openapiSpec := (Compile / resourceDirectory).value / "joex-openapi.yml",
openapiStaticGen := OpenApiDocGenerator.Redoc
)
.dependsOn(common)
.dependsOn(common, loggingScribe)
val backend = project
.in(file("modules/backend"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettings
.settings(
name := "docspell-backend",
libraryDependencies ++=
Dependencies.loggingApi ++
Dependencies.fs2 ++
Dependencies.fs2 ++
Dependencies.bcrypt ++
Dependencies.http4sClient ++
Dependencies.emil
)
.dependsOn(store, notificationApi, joexapi, ftsclient, totp, pubsubApi)
.dependsOn(store, notificationApi, joexapi, ftsclient, totp, pubsubApi, loggingApi)
val oidc = project
.in(file("modules/oidc"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettings
.settings(
name := "docspell-oidc",
libraryDependencies ++=
Dependencies.loggingApi ++
Dependencies.fs2 ++
Dependencies.fs2 ++
Dependencies.http4sClient ++
Dependencies.http4sCirce ++
Dependencies.http4sDsl ++
Dependencies.circe ++
Dependencies.jwtScala
)
.dependsOn(common)
.dependsOn(common, loggingScribe)
val webapp = project
.in(file("modules/webapp"))
@ -678,7 +689,7 @@ val joex = project
ClasspathJarPlugin
)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettings
.settings(debianSettings("docspell-joex"))
.settings(buildInfoSettings)
.settings(
@ -698,10 +709,7 @@ val joex = project
Dependencies.emilMarkdown ++
Dependencies.emilJsoup ++
Dependencies.jsoup ++
Dependencies.yamusca ++
Dependencies.loggingApi ++
Dependencies.logging.map(_ % Runtime),
addCompilerPlugin(Dependencies.kindProjectorPlugin),
Dependencies.yamusca,
addCompilerPlugin(Dependencies.betterMonadicFor),
buildInfoPackage := "docspell.joex",
reStart / javaOptions ++= Seq(
@ -713,6 +721,8 @@ val joex = project
)
.dependsOn(
config,
loggingApi,
loggingScribe,
store,
backend,
extract,
@ -735,7 +745,7 @@ val restserver = project
ClasspathJarPlugin
)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.withTestSettings
.settings(debianSettings("docspell-server"))
.settings(buildInfoSettings)
.settings(
@ -751,10 +761,7 @@ val restserver = project
Dependencies.pureconfig ++
Dependencies.yamusca ++
Dependencies.kittens ++
Dependencies.webjars ++
Dependencies.loggingApi ++
Dependencies.logging.map(_ % Runtime),
addCompilerPlugin(Dependencies.kindProjectorPlugin),
Dependencies.webjars,
addCompilerPlugin(Dependencies.betterMonadicFor),
buildInfoPackage := "docspell.restserver",
Compile / sourceGenerators += Def.task {
@ -788,6 +795,8 @@ val restserver = project
)
.dependsOn(
config,
loggingApi,
loggingScribe,
restapi,
joexapi,
backend,
@ -869,6 +878,8 @@ val root = project
)
.aggregate(
common,
loggingApi,
loggingScribe,
config,
extract,
convert,

View File

@ -15,8 +15,7 @@ import docspell.analysis.contact.Contact
import docspell.analysis.date.DateFind
import docspell.analysis.nlp._
import docspell.common._
import org.log4s.getLogger
import docspell.logging.Logger
trait TextAnalyser[F[_]] {
@ -30,7 +29,6 @@ trait TextAnalyser[F[_]] {
def classifier: TextClassifier[F]
}
object TextAnalyser {
private[this] val logger = getLogger
case class Result(labels: Vector[NerLabel], dates: Vector[NerDateLabel]) {
@ -87,10 +85,11 @@ object TextAnalyser {
private object Nlp {
def apply[F[_]: Async](
cfg: TextAnalysisConfig.NlpConfig
): F[Input[F] => F[Vector[NerLabel]]] =
): F[Input[F] => F[Vector[NerLabel]]] = {
val log = docspell.logging.getLogger[F]
cfg.mode match {
case NlpMode.Disabled =>
Logger.log4s(logger).info("NLP is disabled as defined in config.") *>
log.info("NLP is disabled as defined in config.") *>
Applicative[F].pure(_ => Vector.empty[NerLabel].pure[F])
case _ =>
PipelineCache(cfg.clearInterval)(
@ -99,6 +98,7 @@ object TextAnalyser {
)
.map(annotate[F])
}
}
final case class Input[F[_]](
key: Ident,

View File

@ -17,6 +17,7 @@ import docspell.analysis.classifier.TextClassifier._
import docspell.analysis.nlp.Properties
import docspell.common._
import docspell.common.syntax.FileSyntax._
import docspell.logging.Logger
import edu.stanford.nlp.classify.ColumnDataClassifier

View File

@ -10,7 +10,7 @@ import cats.data.Kleisli
import fs2.Stream
import docspell.analysis.classifier.TextClassifier.Data
import docspell.common._
import docspell.logging.Logger
trait TextClassifier[F[_]] {

View File

@ -12,6 +12,7 @@ import cats.{Applicative, FlatMap}
import docspell.analysis.NlpSettings
import docspell.common._
import docspell.logging.Logger
import edu.stanford.nlp.pipeline.StanfordCoreNLP

View File

@ -19,14 +19,13 @@ import docspell.common._
import edu.stanford.nlp.ie.AbstractSequenceClassifier
import edu.stanford.nlp.ie.crf.CRFClassifier
import edu.stanford.nlp.ling.{CoreAnnotations, CoreLabel}
import org.log4s.getLogger
/** This is only using the CRFClassifier without building an analysis pipeline. The
* ner-classifier cannot use results from POS-tagging etc. and is therefore not as good
* as the [[StanfordNerAnnotator]]. But it uses less memory, while still being not bad.
*/
object BasicCRFAnnotator {
private[this] val logger = getLogger
private[this] val logger = docspell.logging.unsafeLogger
// assert correct resource names
NLPLanguage.all.toList.foreach(classifierResource)

View File

@ -15,8 +15,6 @@ import cats.implicits._
import docspell.analysis.NlpSettings
import docspell.common._
import org.log4s.getLogger
/** Creating the StanfordCoreNLP pipeline is quite expensive as it involves IO and
* initializing large objects.
*
@ -31,17 +29,19 @@ trait PipelineCache[F[_]] {
}
object PipelineCache {
private[this] val logger = getLogger
private[this] val logger = docspell.logging.unsafeLogger
def apply[F[_]: Async](clearInterval: Duration)(
creator: NlpSettings => Annotator[F],
release: F[Unit]
): F[PipelineCache[F]] =
): F[PipelineCache[F]] = {
val log = docspell.logging.getLogger[F]
for {
data <- Ref.of(Map.empty[String, Entry[Annotator[F]]])
cacheClear <- CacheClearing.create(data, clearInterval, release)
_ <- Logger.log4s(logger).info("Creating nlp pipeline cache")
_ <- log.info("Creating nlp pipeline cache")
} yield new Impl[F](data, creator, cacheClear)
}
final private class Impl[F[_]: Async](
data: Ref[F, Map[String, Entry[Annotator[F]]]],
@ -116,7 +116,7 @@ object PipelineCache {
for {
counter <- Ref.of(0L)
cleaning <- Ref.of(None: Option[Fiber[F, Throwable, Unit]])
log = Logger.log4s(logger)
log = docspell.logging.getLogger[F]
result <-
if (interval.millis <= 0)
log
@ -145,7 +145,7 @@ object PipelineCache {
release: F[Unit]
)(implicit F: Async[F])
extends CacheClearing[F] {
private[this] val log = Logger.log4s[F](logger)
private[this] val log = docspell.logging.getLogger[F]
def withCache: Resource[F, Unit] =
Resource.make(counter.update(_ + 1) *> cancelClear)(_ =>

View File

@ -14,10 +14,9 @@ import fs2.io.file.Path
import docspell.common._
import edu.stanford.nlp.pipeline.{CoreDocument, StanfordCoreNLP}
import org.log4s.getLogger
object StanfordNerAnnotator {
private[this] val logger = getLogger
private[this] val logger = docspell.logging.unsafeLogger
/** Runs named entity recognition on the given `text`.
*

View File

@ -17,11 +17,12 @@ import fs2.io.file.Files
import docspell.analysis.classifier.TextClassifier.Data
import docspell.common._
import docspell.logging.TestLoggingConfig
import munit._
class StanfordTextClassifierSuite extends FunSuite {
val logger = Logger.log4s[IO](org.log4s.getLogger)
class StanfordTextClassifierSuite extends FunSuite with TestLoggingConfig {
val logger = docspell.logging.getLogger[IO]
test("learn from data") {
val cfg = TextClassifierConfig(File.path(Paths.get("target")), NonEmptyList.of(Map()))

View File

@ -10,10 +10,11 @@ import docspell.analysis.Env
import docspell.common.Language.NLPLanguage
import docspell.common._
import docspell.files.TestFiles
import docspell.logging.TestLoggingConfig
import munit._
class BaseCRFAnnotatorSuite extends FunSuite {
class BaseCRFAnnotatorSuite extends FunSuite with TestLoggingConfig {
def annotate(language: NLPLanguage): String => Vector[NerLabel] =
BasicCRFAnnotator.nerAnnotate(BasicCRFAnnotator.Cache.getAnnotator(language))

View File

@ -14,11 +14,12 @@ import cats.effect.unsafe.implicits.global
import docspell.analysis.Env
import docspell.common._
import docspell.files.TestFiles
import docspell.logging.TestLoggingConfig
import edu.stanford.nlp.pipeline.StanfordCoreNLP
import munit._
class StanfordNerAnnotatorSuite extends FunSuite {
class StanfordNerAnnotatorSuite extends FunSuite with TestLoggingConfig {
lazy val germanClassifier =
new StanfordCoreNLP(Properties.nerGerman(None, false))
lazy val englishClassifier =

View File

@ -17,7 +17,6 @@ import docspell.store.queries.QLogin
import docspell.store.records._
import docspell.totp.{OnetimePassword, Totp}
import org.log4s.getLogger
import org.mindrot.jbcrypt.BCrypt
import scodec.bits.ByteVector
@ -41,8 +40,6 @@ trait Login[F[_]] {
}
object Login {
private[this] val logger = getLogger
case class Config(
serverSecret: ByteVector,
sessionValid: Duration,
@ -93,7 +90,7 @@ object Login {
def apply[F[_]: Async](store: Store[F], totp: Totp): Resource[F, Login[F]] =
Resource.pure[F, Login[F]](new Login[F] {
private val logF = Logger.log4s(logger)
private val logF = docspell.logging.getLogger[F]
def loginExternal(config: Config)(accountId: AccountId): F[Result] =
for {
@ -124,7 +121,7 @@ object Login {
case Right(acc) =>
for {
data <- store.transact(QLogin.findUser(acc))
_ <- Sync[F].delay(logger.trace(s"Account lookup: $data"))
_ <- logF.trace(s"Account lookup: $data")
res <-
if (data.exists(check(up.pass))) doLogin(config, acc, up.rememberMe)
else Result.invalidAuth.pure[F]

View File

@ -12,6 +12,7 @@ import cats.effect._
import docspell.common._
import docspell.ftsclient.FtsClient
import docspell.ftsclient.TextData
import docspell.logging.Logger
import docspell.store.Store
import docspell.store.queries.QAttachment
import docspell.store.queries.QItem

View File

@ -14,6 +14,7 @@ import cats.implicits._
import docspell.backend.fulltext.CreateIndex
import docspell.backend.ops.OItem
import docspell.common._
import docspell.logging.Logger
import docspell.store.Store
import docspell.store.queries.QCustomField
import docspell.store.queries.QCustomField.FieldValue

View File

@ -37,11 +37,9 @@ trait OClientSettings[F[_]] {
}
object OClientSettings {
private[this] val logger = org.log4s.getLogger
def apply[F[_]: Async](store: Store[F]): Resource[F, OClientSettings[F]] =
Resource.pure[F, OClientSettings[F]](new OClientSettings[F] {
val log = Logger.log4s[F](logger)
val log = docspell.logging.getLogger[F]
private def getUserId(account: AccountId): OptionT[F, Ident] =
OptionT(store.transact(RUser.findByAccount(account))).map(_.uid)

View File

@ -31,7 +31,6 @@ import docspell.store.records.RCustomFieldValue
import docspell.store.records.RItem
import doobie._
import org.log4s.getLogger
trait OCustomFields[F[_]] {
@ -153,7 +152,7 @@ object OCustomFields {
): Resource[F, OCustomFields[F]] =
Resource.pure[F, OCustomFields[F]](new OCustomFields[F] {
private[this] val logger = Logger.log4s[ConnectionIO](getLogger)
private[this] val logger = docspell.logging.getLogger[ConnectionIO]
def findAllValues(itemIds: Nel[Ident]): F[List[FieldValue]] =
store.transact(QCustomField.findAllValues(itemIds))
@ -224,7 +223,7 @@ object OCustomFields {
.transact(RItem.existsByIdsAndCollective(items, value.collective))
.map(flag => if (flag) Right(()) else Left(SetValueResult.itemNotFound))
)
nu <- EitherT.right[SetValueResult](
_ <- EitherT.right[SetValueResult](
items
.traverse(item => store.transact(RCustomField.setValue(field, item, fval)))
.map(_.toList.sum)

View File

@ -14,7 +14,6 @@ import fs2.Stream
import docspell.backend.JobFactory
import docspell.backend.ops.OItemSearch._
import docspell.common._
import docspell.common.syntax.all._
import docspell.ftsclient._
import docspell.query.ItemQuery._
import docspell.query.ItemQueryDsl._
@ -23,8 +22,6 @@ import docspell.store.queue.JobQueue
import docspell.store.records.RJob
import docspell.store.{Store, qb}
import org.log4s.getLogger
trait OFulltext[F[_]] {
def findItems(maxNoteLen: Int)(
@ -59,7 +56,6 @@ trait OFulltext[F[_]] {
}
object OFulltext {
private[this] val logger = getLogger
case class FtsInput(
query: String,
@ -89,16 +85,17 @@ object OFulltext {
joex: OJoex[F]
): Resource[F, OFulltext[F]] =
Resource.pure[F, OFulltext[F]](new OFulltext[F] {
val logger = docspell.logging.getLogger[F]
def reindexAll: F[Unit] =
for {
_ <- logger.finfo(s"Re-index all.")
_ <- logger.info(s"Re-index all.")
job <- JobFactory.reIndexAll[F]
_ <- queue.insertIfNew(job) *> joex.notifyAllNodes
} yield ()
def reindexCollective(account: AccountId): F[Unit] =
for {
_ <- logger.fdebug(s"Re-index collective: $account")
_ <- logger.debug(s"Re-index collective: $account")
exist <- store.transact(
RJob.findNonFinalByTracker(DocspellSystem.migrationTaskTracker)
)
@ -123,7 +120,7 @@ object OFulltext {
FtsQuery.HighlightSetting(ftsQ.highlightPre, ftsQ.highlightPost)
)
for {
_ <- logger.ftrace(s"Find index only: ${ftsQ.query}/$batch")
_ <- logger.trace(s"Find index only: ${ftsQ.query}/$batch")
folders <- store.transact(QFolder.getMemberFolders(account))
ftsR <- fts.search(fq.withFolders(folders))
ftsItems = ftsR.results.groupBy(_.itemId)

View File

@ -16,6 +16,7 @@ import docspell.backend.fulltext.CreateIndex
import docspell.backend.item.Merge
import docspell.common._
import docspell.ftsclient.FtsClient
import docspell.logging.Logger
import docspell.notification.api.Event
import docspell.store.queries.{QAttachment, QItem, QMoveAttachment}
import docspell.store.queue.JobQueue
@ -23,7 +24,6 @@ import docspell.store.records._
import docspell.store.{AddResult, Store, UpdateResult}
import doobie.implicits._
import org.log4s.getLogger
trait OItem[F[_]] {
@ -235,7 +235,7 @@ object OItem {
otag <- OTag(store)
oorg <- OOrganization(store)
oequip <- OEquipment(store)
logger <- Resource.pure[F, Logger[F]](Logger.log4s(getLogger))
logger <- Resource.pure[F, Logger[F]](docspell.logging.getLogger[F])
oitem <- Resource.pure[F, OItem[F]](new OItem[F] {
def merge(

View File

@ -59,7 +59,7 @@ object OJob {
pubsub: PubSubT[F]
): Resource[F, OJob[F]] =
Resource.pure[F, OJob[F]](new OJob[F] {
private[this] val logger = Logger.log4s(org.log4s.getLogger(OJob.getClass))
private[this] val logger = docspell.logging.getLogger[F]
def queueState(collective: Ident, maxResults: Int): F[CollectiveQueueState] =
store

View File

@ -9,13 +9,10 @@ package docspell.backend.ops
import cats.effect.{Async, Resource}
import cats.implicits._
import docspell.common.syntax.all._
import docspell.common.{Ident, LenientUri, NodeType}
import docspell.store.Store
import docspell.store.records.RNode
import org.log4s._
trait ONode[F[_]] {
def register(appId: Ident, nodeType: NodeType, uri: LenientUri): F[Unit]
@ -24,20 +21,19 @@ trait ONode[F[_]] {
}
object ONode {
private[this] val logger = getLogger
def apply[F[_]: Async](store: Store[F]): Resource[F, ONode[F]] =
Resource.pure[F, ONode[F]](new ONode[F] {
val logger = docspell.logging.getLogger[F]
def register(appId: Ident, nodeType: NodeType, uri: LenientUri): F[Unit] =
for {
node <- RNode(appId, nodeType, uri)
_ <- logger.finfo(s"Registering node ${node.id.id}")
_ <- logger.info(s"Registering node ${node.id.id}")
_ <- store.transact(RNode.set(node))
} yield ()
def unregister(appId: Ident): F[Unit] =
logger.finfo(s"Unregister app ${appId.id}") *>
logger.info(s"Unregister app ${appId.id}") *>
store.transact(RNode.delete(appId)).map(_ => ())
})

View File

@ -6,9 +6,6 @@
package docspell.backend.ops
import java.io.PrintWriter
import java.io.StringWriter
import cats.data.OptionT
import cats.data.{NonEmptyList => Nel}
import cats.effect._
@ -17,6 +14,7 @@ import cats.implicits._
import docspell.backend.ops.ONotification.Hook
import docspell.common._
import docspell.jsonminiq.JsonMiniQuery
import docspell.logging.{Level, LogEvent, Logger}
import docspell.notification.api._
import docspell.store.AddResult
import docspell.store.Store
@ -75,14 +73,13 @@ trait ONotification[F[_]] {
}
object ONotification {
private[this] val logger = org.log4s.getLogger
def apply[F[_]: Async](
store: Store[F],
notMod: NotificationModule[F]
): Resource[F, ONotification[F]] =
Resource.pure[F, ONotification[F]](new ONotification[F] {
val log = Logger.log4s[F](logger)
val log = docspell.logging.getLogger[F]
def withUserId[A](
account: AccountId
@ -129,9 +126,9 @@ object ONotification {
.map {
case Right(res) => res
case Left(ex) =>
val ps = new StringWriter()
ex.printStackTrace(new PrintWriter(ps))
SendTestResult(false, Vector(s"${ex.getMessage}\n$ps"))
val ev =
LogEvent.of(Level.Error, "Failed sending sample event").addError(ex)
SendTestResult(false, Vector(ev))
}
def listChannels(account: AccountId): F[Vector[Channel]] =
@ -316,5 +313,5 @@ object ONotification {
} yield h
}
final case class SendTestResult(success: Boolean, logMessages: Vector[String])
final case class SendTestResult(success: Boolean, logEvents: Vector[LogEvent])
}

View File

@ -152,7 +152,7 @@ object OShare {
emil: Emil[F]
): OShare[F] =
new OShare[F] {
private[this] val logger = Logger.log4s[F](org.log4s.getLogger)
private[this] val logger = docspell.logging.getLogger[F]
def findAll(
collective: Ident,

View File

@ -5,6 +5,7 @@
*/
package docspell.backend.ops
import cats.effect._
import cats.implicits._
@ -14,8 +15,6 @@ import docspell.store.records.{RTotp, RUser}
import docspell.store.{AddResult, Store, UpdateResult}
import docspell.totp.{Key, OnetimePassword, Totp}
import org.log4s.getLogger
trait OTotp[F[_]] {
/** Return whether TOTP is enabled for this account or not. */
@ -38,8 +37,6 @@ trait OTotp[F[_]] {
}
object OTotp {
private[this] val logger = getLogger
sealed trait OtpState {
def isEnabled: Boolean
def isDisabled = !isEnabled
@ -86,7 +83,7 @@ object OTotp {
def apply[F[_]: Async](store: Store[F], totp: Totp): Resource[F, OTotp[F]] =
Resource.pure[F, OTotp[F]](new OTotp[F] {
val log = Logger.log4s[F](logger)
val log = docspell.logging.getLogger[F]
def initialize(accountId: AccountId): F[InitResult] =
for {

View File

@ -14,13 +14,10 @@ import fs2.Stream
import docspell.backend.JobFactory
import docspell.common._
import docspell.common.syntax.all._
import docspell.store.Store
import docspell.store.queue.JobQueue
import docspell.store.records._
import org.log4s._
trait OUpload[F[_]] {
def submit(
@ -56,8 +53,6 @@ trait OUpload[F[_]] {
}
object OUpload {
private[this] val logger = getLogger
case class File[F[_]](
name: Option[String],
advertisedMime: Option[MimeType],
@ -117,7 +112,7 @@ object OUpload {
joex: OJoex[F]
): Resource[F, OUpload[F]] =
Resource.pure[F, OUpload[F]](new OUpload[F] {
private[this] val logger = docspell.logging.getLogger[F]
def submit(
data: OUpload.UploadData[F],
account: AccountId,
@ -155,7 +150,7 @@ object OUpload {
if (data.multiple) files.map(f => ProcessItemArgs(meta, List(f)))
else Vector(ProcessItemArgs(meta, files.toList))
jobs <- right(makeJobs(args, account, data.priority, data.tracker))
_ <- right(logger.fdebug(s"Storing jobs: $jobs"))
_ <- right(logger.debug(s"Storing jobs: $jobs"))
res <- right(submitJobs(notifyJoex)(jobs))
_ <- right(
store.transact(
@ -194,7 +189,7 @@ object OUpload {
notifyJoex: Boolean
)(jobs: Vector[RJob]): F[OUpload.UploadResult] =
for {
_ <- logger.fdebug(s"Storing jobs: $jobs")
_ <- logger.debug(s"Storing jobs: $jobs")
_ <- queue.insertAll(jobs)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield UploadResult.Success
@ -203,7 +198,7 @@ object OUpload {
private def saveFile(
accountId: AccountId
)(file: File[F]): F[Option[ProcessItemArgs.File]] =
logger.finfo(s"Receiving file $file") *>
logger.info(s"Receiving file $file") *>
file.data
.through(
store.fileRepo.save(

View File

@ -11,12 +11,10 @@ import cats.implicits._
import docspell.backend.PasswordCrypt
import docspell.common._
import docspell.common.syntax.all._
import docspell.store.records.{RCollective, RInvitation, RUser}
import docspell.store.{AddResult, Store}
import doobie.free.connection.ConnectionIO
import org.log4s.getLogger
trait OSignup[F[_]] {
@ -29,10 +27,10 @@ trait OSignup[F[_]] {
}
object OSignup {
private[this] val logger = getLogger
def apply[F[_]: Async](store: Store[F]): Resource[F, OSignup[F]] =
Resource.pure[F, OSignup[F]](new OSignup[F] {
private[this] val logger = docspell.logging.getLogger[F]
def newInvite(cfg: Config)(password: Password): F[NewInviteResult] =
if (cfg.mode == Config.Mode.Invite)
@ -66,7 +64,7 @@ object OSignup {
_ <-
if (retryInvite(res))
logger
.fdebug(
.debug(
s"Adding account failed ($res). Allow retry with invite."
) *> store
.transact(

View File

@ -6,6 +6,8 @@
package docspell.common
import docspell.logging.Level
import io.circe.{Decoder, Encoder}
sealed trait LogLevel { self: Product =>
@ -40,6 +42,16 @@ object LogLevel {
case _ => Left(s"Invalid log-level: $str")
}
def fromLevel(level: Level): LogLevel =
level match {
case Level.Fatal => LogLevel.Error
case Level.Error => LogLevel.Error
case Level.Warn => LogLevel.Warn
case Level.Info => LogLevel.Info
case Level.Debug => LogLevel.Debug
case Level.Trace => LogLevel.Debug
}
def unsafeString(str: String): LogLevel =
fromString(str).fold(sys.error, identity)

View File

@ -1,143 +0,0 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.common
import java.io.{PrintWriter, StringWriter}
import cats.Applicative
import cats.effect.{Ref, Sync}
import cats.implicits._
import fs2.Stream
import docspell.common.syntax.all._
import org.log4s.{Logger => Log4sLogger}
trait Logger[F[_]] { self =>
def trace(msg: => String): F[Unit]
def debug(msg: => String): F[Unit]
def info(msg: => String): F[Unit]
def warn(msg: => String): F[Unit]
def error(ex: Throwable)(msg: => String): F[Unit]
def error(msg: => String): F[Unit]
final def s: Logger[Stream[F, *]] = new Logger[Stream[F, *]] {
def trace(msg: => String): Stream[F, Unit] =
Stream.eval(self.trace(msg))
def debug(msg: => String): Stream[F, Unit] =
Stream.eval(self.debug(msg))
def info(msg: => String): Stream[F, Unit] =
Stream.eval(self.info(msg))
def warn(msg: => String): Stream[F, Unit] =
Stream.eval(self.warn(msg))
def error(msg: => String): Stream[F, Unit] =
Stream.eval(self.error(msg))
def error(ex: Throwable)(msg: => String): Stream[F, Unit] =
Stream.eval(self.error(ex)(msg))
}
def andThen(other: Logger[F])(implicit F: Sync[F]): Logger[F] = {
val self = this
new Logger[F] {
def trace(msg: => String) =
self.trace(msg) >> other.trace(msg)
override def debug(msg: => String) =
self.debug(msg) >> other.debug(msg)
override def info(msg: => String) =
self.info(msg) >> other.info(msg)
override def warn(msg: => String) =
self.warn(msg) >> other.warn(msg)
override def error(ex: Throwable)(msg: => String) =
self.error(ex)(msg) >> other.error(ex)(msg)
override def error(msg: => String) =
self.error(msg) >> other.error(msg)
}
}
}
object Logger {
def off[F[_]: Applicative]: Logger[F] =
new Logger[F] {
def trace(msg: => String): F[Unit] =
Applicative[F].pure(())
def debug(msg: => String): F[Unit] =
Applicative[F].pure(())
def info(msg: => String): F[Unit] =
Applicative[F].pure(())
def warn(msg: => String): F[Unit] =
Applicative[F].pure(())
def error(ex: Throwable)(msg: => String): F[Unit] =
Applicative[F].pure(())
def error(msg: => String): F[Unit] =
Applicative[F].pure(())
}
def log4s[F[_]: Sync](log: Log4sLogger): Logger[F] =
new Logger[F] {
def trace(msg: => String): F[Unit] =
log.ftrace(msg)
def debug(msg: => String): F[Unit] =
log.fdebug(msg)
def info(msg: => String): F[Unit] =
log.finfo(msg)
def warn(msg: => String): F[Unit] =
log.fwarn(msg)
def error(ex: Throwable)(msg: => String): F[Unit] =
log.ferror(ex)(msg)
def error(msg: => String): F[Unit] =
log.ferror(msg)
}
def buffer[F[_]: Sync](): F[(Ref[F, Vector[String]], Logger[F])] =
for {
buffer <- Ref.of[F, Vector[String]](Vector.empty[String])
logger = new Logger[F] {
def trace(msg: => String) =
buffer.update(_.appended(s"TRACE $msg"))
def debug(msg: => String) =
buffer.update(_.appended(s"DEBUG $msg"))
def info(msg: => String) =
buffer.update(_.appended(s"INFO $msg"))
def warn(msg: => String) =
buffer.update(_.appended(s"WARN $msg"))
def error(ex: Throwable)(msg: => String) = {
val ps = new StringWriter()
ex.printStackTrace(new PrintWriter(ps))
buffer.update(_.appended(s"ERROR $msg:\n$ps"))
}
def error(msg: => String) =
buffer.update(_.appended(s"ERROR $msg"))
}
} yield (buffer, logger)
}

View File

@ -17,6 +17,8 @@ import cats.implicits._
import fs2.io.file.Path
import fs2.{Stream, io, text}
import docspell.logging.Logger
object SystemCommand {
final case class Config(program: String, args: Seq[String], timeout: Duration) {

View File

@ -1,42 +0,0 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.common.syntax
import cats.effect.Sync
import fs2.Stream
import org.log4s.Logger
trait LoggerSyntax {
implicit final class LoggerOps(logger: Logger) {
def ftrace[F[_]: Sync](msg: => String): F[Unit] =
Sync[F].delay(logger.trace(msg))
def fdebug[F[_]: Sync](msg: => String): F[Unit] =
Sync[F].delay(logger.debug(msg))
def sdebug[F[_]: Sync](msg: => String): Stream[F, Nothing] =
Stream.eval(fdebug(msg)).drain
def finfo[F[_]: Sync](msg: => String): F[Unit] =
Sync[F].delay(logger.info(msg))
def sinfo[F[_]: Sync](msg: => String): Stream[F, Nothing] =
Stream.eval(finfo(msg)).drain
def fwarn[F[_]: Sync](msg: => String): F[Unit] =
Sync[F].delay(logger.warn(msg))
def ferror[F[_]: Sync](msg: => String): F[Unit] =
Sync[F].delay(logger.error(msg))
def ferror[F[_]: Sync](ex: Throwable)(msg: => String): F[Unit] =
Sync[F].delay(logger.error(ex)(msg))
}
}

View File

@ -8,11 +8,6 @@ package docspell.common
package object syntax {
object all
extends EitherSyntax
with StreamSyntax
with StringSyntax
with LoggerSyntax
with FileSyntax
object all extends EitherSyntax with StreamSyntax with StringSyntax with FileSyntax
}

View File

@ -13,7 +13,7 @@ import cats.effect._
import cats.implicits._
import fs2.io.file.{Files, Path}
import docspell.common.Logger
import docspell.logging.Logger
import pureconfig.{ConfigReader, ConfigSource}

View File

@ -13,6 +13,7 @@ import scala.reflect.ClassTag
import fs2.io.file.Path
import docspell.common._
import docspell.logging.{Level, LogConfig}
import com.github.eikek.calev.CalEvent
import pureconfig.ConfigReader
@ -63,6 +64,12 @@ object Implicits {
implicit val nlpModeReader: ConfigReader[NlpMode] =
ConfigReader[String].emap(reason(NlpMode.fromString))
implicit val logFormatReader: ConfigReader[LogConfig.Format] =
ConfigReader[String].emap(reason(LogConfig.Format.fromString))
implicit val logLevelReader: ConfigReader[Level] =
ConfigReader[String].emap(reason(Level.fromString))
def reason[A: ClassTag](
f: String => Either[String, A]
): String => Either[FailureReason, A] =

View File

@ -17,6 +17,7 @@ import docspell.convert.ConversionResult.Handler
import docspell.convert.extern._
import docspell.convert.flexmark.Markdown
import docspell.files.{ImageSize, TikaMimetype}
import docspell.logging.Logger
import scodec.bits.ByteVector
@ -46,7 +47,7 @@ object Conversion {
val allPass = cfg.decryptPdf.passwords ++ additionalPasswords
val pdfStream =
if (cfg.decryptPdf.enabled) {
logger.s
logger.stream
.debug(s"Trying to read the PDF using ${allPass.size} passwords")
.drain ++
in.through(RemovePdfEncryption(logger, allPass))

View File

@ -12,6 +12,7 @@ import cats.effect._
import fs2.{Chunk, Pipe, Stream}
import docspell.common._
import docspell.logging.Logger
import org.apache.pdfbox.pdmodel.PDDocument
import org.apache.pdfbox.pdmodel.encryption.InvalidPasswordException
@ -36,7 +37,7 @@ object RemovePdfEncryption {
.head
.flatMap { doc =>
if (doc.isEncrypted) {
logger.s.debug("Removing protection/encryption from PDF").drain ++
logger.stream.debug("Removing protection/encryption from PDF").drain ++
Stream.eval(Sync[F].delay(doc.setAllSecurityToBeRemoved(true))).drain ++
toStream[F](doc)
} else {
@ -44,7 +45,7 @@ object RemovePdfEncryption {
}
}
.ifEmpty(
logger.s
logger.stream
.info(
s"None of the passwords helped to read the given PDF!"
)
@ -64,7 +65,8 @@ object RemovePdfEncryption {
val log =
if (pw.isEmpty) Stream.empty
else logger.s.debug(s"Try opening PDF with password: ${pw.pass.take(2)}***").drain
else
logger.stream.debug(s"Try opening PDF with password: ${pw.pass.take(2)}***").drain
in =>
Stream

View File

@ -14,6 +14,7 @@ import fs2.{Pipe, Stream}
import docspell.common._
import docspell.convert.ConversionResult
import docspell.convert.ConversionResult.{Handler, successPdf, successPdfTxt}
import docspell.logging.Logger
private[extern] object ExternConv {

View File

@ -13,6 +13,7 @@ import fs2.io.file.Path
import docspell.common._
import docspell.convert.ConversionResult
import docspell.convert.ConversionResult.Handler
import docspell.logging.Logger
object OcrMyPdf {

View File

@ -13,6 +13,7 @@ import fs2.io.file.Path
import docspell.common._
import docspell.convert.ConversionResult
import docspell.convert.ConversionResult.Handler
import docspell.logging.Logger
object Tesseract {

View File

@ -13,6 +13,7 @@ import fs2.io.file.Path
import docspell.common._
import docspell.convert.ConversionResult
import docspell.convert.ConversionResult.Handler
import docspell.logging.Logger
object Unoconv {

View File

@ -16,6 +16,7 @@ import fs2.{Chunk, Stream}
import docspell.common._
import docspell.convert.ConversionResult.Handler
import docspell.convert.{ConversionResult, SanitizeHtml}
import docspell.logging.Logger
object WkHtmlPdf {

View File

@ -20,12 +20,13 @@ import docspell.convert.extern.OcrMyPdfConfig
import docspell.convert.extern.{TesseractConfig, UnoconvConfig, WkHtmlPdfConfig}
import docspell.convert.flexmark.MarkdownConfig
import docspell.files.ExampleFiles
import docspell.logging.TestLoggingConfig
import munit._
class ConversionTest extends FunSuite with FileChecks {
class ConversionTest extends FunSuite with FileChecks with TestLoggingConfig {
val logger = Logger.log4s[IO](org.log4s.getLogger)
val logger = docspell.logging.getLogger[IO]
val target = File.path(Paths.get("target"))
val convertConfig = ConvertConfig(

View File

@ -11,11 +11,15 @@ import fs2.Stream
import docspell.common._
import docspell.files.ExampleFiles
import docspell.logging.{Logger, TestLoggingConfig}
import munit.CatsEffectSuite
class RemovePdfEncryptionTest extends CatsEffectSuite with FileChecks {
val logger: Logger[IO] = Logger.log4s(org.log4s.getLogger)
class RemovePdfEncryptionTest
extends CatsEffectSuite
with FileChecks
with TestLoggingConfig {
val logger: Logger[IO] = docspell.logging.getLogger[IO]
private val protectedPdf =
ExampleFiles.secured_protected_test123_pdf.readURL[IO](16 * 1024)

View File

@ -16,12 +16,13 @@ import fs2.io.file.Path
import docspell.common._
import docspell.convert._
import docspell.files.ExampleFiles
import docspell.logging.TestLoggingConfig
import munit._
class ExternConvTest extends FunSuite with FileChecks {
class ExternConvTest extends FunSuite with FileChecks with TestLoggingConfig {
val utf8 = StandardCharsets.UTF_8
val logger = Logger.log4s[IO](org.log4s.getLogger)
val logger = docspell.logging.getLogger[IO]
val target = File.path(Paths.get("target"))
test("convert html to pdf") {

View File

@ -18,6 +18,7 @@ import docspell.extract.poi.{PoiExtract, PoiType}
import docspell.extract.rtf.RtfExtract
import docspell.files.ImageSize
import docspell.files.TikaMimetype
import docspell.logging.Logger
trait Extraction[F[_]] {

View File

@ -10,11 +10,12 @@ import cats.effect._
import cats.implicits._
import fs2.Stream
import docspell.common.{Language, Logger}
import docspell.common.Language
import docspell.extract.internal.Text
import docspell.extract.ocr.{OcrConfig, TextExtract}
import docspell.extract.pdfbox.PdfMetaData
import docspell.extract.pdfbox.PdfboxExtract
import docspell.logging.Logger
object PdfExtract {
final case class Result(txt: Text, meta: Option[PdfMetaData])

View File

@ -11,6 +11,7 @@ import fs2.Stream
import fs2.io.file.Path
import docspell.common._
import docspell.logging.Logger
object Ocr {

View File

@ -12,6 +12,7 @@ import fs2.Stream
import docspell.common._
import docspell.extract.internal.Text
import docspell.files._
import docspell.logging.Logger
object TextExtract {

View File

@ -32,7 +32,7 @@ trait PdfboxPreview[F[_]] {
}
object PdfboxPreview {
private[this] val logger = org.log4s.getLogger
private[this] val logger = docspell.logging.unsafeLogger
def apply[F[_]: Sync](cfg: PreviewConfig): F[PdfboxPreview[F]] =
Sync[F].pure(new PdfboxPreview[F] {

View File

@ -9,15 +9,15 @@ package docspell.extract.ocr
import cats.effect.IO
import cats.effect.unsafe.implicits.global
import docspell.common.Logger
import docspell.files.TestFiles
import docspell.logging.TestLoggingConfig
import munit._
class TextExtractionSuite extends FunSuite {
class TextExtractionSuite extends FunSuite with TestLoggingConfig {
import TestFiles._
val logger = Logger.log4s[IO](org.log4s.getLogger)
val logger = docspell.logging.getLogger[IO]
test("extract english pdf".ignore) {
val text = TextExtract

View File

@ -10,10 +10,11 @@ import cats.effect._
import cats.effect.unsafe.implicits.global
import docspell.files.ExampleFiles
import docspell.logging.TestLoggingConfig
import munit._
class OdfExtractTest extends FunSuite {
class OdfExtractTest extends FunSuite with TestLoggingConfig {
val files = List(
ExampleFiles.examples_sample_odt -> 6367,

View File

@ -6,9 +6,11 @@
package docspell.extract.pdfbox
import docspell.logging.TestLoggingConfig
import munit._
class PdfMetaDataTest extends FunSuite {
class PdfMetaDataTest extends FunSuite with TestLoggingConfig {
test("split keywords on comma") {
val md = PdfMetaData.empty.copy(keywords = Some("a,b, c"))

View File

@ -10,10 +10,11 @@ import cats.effect._
import cats.effect.unsafe.implicits.global
import docspell.files.{ExampleFiles, TestFiles}
import docspell.logging.TestLoggingConfig
import munit._
class PdfboxExtractTest extends FunSuite {
class PdfboxExtractTest extends FunSuite with TestLoggingConfig {
val textPDFs = List(
ExampleFiles.letter_de_pdf -> TestFiles.letterDEText,

View File

@ -13,10 +13,11 @@ import fs2.io.file.Files
import fs2.io.file.Path
import docspell.files.ExampleFiles
import docspell.logging.TestLoggingConfig
import munit._
class PdfboxPreviewTest extends FunSuite {
class PdfboxPreviewTest extends FunSuite with TestLoggingConfig {
val testPDFs = List(
ExampleFiles.letter_de_pdf -> "7d98be75b239816d6c751b3f3c56118ebf1a4632c43baf35a68a662f9d595ab8",

View File

@ -11,10 +11,11 @@ import cats.effect.unsafe.implicits.global
import docspell.common.MimeTypeHint
import docspell.files.ExampleFiles
import docspell.logging.TestLoggingConfig
import munit._
class PoiExtractTest extends FunSuite {
class PoiExtractTest extends FunSuite with TestLoggingConfig {
val officeFiles = List(
ExampleFiles.examples_sample_doc -> 6241,

View File

@ -7,10 +7,11 @@
package docspell.extract.rtf
import docspell.files.ExampleFiles
import docspell.logging.TestLoggingConfig
import munit._
class RtfExtractTest extends FunSuite {
class RtfExtractTest extends FunSuite with TestLoggingConfig {
test("extract text from rtf using java input-stream") {
val file = ExampleFiles.examples_sample_rtf

View File

@ -1,14 +0,0 @@
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<withJansi>true</withJansi>
<encoder>
<pattern>level=%-5level thread=%thread logger=%logger{15} message="%replace(%msg){'"', '\\"'}"%n</pattern>
</encoder>
</appender>
<logger name="docspell" level="debug" />
<root level="error">
<appender-ref ref="STDOUT" />
</root>
</configuration>

View File

@ -11,8 +11,7 @@ import cats.implicits._
import fs2.Stream
import docspell.common._
import org.log4s.getLogger
import docspell.logging.Logger
/** The fts client is the interface for docspell to a fulltext search engine.
*
@ -127,7 +126,7 @@ object FtsClient {
def none[F[_]: Sync] =
new FtsClient[F] {
private[this] val logger = Logger.log4s[F](getLogger)
private[this] val logger = docspell.logging.getLogger[F]
def initialize: F[List[FtsMigration[F]]] =
Sync[F].pure(Nil)

View File

@ -12,10 +12,10 @@ import fs2.Stream
import docspell.common._
import docspell.ftsclient._
import docspell.logging.Logger
import org.http4s.client.Client
import org.http4s.client.middleware.Logger
import org.log4s.getLogger
import org.http4s.client.middleware.{Logger => Http4sLogger}
final class SolrFtsClient[F[_]: Async](
solrUpdate: SolrUpdate[F],
@ -81,7 +81,6 @@ final class SolrFtsClient[F[_]: Async](
}
object SolrFtsClient {
private[this] val logger = getLogger
def apply[F[_]: Async](
cfg: SolrConfig,
@ -100,11 +99,13 @@ object SolrFtsClient {
private def loggingMiddleware[F[_]: Async](
cfg: SolrConfig,
client: Client[F]
): Client[F] =
Logger(
): Client[F] = {
val delegate = docspell.logging.getLogger[F]
Http4sLogger(
logHeaders = true,
logBody = cfg.logVerbose,
logAction = Some((msg: String) => Sync[F].delay(logger.trace(msg)))
logAction = Some((msg: String) => delegate.trace(msg))
)(client)
}
}

View File

@ -1,14 +0,0 @@
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<withJansi>true</withJansi>
<encoder>
<pattern>level=%-5level thread=%thread logger=%logger{15} message="%replace(%msg){'"', '\\"'}"%n</pattern>
</encoder>
</appender>
<logger name="docspell" level="debug" />
<root level="INFO">
<appender-ref ref="STDOUT" />
</root>
</configuration>

View File

@ -18,6 +18,17 @@ docspell.joex {
port = 7878
}
# Configures logging
logging {
# The format for the log messages. Can be one of:
# Json, Logfmt, Fancy or Plain
format = "Plain"
# The minimum level to log. From lowest to highest:
# Trace, Debug, Info, Warn, Error
minimum-level = "Info"
}
# The database connection.
#
# It must be the same connection as the rest server is using.

View File

@ -21,12 +21,14 @@ import docspell.joex.hk.HouseKeepingConfig
import docspell.joex.routes.InternalHeader
import docspell.joex.scheduler.{PeriodicSchedulerConfig, SchedulerConfig}
import docspell.joex.updatecheck.UpdateCheckConfig
import docspell.logging.LogConfig
import docspell.pubsub.naive.PubSubConfig
import docspell.store.JdbcConfig
case class Config(
appId: Ident,
baseUrl: LenientUri,
logging: LogConfig,
bind: Config.Bind,
jdbc: JdbcConfig,
scheduler: SchedulerConfig,

View File

@ -8,7 +8,6 @@ package docspell.joex
import cats.effect.Async
import docspell.common.Logger
import docspell.config.Implicits._
import docspell.config.{ConfigFactory, Validation}
import docspell.joex.scheduler.CountingScheme
@ -23,7 +22,7 @@ object ConfigFile {
import Implicits._
def loadConfig[F[_]: Async](args: List[String]): F[Config] = {
val logger = Logger.log4s[F](org.log4s.getLogger)
val logger = docspell.logging.getLogger[F]
ConfigFactory
.default[F, Config](logger, "docspell.joex")(args, validate)
}

View File

@ -132,10 +132,8 @@ object JoexAppImpl extends MailAddressCodec {
): Resource[F, JoexApp[F]] =
for {
pstore <- PeriodicTaskStore.create(store)
pubSubT = PubSubT(
pubSub,
Logger.log4s(org.log4s.getLogger(s"joex-${cfg.appId.id}"))
)
joexLogger = docspell.logging.getLogger[F](s"joex-${cfg.appId.id}")
pubSubT = PubSubT(pubSub, joexLogger)
javaEmil =
JavaMailEmil(Settings.defaultSettings.copy(debug = cfg.mailDebug))
notificationMod <- Resource.eval(

View File

@ -9,12 +9,12 @@ package docspell.joex
import cats.effect._
import docspell.common._
import org.log4s.getLogger
import docspell.logging.Logger
import docspell.logging.impl.ScribeConfigure
object Main extends IOApp {
private val logger: Logger[IO] = Logger.log4s[IO](getLogger)
private val logger: Logger[IO] = docspell.logging.getLogger[IO]
private val connectEC =
ThreadFactories.fixed[IO](5, ThreadFactories.ofName("docspell-joex-dbconnect"))
@ -22,6 +22,7 @@ object Main extends IOApp {
def run(args: List[String]): IO[ExitCode] =
for {
cfg <- ConfigFile.loadConfig[IO](args)
_ <- ScribeConfigure.configure[IO](cfg.logging)
banner = Banner(
"JOEX",
BuildInfo.version,

View File

@ -12,7 +12,6 @@ import cats.implicits._
import fs2.io.file.Path
import docspell.common._
import docspell.common.syntax.all._
import docspell.store.Store
import docspell.store.queries.QCollective
import docspell.store.records.REquipment
@ -20,7 +19,6 @@ import docspell.store.records.ROrganization
import docspell.store.records.RPerson
import io.circe.syntax._
import org.log4s.getLogger
/** Maintains a custom regex-ner file per collective for stanford's regexner annotator. */
trait RegexNerFile[F[_]] {
@ -30,7 +28,6 @@ trait RegexNerFile[F[_]] {
}
object RegexNerFile {
private[this] val logger = getLogger
case class Config(maxEntries: Int, directory: Path, minTime: Duration)
@ -49,6 +46,8 @@ object RegexNerFile {
writer: Semaphore[F] // TODO allow parallelism per collective
) extends RegexNerFile[F] {
private[this] val logger = docspell.logging.getLogger[F]
def makeFile(collective: Ident): F[Option[Path]] =
if (cfg.maxEntries > 0) doMakeFile(collective)
else (None: Option[Path]).pure[F]
@ -61,7 +60,7 @@ object RegexNerFile {
case Some(nf) =>
val dur = Duration.between(nf.creation, now)
if (dur > cfg.minTime)
logger.fdebug(
logger.debug(
s"Cache time elapsed ($dur > ${cfg.minTime}). Check for new state."
) *> updateFile(
collective,
@ -89,12 +88,12 @@ object RegexNerFile {
case Some(cur) =>
val nerf =
if (cur.updated == lup)
logger.fdebug(s"No state change detected.") *> updateTimestamp(
logger.debug(s"No state change detected.") *> updateTimestamp(
cur,
now
) *> cur.pure[F]
else
logger.fdebug(
logger.debug(
s"There have been state changes for collective '${collective.id}'. Reload NER file."
) *> createFile(lup, collective, now)
nerf.map(_.nerFilePath(cfg.directory).some)
@ -126,7 +125,7 @@ object RegexNerFile {
writer.permit.use(_ =>
for {
jsonFile <- Sync[F].pure(nf.jsonFilePath(cfg.directory))
_ <- logger.fdebug(
_ <- logger.debug(
s"Writing custom NER file for collective '${collective.id}'"
)
_ <- jsonFile.parent match {
@ -139,7 +138,7 @@ object RegexNerFile {
)
for {
_ <- logger.finfo(s"Generating custom NER file for collective '${collective.id}'")
_ <- logger.info(s"Generating custom NER file for collective '${collective.id}'")
names <- store.transact(QCollective.allNames(collective, cfg.maxEntries))
nerFile = NerFile(collective, lastUpdate, now)
_ <- update(nerFile, NerFile.mkNerConfig(names))

View File

@ -7,10 +7,10 @@
package docspell.joex.fts
import docspell.backend.fulltext.CreateIndex
import docspell.common.Logger
import docspell.ftsclient.FtsClient
import docspell.joex.Config
import docspell.joex.scheduler.Context
import docspell.logging.Logger
import docspell.store.Store
case class FtsContext[F[_]](

View File

@ -15,6 +15,7 @@ import docspell.common._
import docspell.ftsclient._
import docspell.joex.Config
import docspell.joex.scheduler.Context
import docspell.logging.Logger
object FtsWork {
import syntax._

View File

@ -15,6 +15,7 @@ import docspell.backend.fulltext.CreateIndex
import docspell.common._
import docspell.ftsclient._
import docspell.joex.Config
import docspell.logging.Logger
import docspell.store.Store
/** Migrating the index from the previous version to this version.

View File

@ -11,6 +11,7 @@ import cats.implicits._
import docspell.common._
import docspell.joex.scheduler.{Context, Task}
import docspell.logging.Logger
import docspell.store.records._
import org.http4s.blaze.client.BlazeClientBuilder

View File

@ -14,6 +14,7 @@ import fs2.io.file.Path
import docspell.analysis.classifier.{ClassifierModel, TextClassifier}
import docspell.common._
import docspell.logging.Logger
import docspell.store.Store
import docspell.store.records.RClassifierModel

View File

@ -15,6 +15,7 @@ import docspell.backend.ops.OCollective
import docspell.common._
import docspell.joex.Config
import docspell.joex.scheduler._
import docspell.logging.Logger
import docspell.store.records.{RClassifierModel, RClassifierSetting}
object LearnClassifierTask {

View File

@ -13,6 +13,7 @@ import fs2.io.file.Files
import docspell.analysis.classifier.ClassifierModel
import docspell.common._
import docspell.joex.scheduler._
import docspell.logging.Logger
import docspell.store.Store
import docspell.store.records.RClassifierModel

View File

@ -11,6 +11,7 @@ import cats.implicits._
import fs2.{Pipe, Stream}
import docspell.common._
import docspell.logging.Logger
import docspell.store.syntax.MimeTypes._
import emil.javamail.syntax._

View File

@ -12,6 +12,7 @@ import cats.implicits._
import docspell.backend.ops.ONotification
import docspell.common._
import docspell.logging.Logger
import docspell.notification.api.ChannelRef
import docspell.notification.api.Event
import docspell.notification.api.EventContext

View File

@ -13,6 +13,7 @@ import cats.implicits._
import docspell.common._
import docspell.joex.scheduler.Task
import docspell.logging.Logger
/** After candidates have been determined, the set is reduced by doing some cross checks.
* For example: if a organization is suggested as correspondent, the correspondent person

View File

@ -93,7 +93,7 @@ object ExtractArchive {
archive: Option[RAttachmentArchive]
)(ra: RAttachment, pos: Int, mime: MimeType): F[Extracted] =
mime match {
case MimeType.ZipMatch(_) if ra.name.exists(_.endsWith(".zip")) =>
case MimeType.ZipMatch(_) if ra.name.exists(_.toLowerCase.endsWith(".zip")) =>
ctx.logger.info(s"Extracting zip archive ${ra.name.getOrElse("<noname>")}.") *>
extractZip(ctx, archive)(ra, pos)
.flatMap(cleanupParents(ctx, ra, archive))

View File

@ -49,7 +49,7 @@ object ReProcessItem {
private def contains[F[_]](ctx: Context[F, Args]): RAttachment => Boolean = {
val selection = ctx.args.attachments.toSet
if (selection.isEmpty) (_ => true)
if (selection.isEmpty) _ => true
else ra => selection.contains(ra.id)
}

View File

@ -1,45 +0,0 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.joex.process
import cats.effect.Sync
import cats.implicits._
import docspell.common.ProcessItemArgs
import docspell.common.syntax.all._
import docspell.joex.scheduler.Task
import org.log4s._
object TestTasks {
private[this] val logger = getLogger
def success[F[_]]: Task[F, ProcessItemArgs, Unit] =
Task(ctx => ctx.logger.info(s"Running task now: ${ctx.args}"))
def failing[F[_]: Sync]: Task[F, ProcessItemArgs, Unit] =
Task { ctx =>
ctx.logger
.info(s"Failing the task run :(")
.map(_ => sys.error("Oh, cannot extract gold from this document"))
}
def longRunning[F[_]: Sync]: Task[F, ProcessItemArgs, Unit] =
Task { ctx =>
logger.fwarn(s"${Thread.currentThread()} From executing long running task") >>
ctx.logger.info(s"${Thread.currentThread()} Running task now: ${ctx.args}") >>
sleep(2400) >>
ctx.logger.debug("doing things") >>
sleep(2400) >>
ctx.logger.debug("doing more things") >>
sleep(2400) >>
ctx.logger.info("doing more things")
}
private def sleep[F[_]: Sync](ms: Long): F[Unit] =
Sync[F].delay(Thread.sleep(ms))
}

View File

@ -17,6 +17,7 @@ import docspell.backend.ops.{OJoex, OUpload}
import docspell.common._
import docspell.joex.Config
import docspell.joex.scheduler.{Context, Task}
import docspell.logging.Logger
import docspell.store.queries.QOrganization
import docspell.store.records._

View File

@ -11,12 +11,10 @@ import cats.implicits._
import cats.{Applicative, Functor}
import docspell.common._
import docspell.common.syntax.all._
import docspell.logging.Logger
import docspell.store.Store
import docspell.store.records.RJob
import org.log4s.{Logger => _, _}
trait Context[F[_], A] { self =>
def jobId: Ident
@ -42,7 +40,6 @@ trait Context[F[_], A] { self =>
}
object Context {
private[this] val log = getLogger
def create[F[_]: Async, A](
jobId: Ident,
@ -59,13 +56,15 @@ object Context {
config: SchedulerConfig,
logSink: LogSink[F],
store: Store[F]
): F[Context[F, A]] =
): F[Context[F, A]] = {
val log = docspell.logging.getLogger[F]
for {
_ <- log.ftrace("Creating logger for task run")
_ <- log.trace("Creating logger for task run")
logger <- QueueLogger(job.id, job.info, config.logBufferSize, logSink)
_ <- log.ftrace("Logger created, instantiating context")
_ <- log.trace("Logger created, instantiating context")
ctx = create[F, A](job.id, arg, config, logger, store)
} yield ctx
}
final private class ContextImpl[F[_]: Functor, A](
val args: A,

View File

@ -11,12 +11,10 @@ import cats.implicits._
import fs2.Pipe
import docspell.common._
import docspell.common.syntax.all._
import docspell.logging
import docspell.store.Store
import docspell.store.records.RJobLog
import org.log4s.{LogLevel => _, _}
trait LogSink[F[_]] {
def receive: Pipe[F, LogEvent, Unit]
@ -24,29 +22,33 @@ trait LogSink[F[_]] {
}
object LogSink {
private[this] val logger = getLogger
def apply[F[_]](sink: Pipe[F, LogEvent, Unit]): LogSink[F] =
new LogSink[F] {
val receive = sink
}
def logInternal[F[_]: Sync](e: LogEvent): F[Unit] =
def logInternal[F[_]: Sync](e: LogEvent): F[Unit] = {
val logger = docspell.logging.getLogger[F]
val addData: logging.LogEvent => logging.LogEvent =
_.data("jobId", e.jobId).data("jobInfo", e.jobInfo)
e.level match {
case LogLevel.Info =>
logger.finfo(e.logLine)
logger.infoWith(e.logLine)(addData)
case LogLevel.Debug =>
logger.fdebug(e.logLine)
logger.debugWith(e.logLine)(addData)
case LogLevel.Warn =>
logger.fwarn(e.logLine)
logger.warnWith(e.logLine)(addData)
case LogLevel.Error =>
e.ex match {
case Some(exc) =>
logger.ferror(exc)(e.logLine)
logger.errorWith(e.logLine)(addData.andThen(_.addError(exc)))
case None =>
logger.ferror(e.logLine)
logger.errorWith(e.logLine)(addData)
}
}
}
def printer[F[_]: Sync]: LogSink[F] =
LogSink(_.evalMap(e => logInternal(e)))

View File

@ -13,13 +13,11 @@ import fs2.concurrent.SignallingRef
import docspell.backend.ops.OJoex
import docspell.common._
import docspell.common.syntax.all._
import docspell.joex.scheduler.PeriodicSchedulerImpl.State
import docspell.store.queue._
import docspell.store.records.RPeriodicTask
import eu.timepit.fs2cron.calev.CalevScheduler
import org.log4s.getLogger
final class PeriodicSchedulerImpl[F[_]: Async](
val config: PeriodicSchedulerConfig,
@ -30,10 +28,10 @@ final class PeriodicSchedulerImpl[F[_]: Async](
waiter: SignallingRef[F, Boolean],
state: SignallingRef[F, State[F]]
) extends PeriodicScheduler[F] {
private[this] val logger = getLogger
private[this] val logger = docspell.logging.getLogger[F]
def start: Stream[F, Nothing] =
logger.sinfo("Starting periodic scheduler") ++
logger.stream.info("Starting periodic scheduler").drain ++
mainLoop
def shutdown: F[Unit] =
@ -43,7 +41,7 @@ final class PeriodicSchedulerImpl[F[_]: Async](
Async[F].start(
Stream
.awakeEvery[F](config.wakeupPeriod.toScala)
.evalMap(_ => logger.fdebug("Periodic awake reached") *> notifyChange)
.evalMap(_ => logger.debug("Periodic awake reached") *> notifyChange)
.compile
.drain
)
@ -62,22 +60,22 @@ final class PeriodicSchedulerImpl[F[_]: Async](
def mainLoop: Stream[F, Nothing] = {
val body: F[Boolean] =
for {
_ <- logger.fdebug(s"Going into main loop")
_ <- logger.debug(s"Going into main loop")
now <- Timestamp.current[F]
_ <- logger.fdebug(s"Looking for next periodic task")
_ <- logger.debug(s"Looking for next periodic task")
go <- logThrow("Error getting next task")(
store
.takeNext(config.name, None)
.use {
case Marked.Found(pj) =>
logger
.fdebug(s"Found periodic task '${pj.subject}/${pj.timer.asString}'") *>
.debug(s"Found periodic task '${pj.subject}/${pj.timer.asString}'") *>
(if (isTriggered(pj, now)) submitJob(pj)
else scheduleNotify(pj).map(_ => false))
case Marked.NotFound =>
logger.fdebug("No periodic task found") *> false.pure[F]
logger.debug("No periodic task found") *> false.pure[F]
case Marked.NotMarkable =>
logger.fdebug("Periodic job cannot be marked. Trying again.") *> true
logger.debug("Periodic job cannot be marked. Trying again.") *> true
.pure[F]
}
)
@ -86,7 +84,7 @@ final class PeriodicSchedulerImpl[F[_]: Async](
Stream
.eval(state.get.map(_.shutdownRequest))
.evalTap(
if (_) logger.finfo[F]("Stopping main loop due to shutdown request.")
if (_) logger.info("Stopping main loop due to shutdown request.")
else ().pure[F]
)
.flatMap(if (_) Stream.empty else Stream.eval(cancelNotify *> body))
@ -94,9 +92,9 @@ final class PeriodicSchedulerImpl[F[_]: Async](
case true =>
mainLoop
case false =>
logger.sdebug(s"Waiting for notify") ++
logger.stream.debug(s"Waiting for notify").drain ++
waiter.discrete.take(2).drain ++
logger.sdebug(s"Notify signal, going into main loop") ++
logger.stream.debug(s"Notify signal, going into main loop").drain ++
mainLoop
}
}
@ -109,12 +107,12 @@ final class PeriodicSchedulerImpl[F[_]: Async](
.findNonFinalJob(pj.id)
.flatMap {
case Some(job) =>
logger.finfo[F](
logger.info(
s"There is already a job with non-final state '${job.state}' in the queue"
) *> scheduleNotify(pj) *> false.pure[F]
case None =>
logger.finfo[F](s"Submitting job for periodic task '${pj.task.id}'") *>
logger.info(s"Submitting job for periodic task '${pj.task.id}'") *>
pj.toJob.flatMap(queue.insert) *> notifyJoex *> true.pure[F]
}
@ -125,7 +123,7 @@ final class PeriodicSchedulerImpl[F[_]: Async](
Timestamp
.current[F]
.flatMap(now =>
logger.fdebug(
logger.debug(
s"Scheduling next notify for timer ${pj.timer.asString} -> ${pj.timer.nextElapse(now.toUtcDateTime)}"
)
) *>
@ -153,13 +151,13 @@ final class PeriodicSchedulerImpl[F[_]: Async](
private def logError(msg: => String)(fa: F[Unit]): F[Unit] =
fa.attempt.flatMap {
case Right(_) => ().pure[F]
case Left(ex) => logger.ferror(ex)(msg).map(_ => ())
case Left(ex) => logger.error(ex)(msg).map(_ => ())
}
private def logThrow[A](msg: => String)(fa: F[A]): F[A] =
fa.attempt.flatMap {
case r @ Right(_) => (r: Either[Throwable, A]).pure[F]
case l @ Left(ex) => logger.ferror(ex)(msg).map(_ => (l: Either[Throwable, A]))
case l @ Left(ex) => logger.error(ex)(msg).map(_ => (l: Either[Throwable, A]))
}.rethrow
}

View File

@ -12,6 +12,8 @@ import cats.implicits._
import fs2.Stream
import docspell.common._
import docspell.logging
import docspell.logging.{Level, Logger}
object QueueLogger {
@ -21,26 +23,20 @@ object QueueLogger {
q: Queue[F, LogEvent]
): Logger[F] =
new Logger[F] {
def trace(msg: => String): F[Unit] =
LogEvent.create[F](jobId, jobInfo, LogLevel.Debug, msg).flatMap(q.offer)
def debug(msg: => String): F[Unit] =
LogEvent.create[F](jobId, jobInfo, LogLevel.Debug, msg).flatMap(q.offer)
def info(msg: => String): F[Unit] =
LogEvent.create[F](jobId, jobInfo, LogLevel.Info, msg).flatMap(q.offer)
def warn(msg: => String): F[Unit] =
LogEvent.create[F](jobId, jobInfo, LogLevel.Warn, msg).flatMap(q.offer)
def error(ex: Throwable)(msg: => String): F[Unit] =
def log(logEvent: logging.LogEvent) =
LogEvent
.create[F](jobId, jobInfo, LogLevel.Error, msg)
.map(le => le.copy(ex = Some(ex)))
.flatMap(q.offer)
.create[F](jobId, jobInfo, level2Level(logEvent.level), logEvent.msg())
.flatMap { ev =>
val event =
logEvent.findErrors.headOption
.map(ex => ev.copy(ex = Some(ex)))
.getOrElse(ev)
def error(msg: => String): F[Unit] =
LogEvent.create[F](jobId, jobInfo, LogLevel.Error, msg).flatMap(q.offer)
q.offer(event)
}
def asUnsafe = Logger.off
}
def apply[F[_]: Async](
@ -57,4 +53,6 @@ object QueueLogger {
)
} yield log
private def level2Level(level: Level): LogLevel =
LogLevel.fromLevel(level)
}

View File

@ -15,7 +15,6 @@ import fs2.concurrent.SignallingRef
import docspell.backend.msg.JobDone
import docspell.common._
import docspell.common.syntax.all._
import docspell.joex.scheduler.SchedulerImpl._
import docspell.notification.api.Event
import docspell.notification.api.EventSink
@ -26,7 +25,6 @@ import docspell.store.queue.JobQueue
import docspell.store.records.RJob
import io.circe.Json
import org.log4s.getLogger
final class SchedulerImpl[F[_]: Async](
val config: SchedulerConfig,
@ -41,7 +39,7 @@ final class SchedulerImpl[F[_]: Async](
permits: Semaphore[F]
) extends Scheduler[F] {
private[this] val logger = getLogger
private[this] val logger = docspell.logging.getLogger[F]
/** On startup, get all jobs in state running from this scheduler and put them into
* waiting state, so they get picked up again.
@ -53,7 +51,7 @@ final class SchedulerImpl[F[_]: Async](
Async[F].start(
Stream
.awakeEvery[F](config.wakeupPeriod.toScala)
.evalMap(_ => logger.fdebug("Periodic awake reached") *> notifyChange)
.evalMap(_ => logger.debug("Periodic awake reached") *> notifyChange)
.compile
.drain
)
@ -62,7 +60,7 @@ final class SchedulerImpl[F[_]: Async](
state.get.flatMap(s => QJob.findAll(s.getRunning, store))
def requestCancel(jobId: Ident): F[Boolean] =
logger.finfo(s"Scheduler requested to cancel job: ${jobId.id}") *>
logger.info(s"Scheduler requested to cancel job: ${jobId.id}") *>
state.get.flatMap(_.cancelRequest(jobId) match {
case Some(ct) => ct.map(_ => true)
case None =>
@ -74,7 +72,7 @@ final class SchedulerImpl[F[_]: Async](
)
} yield true)
.getOrElseF(
logger.fwarn(s"Job ${jobId.id} not found, cannot cancel.").map(_ => false)
logger.warn(s"Job ${jobId.id} not found, cannot cancel.").map(_ => false)
)
})
@ -90,16 +88,16 @@ final class SchedulerImpl[F[_]: Async](
val wait = Stream
.eval(runShutdown)
.evalMap(_ => logger.finfo("Scheduler is shutting down now."))
.evalMap(_ => logger.info("Scheduler is shutting down now."))
.flatMap(_ =>
Stream.eval(state.get) ++ Stream
.suspend(state.discrete.takeWhile(_.getRunning.nonEmpty))
)
.flatMap { state =>
if (state.getRunning.isEmpty) Stream.eval(logger.finfo("No jobs running."))
if (state.getRunning.isEmpty) Stream.eval(logger.info("No jobs running."))
else
Stream.eval(
logger.finfo(s"Waiting for ${state.getRunning.size} jobs to finish.")
logger.info(s"Waiting for ${state.getRunning.size} jobs to finish.")
) ++
Stream.emit(state)
}
@ -108,35 +106,35 @@ final class SchedulerImpl[F[_]: Async](
}
def start: Stream[F, Nothing] =
logger.sinfo("Starting scheduler") ++
logger.stream.info("Starting scheduler").drain ++
mainLoop
def mainLoop: Stream[F, Nothing] = {
val body: F[Boolean] =
for {
_ <- permits.available.flatMap(a =>
logger.fdebug(s"Try to acquire permit ($a free)")
logger.debug(s"Try to acquire permit ($a free)")
)
_ <- permits.acquire
_ <- logger.fdebug("New permit acquired")
_ <- logger.debug("New permit acquired")
down <- state.get.map(_.shutdownRequest)
rjob <-
if (down)
logger.finfo("") *> permits.release *> (None: Option[RJob]).pure[F]
logger.info("") *> permits.release *> (None: Option[RJob]).pure[F]
else
queue.nextJob(
group => state.modify(_.nextPrio(group, config.countingScheme)),
config.name,
config.retryDelay
)
_ <- logger.fdebug(s"Next job found: ${rjob.map(_.info)}")
_ <- logger.debug(s"Next job found: ${rjob.map(_.info)}")
_ <- rjob.map(execute).getOrElse(permits.release)
} yield rjob.isDefined
Stream
.eval(state.get.map(_.shutdownRequest))
.evalTap(
if (_) logger.finfo[F]("Stopping main loop due to shutdown request.")
if (_) logger.info("Stopping main loop due to shutdown request.")
else ().pure[F]
)
.flatMap(if (_) Stream.empty else Stream.eval(body))
@ -144,9 +142,9 @@ final class SchedulerImpl[F[_]: Async](
case true =>
mainLoop
case false =>
logger.sdebug(s"Waiting for notify") ++
logger.stream.debug(s"Waiting for notify").drain ++
waiter.discrete.take(2).drain ++
logger.sdebug(s"Notify signal, going into main loop") ++
logger.stream.debug(s"Notify signal, going into main loop").drain ++
mainLoop
}
}
@ -161,17 +159,17 @@ final class SchedulerImpl[F[_]: Async](
task match {
case Left(err) =>
logger.ferror(s"Unable to run cancellation task for job ${job.info}: $err")
logger.error(s"Unable to run cancellation task for job ${job.info}: $err")
case Right(t) =>
for {
_ <-
logger.fdebug(s"Creating context for job ${job.info} to run cancellation $t")
logger.debug(s"Creating context for job ${job.info} to run cancellation $t")
ctx <- Context[F, String](job, job.args, config, logSink, store)
_ <- t.onCancel.run(ctx)
_ <- state.modify(_.markCancelled(job))
_ <- onFinish(job, Json.Null, JobState.Cancelled)
_ <- ctx.logger.warn("Job has been cancelled.")
_ <- logger.fdebug(s"Job ${job.info} has been cancelled.")
_ <- logger.debug(s"Job ${job.info} has been cancelled.")
} yield ()
}
}
@ -186,10 +184,10 @@ final class SchedulerImpl[F[_]: Async](
task match {
case Left(err) =>
logger.ferror(s"Unable to start a task for job ${job.info}: $err")
logger.error(s"Unable to start a task for job ${job.info}: $err")
case Right(t) =>
for {
_ <- logger.fdebug(s"Creating context for job ${job.info} to run $t")
_ <- logger.debug(s"Creating context for job ${job.info} to run $t")
ctx <- Context[F, String](job, job.args, config, logSink, store)
jot = wrapTask(job, t.task, ctx)
tok <- forkRun(job, jot.run(ctx), t.onCancel.run(ctx), ctx)
@ -200,9 +198,9 @@ final class SchedulerImpl[F[_]: Async](
def onFinish(job: RJob, result: Json, finishState: JobState): F[Unit] =
for {
_ <- logger.fdebug(s"Job ${job.info} done $finishState. Releasing resources.")
_ <- logger.debug(s"Job ${job.info} done $finishState. Releasing resources.")
_ <- permits.release *> permits.available.flatMap(a =>
logger.fdebug(s"Permit released ($a free)")
logger.debug(s"Permit released ($a free)")
)
_ <- state.modify(_.removeRunning(job))
_ <- QJob.setFinalState(job.id, finishState, store)
@ -241,7 +239,7 @@ final class SchedulerImpl[F[_]: Async](
ctx: Context[F, String]
): Task[F, String, Unit] =
task
.mapF(fa => onStart(job) *> logger.fdebug("Starting task now") *> fa)
.mapF(fa => onStart(job) *> logger.debug("Starting task now") *> fa)
.mapF(_.attempt.flatMap {
case Right(result) =>
logger.info(s"Job execution successful: ${job.info}")
@ -284,11 +282,11 @@ final class SchedulerImpl[F[_]: Async](
onCancel: F[Unit],
ctx: Context[F, String]
): F[F[Unit]] =
logger.fdebug(s"Forking job ${job.info}") *>
logger.debug(s"Forking job ${job.info}") *>
Async[F]
.start(code)
.map(fiber =>
logger.fdebug(s"Cancelling job ${job.info}") *>
logger.debug(s"Cancelling job ${job.info}") *>
fiber.cancel *>
onCancel.attempt.map {
case Right(_) => ()
@ -299,7 +297,7 @@ final class SchedulerImpl[F[_]: Async](
state.modify(_.markCancelled(job)) *>
onFinish(job, Json.Null, JobState.Cancelled) *>
ctx.logger.warn("Job has been cancelled.") *>
logger.fdebug(s"Job ${job.info} has been cancelled.")
logger.debug(s"Job ${job.info} has been cancelled.")
)
}

View File

@ -11,7 +11,7 @@ import cats.data.Kleisli
import cats.effect.Sync
import cats.implicits._
import docspell.common.Logger
import docspell.logging.Logger
/** The code that is executed by the scheduler */
trait Task[F[_], A, B] {

View File

@ -9,7 +9,6 @@ package docspell.joexapi.client
import cats.effect._
import cats.implicits._
import docspell.common.syntax.all._
import docspell.common.{Ident, LenientUri}
import docspell.joexapi.model.BasicResult
@ -17,7 +16,6 @@ import org.http4s.blaze.client.BlazeClientBuilder
import org.http4s.circe.CirceEntityDecoder
import org.http4s.client.Client
import org.http4s.{Method, Request, Uri}
import org.log4s.getLogger
trait JoexClient[F[_]] {
@ -31,22 +29,21 @@ trait JoexClient[F[_]] {
object JoexClient {
private[this] val logger = getLogger
def apply[F[_]: Async](client: Client[F]): JoexClient[F] =
new JoexClient[F] with CirceEntityDecoder {
private[this] val logger = docspell.logging.getLogger[F]
def notifyJoex(base: LenientUri): F[BasicResult] = {
val notifyUrl = base / "api" / "v1" / "notify"
val req = Request[F](Method.POST, uri(notifyUrl))
logger.fdebug(s"Notify joex at ${notifyUrl.asString}") *>
logger.debug(s"Notify joex at ${notifyUrl.asString}") *>
client.expect[BasicResult](req)
}
def notifyJoexIgnoreErrors(base: LenientUri): F[Unit] =
notifyJoex(base).attempt.map {
notifyJoex(base).attempt.flatMap {
case Right(BasicResult(succ, msg)) =>
if (succ) ()
if (succ) ().pure[F]
else
logger.warn(
s"Notifying Joex instance '${base.asString}' returned with failure: $msg"

View File

@ -0,0 +1,39 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.logging
import cats.data.NonEmptyList
import cats.syntax.all._
import cats.{Applicative, Id}
final private[logging] class AndThenLogger[F[_]: Applicative](
val loggers: NonEmptyList[Logger[F]]
) extends Logger[F] {
def log(ev: LogEvent): F[Unit] =
loggers.traverse(_.log(ev)).as(())
def asUnsafe: Logger[Id] =
new Logger[Id] { self =>
def log(ev: LogEvent): Unit =
loggers.toList.foreach(_.asUnsafe.log(ev))
def asUnsafe = self
}
}
private[logging] object AndThenLogger {
def combine[F[_]: Applicative](a: Logger[F], b: Logger[F]): Logger[F] =
(a, b) match {
case (aa: AndThenLogger[F], bb: AndThenLogger[F]) =>
new AndThenLogger[F](aa.loggers ++ bb.loggers.toList)
case (aa: AndThenLogger[F], _) =>
new AndThenLogger[F](aa.loggers.prepend(b))
case (_, bb: AndThenLogger[F]) =>
new AndThenLogger[F](bb.loggers.prepend(a))
case _ =>
new AndThenLogger[F](NonEmptyList.of(a, b))
}
}

View File

@ -0,0 +1,57 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.logging
import cats.Order
import cats.data.NonEmptyList
import io.circe.{Decoder, Encoder}
sealed trait Level { self: Product =>
val name: String =
productPrefix.toUpperCase
val value: Double
}
object Level {
case object Fatal extends Level {
val value = 600.0
}
case object Error extends Level {
val value = 500.0
}
case object Warn extends Level {
val value = 400.0
}
case object Info extends Level {
val value = 300.0
}
case object Debug extends Level {
val value = 200.0
}
case object Trace extends Level {
val value = 100.0
}
val all: NonEmptyList[Level] =
NonEmptyList.of(Fatal, Error, Warn, Info, Debug, Trace)
def fromString(str: String): Either[String, Level] = {
val s = str.toUpperCase
all.find(_.name == s).toRight(s"Invalid level name: $str")
}
implicit val order: Order[Level] =
Order.by(_.value)
implicit val jsonEncoder: Encoder[Level] =
Encoder.encodeString.contramap(_.name)
implicit val jsonDecoder: Decoder[Level] =
Decoder.decodeString.emap(fromString)
}

View File

@ -0,0 +1,45 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.logging
import cats.data.NonEmptyList
import io.circe.{Decoder, Encoder}
final case class LogConfig(minimumLevel: Level, format: LogConfig.Format) {}
object LogConfig {
sealed trait Format { self: Product =>
def name: String =
productPrefix.toLowerCase
}
object Format {
case object Plain extends Format
case object Fancy extends Format
case object Json extends Format
case object Logfmt extends Format
val all: NonEmptyList[Format] =
NonEmptyList.of(Plain, Fancy, Json, Logfmt)
def fromString(str: String): Either[String, Format] =
all.find(_.name.equalsIgnoreCase(str)).toRight(s"Invalid format name: $str")
implicit val jsonDecoder: Decoder[Format] =
Decoder.decodeString.emap(fromString)
implicit val jsonEncoder: Encoder[Format] =
Encoder.encodeString.contramap(_.name)
}
implicit val jsonDecoder: Decoder[LogConfig] =
Decoder.forProduct2("minimumLevel", "format")(LogConfig.apply)
implicit val jsonEncoder: Encoder[LogConfig] =
Encoder.forProduct2("minimumLevel", "format")(r => (r.minimumLevel, r.format))
}

View File

@ -0,0 +1,51 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.logging
import io.circe.{Encoder, Json}
import sourcecode._
final case class LogEvent(
level: Level,
msg: () => String,
additional: List[() => LogEvent.AdditionalMsg],
data: Map[String, () => Json],
pkg: Pkg,
fileName: FileName,
name: Name,
line: Line
) {
def asString =
s"${level.name} ${name.value}/${fileName}:${line.value} - ${msg()}"
def data[A: Encoder](key: String, value: => A): LogEvent =
copy(data = data.updated(key, () => Encoder[A].apply(value)))
def addMessage(msg: => String): LogEvent =
copy(additional = (() => Left(msg)) :: additional)
def addError(ex: Throwable): LogEvent =
copy(additional = (() => Right(ex)) :: additional)
def findErrors: List[Throwable] =
additional.map(a => a()).collect { case Right(ex) =>
ex
}
}
object LogEvent {
type AdditionalMsg = Either[String, Throwable]
def of(l: Level, m: => String)(implicit
pkg: Pkg,
fileName: FileName,
name: Name,
line: Line
): LogEvent = LogEvent(l, () => m, Nil, Map.empty, pkg, fileName, name, line)
}

View File

@ -0,0 +1,169 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.logging
import java.io.PrintStream
import java.time.Instant
import cats.effect.{Ref, Sync}
import cats.syntax.applicative._
import cats.syntax.functor._
import cats.syntax.order._
import cats.{Applicative, Id}
import sourcecode._
trait Logger[F[_]] extends LoggerExtension[F] {
def log(ev: LogEvent): F[Unit]
def asUnsafe: Logger[Id]
def trace(msg: => String)(implicit
pkg: Pkg,
fileName: FileName,
name: Name,
line: Line
): F[Unit] =
log(LogEvent.of(Level.Trace, msg))
def traceWith(msg: => String)(modify: LogEvent => LogEvent)(implicit
pkg: Pkg,
fileName: FileName,
name: Name,
line: Line
): F[Unit] =
log(modify(LogEvent.of(Level.Trace, msg)))
def debug(msg: => String)(implicit
pkg: Pkg,
fileName: FileName,
name: Name,
line: Line
): F[Unit] =
log(LogEvent.of(Level.Debug, msg))
def debugWith(msg: => String)(modify: LogEvent => LogEvent)(implicit
pkg: Pkg,
fileName: FileName,
name: Name,
line: Line
): F[Unit] =
log(modify(LogEvent.of(Level.Debug, msg)))
def info(msg: => String)(implicit
pkg: Pkg,
fileName: FileName,
name: Name,
line: Line
): F[Unit] =
log(LogEvent.of(Level.Info, msg))
def infoWith(msg: => String)(modify: LogEvent => LogEvent)(implicit
pkg: Pkg,
fileName: FileName,
name: Name,
line: Line
): F[Unit] =
log(modify(LogEvent.of(Level.Info, msg)))
def warn(msg: => String)(implicit
pkg: Pkg,
fileName: FileName,
name: Name,
line: Line
): F[Unit] =
log(LogEvent.of(Level.Warn, msg))
def warnWith(msg: => String)(modify: LogEvent => LogEvent)(implicit
pkg: Pkg,
fileName: FileName,
name: Name,
line: Line
): F[Unit] =
log(modify(LogEvent.of(Level.Warn, msg)))
def warn(ex: Throwable)(msg: => String)(implicit
pkg: Pkg,
fileName: FileName,
name: Name,
line: Line
): F[Unit] =
log(LogEvent.of(Level.Warn, msg).addError(ex))
def error(msg: => String)(implicit
pkg: Pkg,
fileName: FileName,
name: Name,
line: Line
): F[Unit] =
log(LogEvent.of(Level.Error, msg))
def errorWith(msg: => String)(modify: LogEvent => LogEvent)(implicit
pkg: Pkg,
fileName: FileName,
name: Name,
line: Line
): F[Unit] =
log(modify(LogEvent.of(Level.Error, msg)))
def error(ex: Throwable)(msg: => String)(implicit
pkg: Pkg,
fileName: FileName,
name: Name,
line: Line
): F[Unit] =
log(LogEvent.of(Level.Error, msg).addError(ex))
}
object Logger {
def off: Logger[Id] =
new Logger[Id] {
def log(ev: LogEvent): Unit = ()
def asUnsafe = this
}
def offF[F[_]: Applicative]: Logger[F] =
new Logger[F] {
def log(ev: LogEvent) = ().pure[F]
def asUnsafe = off
}
def buffer[F[_]: Sync](): F[(Ref[F, Vector[LogEvent]], Logger[F])] =
for {
buffer <- Ref.of[F, Vector[LogEvent]](Vector.empty[LogEvent])
logger =
new Logger[F] {
def log(ev: LogEvent) =
buffer.update(_.appended(ev))
def asUnsafe = off
}
} yield (buffer, logger)
/** Just prints to the given print stream. Useful for testing. */
def simple(ps: PrintStream, minimumLevel: Level): Logger[Id] =
new Logger[Id] {
def log(ev: LogEvent): Unit =
if (ev.level >= minimumLevel)
ps.println(s"${Instant.now()} [${Thread.currentThread()}] ${ev.asString}")
else
()
def asUnsafe = this
}
def simpleF[F[_]: Sync](ps: PrintStream, minimumLevel: Level): Logger[F] =
new Logger[F] {
def log(ev: LogEvent) =
Sync[F].delay(asUnsafe.log(ev))
val asUnsafe = simple(ps, minimumLevel)
}
def simpleDefault[F[_]: Sync](minimumLevel: Level = Level.Info): Logger[F] =
simpleF[F](System.err, minimumLevel)
}

View File

@ -0,0 +1,27 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.logging
import cats.Applicative
import fs2.Stream
trait LoggerExtension[F[_]] { self: Logger[F] =>
def stream: Logger[Stream[F, *]] =
new Logger[Stream[F, *]] {
def log(ev: LogEvent) =
Stream.eval(self.log(ev))
def asUnsafe = self.asUnsafe
}
def andThen(other: Logger[F])(implicit F: Applicative[F]): Logger[F] =
AndThenLogger.combine(self, other)
def >>(other: Logger[F])(implicit F: Applicative[F]): Logger[F] =
AndThenLogger.combine(self, other)
}

View File

@ -0,0 +1,26 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.logging.impl
import io.circe.syntax._
import scribe._
import scribe.output._
import scribe.output.format.OutputFormat
import scribe.writer._
final case class JsonWriter(writer: Writer, compact: Boolean = true) extends Writer {
override def write[M](
record: LogRecord[M],
output: LogOutput,
outputFormat: OutputFormat
): Unit = {
val r = Record.fromLogRecord(record)
val json = r.asJson
val jsonString = if (compact) json.noSpaces else json.spaces2
writer.write(record, new TextOutput(jsonString), outputFormat)
}
}

View File

@ -0,0 +1,32 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.logging.impl
import io.circe.syntax._
import scribe._
import scribe.output._
import scribe.output.format.OutputFormat
import scribe.writer._
// https://brandur.org/logfmt
final case class LogfmtWriter(writer: Writer) extends Writer {
override def write[M](
record: LogRecord[M],
output: LogOutput,
outputFormat: OutputFormat
): Unit = {
val r = Record.fromLogRecord(record)
val data = r.data
.map { case (k, v) =>
s"$k=${v.noSpaces}"
}
.mkString(" ")
val logfmtStr =
s"""level=${r.level.asJson.noSpaces} levelValue=${r.levelValue} message=${r.message.asJson.noSpaces} fileName=${r.fileName.asJson.noSpaces} className=${r.className.asJson.noSpaces} methodName=${r.methodName.asJson.noSpaces} line=${r.line.asJson.noSpaces} column=${r.column.asJson.noSpaces} $data timestamp=${r.timeStamp} date=${r.date} time=${r.time}"""
writer.write(record, new TextOutput(logfmtStr), outputFormat)
}
}

View File

@ -0,0 +1,120 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.logging.impl
import docspell.logging.impl.Record._
import io.circe.syntax._
import io.circe.{Encoder, Json}
import perfolation._
import scribe.LogRecord
import scribe.data.MDC
import scribe.message.Message
// From: https://github.com/outr/scribe/blob/8e99521e1ee1f0c421629764dd96e4eb193d84bd/json/shared/src/main/scala/scribe/json/JsonWriter.scala
// which would introduce jackson and other dependencies. Modified to work with circe.
// Original licensed under MIT.
private[impl] case class Record(
level: String,
levelValue: Double,
message: String,
additionalMessages: List[String],
fileName: String,
className: String,
methodName: Option[String],
line: Option[Int],
column: Option[Int],
data: Map[String, Json],
traces: List[Trace],
timeStamp: Long,
date: String,
time: String
)
private[impl] object Record {
def fromLogRecord[M](record: LogRecord[M]): Record = {
val l = record.timeStamp
val traces = record.additionalMessages.collect {
case message: Message[_] if message.value.isInstanceOf[Throwable] =>
throwable2Trace(message.value.asInstanceOf[Throwable])
}
val additionalMessages = record.additionalMessages.map(_.logOutput.plainText)
Record(
level = record.level.name,
levelValue = record.levelValue,
message = record.logOutput.plainText,
additionalMessages = additionalMessages,
fileName = record.fileName,
className = record.className,
methodName = record.methodName,
line = record.line,
column = record.column,
data = (record.data ++ MDC.map).map { case (key, value) =>
value() match {
case value: Json => key -> value
case value: Int => key -> value.asJson
case value: Long => key -> value.asJson
case value: Double => key -> value.asJson
case any => key -> Json.fromString(any.toString)
}
},
traces = traces,
timeStamp = l,
date = l.t.F,
time = s"${l.t.T}.${l.t.L}${l.t.z}"
)
}
private def throwable2Trace(throwable: Throwable): Trace = {
val elements = throwable.getStackTrace.toList.map { e =>
TraceElement(e.getClassName, e.getMethodName, e.getLineNumber)
}
Trace(
throwable.getLocalizedMessage,
elements,
Option(throwable.getCause).map(throwable2Trace)
)
}
implicit val jsonEncoder: Encoder[Record] =
Encoder.forProduct14(
"level",
"levelValue",
"message",
"additionalMessages",
"fileName",
"className",
"methodName",
"line",
"column",
"data",
"traces",
"timestamp",
"date",
"time"
)(r => Record.unapply(r).get)
case class Trace(message: String, elements: List[TraceElement], cause: Option[Trace])
object Trace {
implicit def jsonEncoder: Encoder[Trace] =
Encoder.forProduct3("message", "elements", "cause")(r => Trace.unapply(r).get)
implicit def openEncoder: Encoder[Option[Trace]] =
Encoder.instance(opt => opt.map(jsonEncoder.apply).getOrElse(Json.Null))
}
case class TraceElement(`class`: String, method: String, line: Int)
object TraceElement {
implicit val jsonEncoder: Encoder[TraceElement] =
Encoder.forProduct3("class", "method", "line")(r => TraceElement.unapply(r).get)
}
}

View File

@ -0,0 +1,69 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.logging.impl
import cats.effect.Sync
import docspell.logging.LogConfig.Format
import docspell.logging.{Level, LogConfig}
import scribe.format.Formatter
import scribe.jul.JULHandler
object ScribeConfigure {
private[this] val docspellRootVerbose = "DOCSPELL_ROOT_LOGGER_LEVEL"
def configure[F[_]: Sync](cfg: LogConfig): F[Unit] =
Sync[F].delay {
replaceJUL()
val docspellLogger = scribe.Logger("docspell")
unsafeConfigure(scribe.Logger.root, cfg.copy(minimumLevel = getRootMinimumLevel))
unsafeConfigure(docspellLogger, cfg)
}
private[this] def getRootMinimumLevel: Level =
Option(System.getenv(docspellRootVerbose))
.map(Level.fromString)
.flatMap {
case Right(level) => Some(level)
case Left(err) =>
scribe.warn(
s"Environment variable '$docspellRootVerbose' has invalid value: $err"
)
None
}
.getOrElse(Level.Error)
def unsafeConfigure(logger: scribe.Logger, cfg: LogConfig): Unit = {
val mods = List[scribe.Logger => scribe.Logger](
_.clearHandlers(),
_.withMinimumLevel(ScribeWrapper.convertLevel(cfg.minimumLevel)),
l =>
cfg.format match {
case Format.Fancy =>
l.withHandler(formatter = Formatter.enhanced, writer = StdoutWriter)
case Format.Plain =>
l.withHandler(formatter = Formatter.classic, writer = StdoutWriter)
case Format.Json =>
l.withHandler(writer = JsonWriter(StdoutWriter))
case Format.Logfmt =>
l.withHandler(writer = LogfmtWriter(StdoutWriter))
},
_.replace()
)
mods.foldLeft(logger)((l, mod) => mod(l))
()
}
private def replaceJUL(): Unit = {
scribe.Logger.system // just to load effects in Logger singleton
val julRoot = java.util.logging.LogManager.getLogManager.getLogger("")
julRoot.getHandlers.foreach(julRoot.removeHandler)
julRoot.addHandler(JULHandler)
}
}

View File

@ -0,0 +1,52 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.logging.impl
import cats.Id
import cats.effect.Sync
import docspell.logging.{Level, LogEvent, Logger}
import scribe.LoggerSupport
import scribe.message.{LoggableMessage, Message}
private[logging] object ScribeWrapper {
final class ImplUnsafe(log: scribe.Logger) extends Logger[Id] {
override def asUnsafe = this
override def log(ev: LogEvent): Unit =
log.log(convert(ev))
}
final class Impl[F[_]: Sync](log: scribe.Logger) extends Logger[F] {
override def asUnsafe = new ImplUnsafe(log)
override def log(ev: LogEvent) =
Sync[F].delay(log.log(convert(ev)))
}
private[impl] def convertLevel(l: Level): scribe.Level =
l match {
case Level.Fatal => scribe.Level.Fatal
case Level.Error => scribe.Level.Error
case Level.Warn => scribe.Level.Warn
case Level.Info => scribe.Level.Info
case Level.Debug => scribe.Level.Debug
case Level.Trace => scribe.Level.Trace
}
private[this] def convert(ev: LogEvent) = {
val level = convertLevel(ev.level)
val additional: List[LoggableMessage] = ev.additional.map { x =>
x() match {
case Right(ex) => Message.static(ex)
case Left(msg) => Message.static(msg)
}
}
LoggerSupport(level, ev.msg(), additional, ev.pkg, ev.fileName, ev.name, ev.line)
.copy(data = ev.data)
}
}

View File

@ -0,0 +1,56 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.logging.impl
import scribe._
import scribe.output.LogOutput
import scribe.output.format.OutputFormat
import scribe.writer.Writer
// From: https://github.com/outr/scribe/blob/8e99521e1ee1f0c421629764dd96e4eb193d84bd/core/shared/src/main/scala/scribe/writer/SystemOutputWriter.scala
// Modified to always log to stdout. The original code was logging to stdout and stderr
// depending on the log level.
// Original code licensed under MIT
private[impl] object StdoutWriter extends Writer {
/** If true, will always synchronize writing to the console to avoid interleaved text.
* Most native consoles will handle this automatically, but IntelliJ and Eclipse are
* notorious about not properly handling this. Defaults to true.
*/
val synchronizeWriting: Boolean = true
/** Workaround for some consoles that don't play nicely with asynchronous calls */
val alwaysFlush: Boolean = false
private val stringBuilders = new ThreadLocal[StringBuilder] {
override def initialValue(): StringBuilder = new StringBuilder(512)
}
@annotation.nowarn
override def write[M](
record: LogRecord[M],
output: LogOutput,
outputFormat: OutputFormat
): Unit = {
val stream = Logger.system.out
val sb = stringBuilders.get()
outputFormat.begin(sb.append(_))
outputFormat(output, s => sb.append(s))
outputFormat.end(sb.append(_))
if (synchronizeWriting) {
synchronized {
stream.println(sb.toString())
if (alwaysFlush) stream.flush()
}
} else {
stream.println(sb.toString())
if (alwaysFlush) stream.flush()
}
sb.clear()
}
}

View File

@ -0,0 +1,33 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell
import cats.Id
import cats.effect._
import docspell.logging.impl.ScribeWrapper
import sourcecode.Enclosing
package object logging {
def unsafeLogger(name: String): Logger[Id] =
new ScribeWrapper.ImplUnsafe(scribe.Logger(name))
def unsafeLogger(implicit e: Enclosing): Logger[Id] =
unsafeLogger(e.value)
def getLogger[F[_]: Sync](implicit e: Enclosing): Logger[F] =
getLogger(e.value)
def getLogger[F[_]: Sync](name: String): Logger[F] =
new ScribeWrapper.Impl[F](scribe.Logger(name))
def getLogger[F[_]: Sync](clazz: Class[_]): Logger[F] =
new ScribeWrapper.Impl[F](scribe.Logger(clazz.getName))
}

View File

@ -0,0 +1,26 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.logging
import docspell.logging.impl.ScribeConfigure
import munit.Suite
trait TestLoggingConfig extends Suite {
def docspellLogConfig: LogConfig = LogConfig(Level.Warn, LogConfig.Format.Fancy)
def rootMinimumLevel: Level = Level.Error
override def beforeAll(): Unit = {
super.beforeAll()
val docspellLogger = scribe.Logger("docspell")
ScribeConfigure.unsafeConfigure(docspellLogger, docspellLogConfig)
val rootCfg = docspellLogConfig.copy(minimumLevel = rootMinimumLevel)
ScribeConfigure.unsafeConfigure(scribe.Logger.root, rootCfg)
()
}
}

View File

@ -13,7 +13,7 @@ import cats.effect.std.Queue
import cats.implicits._
import fs2.Stream
import docspell.common.Logger
import docspell.logging.Logger
/** Combines a sink and reader to a place where events can be submitted and processed in a
* producer-consumer manner.
@ -21,8 +21,6 @@ import docspell.common.Logger
trait EventExchange[F[_]] extends EventSink[F] with EventReader[F] {}
object EventExchange {
private[this] val logger = org.log4s.getLogger
def silent[F[_]: Applicative]: EventExchange[F] =
new EventExchange[F] {
def offer(event: Event): F[Unit] =
@ -36,7 +34,7 @@ object EventExchange {
Queue.circularBuffer[F, Event](queueSize).map(q => new Impl(q))
final class Impl[F[_]: Async](queue: Queue[F, Event]) extends EventExchange[F] {
private[this] val log = Logger.log4s[F](logger)
private[this] val log: Logger[F] = docspell.logging.getLogger[F]
def offer(event: Event): F[Unit] =
log.debug(s"Pushing event to queue: $event") *>
@ -47,7 +45,7 @@ object EventExchange {
def consume(maxConcurrent: Int)(run: Kleisli[F, Event, Unit]): Stream[F, Nothing] = {
val stream = Stream.repeatEval(queue.take).evalMap((logEvent >> run).run)
log.s.info(s"Starting up $maxConcurrent notification event consumers").drain ++
log.stream.info(s"Starting up $maxConcurrent notification event consumers").drain ++
Stream(stream).repeat.take(maxConcurrent.toLong).parJoin(maxConcurrent).drain
}
}

Some files were not shown because too many files have changed in this diff Show More