mirror of
				https://github.com/TheAnachronism/docspell.git
				synced 2025-10-31 17:50:11 +00:00 
			
		
		
		
	add solr content_kh field type with icu tokenizer for khmer content
				
					
				
			This commit is contained in:
		| @@ -30,6 +30,7 @@ object Field { | ||||
|   val content_de = contentField(Language.German) | ||||
|   val content_en = contentField(Language.English) | ||||
|   val content_fr = contentField(Language.French) | ||||
|   val content_kh = contentField(Language.Khmer) | ||||
|   val itemName = Field("itemName") | ||||
|   val itemNotes = Field("itemNotes") | ||||
|   val folderId = Field("folder") | ||||
|   | ||||
| @@ -172,7 +172,18 @@ object SolrSetup { | ||||
|             "Add Ukrainian", | ||||
|             addContentField(Language.Ukrainian) | ||||
|           ), | ||||
|           SolrMigration.reIndexAll(31, "Re-Index after adding Estonian and Ukrainian") | ||||
|           SolrMigration.reIndexAll(31, "Re-Index after adding Estonian and Ukrainian"), | ||||
|           SolrMigration[F]( | ||||
|             32, | ||||
|             "Add new field type for khmer content", | ||||
|             addFieldType(AddFieldType.textKhm) | ||||
|           ), | ||||
|           SolrMigration[F]( | ||||
|             33, | ||||
|             "Add Khmer", | ||||
|             addContentField(Language.Khmer) | ||||
|           ), | ||||
|           SolrMigration.reIndexAll(34, "Re-Index after adding Khmer") | ||||
|         ) | ||||
|  | ||||
|       def addFolderField: F[Unit] = | ||||
| @@ -347,6 +358,16 @@ object SolrSetup { | ||||
|       ) | ||||
|     ) | ||||
|  | ||||
|     val textKhm = AddFieldType( | ||||
|       "text_kh", | ||||
|       "solr.TextField", | ||||
|       Analyzer( | ||||
|         Tokenizer("solr.ICUTokenizerFactory", Map.empty), | ||||
|         List( | ||||
|         ) | ||||
|       ) | ||||
|     ) | ||||
|  | ||||
|     final case class Filter(`class`: String, attr: Map[String, String]) | ||||
|     final case class Tokenizer(`class`: String, attr: Map[String, String]) | ||||
|     final case class Analyzer(tokenizer: Tokenizer, filter: List[Filter]) | ||||
|   | ||||
		Reference in New Issue
	
	Block a user