Add JO validation and add category option to import scripts
This commit is contained in:
parent
126aebd7b1
commit
bf3cda1a09
8 changed files with 1010 additions and 449 deletions
|
@ -23,6 +23,22 @@ import {
|
||||||
allJorfArticleTypesMutable,
|
allJorfArticleTypesMutable,
|
||||||
} from "$lib/legal"
|
} from "$lib/legal"
|
||||||
|
|
||||||
|
export const jorfArticleStats: {
|
||||||
|
// countByEtat: { [etat: string]: number }
|
||||||
|
// countByLienArtEtat: { [etat: string]: number }
|
||||||
|
// countByLienNature: { [nature: string]: number }
|
||||||
|
// countByLienType: { [type: string]: number }
|
||||||
|
// countByTexteNature: { [nature: string]: number }
|
||||||
|
// countByVersionEtat: { [etat: string]: number }
|
||||||
|
} = {
|
||||||
|
// countByEtat: {},
|
||||||
|
// countByLienArtEtat: {},
|
||||||
|
// countByLienNature: {},
|
||||||
|
// countByLienType: {},
|
||||||
|
// countByTexteNature: {},
|
||||||
|
// countByVersionEtat: {},
|
||||||
|
}
|
||||||
|
|
||||||
function auditBlocTextuel(
|
function auditBlocTextuel(
|
||||||
audit: Audit,
|
audit: Audit,
|
||||||
dataUnknown: unknown,
|
dataUnknown: unknown,
|
||||||
|
|
|
@ -1 +1,2 @@
|
||||||
export { auditJorfArticle } from "./articles"
|
export { auditJo, joStats } from "./jo"
|
||||||
|
export { auditJorfArticle, jorfArticleStats } from "./articles"
|
||||||
|
|
376
src/lib/auditors/jorf/jo.ts
Normal file
376
src/lib/auditors/jorf/jo.ts
Normal file
|
@ -0,0 +1,376 @@
|
||||||
|
import {
|
||||||
|
type Audit,
|
||||||
|
auditRequire,
|
||||||
|
auditDateIso8601String,
|
||||||
|
auditTrimString,
|
||||||
|
auditInteger,
|
||||||
|
auditFunction,
|
||||||
|
auditEmptyToNull,
|
||||||
|
auditNullish,
|
||||||
|
auditSwitch,
|
||||||
|
auditNumber,
|
||||||
|
auditCleanArray,
|
||||||
|
auditOptions,
|
||||||
|
auditHttpUrl,
|
||||||
|
auditStringToNumber,
|
||||||
|
} from "@auditors/core"
|
||||||
|
|
||||||
|
import { allJoNaturesMutable, allJoOriginesMutable } from "$lib/legal"
|
||||||
|
|
||||||
|
export const joStats: {
|
||||||
|
countByNature: { [nature: string]: number }
|
||||||
|
countByOrigine: { [origine: string]: number }
|
||||||
|
} = {
|
||||||
|
countByNature: {},
|
||||||
|
countByOrigine: {},
|
||||||
|
}
|
||||||
|
|
||||||
|
export function auditJo(
|
||||||
|
audit: Audit,
|
||||||
|
dataUnknown: unknown,
|
||||||
|
): [unknown, unknown] {
|
||||||
|
if (dataUnknown == null) {
|
||||||
|
return [dataUnknown, null]
|
||||||
|
}
|
||||||
|
if (typeof dataUnknown !== "object") {
|
||||||
|
return audit.unexpectedType(dataUnknown, "object")
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = { ...dataUnknown }
|
||||||
|
const errors: { [key: string]: unknown } = {}
|
||||||
|
const remainingKeys = new Set(Object.keys(data))
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
"META",
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditMeta,
|
||||||
|
auditRequire,
|
||||||
|
)
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
"STRUCTURE_TXT",
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditSwitch(
|
||||||
|
[auditTrimString, auditEmptyToNull, auditNullish],
|
||||||
|
auditStructureTxt,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
return audit.reduceRemaining(data, errors, remainingKeys)
|
||||||
|
}
|
||||||
|
|
||||||
|
function auditLienTxt(audit: Audit, dataUnknown: unknown): [unknown, unknown] {
|
||||||
|
if (dataUnknown == null) {
|
||||||
|
return [dataUnknown, null]
|
||||||
|
}
|
||||||
|
if (typeof dataUnknown !== "object") {
|
||||||
|
return audit.unexpectedType(dataUnknown, "object")
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = { ...dataUnknown }
|
||||||
|
const errors: { [key: string]: unknown } = {}
|
||||||
|
const remainingKeys = new Set(Object.keys(data))
|
||||||
|
|
||||||
|
for (const key of ["@idtxt", "@titretxt"]) {
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
key,
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditTrimString,
|
||||||
|
auditEmptyToNull,
|
||||||
|
auditRequire,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return audit.reduceRemaining(data, errors, remainingKeys)
|
||||||
|
}
|
||||||
|
|
||||||
|
function auditMeta(audit: Audit, dataUnknown: unknown): [unknown, unknown] {
|
||||||
|
if (dataUnknown == null) {
|
||||||
|
return [dataUnknown, null]
|
||||||
|
}
|
||||||
|
if (typeof dataUnknown !== "object") {
|
||||||
|
return audit.unexpectedType(dataUnknown, "object")
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = { ...dataUnknown }
|
||||||
|
const errors: { [key: string]: unknown } = {}
|
||||||
|
const remainingKeys = new Set(Object.keys(data))
|
||||||
|
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
"META_COMMUN",
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditMetaCommun,
|
||||||
|
auditRequire,
|
||||||
|
)
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
"META_SPEC",
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditMetaSpec,
|
||||||
|
auditRequire,
|
||||||
|
)
|
||||||
|
|
||||||
|
return audit.reduceRemaining(data, errors, remainingKeys)
|
||||||
|
}
|
||||||
|
|
||||||
|
function auditMetaCommun(
|
||||||
|
audit: Audit,
|
||||||
|
dataUnknown: unknown,
|
||||||
|
): [unknown, unknown] {
|
||||||
|
if (dataUnknown == null) {
|
||||||
|
return [dataUnknown, null]
|
||||||
|
}
|
||||||
|
if (typeof dataUnknown !== "object") {
|
||||||
|
return audit.unexpectedType(dataUnknown, "object")
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = { ...dataUnknown }
|
||||||
|
const errors: { [key: string]: unknown } = {}
|
||||||
|
const remainingKeys = new Set(Object.keys(data))
|
||||||
|
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
"ANCIEN_ID",
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditSwitch(
|
||||||
|
[auditNumber, auditInteger, auditFunction((id) => id.toString())],
|
||||||
|
[auditTrimString, auditEmptyToNull],
|
||||||
|
),
|
||||||
|
auditNullish,
|
||||||
|
)
|
||||||
|
for (const key of ["ID", "URL"]) {
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
key,
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditTrimString,
|
||||||
|
auditEmptyToNull,
|
||||||
|
auditRequire,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
"ID_ELI",
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditTrimString,
|
||||||
|
auditEmptyToNull,
|
||||||
|
auditHttpUrl,
|
||||||
|
)
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
"NATURE",
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditTrimString,
|
||||||
|
auditEmptyToNull,
|
||||||
|
// auditFunction((nature) => {
|
||||||
|
// joStats.countByNature[nature] = (joStats.countByNature[nature] ?? 0) + 1
|
||||||
|
// return nature
|
||||||
|
// }),
|
||||||
|
auditOptions(allJoNaturesMutable),
|
||||||
|
auditRequire,
|
||||||
|
)
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
"ORIGINE",
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditTrimString,
|
||||||
|
auditEmptyToNull,
|
||||||
|
// auditFunction((origine) => {
|
||||||
|
// joStats.countByOrigine[origine] =
|
||||||
|
// (joStats.countByOrigine[origine] ?? 0) + 1
|
||||||
|
// return origine
|
||||||
|
// }),
|
||||||
|
auditOptions(allJoOriginesMutable),
|
||||||
|
auditRequire,
|
||||||
|
)
|
||||||
|
|
||||||
|
return audit.reduceRemaining(data, errors, remainingKeys)
|
||||||
|
}
|
||||||
|
|
||||||
|
function auditMetaConteneur(
|
||||||
|
audit: Audit,
|
||||||
|
dataUnknown: unknown,
|
||||||
|
): [unknown, unknown] {
|
||||||
|
if (dataUnknown == null) {
|
||||||
|
return [dataUnknown, null]
|
||||||
|
}
|
||||||
|
if (typeof dataUnknown !== "object") {
|
||||||
|
return audit.unexpectedType(dataUnknown, "object")
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = { ...dataUnknown }
|
||||||
|
const errors: { [key: string]: unknown } = {}
|
||||||
|
const remainingKeys = new Set(Object.keys(data))
|
||||||
|
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
"DATE_PUBLI",
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditDateIso8601String,
|
||||||
|
auditRequire,
|
||||||
|
)
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
"NUM",
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditSwitch(
|
||||||
|
[auditNumber, auditFunction((num) => num.toString())],
|
||||||
|
[auditTrimString, auditEmptyToNull],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
"TITRE",
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditTrimString,
|
||||||
|
auditEmptyToNull,
|
||||||
|
auditRequire,
|
||||||
|
)
|
||||||
|
|
||||||
|
return audit.reduceRemaining(data, errors, remainingKeys)
|
||||||
|
}
|
||||||
|
|
||||||
|
function auditMetaSpec(audit: Audit, dataUnknown: unknown): [unknown, unknown] {
|
||||||
|
if (dataUnknown == null) {
|
||||||
|
return [dataUnknown, null]
|
||||||
|
}
|
||||||
|
if (typeof dataUnknown !== "object") {
|
||||||
|
return audit.unexpectedType(dataUnknown, "object")
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = { ...dataUnknown }
|
||||||
|
const errors: { [key: string]: unknown } = {}
|
||||||
|
const remainingKeys = new Set(Object.keys(data))
|
||||||
|
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
"META_CONTENEUR",
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditMetaConteneur,
|
||||||
|
auditRequire,
|
||||||
|
)
|
||||||
|
|
||||||
|
return audit.reduceRemaining(data, errors, remainingKeys)
|
||||||
|
}
|
||||||
|
|
||||||
|
function auditStructureTxt(
|
||||||
|
audit: Audit,
|
||||||
|
dataUnknown: unknown,
|
||||||
|
): [unknown, unknown] {
|
||||||
|
if (dataUnknown == null) {
|
||||||
|
return [dataUnknown, null]
|
||||||
|
}
|
||||||
|
if (typeof dataUnknown !== "object") {
|
||||||
|
return audit.unexpectedType(dataUnknown, "object")
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = { ...dataUnknown }
|
||||||
|
const errors: { [key: string]: unknown } = {}
|
||||||
|
const remainingKeys = new Set(Object.keys(data))
|
||||||
|
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
"LIEN_TXT",
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditFunction((lien) => (Array.isArray(lien) ? lien : [lien])),
|
||||||
|
auditCleanArray(auditLienTxt, auditRequire),
|
||||||
|
)
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
"TM",
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditFunction((tm) => (Array.isArray(tm) ? tm : [tm])),
|
||||||
|
auditCleanArray(auditTm, auditRequire),
|
||||||
|
)
|
||||||
|
|
||||||
|
return audit.reduceRemaining(data, errors, remainingKeys)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Table des matières
|
||||||
|
function auditTm(audit: Audit, dataUnknown: unknown): [unknown, unknown] {
|
||||||
|
if (dataUnknown == null) {
|
||||||
|
return [dataUnknown, null]
|
||||||
|
}
|
||||||
|
if (typeof dataUnknown !== "object") {
|
||||||
|
return audit.unexpectedType(dataUnknown, "object")
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = { ...dataUnknown }
|
||||||
|
const errors: { [key: string]: unknown } = {}
|
||||||
|
const remainingKeys = new Set(Object.keys(data))
|
||||||
|
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
"@niv",
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditStringToNumber,
|
||||||
|
auditNumber,
|
||||||
|
auditRequire,
|
||||||
|
)
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
"LIEN_TXT",
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditFunction((lien) => (Array.isArray(lien) ? lien : [lien])),
|
||||||
|
auditCleanArray(auditLienTxt, auditRequire),
|
||||||
|
)
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
"TITRE_TM",
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditTrimString,
|
||||||
|
auditEmptyToNull,
|
||||||
|
auditRequire,
|
||||||
|
)
|
||||||
|
audit.attribute(
|
||||||
|
data,
|
||||||
|
"TM",
|
||||||
|
true,
|
||||||
|
errors,
|
||||||
|
remainingKeys,
|
||||||
|
auditFunction((tm) => (Array.isArray(tm) ? tm : [tm])),
|
||||||
|
auditCleanArray(auditTm, auditRequire),
|
||||||
|
)
|
||||||
|
|
||||||
|
return audit.reduceRemaining(data, errors, remainingKeys)
|
||||||
|
}
|
|
@ -20,6 +20,10 @@ export { default as TextelrView } from "./components/TextelrView.svelte"
|
||||||
export { default as TexteVersionView } from "./components/TexteVersionView.svelte"
|
export { default as TexteVersionView } from "./components/TexteVersionView.svelte"
|
||||||
|
|
||||||
export {
|
export {
|
||||||
|
allJoNatures,
|
||||||
|
allJoNaturesMutable,
|
||||||
|
allJoOrigines,
|
||||||
|
allJoOriginesMutable,
|
||||||
allJorfArticleEtats,
|
allJorfArticleEtats,
|
||||||
allJorfArticleEtatsMutable,
|
allJorfArticleEtatsMutable,
|
||||||
allJorfArticleLienArticleOrigines,
|
allJorfArticleLienArticleOrigines,
|
||||||
|
@ -49,6 +53,8 @@ export {
|
||||||
allLegiArticleTypes,
|
allLegiArticleTypes,
|
||||||
allLegiArticleTypesMutable,
|
allLegiArticleTypesMutable,
|
||||||
type DossierLegislatif,
|
type DossierLegislatif,
|
||||||
|
type JoNature,
|
||||||
|
type JoOrigine,
|
||||||
type JorfArticle,
|
type JorfArticle,
|
||||||
type JorfArticleEtat,
|
type JorfArticleEtat,
|
||||||
type JorfArticleLienArticleOrigine,
|
type JorfArticleLienArticleOrigine,
|
||||||
|
|
|
@ -4,6 +4,10 @@ import type { DossierLegislatif } from "./dole"
|
||||||
|
|
||||||
export type { DossierLegislatif } from "./dole"
|
export type { DossierLegislatif } from "./dole"
|
||||||
export {
|
export {
|
||||||
|
allJoNatures,
|
||||||
|
allJoNaturesMutable,
|
||||||
|
allJoOrigines,
|
||||||
|
allJoOriginesMutable,
|
||||||
allJorfArticleEtats,
|
allJorfArticleEtats,
|
||||||
allJorfArticleEtatsMutable,
|
allJorfArticleEtatsMutable,
|
||||||
allJorfArticleLienArticleOrigines,
|
allJorfArticleLienArticleOrigines,
|
||||||
|
@ -16,6 +20,8 @@ export {
|
||||||
allJorfArticleTexteNaturesMutable,
|
allJorfArticleTexteNaturesMutable,
|
||||||
allJorfArticleTypes,
|
allJorfArticleTypes,
|
||||||
allJorfArticleTypesMutable,
|
allJorfArticleTypesMutable,
|
||||||
|
type JoNature,
|
||||||
|
type JoOrigine,
|
||||||
type JorfArticle,
|
type JorfArticle,
|
||||||
type JorfArticleEtat,
|
type JorfArticleEtat,
|
||||||
type JorfArticleLienArticleOrigine,
|
type JorfArticleLienArticleOrigine,
|
||||||
|
|
|
@ -59,6 +59,10 @@ export interface JorfArticle {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type JoNature = (typeof allJoNatures)[number]
|
||||||
|
|
||||||
|
export type JoOrigine = (typeof allJoOrigines)[number]
|
||||||
|
|
||||||
export type JorfArticleEtat = (typeof allJorfArticleEtats)[number]
|
export type JorfArticleEtat = (typeof allJorfArticleEtats)[number]
|
||||||
|
|
||||||
export type JorfArticleLienArticleOrigine =
|
export type JorfArticleLienArticleOrigine =
|
||||||
|
@ -83,6 +87,12 @@ export interface JorfArticleTm {
|
||||||
TM?: JorfArticleTm
|
TM?: JorfArticleTm
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const allJoNatures = ["JO"] as const
|
||||||
|
export const allJoNaturesMutable = [...allJoNatures]
|
||||||
|
|
||||||
|
export const allJoOrigines = ["JORF"] as const
|
||||||
|
export const allJoOriginesMutable = [...allJoOrigines]
|
||||||
|
|
||||||
export const allJorfArticleEtats = [
|
export const allJorfArticleEtats = [
|
||||||
"ABROGE",
|
"ABROGE",
|
||||||
"ABROGE_DIFF",
|
"ABROGE_DIFF",
|
||||||
|
|
|
@ -1,4 +1,9 @@
|
||||||
import { auditChain, auditRequire, strictAudit } from "@auditors/core"
|
import {
|
||||||
|
auditChain,
|
||||||
|
auditOptions,
|
||||||
|
auditRequire,
|
||||||
|
strictAudit,
|
||||||
|
} from "@auditors/core"
|
||||||
import assert from "assert"
|
import assert from "assert"
|
||||||
import { XMLParser } from "fast-xml-parser"
|
import { XMLParser } from "fast-xml-parser"
|
||||||
import fs from "fs-extra"
|
import fs from "fs-extra"
|
||||||
|
@ -8,7 +13,11 @@ import type { JSONValue } from "postgres"
|
||||||
import sade from "sade"
|
import sade from "sade"
|
||||||
|
|
||||||
import { auditId, auditVersions } from "$lib/auditors/legal"
|
import { auditId, auditVersions } from "$lib/auditors/legal"
|
||||||
import { auditJorfArticle } from "$lib/auditors/jorf"
|
import {
|
||||||
|
auditJo,
|
||||||
|
auditJorfArticle,
|
||||||
|
joStats,
|
||||||
|
} from "$lib/auditors/jorf"
|
||||||
import type {
|
import type {
|
||||||
Jo,
|
Jo,
|
||||||
JorfArticle,
|
JorfArticle,
|
||||||
|
@ -21,6 +30,18 @@ import type {
|
||||||
import { db } from "$lib/server/database"
|
import { db } from "$lib/server/database"
|
||||||
import { walkDir } from "$lib/server/file_systems"
|
import { walkDir } from "$lib/server/file_systems"
|
||||||
|
|
||||||
|
type CategoryTag = (typeof allCategoriesCode)[number]
|
||||||
|
|
||||||
|
const allCategoriesCode = [
|
||||||
|
"ARTICLE",
|
||||||
|
"ID",
|
||||||
|
"JO",
|
||||||
|
"SECTION_TA",
|
||||||
|
"TEXTE_VERSION",
|
||||||
|
"TEXTELR",
|
||||||
|
"VERSIONS",
|
||||||
|
] as const
|
||||||
|
|
||||||
const xmlParser = new XMLParser({
|
const xmlParser = new XMLParser({
|
||||||
attributeNamePrefix: "@",
|
attributeNamePrefix: "@",
|
||||||
ignoreAttributes: false,
|
ignoreAttributes: false,
|
||||||
|
@ -42,71 +63,105 @@ const xmlParser = new XMLParser({
|
||||||
|
|
||||||
async function importJorf(
|
async function importJorf(
|
||||||
dilaDir: string,
|
dilaDir: string,
|
||||||
{ resume }: { resume?: string } = {},
|
{ category, resume }: { category?: string; resume?: string } = {},
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
|
const [categoryTag, categoryError] = auditOptions([
|
||||||
|
...[...allCategoriesCode],
|
||||||
|
])(strictAudit, category) as [CategoryTag | undefined, unknown]
|
||||||
|
assert.strictEqual(
|
||||||
|
categoryError,
|
||||||
|
null,
|
||||||
|
`Error for category ${JSON.stringify(categoryTag)}:\n${JSON.stringify(
|
||||||
|
categoryError,
|
||||||
|
null,
|
||||||
|
2,
|
||||||
|
)}`,
|
||||||
|
)
|
||||||
let skip = resume !== undefined
|
let skip = resume !== undefined
|
||||||
|
|
||||||
const deleteRemainingIds = !skip
|
const deleteRemainingIds = !skip
|
||||||
|
|
||||||
const articleRemainingIds = new Set(
|
const articleRemainingIds =
|
||||||
(
|
categoryTag === undefined || categoryTag === "ARTICLE"
|
||||||
await db<{ id: string }[]>`
|
? new Set(
|
||||||
SELECT id
|
(
|
||||||
FROM article
|
await db<{ id: string }[]>`
|
||||||
WHERE id LIKE 'JORF%'
|
SELECT id
|
||||||
`
|
FROM article
|
||||||
).map(({ id }) => id),
|
WHERE id LIKE 'JORF%'
|
||||||
)
|
`
|
||||||
const idRemainingElis = new Set(
|
).map(({ id }) => id),
|
||||||
(
|
)
|
||||||
await db<{ eli: string }[]>`
|
: new Set<string>()
|
||||||
SELECT eli
|
const idRemainingElis =
|
||||||
FROM id
|
categoryTag === undefined || categoryTag === "ID"
|
||||||
`
|
? new Set(
|
||||||
).map(({ eli }) => eli),
|
(
|
||||||
)
|
await db<{ eli: string }[]>`
|
||||||
const joRemainingIds = new Set(
|
SELECT eli
|
||||||
(
|
FROM id
|
||||||
await db<{ id: string }[]>`
|
`
|
||||||
SELECT id
|
).map(({ eli }) => eli),
|
||||||
FROM jo
|
)
|
||||||
`
|
: new Set<string>()
|
||||||
).map(({ id }) => id),
|
const joRemainingIds =
|
||||||
)
|
categoryTag === undefined || categoryTag === "JO"
|
||||||
const sectionTaRemainingIds = new Set(
|
? new Set(
|
||||||
(
|
(
|
||||||
await db<{ id: string }[]>`
|
await db<{ id: string }[]>`
|
||||||
SELECT id
|
SELECT id
|
||||||
FROM section_ta
|
FROM jo
|
||||||
WHERE id LIKE 'JORF%'
|
`
|
||||||
`
|
).map(({ id }) => id),
|
||||||
).map(({ id }) => id),
|
)
|
||||||
)
|
: new Set<string>()
|
||||||
const textelrRemainingIds = new Set(
|
const sectionTaRemainingIds =
|
||||||
(
|
categoryTag === undefined || categoryTag === "SECTION_TA"
|
||||||
await db<{ id: string }[]>`
|
? new Set(
|
||||||
SELECT id
|
(
|
||||||
FROM textelr
|
await db<{ id: string }[]>`
|
||||||
WHERE id LIKE 'JORF%'
|
SELECT id
|
||||||
`
|
FROM section_ta
|
||||||
).map(({ id }) => id),
|
WHERE id LIKE 'JORF%'
|
||||||
)
|
`
|
||||||
const texteVersionRemainingIds = new Set(
|
).map(({ id }) => id),
|
||||||
(
|
)
|
||||||
await db<{ id: string }[]>`
|
: new Set<string>()
|
||||||
SELECT id
|
const textelrRemainingIds =
|
||||||
FROM texte_version
|
categoryTag === undefined || categoryTag === "TEXTELR"
|
||||||
WHERE id LIKE 'JORF%'
|
? new Set(
|
||||||
`
|
(
|
||||||
).map(({ id }) => id),
|
await db<{ id: string }[]>`
|
||||||
)
|
SELECT id
|
||||||
const versionsRemainingElis = new Set(
|
FROM textelr
|
||||||
(
|
WHERE id LIKE 'JORF%'
|
||||||
await db<{ eli: string }[]>`
|
`
|
||||||
SELECT eli
|
).map(({ id }) => id),
|
||||||
FROM versions
|
)
|
||||||
`
|
: new Set<string>()
|
||||||
).map(({ eli }) => eli),
|
const texteVersionRemainingIds =
|
||||||
)
|
categoryTag === undefined || categoryTag === "TEXTE_VERSION"
|
||||||
|
? new Set(
|
||||||
|
(
|
||||||
|
await db<{ id: string }[]>`
|
||||||
|
SELECT id
|
||||||
|
FROM texte_version
|
||||||
|
WHERE id LIKE 'JORF%'
|
||||||
|
`
|
||||||
|
).map(({ id }) => id),
|
||||||
|
)
|
||||||
|
: new Set<string>()
|
||||||
|
const versionsRemainingElis =
|
||||||
|
categoryTag === undefined || categoryTag === "VERSIONS"
|
||||||
|
? new Set(
|
||||||
|
(
|
||||||
|
await db<{ eli: string }[]>`
|
||||||
|
SELECT eli
|
||||||
|
FROM versions
|
||||||
|
`
|
||||||
|
).map(({ eli }) => eli),
|
||||||
|
)
|
||||||
|
: new Set<string>()
|
||||||
|
|
||||||
const dataDir = path.join(dilaDir, "jorf")
|
const dataDir = path.join(dilaDir, "jorf")
|
||||||
assert(await fs.pathExists(dataDir))
|
assert(await fs.pathExists(dataDir))
|
||||||
|
@ -132,207 +187,238 @@ async function importJorf(
|
||||||
encoding: "utf8",
|
encoding: "utf8",
|
||||||
})
|
})
|
||||||
const xmlData = xmlParser.parse(xmlString)
|
const xmlData = xmlParser.parse(xmlString)
|
||||||
for (const [key, element] of Object.entries(xmlData) as [
|
for (const [tag, element] of Object.entries(xmlData) as [
|
||||||
string,
|
CategoryTag | "?xml",
|
||||||
(
|
(
|
||||||
| JorfArticle
|
|
||||||
| Jo
|
| Jo
|
||||||
| SectionTa
|
| JorfArticle
|
||||||
|
| JorfSectionTa
|
||||||
| Textelr
|
| Textelr
|
||||||
| TexteVersion
|
| TexteVersion
|
||||||
| Versions
|
| Versions
|
||||||
| XmlHeader
|
| XmlHeader
|
||||||
),
|
),
|
||||||
][]) {
|
][]) {
|
||||||
switch (key) {
|
switch (tag) {
|
||||||
case "?xml": {
|
case "?xml": {
|
||||||
const xmlHeader = element as XmlHeader
|
const xmlHeader = element as XmlHeader
|
||||||
assert.strictEqual(xmlHeader["@encoding"], "UTF-8", filePath)
|
assert.strictEqual(xmlHeader["@encoding"], "UTF-8", filePath)
|
||||||
assert.strictEqual(xmlHeader["@version"], "1.0", filePath)
|
assert.strictEqual(xmlHeader["@version"], "1.0", filePath)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
case "ARTICLE": {
|
case "ARTICLE":
|
||||||
const [article, error] = auditChain(auditJorfArticle, auditRequire)(
|
if (categoryTag === undefined || categoryTag === tag) {
|
||||||
strictAudit,
|
const [article, error] = auditChain(
|
||||||
element,
|
auditJorfArticle,
|
||||||
) as [JorfArticle, unknown]
|
auditRequire,
|
||||||
assert.strictEqual(
|
)(strictAudit, element) as [JorfArticle, unknown]
|
||||||
error,
|
assert.strictEqual(
|
||||||
null,
|
error,
|
||||||
`Unexpected format for ARTICLE:\n${JSON.stringify(
|
|
||||||
article,
|
|
||||||
null,
|
null,
|
||||||
2,
|
`Unexpected format for ARTICLE:\n${JSON.stringify(
|
||||||
)}\nError:\n${JSON.stringify(error, null, 2)}`,
|
article,
|
||||||
)
|
null,
|
||||||
await db`
|
2,
|
||||||
INSERT INTO article (
|
)}\nError:\n${JSON.stringify(error, null, 2)}`,
|
||||||
id,
|
|
||||||
data
|
|
||||||
) VALUES (
|
|
||||||
${article.META.META_COMMUN.ID},
|
|
||||||
${db.json(article as unknown as JSONValue)}
|
|
||||||
)
|
)
|
||||||
ON CONFLICT (id)
|
await db`
|
||||||
DO UPDATE SET
|
INSERT INTO article (
|
||||||
data = ${db.json(article as unknown as JSONValue)}
|
id,
|
||||||
`
|
data
|
||||||
articleRemainingIds.delete(article.META.META_COMMUN.ID)
|
) VALUES (
|
||||||
|
${article.META.META_COMMUN.ID},
|
||||||
|
${db.json(article as unknown as JSONValue)}
|
||||||
|
)
|
||||||
|
ON CONFLICT (id)
|
||||||
|
DO UPDATE SET
|
||||||
|
data = ${db.json(article as unknown as JSONValue)}
|
||||||
|
`
|
||||||
|
articleRemainingIds.delete(article.META.META_COMMUN.ID)
|
||||||
|
}
|
||||||
break
|
break
|
||||||
}
|
case "ID":
|
||||||
case "ID": {
|
if (categoryTag === undefined || categoryTag === tag) {
|
||||||
assert.strictEqual(relativeSplitPath[0], "global")
|
assert.strictEqual(relativeSplitPath[0], "global")
|
||||||
assert.strictEqual(relativeSplitPath[1], "eli")
|
assert.strictEqual(relativeSplitPath[1], "eli")
|
||||||
const eli = relativeSplitPath.slice(2, -1).join("/")
|
const eli = relativeSplitPath.slice(2, -1).join("/")
|
||||||
const [id, idError] = auditChain(auditId, auditRequire)(
|
const [id, idError] = auditChain(auditId, auditRequire)(
|
||||||
strictAudit,
|
strictAudit,
|
||||||
element,
|
element,
|
||||||
)
|
)
|
||||||
assert.strictEqual(
|
assert.strictEqual(
|
||||||
idError,
|
idError,
|
||||||
null,
|
|
||||||
`Unexpected format for ID:\n${JSON.stringify(
|
|
||||||
id,
|
|
||||||
null,
|
null,
|
||||||
2,
|
`Unexpected format for ID:\n${JSON.stringify(
|
||||||
)}\nError:\n${JSON.stringify(idError, null, 2)}`,
|
id,
|
||||||
)
|
null,
|
||||||
await db`
|
2,
|
||||||
INSERT INTO id (
|
)}\nError:\n${JSON.stringify(idError, null, 2)}`,
|
||||||
eli,
|
|
||||||
id
|
|
||||||
) VALUES (
|
|
||||||
${eli},
|
|
||||||
${id}
|
|
||||||
)
|
)
|
||||||
ON CONFLICT (eli)
|
await db`
|
||||||
DO UPDATE SET
|
INSERT INTO id (
|
||||||
id = ${id}
|
eli,
|
||||||
`
|
id
|
||||||
idRemainingElis.delete(eli)
|
) VALUES (
|
||||||
|
${eli},
|
||||||
|
${id}
|
||||||
|
)
|
||||||
|
ON CONFLICT (eli)
|
||||||
|
DO UPDATE SET
|
||||||
|
id = ${id}
|
||||||
|
`
|
||||||
|
idRemainingElis.delete(eli)
|
||||||
|
}
|
||||||
break
|
break
|
||||||
}
|
case "JO":
|
||||||
case "JO": {
|
if (categoryTag === undefined || categoryTag === tag) {
|
||||||
const jo = element as Jo
|
const [jo, error] = auditChain(auditJo, auditRequire)(
|
||||||
await db`
|
strictAudit,
|
||||||
INSERT INTO jo (
|
element,
|
||||||
id,
|
) as [Jo, unknown]
|
||||||
data
|
assert.strictEqual(
|
||||||
) VALUES (
|
error,
|
||||||
${jo.META.META_COMMUN.ID},
|
|
||||||
${db.json(jo as unknown as JSONValue)}
|
|
||||||
)
|
|
||||||
ON CONFLICT (id)
|
|
||||||
DO UPDATE SET
|
|
||||||
data = ${db.json(jo as unknown as JSONValue)}
|
|
||||||
`
|
|
||||||
joRemainingIds.delete(jo.META.META_COMMUN.ID)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
case "SECTION_TA": {
|
|
||||||
const section = element as SectionTa
|
|
||||||
await db`
|
|
||||||
INSERT INTO section_ta (
|
|
||||||
id,
|
|
||||||
data
|
|
||||||
) VALUES (
|
|
||||||
${section.ID},
|
|
||||||
${db.json(section as unknown as JSONValue)}
|
|
||||||
)
|
|
||||||
ON CONFLICT (id)
|
|
||||||
DO UPDATE SET
|
|
||||||
data = ${db.json(section as unknown as JSONValue)}
|
|
||||||
`
|
|
||||||
sectionTaRemainingIds.delete(section.ID)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
case "TEXTE_VERSION": {
|
|
||||||
const texteVersion = element as TexteVersion
|
|
||||||
const textAFragments = [
|
|
||||||
texteVersion.META.META_SPEC.META_TEXTE_VERSION.TITRE,
|
|
||||||
texteVersion.META.META_SPEC.META_TEXTE_VERSION.TITREFULL,
|
|
||||||
]
|
|
||||||
await db`
|
|
||||||
INSERT INTO texte_version (
|
|
||||||
id,
|
|
||||||
data,
|
|
||||||
nature,
|
|
||||||
text_search
|
|
||||||
) VALUES (
|
|
||||||
${texteVersion.META.META_COMMUN.ID},
|
|
||||||
${db.json(texteVersion as unknown as JSONValue)},
|
|
||||||
${texteVersion.META.META_COMMUN.NATURE},
|
|
||||||
setweight(to_tsvector('french', ${textAFragments.join(
|
|
||||||
" ",
|
|
||||||
)}), 'A')
|
|
||||||
)
|
|
||||||
ON CONFLICT (id)
|
|
||||||
DO UPDATE SET
|
|
||||||
data = ${db.json(texteVersion as unknown as JSONValue)},
|
|
||||||
nature = ${texteVersion.META.META_COMMUN.NATURE},
|
|
||||||
text_search = setweight(to_tsvector('french', ${textAFragments.join(
|
|
||||||
" ",
|
|
||||||
)}), 'A')
|
|
||||||
`
|
|
||||||
texteVersionRemainingIds.delete(texteVersion.META.META_COMMUN.ID)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
case "TEXTELR": {
|
|
||||||
const textelr = element as Textelr
|
|
||||||
await db`
|
|
||||||
INSERT INTO textelr (
|
|
||||||
id,
|
|
||||||
data
|
|
||||||
) VALUES (
|
|
||||||
${textelr.META.META_COMMUN.ID},
|
|
||||||
${db.json(textelr as unknown as JSONValue)}
|
|
||||||
)
|
|
||||||
ON CONFLICT (id)
|
|
||||||
DO UPDATE SET
|
|
||||||
data = ${db.json(textelr as unknown as JSONValue)}
|
|
||||||
`
|
|
||||||
textelrRemainingIds.delete(textelr.META.META_COMMUN.ID)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
case "VERSIONS": {
|
|
||||||
assert.strictEqual(relativeSplitPath[0], "global")
|
|
||||||
assert.strictEqual(relativeSplitPath[1], "eli")
|
|
||||||
const eli = relativeSplitPath.slice(2, -1).join("/")
|
|
||||||
const [versions, versionsError] = auditChain(
|
|
||||||
auditVersions,
|
|
||||||
auditRequire,
|
|
||||||
)(strictAudit, element)
|
|
||||||
assert.strictEqual(
|
|
||||||
versionsError,
|
|
||||||
null,
|
|
||||||
`Unexpected format for VERSIONS:\n${JSON.stringify(
|
|
||||||
versions,
|
|
||||||
null,
|
null,
|
||||||
2,
|
`Unexpected format for JO:\n${JSON.stringify(
|
||||||
)}\nError:\n${JSON.stringify(versionsError, null, 2)}`,
|
jo,
|
||||||
)
|
null,
|
||||||
const id = versions.VERSION["@id"]
|
2,
|
||||||
await db`
|
)}\nError:\n${JSON.stringify(error, null, 2)}`,
|
||||||
INSERT INTO versions (
|
|
||||||
eli,
|
|
||||||
id,
|
|
||||||
data
|
|
||||||
) VALUES (
|
|
||||||
${eli},
|
|
||||||
${id},
|
|
||||||
${db.json(versions as unknown as JSONValue)}
|
|
||||||
)
|
)
|
||||||
ON CONFLICT (eli)
|
await db`
|
||||||
DO UPDATE SET
|
INSERT INTO jo (
|
||||||
id = ${id},
|
id,
|
||||||
data = ${db.json(versions as unknown as JSONValue)}
|
data
|
||||||
`
|
) VALUES (
|
||||||
versionsRemainingElis.delete(id)
|
${jo.META.META_COMMUN.ID},
|
||||||
|
${db.json(jo as unknown as JSONValue)}
|
||||||
|
)
|
||||||
|
ON CONFLICT (id)
|
||||||
|
DO UPDATE SET
|
||||||
|
data = ${db.json(jo as unknown as JSONValue)}
|
||||||
|
`
|
||||||
|
joRemainingIds.delete(jo.META.META_COMMUN.ID)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
case "SECTION_TA":
|
||||||
|
if (categoryTag === undefined || categoryTag === tag) {
|
||||||
|
const [section, error] = auditChain(
|
||||||
|
auditJorfSectionTa,
|
||||||
|
auditRequire,
|
||||||
|
)(strictAudit, element) as [JorfSectionTa, unknown]
|
||||||
|
assert.strictEqual(
|
||||||
|
error,
|
||||||
|
null,
|
||||||
|
`Unexpected format for SECTION_TA:\n${JSON.stringify(
|
||||||
|
section,
|
||||||
|
null,
|
||||||
|
2,
|
||||||
|
)}\nError:\n${JSON.stringify(error, null, 2)}`,
|
||||||
|
)
|
||||||
|
await db`
|
||||||
|
INSERT INTO section_ta (
|
||||||
|
id,
|
||||||
|
data
|
||||||
|
) VALUES (
|
||||||
|
${section.ID},
|
||||||
|
${db.json(section as unknown as JSONValue)}
|
||||||
|
)
|
||||||
|
ON CONFLICT (id)
|
||||||
|
DO UPDATE SET
|
||||||
|
data = ${db.json(section as unknown as JSONValue)}
|
||||||
|
`
|
||||||
|
sectionTaRemainingIds.delete(section.ID)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
case "TEXTE_VERSION":
|
||||||
|
if (categoryTag === undefined || categoryTag === tag) {
|
||||||
|
const texteVersion = element as TexteVersion
|
||||||
|
const textAFragments = [
|
||||||
|
texteVersion.META.META_SPEC.META_TEXTE_VERSION.TITRE,
|
||||||
|
texteVersion.META.META_SPEC.META_TEXTE_VERSION.TITREFULL,
|
||||||
|
]
|
||||||
|
await db`
|
||||||
|
INSERT INTO texte_version (
|
||||||
|
id,
|
||||||
|
data,
|
||||||
|
nature,
|
||||||
|
text_search
|
||||||
|
) VALUES (
|
||||||
|
${texteVersion.META.META_COMMUN.ID},
|
||||||
|
${db.json(texteVersion as unknown as JSONValue)},
|
||||||
|
${texteVersion.META.META_COMMUN.NATURE},
|
||||||
|
setweight(to_tsvector('french', ${textAFragments.join(
|
||||||
|
" ",
|
||||||
|
)}), 'A')
|
||||||
|
)
|
||||||
|
ON CONFLICT (id)
|
||||||
|
DO UPDATE SET
|
||||||
|
data = ${db.json(texteVersion as unknown as JSONValue)},
|
||||||
|
nature = ${texteVersion.META.META_COMMUN.NATURE},
|
||||||
|
text_search = setweight(to_tsvector('french', ${textAFragments.join(
|
||||||
|
" ",
|
||||||
|
)}), 'A')
|
||||||
|
`
|
||||||
|
texteVersionRemainingIds.delete(texteVersion.META.META_COMMUN.ID)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
case "TEXTELR":
|
||||||
|
if (categoryTag === undefined || categoryTag === tag) {
|
||||||
|
const textelr = element as Textelr
|
||||||
|
await db`
|
||||||
|
INSERT INTO textelr (
|
||||||
|
id,
|
||||||
|
data
|
||||||
|
) VALUES (
|
||||||
|
${textelr.META.META_COMMUN.ID},
|
||||||
|
${db.json(textelr as unknown as JSONValue)}
|
||||||
|
)
|
||||||
|
ON CONFLICT (id)
|
||||||
|
DO UPDATE SET
|
||||||
|
data = ${db.json(textelr as unknown as JSONValue)}
|
||||||
|
`
|
||||||
|
textelrRemainingIds.delete(textelr.META.META_COMMUN.ID)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
case "VERSIONS":
|
||||||
|
if (categoryTag === undefined || categoryTag === tag) {
|
||||||
|
assert.strictEqual(relativeSplitPath[0], "global")
|
||||||
|
assert.strictEqual(relativeSplitPath[1], "eli")
|
||||||
|
const eli = relativeSplitPath.slice(2, -1).join("/")
|
||||||
|
const [versions, versionsError] = auditChain(
|
||||||
|
auditVersions,
|
||||||
|
auditRequire,
|
||||||
|
)(strictAudit, element)
|
||||||
|
assert.strictEqual(
|
||||||
|
versionsError,
|
||||||
|
null,
|
||||||
|
`Unexpected format for VERSIONS:\n${JSON.stringify(
|
||||||
|
versions,
|
||||||
|
null,
|
||||||
|
2,
|
||||||
|
)}\nError:\n${JSON.stringify(versionsError, null, 2)}`,
|
||||||
|
)
|
||||||
|
const id = versions.VERSION["@id"]
|
||||||
|
await db`
|
||||||
|
INSERT INTO versions (
|
||||||
|
eli,
|
||||||
|
id,
|
||||||
|
data
|
||||||
|
) VALUES (
|
||||||
|
${eli},
|
||||||
|
${id},
|
||||||
|
${db.json(versions as unknown as JSONValue)}
|
||||||
|
)
|
||||||
|
ON CONFLICT (eli)
|
||||||
|
DO UPDATE SET
|
||||||
|
id = ${id},
|
||||||
|
data = ${db.json(versions as unknown as JSONValue)}
|
||||||
|
`
|
||||||
|
versionsRemainingElis.delete(id)
|
||||||
|
}
|
||||||
break
|
break
|
||||||
}
|
|
||||||
default: {
|
default: {
|
||||||
console.warn(
|
console.warn(
|
||||||
`Unexpected root element "${key}" in XML file: ${filePath}`,
|
`Unexpected root element "${tag}" in XML file: ${filePath}`,
|
||||||
)
|
)
|
||||||
break iterXmlFiles
|
break iterXmlFiles
|
||||||
}
|
}
|
||||||
|
@ -395,10 +481,17 @@ async function importJorf(
|
||||||
`
|
`
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// console.log(
|
||||||
|
// "JORF articles stats =",
|
||||||
|
// JSON.stringify(jorfArticleStats, null, 2),
|
||||||
|
// )
|
||||||
|
console.log("JO stats =", JSON.stringify(joStats, null, 2))
|
||||||
}
|
}
|
||||||
|
|
||||||
sade("import_jorf <dilaDir>", true)
|
sade("import_jorf <dilaDir>", true)
|
||||||
.describe("Import Dila's JORF database")
|
.describe("Import Dila's JORF database")
|
||||||
|
.option("-k, --category", "Import only given type of data")
|
||||||
.option("-r, --resume", "Resume import at given relative file path")
|
.option("-r, --resume", "Resume import at given relative file path")
|
||||||
.example(
|
.example(
|
||||||
"--resume global/eli/accord/2002/5/5/MESS0221690X/jo/article_1/versions.xml ../dila-data/",
|
"--resume global/eli/accord/2002/5/5/MESS0221690X/jo/article_1/versions.xml ../dila-data/",
|
||||||
|
|
|
@ -1,4 +1,9 @@
|
||||||
import { auditChain, auditRequire, strictAudit } from "@auditors/core"
|
import {
|
||||||
|
auditChain,
|
||||||
|
auditOptions,
|
||||||
|
auditRequire,
|
||||||
|
strictAudit,
|
||||||
|
} from "@auditors/core"
|
||||||
import assert from "assert"
|
import assert from "assert"
|
||||||
import { XMLParser } from "fast-xml-parser"
|
import { XMLParser } from "fast-xml-parser"
|
||||||
import fs from "fs-extra"
|
import fs from "fs-extra"
|
||||||
|
@ -23,6 +28,17 @@ import type {
|
||||||
import { db } from "$lib/server/database"
|
import { db } from "$lib/server/database"
|
||||||
import { walkDir } from "$lib/server/file_systems"
|
import { walkDir } from "$lib/server/file_systems"
|
||||||
|
|
||||||
|
type CategoryTag = (typeof allCategoriesCode)[number]
|
||||||
|
|
||||||
|
const allCategoriesCode = [
|
||||||
|
"ARTICLE",
|
||||||
|
"ID",
|
||||||
|
"SECTION_TA",
|
||||||
|
"TEXTE_VERSION",
|
||||||
|
"TEXTELR",
|
||||||
|
"VERSIONS",
|
||||||
|
] as const
|
||||||
|
|
||||||
const xmlParser = new XMLParser({
|
const xmlParser = new XMLParser({
|
||||||
attributeNamePrefix: "@",
|
attributeNamePrefix: "@",
|
||||||
ignoreAttributes: false,
|
ignoreAttributes: false,
|
||||||
|
@ -44,63 +60,94 @@ const xmlParser = new XMLParser({
|
||||||
|
|
||||||
async function importLegi(
|
async function importLegi(
|
||||||
dilaDir: string,
|
dilaDir: string,
|
||||||
{ resume }: { resume?: string } = {},
|
{ category, resume }: { category?: string; resume?: string } = {},
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
|
const [categoryTag, categoryError] = auditOptions([
|
||||||
|
...[...allCategoriesCode],
|
||||||
|
])(strictAudit, category) as [CategoryTag | undefined, unknown]
|
||||||
|
assert.strictEqual(
|
||||||
|
categoryError,
|
||||||
|
null,
|
||||||
|
`Error for category ${JSON.stringify(categoryTag)}:\n${JSON.stringify(
|
||||||
|
categoryError,
|
||||||
|
null,
|
||||||
|
2,
|
||||||
|
)}`,
|
||||||
|
)
|
||||||
let skip = resume !== undefined
|
let skip = resume !== undefined
|
||||||
|
|
||||||
const deleteRemainingIds = !skip
|
const deleteRemainingIds = !skip
|
||||||
|
|
||||||
const articleRemainingIds = new Set(
|
const articleRemainingIds =
|
||||||
(
|
categoryTag === undefined || categoryTag === "ARTICLE"
|
||||||
await db<{ id: string }[]>`
|
? new Set(
|
||||||
SELECT id
|
(
|
||||||
FROM article
|
await db<{ id: string }[]>`
|
||||||
WHERE id LIKE 'LEGI%'
|
SELECT id
|
||||||
`
|
FROM article
|
||||||
).map(({ id }) => id),
|
WHERE id LIKE 'LEGI%'
|
||||||
)
|
`
|
||||||
const idRemainingElis = new Set(
|
).map(({ id }) => id),
|
||||||
(
|
)
|
||||||
await db<{ eli: string }[]>`
|
: new Set<string>()
|
||||||
SELECT eli
|
const idRemainingElis =
|
||||||
FROM id
|
categoryTag === undefined || categoryTag === "ID"
|
||||||
`
|
? new Set(
|
||||||
).map(({ eli }) => eli),
|
(
|
||||||
)
|
await db<{ eli: string }[]>`
|
||||||
const sectionTaRemainingIds = new Set(
|
SELECT eli
|
||||||
(
|
FROM id
|
||||||
await db<{ id: string }[]>`
|
`
|
||||||
SELECT id
|
).map(({ eli }) => eli),
|
||||||
FROM section_ta
|
)
|
||||||
WHERE id LIKE 'LEGI%'
|
: new Set<string>()
|
||||||
`
|
const sectionTaRemainingIds =
|
||||||
).map(({ id }) => id),
|
categoryTag === undefined || categoryTag === "SECTION_TA"
|
||||||
)
|
? new Set(
|
||||||
const textelrRemainingIds = new Set(
|
(
|
||||||
(
|
await db<{ id: string }[]>`
|
||||||
await db<{ id: string }[]>`
|
SELECT id
|
||||||
SELECT id
|
FROM section_ta
|
||||||
FROM textelr
|
WHERE id LIKE 'LEGI%'
|
||||||
WHERE id LIKE 'LEGI%'
|
`
|
||||||
`
|
).map(({ id }) => id),
|
||||||
).map(({ id }) => id),
|
)
|
||||||
)
|
: new Set<string>()
|
||||||
const texteVersionRemainingIds = new Set(
|
const textelrRemainingIds =
|
||||||
(
|
categoryTag === undefined || categoryTag === "TEXTELR"
|
||||||
await db<{ id: string }[]>`
|
? new Set(
|
||||||
SELECT id
|
(
|
||||||
FROM texte_version
|
await db<{ id: string }[]>`
|
||||||
WHERE id LIKE 'LEGI%'
|
SELECT id
|
||||||
`
|
FROM textelr
|
||||||
).map(({ id }) => id),
|
WHERE id LIKE 'LEGI%'
|
||||||
)
|
`
|
||||||
const versionsRemainingElis = new Set(
|
).map(({ id }) => id),
|
||||||
(
|
)
|
||||||
await db<{ eli: string }[]>`
|
: new Set<string>()
|
||||||
SELECT eli
|
const texteVersionRemainingIds =
|
||||||
FROM versions
|
categoryTag === undefined || categoryTag === "TEXTE_VERSION"
|
||||||
`
|
? new Set(
|
||||||
).map(({ eli }) => eli),
|
(
|
||||||
)
|
await db<{ id: string }[]>`
|
||||||
|
SELECT id
|
||||||
|
FROM texte_version
|
||||||
|
WHERE id LIKE 'LEGI%'
|
||||||
|
`
|
||||||
|
).map(({ id }) => id),
|
||||||
|
)
|
||||||
|
: new Set<string>()
|
||||||
|
const versionsRemainingElis =
|
||||||
|
categoryTag === undefined || categoryTag === "VERSIONS"
|
||||||
|
? new Set(
|
||||||
|
(
|
||||||
|
await db<{ eli: string }[]>`
|
||||||
|
SELECT eli
|
||||||
|
FROM versions
|
||||||
|
`
|
||||||
|
).map(({ eli }) => eli),
|
||||||
|
)
|
||||||
|
: new Set<string>()
|
||||||
|
|
||||||
const dataDir = path.join(dilaDir, "legi")
|
const dataDir = path.join(dilaDir, "legi")
|
||||||
assert(await fs.pathExists(dataDir))
|
assert(await fs.pathExists(dataDir))
|
||||||
|
@ -126,183 +173,189 @@ async function importLegi(
|
||||||
encoding: "utf8",
|
encoding: "utf8",
|
||||||
})
|
})
|
||||||
const xmlData = xmlParser.parse(xmlString)
|
const xmlData = xmlParser.parse(xmlString)
|
||||||
for (const [key, element] of Object.entries(xmlData) as [
|
for (const [tag, element] of Object.entries(xmlData) as [
|
||||||
string,
|
CategoryTag | "?xml",
|
||||||
LegiArticle | SectionTa | Textelr | TexteVersion | Versions | XmlHeader,
|
LegiArticle | SectionTa | Textelr | TexteVersion | Versions | XmlHeader,
|
||||||
][]) {
|
][]) {
|
||||||
switch (key) {
|
switch (tag) {
|
||||||
case "?xml": {
|
case "?xml": {
|
||||||
const xmlHeader = element as XmlHeader
|
const xmlHeader = element as XmlHeader
|
||||||
assert.strictEqual(xmlHeader["@encoding"], "UTF-8", filePath)
|
assert.strictEqual(xmlHeader["@encoding"], "UTF-8", filePath)
|
||||||
assert.strictEqual(xmlHeader["@version"], "1.0", filePath)
|
assert.strictEqual(xmlHeader["@version"], "1.0", filePath)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
case "ARTICLE": {
|
case "ARTICLE":
|
||||||
const [article, error] = auditChain(auditLegiArticle, auditRequire)(
|
if (categoryTag === undefined || categoryTag === tag) {
|
||||||
strictAudit,
|
const [article, error] = auditChain(
|
||||||
element,
|
auditLegiArticle,
|
||||||
) as [LegiArticle, unknown]
|
auditRequire,
|
||||||
assert.strictEqual(
|
)(strictAudit, element) as [LegiArticle, unknown]
|
||||||
error,
|
assert.strictEqual(
|
||||||
null,
|
error,
|
||||||
`Unexpected format for ARTICLE:\n${JSON.stringify(
|
|
||||||
article,
|
|
||||||
null,
|
null,
|
||||||
2,
|
`Unexpected format for ARTICLE:\n${JSON.stringify(
|
||||||
)}\nError:\n${JSON.stringify(error, null, 2)}`,
|
article,
|
||||||
)
|
null,
|
||||||
await db`
|
2,
|
||||||
INSERT INTO article (
|
)}\nError:\n${JSON.stringify(error, null, 2)}`,
|
||||||
id,
|
|
||||||
data
|
|
||||||
) VALUES (
|
|
||||||
${article.META.META_COMMUN.ID},
|
|
||||||
${db.json(article as unknown as JSONValue)}
|
|
||||||
)
|
)
|
||||||
ON CONFLICT (id)
|
await db`
|
||||||
DO UPDATE SET
|
INSERT INTO article (
|
||||||
data = ${db.json(article as unknown as JSONValue)}
|
id,
|
||||||
`
|
data
|
||||||
articleRemainingIds.delete(article.META.META_COMMUN.ID)
|
) VALUES (
|
||||||
|
${article.META.META_COMMUN.ID},
|
||||||
|
${db.json(article as unknown as JSONValue)}
|
||||||
|
)
|
||||||
|
ON CONFLICT (id)
|
||||||
|
DO UPDATE SET
|
||||||
|
data = ${db.json(article as unknown as JSONValue)}
|
||||||
|
`
|
||||||
|
articleRemainingIds.delete(article.META.META_COMMUN.ID)
|
||||||
|
}
|
||||||
break
|
break
|
||||||
}
|
case "ID":
|
||||||
case "ID": {
|
if (categoryTag === undefined || categoryTag === tag) {
|
||||||
assert.strictEqual(relativeSplitPath[0], "global")
|
assert.strictEqual(relativeSplitPath[0], "global")
|
||||||
assert.strictEqual(relativeSplitPath[1], "eli")
|
assert.strictEqual(relativeSplitPath[1], "eli")
|
||||||
const eli = relativeSplitPath.slice(2, -1).join("/")
|
const eli = relativeSplitPath.slice(2, -1).join("/")
|
||||||
const [id, idError] = auditChain(auditId, auditRequire)(
|
const [id, idError] = auditChain(auditId, auditRequire)(
|
||||||
strictAudit,
|
strictAudit,
|
||||||
element,
|
element,
|
||||||
)
|
)
|
||||||
assert.strictEqual(
|
assert.strictEqual(
|
||||||
idError,
|
idError,
|
||||||
null,
|
|
||||||
`Unexpected format for ID:\n${JSON.stringify(
|
|
||||||
id,
|
|
||||||
null,
|
null,
|
||||||
2,
|
`Unexpected format for ID:\n${JSON.stringify(
|
||||||
)}\nError:\n${JSON.stringify(idError, null, 2)}`,
|
id,
|
||||||
)
|
null,
|
||||||
assert
|
2,
|
||||||
await db`
|
)}\nError:\n${JSON.stringify(idError, null, 2)}`,
|
||||||
INSERT INTO id (
|
|
||||||
eli,
|
|
||||||
id
|
|
||||||
) VALUES (
|
|
||||||
${eli},
|
|
||||||
${id}
|
|
||||||
)
|
)
|
||||||
ON CONFLICT (eli)
|
assert
|
||||||
DO UPDATE SET
|
await db`
|
||||||
id = ${id}
|
INSERT INTO id (
|
||||||
`
|
eli,
|
||||||
idRemainingElis.delete(eli)
|
id
|
||||||
|
) VALUES (
|
||||||
|
${eli},
|
||||||
|
${id}
|
||||||
|
)
|
||||||
|
ON CONFLICT (eli)
|
||||||
|
DO UPDATE SET
|
||||||
|
id = ${id}
|
||||||
|
`
|
||||||
|
idRemainingElis.delete(eli)
|
||||||
|
}
|
||||||
break
|
break
|
||||||
}
|
case "SECTION_TA":
|
||||||
case "SECTION_TA": {
|
if (categoryTag === undefined || categoryTag === tag) {
|
||||||
const section = element as SectionTa
|
const section = element as SectionTa
|
||||||
await db`
|
await db`
|
||||||
INSERT INTO section_ta (
|
INSERT INTO section_ta (
|
||||||
id,
|
id,
|
||||||
data
|
data
|
||||||
) VALUES (
|
) VALUES (
|
||||||
${section.ID},
|
${section.ID},
|
||||||
${db.json(section as unknown as JSONValue)}
|
${db.json(section as unknown as JSONValue)}
|
||||||
)
|
)
|
||||||
ON CONFLICT (id)
|
ON CONFLICT (id)
|
||||||
DO UPDATE SET
|
DO UPDATE SET
|
||||||
data = ${db.json(section as unknown as JSONValue)}
|
data = ${db.json(section as unknown as JSONValue)}
|
||||||
`
|
`
|
||||||
sectionTaRemainingIds.delete(section.ID)
|
sectionTaRemainingIds.delete(section.ID)
|
||||||
|
}
|
||||||
break
|
break
|
||||||
}
|
case "TEXTE_VERSION":
|
||||||
case "TEXTE_VERSION": {
|
if (categoryTag === undefined || categoryTag === tag) {
|
||||||
const texteVersion = element as TexteVersion
|
const texteVersion = element as TexteVersion
|
||||||
const textAFragments = [
|
const textAFragments = [
|
||||||
texteVersion.META.META_SPEC.META_TEXTE_VERSION.TITRE,
|
texteVersion.META.META_SPEC.META_TEXTE_VERSION.TITRE,
|
||||||
texteVersion.META.META_SPEC.META_TEXTE_VERSION.TITREFULL,
|
texteVersion.META.META_SPEC.META_TEXTE_VERSION.TITREFULL,
|
||||||
]
|
]
|
||||||
await db`
|
await db`
|
||||||
INSERT INTO texte_version (
|
INSERT INTO texte_version (
|
||||||
id,
|
id,
|
||||||
data,
|
data,
|
||||||
nature,
|
nature,
|
||||||
text_search
|
text_search
|
||||||
) VALUES (
|
) VALUES (
|
||||||
${texteVersion.META.META_COMMUN.ID},
|
${texteVersion.META.META_COMMUN.ID},
|
||||||
${db.json(texteVersion as unknown as JSONValue)},
|
${db.json(texteVersion as unknown as JSONValue)},
|
||||||
${texteVersion.META.META_COMMUN.NATURE},
|
${texteVersion.META.META_COMMUN.NATURE},
|
||||||
setweight(to_tsvector('french', ${textAFragments.join(
|
setweight(to_tsvector('french', ${textAFragments.join(
|
||||||
" ",
|
" ",
|
||||||
)}), 'A')
|
)}), 'A')
|
||||||
)
|
)
|
||||||
ON CONFLICT (id)
|
ON CONFLICT (id)
|
||||||
DO UPDATE SET
|
DO UPDATE SET
|
||||||
data = ${db.json(texteVersion as unknown as JSONValue)},
|
data = ${db.json(texteVersion as unknown as JSONValue)},
|
||||||
nature = ${texteVersion.META.META_COMMUN.NATURE},
|
nature = ${texteVersion.META.META_COMMUN.NATURE},
|
||||||
text_search = setweight(to_tsvector('french', ${textAFragments.join(
|
text_search = setweight(to_tsvector('french', ${textAFragments.join(
|
||||||
" ",
|
" ",
|
||||||
)}), 'A')
|
)}), 'A')
|
||||||
`
|
`
|
||||||
texteVersionRemainingIds.delete(texteVersion.META.META_COMMUN.ID)
|
texteVersionRemainingIds.delete(texteVersion.META.META_COMMUN.ID)
|
||||||
|
}
|
||||||
break
|
break
|
||||||
}
|
case "TEXTELR":
|
||||||
case "TEXTELR": {
|
if (categoryTag === undefined || categoryTag === tag) {
|
||||||
const textelr = element as Textelr
|
const textelr = element as Textelr
|
||||||
await db`
|
await db`
|
||||||
INSERT INTO textelr (
|
INSERT INTO textelr (
|
||||||
id,
|
id,
|
||||||
data
|
data
|
||||||
) VALUES (
|
) VALUES (
|
||||||
${textelr.META.META_COMMUN.ID},
|
${textelr.META.META_COMMUN.ID},
|
||||||
${db.json(textelr as unknown as JSONValue)}
|
${db.json(textelr as unknown as JSONValue)}
|
||||||
)
|
)
|
||||||
ON CONFLICT (id)
|
ON CONFLICT (id)
|
||||||
DO UPDATE SET
|
DO UPDATE SET
|
||||||
data = ${db.json(textelr as unknown as JSONValue)}
|
data = ${db.json(textelr as unknown as JSONValue)}
|
||||||
`
|
`
|
||||||
textelrRemainingIds.delete(textelr.META.META_COMMUN.ID)
|
textelrRemainingIds.delete(textelr.META.META_COMMUN.ID)
|
||||||
|
}
|
||||||
break
|
break
|
||||||
}
|
case "VERSIONS":
|
||||||
case "VERSIONS": {
|
if (categoryTag === undefined || categoryTag === tag) {
|
||||||
assert.strictEqual(relativeSplitPath[0], "global")
|
assert.strictEqual(relativeSplitPath[0], "global")
|
||||||
assert.strictEqual(relativeSplitPath[1], "eli")
|
assert.strictEqual(relativeSplitPath[1], "eli")
|
||||||
const eli = relativeSplitPath.slice(2, -1).join("/")
|
const eli = relativeSplitPath.slice(2, -1).join("/")
|
||||||
const [versions, versionsError] = auditChain(
|
const [versions, versionsError] = auditChain(
|
||||||
auditVersions,
|
auditVersions,
|
||||||
auditRequire,
|
auditRequire,
|
||||||
)(strictAudit, element)
|
)(strictAudit, element)
|
||||||
assert.strictEqual(
|
assert.strictEqual(
|
||||||
versionsError,
|
versionsError,
|
||||||
null,
|
|
||||||
`Unexpected format for VERSIONS:\n${JSON.stringify(
|
|
||||||
versions,
|
|
||||||
null,
|
null,
|
||||||
2,
|
`Unexpected format for VERSIONS:\n${JSON.stringify(
|
||||||
)}\nError:\n${JSON.stringify(versionsError, null, 2)}`,
|
versions,
|
||||||
)
|
null,
|
||||||
const id = versions.VERSION["@id"]
|
2,
|
||||||
await db`
|
)}\nError:\n${JSON.stringify(versionsError, null, 2)}`,
|
||||||
INSERT INTO versions (
|
|
||||||
eli,
|
|
||||||
id,
|
|
||||||
data
|
|
||||||
) VALUES (
|
|
||||||
${eli},
|
|
||||||
${id},
|
|
||||||
${db.json(versions as unknown as JSONValue)}
|
|
||||||
)
|
)
|
||||||
ON CONFLICT (eli)
|
const id = versions.VERSION["@id"]
|
||||||
DO UPDATE SET
|
await db`
|
||||||
id = ${id},
|
INSERT INTO versions (
|
||||||
data = ${db.json(versions as unknown as JSONValue)}
|
eli,
|
||||||
`
|
id,
|
||||||
versionsRemainingElis.delete(id)
|
data
|
||||||
|
) VALUES (
|
||||||
|
${eli},
|
||||||
|
${id},
|
||||||
|
${db.json(versions as unknown as JSONValue)}
|
||||||
|
)
|
||||||
|
ON CONFLICT (eli)
|
||||||
|
DO UPDATE SET
|
||||||
|
id = ${id},
|
||||||
|
data = ${db.json(versions as unknown as JSONValue)}
|
||||||
|
`
|
||||||
|
versionsRemainingElis.delete(id)
|
||||||
|
}
|
||||||
break
|
break
|
||||||
}
|
|
||||||
default: {
|
default: {
|
||||||
console.warn(
|
console.warn(
|
||||||
`Unexpected root element "${key}" in XML file: ${filePath}`,
|
`Unexpected root element "${tag}" in XML file: ${filePath}`,
|
||||||
)
|
)
|
||||||
break iterXmlFiles
|
break iterXmlFiles
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue