Add script to import LEGI database.

This commit is contained in:
Emmanuel 2022-08-09 19:47:51 +02:00
parent 2bde3313b8
commit 7853ca4ee6
12 changed files with 968 additions and 1014 deletions

1
.gitignore vendored
View file

@ -2,5 +2,6 @@
/.svelte-kit
/build
/node_modules
/package
/*.env
!/example.env

View file

@ -9,9 +9,23 @@ _Tricoteuses Legal Explorer_ is free and open source software.
## Installation
### Create database
Using Debian GNU/Linux, install PostgreSQL, then:
```sh
sudo su - postgres
createuser legi -P # and enter the password
createdb -O legi legi
psql legi
CREATE EXTENSION IF NOT EXISTS pg_trgm;
\q
exit
```
### Install dependencies
```bash
```sh
npm install
```
@ -19,22 +33,22 @@ npm install
Create a `.env` file to set configuration variables (you can use `example.env` as a template). Then:
```bash
npm run configure
```sh
npm run package
npm run configure
```
## Server Launch
In development mode:
```bash
```sh
npm run dev
```
In production mode:
```bash
```sh
npm run build
npm run preview
```

7
example.env Normal file
View file

@ -0,0 +1,7 @@
# PostgreSQL database configuration
DB_NAME="legi"
DB_HOST="localhost"
DB_PORT=5432
DB_USER="legi"
# Change value of DB_PASSWORD!
DB_PASSWORD="legi"

1283
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -13,6 +13,7 @@
"build": "vite build",
"check": "svelte-check --tsconfig ./tsconfig.json",
"check:watch": "svelte-check --tsconfig ./tsconfig.json --watch",
"configure": "node --experimental-specifier-resolution=node package/scripts/configure.js",
"dev": "vite dev",
"format": "prettier --write --plugin-search-dir=. .",
"lint": "prettier --check --plugin-search-dir=. . && eslint .",
@ -21,27 +22,37 @@
"test": "playwright test"
},
"devDependencies": {
"@auditors/core": "^0.3.0",
"@playwright/test": "^1.22.2",
"@sveltejs/adapter-auto": "next",
"@sveltejs/adapter-node": "^1.0.0-next.86",
"@sveltejs/kit": "next",
"@tailwindcss/typography": "^0.5.3",
"@types/fs-extra": "^9.0.13",
"@types/he": "^1.1.2",
"@typescript-eslint/eslint-plugin": "^5.27.0",
"@typescript-eslint/parser": "^5.27.0",
"autoprefixer": "^10.4.7",
"daisyui": "^2.18.1",
"dotenv": "^16.0.1",
"eslint": "^8.16.0",
"eslint-config-prettier": "^8.3.0",
"eslint-plugin-svelte3": "^4.0.0",
"fast-xml-parser": "^4.0.9",
"fs-extra": "^10.1.0",
"he": "^1.2.0",
"postcss": "^8.4.14",
"postcss-load-config": "^4.0.1",
"postgres": "^3.2.4",
"prettier": "^2.6.2",
"prettier-plugin-svelte": "^2.7.0",
"sade": "^1.8.1",
"svelte": "^3.44.0",
"svelte-check": "^2.7.1",
"svelte-preprocess": "^4.10.7",
"svelte2tsx": "^0.5.13",
"tailwindcss": "^3.1.5",
"tslib": "^2.3.1",
"typescript": "^4.7.4",
"vite": "^3.0.0",
"postcss": "^8.4.14",
"postcss-load-config": "^4.0.1",
"autoprefixer": "^10.4.7",
"tailwindcss": "^3.1.5",
"@tailwindcss/typography": "^0.5.3",
"daisyui": "^2.18.1"
"vite": "^3.0.0"
}
}

View file

@ -0,0 +1,12 @@
import { configureDatabase } from "$lib/server/database/configuration"
export async function configure(): Promise<void> {
await configureDatabase()
}
configure()
.then(() => process.exit(0))
.catch((error) => {
console.log(error.stack || error)
process.exit(1)
})

View file

@ -0,0 +1,318 @@
import assert from "assert"
import { XMLParser } from "fast-xml-parser"
import fs from "fs-extra"
import he from "he"
import path from "path"
import type { JSONValue } from "postgres"
import sade from "sade"
import { db } from "$lib/server/database"
import { walkDir } from "$lib/server/file_systems"
interface Article {
META: {
META_COMMUN: MetaCommun
}
BLOC_TEXTUEL: {
CONTENU: string // HTML
}
}
type EliId = string
interface EliVersions {}
interface MetaCommun {
ID: string
}
interface Section {
ID: string
}
interface Struct {
META: {
META_COMMUN: MetaCommun
}
}
interface Version {
META: {
META_COMMUN: MetaCommun
}
}
interface XmlHeader {
"@encoding": "UTF-8"
"@version": "1.0"
}
const xmlParser = new XMLParser({
attributeNamePrefix: "@",
ignoreAttributes: false,
stopNodes: ["ARTICLE.BLOC_TEXTUEL.CONTENU", "TEXTE_VERSION.CONTENU"],
tagValueProcessor: (_tagName, tagValue) => he.decode(tagValue),
})
async function importLegi({ resume }: { resume?: string } = {}): Promise<void> {
let skip = resume !== undefined
const deleteRemainingIds = !skip
const articlesRemainingIds = new Set(
(
await db<{ id: string }[]>`
SELECT id
FROM articles
`
).map(({ id }) => id),
)
const eliIdsRemainingIds = new Set(
(
await db<{ id: string }[]>`
SELECT id
FROM eli_ids
`
).map(({ id }) => id),
)
const eliVersionsRemainingIds = new Set(
(
await db<{ id: string }[]>`
SELECT id
FROM eli_versions
`
).map(({ id }) => id),
)
const sectionsRemainingIds = new Set(
(
await db<{ id: string }[]>`
SELECT id
FROM sections
`
).map(({ id }) => id),
)
const structsRemainingIds = new Set(
(
await db<{ id: string }[]>`
SELECT id
FROM structs
`
).map(({ id }) => id),
)
const versionsRemainingIds = new Set(
(
await db<{ id: string }[]>`
SELECT id
FROM versions
`
).map(({ id }) => id),
)
const dataDir = path.join("..", "dila-data", "legi")
assert(await fs.pathExists(dataDir))
iterXmlFiles: for (const relativeSplitPath of walkDir(dataDir)) {
const relativePath = path.join(...relativeSplitPath)
if (skip) {
if (relativePath.startsWith(resume!)) {
skip = false
console.log(`Resuming at file ${relativePath}...`)
} else {
continue
}
}
const filePath = path.join(dataDir, relativePath)
if (!filePath.endsWith(".xml")) {
console.info(`Skipping non XML file at ${filePath}`)
continue
}
const xmlString: string = await fs.readFile(filePath, {
encoding: "utf8",
})
const xmlData = xmlParser.parse(xmlString)
for (const [key, element] of Object.entries(xmlData) as [
string,
Article | EliId | EliVersions | Section | Struct | Version | XmlHeader,
][]) {
switch (key) {
case "?xml":
const xmlHeader = element as XmlHeader
assert.strictEqual(xmlHeader["@encoding"], "UTF-8", filePath)
assert.strictEqual(xmlHeader["@version"], "1.0", filePath)
break
case "ARTICLE": {
const article = element as Article
await db`
INSERT INTO articles (
id,
data
) VALUES (
${article.META.META_COMMUN.ID},
${db.json(article as unknown as JSONValue)}
)
ON CONFLICT (id)
DO UPDATE SET
data = ${db.json(article as unknown as JSONValue)}
`
articlesRemainingIds.delete(article.META.META_COMMUN.ID)
break
}
case "ID": {
assert.strictEqual(relativeSplitPath[0], "global")
assert.strictEqual(relativeSplitPath[1], "eli")
const id = relativeSplitPath.slice(2, -1).join("/")
const eliId = element as EliId
await db`
INSERT INTO eli_ids (
id,
data
) VALUES (
${id},
${db.json(eliId as unknown as JSONValue)}
)
ON CONFLICT (id)
DO UPDATE SET
data = ${db.json(eliId as unknown as JSONValue)}
`
eliIdsRemainingIds.delete(id)
break
}
case "SECTION_TA": {
const section = element as Section
await db`
INSERT INTO sections (
id,
data
) VALUES (
${section.ID},
${db.json(section as unknown as JSONValue)}
)
ON CONFLICT (id)
DO UPDATE SET
data = ${db.json(section as unknown as JSONValue)}
`
sectionsRemainingIds.delete(section.ID)
break
}
case "TEXTELR": {
const struct = element as Struct
await db`
INSERT INTO structs (
id,
data
) VALUES (
${struct.META.META_COMMUN.ID},
${db.json(struct as unknown as JSONValue)}
)
ON CONFLICT (id)
DO UPDATE SET
data = ${db.json(struct as unknown as JSONValue)}
`
structsRemainingIds.delete(struct.META.META_COMMUN.ID)
break
}
case "TEXTE_VERSION": {
const version = element as Version
await db`
INSERT INTO versions (
id,
data
) VALUES (
${version.META.META_COMMUN.ID},
${db.json(version as unknown as JSONValue)}
)
ON CONFLICT (id)
DO UPDATE SET
data = ${db.json(version as unknown as JSONValue)}
`
versionsRemainingIds.delete(version.META.META_COMMUN.ID)
break
}
case "VERSIONS": {
assert.strictEqual(relativeSplitPath[0], "global")
assert.strictEqual(relativeSplitPath[1], "eli")
const id = relativeSplitPath.slice(2, -1).join("/")
const eliVersion = element as EliVersions
await db`
INSERT INTO eli_versions (
id,
data
) VALUES (
${id},
${db.json(eliVersion as unknown as JSONValue)}
)
ON CONFLICT (id)
DO UPDATE SET
data = ${db.json(eliVersion as unknown as JSONValue)}
`
eliVersionsRemainingIds.delete(id)
break
}
default: {
console.warn(
`Unexpected root element "${key}" in XML file: ${filePath}`,
)
break iterXmlFiles
}
}
}
// console.log(filePath)
// console.log(JSON.stringify(xmlData, null, 2))
}
if (deleteRemainingIds) {
for (const id of articlesRemainingIds) {
console.log(`Deleting article ${id}`)
await db`
DELETE FROM articles
WHERE id = ${id}
`
}
for (const id of eliIdsRemainingIds) {
console.log(`Deleting ELI ID ${id}`)
await db`
DELETE FROM eli_ids
WHERE id = ${id}
`
}
for (const id of eliVersionsRemainingIds) {
console.log(`Deleting ELI versions ${id}`)
await db`
DELETE FROM eli_versions
WHERE id = ${id}
`
}
for (const id of sectionsRemainingIds) {
console.log(`Deleting section ${id}`)
await db`
DELETE FROM sections
WHERE id = ${id}
`
}
for (const id of structsRemainingIds) {
console.log(`Deleting struct ${id}`)
await db`
DELETE FROM structs
WHERE id = ${id}
`
}
for (const id of versionsRemainingIds) {
console.log(`Deleting version ${id}`)
await db`
DELETE FROM versions
WHERE id = ${id}
`
}
}
}
sade("import_legi", true)
.describe("Import Dila's LEGI database")
.option("-r", "--resume", "Resume import at given relative file path")
.example(
"--resume global/eli/accord/2002/5/5/MESS0221690X/jo/article_1/versions.xml",
)
.action(async (options) => {
await importLegi(options)
process.exit(0)
})
.parse(process.argv)

View file

@ -0,0 +1,86 @@
import {
type Audit,
auditInteger,
auditRequire,
auditStringToNumber,
auditTest,
auditTrimString,
cleanAudit,
} from "@auditors/core"
export function auditConfig(
audit: Audit,
dataUnknown: unknown,
): [unknown, unknown] {
if (dataUnknown == null) {
return [dataUnknown, null]
}
if (typeof dataUnknown !== "object") {
return audit.unexpectedType(dataUnknown, "object")
}
const data = { ...dataUnknown }
const errors: { [key: string]: unknown } = {}
const remainingKeys = new Set(Object.keys(data))
audit.attribute(
data,
"db",
true,
errors,
remainingKeys,
auditDb,
auditRequire,
)
return audit.reduceRemaining(data, errors, remainingKeys)
}
export function auditDb(
audit: Audit,
dataUnknown: unknown,
): [unknown, unknown] {
if (dataUnknown == null) {
return [dataUnknown, null]
}
if (typeof dataUnknown !== "object") {
return audit.unexpectedType(dataUnknown, "object")
}
const data = { ...dataUnknown }
const errors: { [key: string]: unknown } = {}
const remainingKeys = new Set(Object.keys(data))
for (const key of ["database", "host", "password", "user"]) {
audit.attribute(
data,
key,
true,
errors,
remainingKeys,
auditTrimString,
auditRequire,
)
}
audit.attribute(
data,
"port",
true,
errors,
remainingKeys,
auditTrimString,
auditStringToNumber,
auditInteger,
auditTest(
(value) => 0 <= value && value <= 65536,
"Must be an integer between 0 and 65536",
),
auditRequire,
)
return audit.reduceRemaining(data, errors, remainingKeys)
}
export function validateConfig(data: unknown): [unknown, unknown] {
return auditConfig(cleanAudit, data)
}

36
src/lib/server/config.ts Normal file
View file

@ -0,0 +1,36 @@
import "dotenv/config"
import { validateConfig } from "$lib/server/auditors/config"
export interface Config {
db: {
host: string
port: number
database: string
user: string
password: string
}
}
const config = {
db: {
host: process.env.DB_HOST,
port: process.env.DB_PORT,
database: process.env.DB_NAME,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
},
}
const [validConfig, error] = validateConfig(config) as [Config, unknown]
if (error !== null) {
console.error(
`Error in server configuration:\n${JSON.stringify(
validConfig,
null,
2,
)}\nError:\n${JSON.stringify(error, null, 2)}`,
)
process.exit(-1)
}
export default validConfig

View file

@ -0,0 +1,126 @@
import assert from "assert"
import dedent from "dedent-js"
import { db, type Version, versionNumber } from "$lib/server/database"
export async function configureDatabase() {
// Table: version
await db`
CREATE TABLE IF NOT EXISTS version(
number integer NOT NULL
)
`
let version = (await db<Version[]>`SELECT * FROM version`)[0]
if (version === undefined) {
version = (
await db<Version[]>`
INSERT INTO version (number)
VALUES (${versionNumber})
RETURNING *
`
)[0]
} else if (version.number === undefined) {
version = { number: 0 }
}
assert(
version.number <= versionNumber,
`Database is too recent for current version of application: ${version.number} > ${versionNumber}.`,
)
if (version.number < versionNumber) {
console.log(
`Upgrading database from version ${version.number} to ${versionNumber}`,
)
}
// Apply patches that must be executed before every table is created.
// Types
// Tables
// Table: articles
await db`
CREATE TABLE IF NOT EXISTS articles (
id char(20) PRIMARY KEY,
data jsonb NOT NULL
)
`
// // Table: articles_autocompletions
// await `
// CREATE TABLE IF NOT EXISTS articles_autocompletions (
// autocompletion text NOT NULL,
// id bigint NOT NULL REFERENCES articles(id) ON DELETE CASCADE,
// weight int NOT NULL,
// PRIMARY KEY (id, autocompletion)
// )
// `
// Table: eli_ids
await db`
CREATE TABLE IF NOT EXISTS eli_ids (
id text PRIMARY KEY,
data jsonb NOT NULL
)
`
// Table: eli_versions
await db`
CREATE TABLE IF NOT EXISTS eli_versions (
id text PRIMARY KEY,
data jsonb NOT NULL
)
`
// Table: sections
await db`
CREATE TABLE IF NOT EXISTS sections (
id char(20) PRIMARY KEY,
data jsonb NOT NULL
)
`
// Table: structs
await db`
CREATE TABLE IF NOT EXISTS structs (
id char(20) PRIMARY KEY,
data jsonb NOT NULL
)
`
// Table: versions
await db`
CREATE TABLE IF NOT EXISTS versions (
id char(20) PRIMARY KEY,
data jsonb NOT NULL
)
`
// Apply patches that must be executed after every table is created.
// Add indexes once every table and column exists.
// await db`
// CREATE INDEX IF NOT EXISTS articles_autocompletions_trigrams_idx
// ON articles_autocompletions
// USING GIST (autocompletion gist_trgm_ops)
// `
// Add comments once every table and column exists.
// Upgrade version number if needed.
const previousVersionNumber = version.number
version.number = versionNumber
assert(
version.number >= previousVersionNumber,
`Error in database upgrade script: Wrong version number: ${version.number} < ${previousVersionNumber}.`,
)
if (version.number !== previousVersionNumber) {
await db`UPDATE version SET number = ${version.number}`
console.log(
`Upgraded database from version ${previousVersionNumber} to ${version.number}.`,
)
}
}

View file

@ -0,0 +1,41 @@
import assert from "assert"
import postgres from "postgres"
import config from "$lib/server/config"
export interface Version {
number: number
}
export const db = postgres({
database: config.db.database,
host: config.db.host,
password: config.db.password,
port: config.db.port,
user: config.db.user,
})
export const versionNumber = 1
/// Check that database exists and is up to date.
export async function checkDb(): Promise<void> {
assert(
(
await db`SELECT EXISTS (
SELECT * FROM information_schema.tables WHERE table_name='version'
)`
)[0]?.exists,
'Database is not initialized. Run "npm run configure" to do it.',
)
const version = (await db<Version[]>`SELECT * FROM version`)[0]
assert.notStrictEqual(
version,
undefined,
'Database has no version number. Run "npm run configure" to do it.',
)
assert(version.number <= versionNumber, "Database format is too recent.")
assert.strictEqual(
version.number,
versionNumber,
'Database must be upgraded. Run "npm run configure" to do it.',
)
}

View file

@ -0,0 +1,21 @@
import fs from "fs-extra"
import path from "path"
export function* walkDir(
rootDir: string,
relativeSplitDir: string[] = [],
): Generator<string[]> {
const dir = path.join(rootDir, ...relativeSplitDir)
for (const filename of fs.readdirSync(dir)) {
if (filename[0] === ".") {
continue
}
const filePath = path.join(dir, filename)
const relativeSplitPath = [...relativeSplitDir, filename]
if (fs.statSync(filePath).isDirectory()) {
yield* walkDir(rootDir, relativeSplitPath)
} else {
yield relativeSplitPath
}
}
}