Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: introduce first version of object interface #756

Merged
merged 23 commits into from
Oct 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions bin/build-db
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,15 @@ const elmWithFixtures = elmTemplate
parseAndValidate(NODE_ENV === "test" ? dataFiles.foodDetailed : dataFiles.foodNoDetails, "id"),
)
.replace("%foodProductExamplesJson%", parseAndValidate("public/data/food/examples.json", "id"))
// Object JSON data
.replace("%objectExamplesJson%", parseAndValidate("public/data/object/examples.json", "id"))
.replace(
"%objectProcessesJson%",
parseAndValidate(
NODE_ENV === "test" ? dataFiles.objectDetailed : dataFiles.objectNoDetails,
"id",
),
)
// Textile JSON data
.replace("%textileMaterialsJson%", parseAndValidate("public/data/textile/materials.json", "id"))
.replace(
Expand Down
3 changes: 2 additions & 1 deletion check-db.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,9 @@ try {

const elmApp = Elm.CheckDb.init({
flags: {
textileProcesses: fs.readFileSync(dataFiles.textileDetailed, "utf-8"),
foodProcesses: fs.readFileSync(dataFiles.foodDetailed, "utf-8"),
objectProcesses: fs.readFileSync(dataFiles.objectDetailed, "utf-8"),
textileProcesses: fs.readFileSync(dataFiles.textileDetailed, "utf-8"),
},
});

Expand Down
2 changes: 2 additions & 0 deletions lib/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,8 @@ function getDataFiles(dataDir) {
return {
foodDetailed: checkFile(`${dataDir}/data/food/processes_impacts.json`),
foodNoDetails: "public/data/food/processes.json",
objectDetailed: checkFile(`${dataDir}/data/object/processes_impacts.json`),
objectNoDetails: "public/data/object/processes.json",
textileDetailed: checkFile(`${dataDir}/data/textile/processes_impacts.json`),
textileNoDetails: "public/data/textile/processes.json",
};
Expand Down
5 changes: 4 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -89,5 +89,8 @@
"node_modules",
"elm-stuff",
".elm"
]
],
"jest": {
"testTimeout": 50
}
}
32 changes: 32 additions & 0 deletions public/data/object/examples.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
[
{
"id": "7d78d30e-7c35-451f-b8ab-e590f39ed0e8",
"name": "Chaise",
"category": "",
"query": {
"processes": [
{
"process_id": "07e9e916-e02b-45e2-a298-2b5084de6242",
"amount": 0.00088
},
{
"process_id": "3295b2a5-328a-4c00-b046-e2ddeb0da823",
"amount": 1.645313
}
]
}
},
{
"id": "4d14194a-8c62-4c23-8799-745c498d1f6b",
"name": "Table",
"category": "",
"query": {
"processes": [
{
"process_id": "07e9e916-e02b-45e2-a298-2b5084de6242",
"amount": 0.002
}
]
}
}
]
66 changes: 66 additions & 0 deletions public/data/object/processes.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
[
{
"id": "07e9e916-e02b-45e2-a298-2b5084de6242",
"name": "Sawnwood, board, hardwood, dried (u=10%), planed {Europe without Switzerland}| market for sawnwood, board, hardwood, dried (u=10%), planed | Cut-off, S",
"display_name": "Planche (bois de feuillus)",
"density": 600,
"unit": "m3",
"impacts": {
"acd": 0,
"cch": 0,
"etf": 0,
"etf-c": 0,
"fru": 0,
"fwe": 0,
"htc": 0,
"htc-c": 0,
"htn": 0,
"htn-c": 0,
"ior": 0,
"ldu": 0,
"mru": 0,
"ozd": 0,
"pco": 0,
"pma": 0,
"swe": 0,
"tre": 0,
"wtu": 0,
"ecs": 22733,
"pef": 0
},
"source": "Ecoinvent",
"comment": ""
},
{
"id": "3295b2a5-328a-4c00-b046-e2ddeb0da823",
"name": "Plastic frame (PP)",
"display_name": "Composant en plastique (PP)",
"density": 900,
"unit": "kg",
"impacts": {
"acd": 0,
"cch": 0,
"etf": 0,
"etf-c": 0,
"fru": 0,
"fwe": 0,
"htc": 0,
"htc-c": 0,
"htn": 0,
"htn-c": 0,
"ior": 0,
"ldu": 0,
"mru": 0,
"ozd": 0,
"pco": 0,
"pma": 0,
"swe": 0,
"tre": 0,
"wtu": 0,
"ecs": 382,
"pef": 0
},
"source": "Ecobalyse",
"comment": "modélisation d'un composant générique (voir documentation)"
}
]
51 changes: 27 additions & 24 deletions server.js
Original file line number Diff line number Diff line change
Expand Up @@ -116,43 +116,44 @@ if (fs.existsSync(versionsDir)) {
const dirs = fs.readdirSync(versionsDir);
for (const dir of dirs) {
const currentVersionDir = path.join(versionsDir, dir);

const foodNoDetails = path.join(currentVersionDir, "data/food/processes.json");
const objectNoDetails = path.join(currentVersionDir, "data/object/processes.json");
const textileNoDetails = path.join(currentVersionDir, "data/textile/processes.json");

const foodDetailed = path.join(currentVersionDir, "data/food/processes_impacts.json");
const textileDetailed = path.join(currentVersionDir, "data/textile/processes_impacts.json");

const foodDetailedEnc = path.join(currentVersionDir, "processes_impacts_food.json.enc");
const objectDetailedEnc = path.join(currentVersionDir, "processes_impacts_object.json.enc");
const textileDetailedEnc = path.join(currentVersionDir, "processes_impacts_textile.json.enc");

// We should not check for the existence of objectNoDetails because old versions don't have it
// and it's expected
if (!fs.existsSync(foodNoDetails) || !fs.existsSync(textileNoDetails)) {
console.error(
`🚨 ERROR: processes files without details missing for version ${dir}. Skipping version.`,
);
continue;
}
let processesImpacts;

if (fs.existsSync(foodDetailedEnc) && fs.existsSync(textileDetailedEnc)) {
console.log(`Encrypted files found for ${dir}: ${foodDetailedEnc} && ${textileDetailedEnc}`);
// Encrypted files exist, use them
processesImpacts = {
foodProcesses: decrypt(JSON.parse(fs.readFileSync(foodDetailedEnc).toString("utf-8"))),
textileProcesses: decrypt(
JSON.parse(fs.readFileSync(textileDetailedEnc).toString("utf-8")),
),
};
} else if (fs.existsSync(foodDetailed) || fs.existsSync(textileDetailed)) {
// Or use old files
processesImpacts = {
foodProcesses: fs.readFileSync(foodDetailed, "utf8"),
textileProcesses: fs.readFileSync(textileDetailed, "utf8"),
};
}
// Encrypted files exist, use them
processesImpacts = {
foodProcesses: decrypt(JSON.parse(fs.readFileSync(foodDetailedEnc).toString("utf-8"))),
// Old versions don't have the object files
objectProcesses: fs.existsSync(objectDetailedEnc)
? decrypt(JSON.parse(fs.readFileSync(objectDetailedEnc).toString("utf-8")))
: null,
textileProcesses: decrypt(JSON.parse(fs.readFileSync(textileDetailedEnc).toString("utf-8"))),
};

availableVersions.push({
dir,
processes: {
foodProcesses: fs.readFileSync(foodNoDetails, "utf8"),
// Old versions don't have the object files
objectProcesses: fs.existsSync(objectNoDetails)
? fs.readFileSync(objectNoDetails, "utf8")
: null,

textileProcesses: fs.readFileSync(textileNoDetails, "utf8"),
},
processesImpacts,
Expand All @@ -172,11 +173,13 @@ const apiTracker = lib.setupTracker(openApiContents);

const processesImpacts = {
foodProcesses: fs.readFileSync(dataFiles.foodDetailed, "utf8"),
objectProcesses: fs.readFileSync(dataFiles.objectDetailed, "utf8"),
textileProcesses: fs.readFileSync(dataFiles.textileDetailed, "utf8"),
};

const processes = {
foodProcesses: fs.readFileSync(dataFiles.foodNoDetails, "utf8"),
objectProcesses: fs.readFileSync(dataFiles.objectNoDetails, "utf8"),
textileProcesses: fs.readFileSync(dataFiles.textileNoDetails, "utf8"),
};

Expand Down Expand Up @@ -275,14 +278,14 @@ version.all("/:versionNumber/api/*", checkVersionAndPath, bodyParser.json(), asy
const foodProcesses = fs
.readFileSync(path.join(req.staticDir, "data", "food", "processes_impacts.json"))
.toString();
const objectProcesses = fs
.readFileSync(path.join(req.staticDir, "data", "object", "processes_impacts.json"))
.toString();
const textileProcesses = fs
.readFileSync(path.join(req.staticDir, "data", "textile", "processes_impacts.json"))
.toString();

const processes = {
foodProcesses: foodProcesses,
textileProcesses: textileProcesses,
};
const processes = { foodProcesses, objectProcesses, textileProcesses };

const { Elm } = require(path.join(req.staticDir, "server-app"));

Expand All @@ -298,7 +301,7 @@ version.all("/:versionNumber/api/*", checkVersionAndPath, bodyParser.json(), asy
method: req.method,
url: urlWithoutPrefix,
body: req.body,
processes: processes,
processes,
jsResponseHandler: ({ status, body }) => {
res.status(status).send(body);
},
Expand Down
1 change: 1 addition & 0 deletions src/CheckDb.elm
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import Static.Json as StaticJson

type alias Flags =
{ foodProcesses : String
, objectProcesses : String
, textileProcesses : String
}

Expand Down
5 changes: 5 additions & 0 deletions src/Data/Dataset.elm
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,11 @@ datasets scope =
, FoodProcesses Nothing
]

Scope.Object ->
[ Impacts Nothing
, Countries Nothing
]

Scope.Textile ->
[ TextileExamples Nothing
, Impacts Nothing
Expand Down
33 changes: 33 additions & 0 deletions src/Data/Object/Db.elm
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
module Data.Object.Db exposing
( Db
, buildFromJson
)

import Data.Example as Example exposing (Example)
import Data.Impact as Impact
import Data.Object.Process as Process exposing (Process)
import Data.Object.Query as Query exposing (Query)
import Json.Decode as Decode
import Result.Extra as RE


type alias Db =
{ examples : List (Example Query)
, processes : List Process
}


buildFromJson : String -> String -> Result String Db
buildFromJson objectExamplesJson objectProcessesJson =
objectProcessesJson
|> Decode.decodeString (Process.decodeList Impact.decodeImpacts)
|> Result.mapError Decode.errorToString
|> Result.andThen
(\processes ->
Ok Db
|> RE.andMap
(objectExamplesJson
|> Example.decodeListFromJsonString Query.decode
)
|> RE.andMap (Ok processes)
)
86 changes: 86 additions & 0 deletions src/Data/Object/Process.elm
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
module Data.Object.Process exposing
( Id
, Process
, decodeId
, decodeList
, encode
, encodeId
, findById
)

import Data.Impact as Impact exposing (Impacts)
import Data.Uuid as Uuid exposing (Uuid)
import Json.Decode as Decode exposing (Decoder)
import Json.Decode.Pipeline as Pipe
import Json.Encode as Encode


type Id
= Id Uuid


type alias Process =
{ comment : String
, density : Float
, displayName : String
, id : Id
, impacts : Impacts
, name : String
, source : String
, unit : String
}


decodeProcess : Decoder Impact.Impacts -> Decoder Process
decodeProcess impactsDecoder =
Decode.succeed Process
|> Pipe.required "comment" Decode.string
|> Pipe.required "density" Decode.float
|> Pipe.required "display_name" Decode.string
|> Pipe.required "id" decodeId
|> Pipe.required "impacts" impactsDecoder
|> Pipe.required "name" Decode.string
|> Pipe.required "source" Decode.string
|> Pipe.required "unit" Decode.string


decodeId : Decoder Id
decodeId =
Decode.map Id Uuid.decoder


decodeList : Decoder Impact.Impacts -> Decoder (List Process)
decodeList impactsDecoder =
Decode.list (decodeProcess impactsDecoder)


encode : Process -> Encode.Value
encode process =
Encode.object
[ ( "comment", Encode.string process.comment )
, ( "density", Encode.float process.density )
, ( "displayName", Encode.string process.displayName )
, ( "id", encodeId process.id )
, ( "impacts", Impact.encode process.impacts )
, ( "name", Encode.string process.name )
, ( "source", Encode.string process.source )
, ( "unit", Encode.string process.unit )
]


encodeId : Id -> Encode.Value
encodeId (Id uuid) =
Uuid.encoder uuid


findById : List Process -> Id -> Result String Process
findById processes id =
processes
|> List.filter (.id >> (==) id)
|> List.head
|> Result.fromMaybe ("Procédé introuvable par id : " ++ idToString id)


idToString : Id -> String
idToString (Id uuid) =
Uuid.toString uuid
Loading