Skip to content

Commit

Permalink
Add new type of NodeCalc to compute Min/Max between timeseries (#2820)
Browse files Browse the repository at this point in the history
Signed-off-by: Nicolas Rol <nicolas.rol@rte-france.com>
  • Loading branch information
rolnico authored Dec 18, 2023
1 parent 85c7efb commit 3a3190f
Show file tree
Hide file tree
Showing 39 changed files with 1,185 additions and 455 deletions.
41 changes: 22 additions & 19 deletions commons/src/main/java/com/powsybl/commons/json/JsonUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@
*/
package com.powsybl.commons.json;

import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.core.*;
import com.fasterxml.jackson.core.json.JsonReadFeature;
import com.fasterxml.jackson.core.json.JsonWriteFeature;
import com.fasterxml.jackson.databind.*;
import com.fasterxml.jackson.databind.json.JsonMapper;
import com.google.common.base.Strings;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
Expand All @@ -33,6 +33,8 @@
*/
public final class JsonUtil {

private static final String UNEXPECTED_TOKEN = "Unexpected token ";

enum ContextType {
OBJECT,
ARRAY
Expand Down Expand Up @@ -67,10 +69,11 @@ private JsonUtil() {
}

public static ObjectMapper createObjectMapper() {
return new ObjectMapper()
return JsonMapper.builder()
.enable(DeserializationFeature.READ_ENUMS_USING_TO_STRING)
.disable(JsonGenerator.Feature.QUOTE_NON_NUMERIC_NUMBERS)
.enable(JsonParser.Feature.ALLOW_NON_NUMERIC_NUMBERS);
.disable(JsonWriteFeature.WRITE_NAN_AS_STRINGS)
.enable(JsonReadFeature.ALLOW_NON_NUMERIC_NUMBERS)
.build();
}

public static void writeJson(Path jsonFile, Object object, ObjectMapper objectMapper) {
Expand Down Expand Up @@ -117,9 +120,10 @@ public static <T> T readJsonAndUpdate(Path jsonFile, T object, ObjectMapper obje
}

public static JsonFactory createJsonFactory() {
return new JsonFactory()
.disable(JsonGenerator.Feature.QUOTE_NON_NUMERIC_NUMBERS)
.enable(JsonParser.Feature.ALLOW_NON_NUMERIC_NUMBERS);
return new JsonFactoryBuilder()
.disable(JsonWriteFeature.WRITE_NAN_AS_STRINGS)
.enable(JsonReadFeature.ALLOW_NON_NUMERIC_NUMBERS)
.build();
}

public static void writeJson(Writer writer, Consumer<JsonGenerator> consumer) {
Expand Down Expand Up @@ -512,7 +516,7 @@ public static void parseObject(JsonParser parser, boolean polymorphic, FieldHand
} else if (token == JsonToken.END_OBJECT) {
break;
} else {
throw new PowsyblException("Unexpected token " + token);
throw new PowsyblException(UNEXPECTED_TOKEN + token);
}
token = parser.nextToken();
}
Expand All @@ -530,13 +534,12 @@ public static <T> void parseObjectArray(JsonParser parser, Consumer<T> objectAdd
if (token != JsonToken.START_ARRAY) {
throw new PowsyblException("Start array token was expected");
}
while ((token = parser.nextToken()) != null) {
if (token == JsonToken.START_OBJECT) {
objectAdder.accept(objectParser.apply(parser));
} else if (token == JsonToken.END_ARRAY) {
break;
} else {
throw new PowsyblException("Unexpected token " + token);
boolean continueLoop = true;
while (continueLoop && (token = parser.nextToken()) != null) {
switch (token) {
case START_OBJECT -> objectAdder.accept(objectParser.apply(parser));
case END_ARRAY -> continueLoop = false;
default -> throw new PowsyblException(UNEXPECTED_TOKEN + token);
}
}
} catch (IOException e) {
Expand Down Expand Up @@ -564,7 +567,7 @@ private static <T> List<T> parseValueArray(JsonParser parser, JsonToken valueTok
} else if (token == JsonToken.END_ARRAY) {
break;
} else {
throw new PowsyblException("Unexpected token " + token);
throw new PowsyblException(UNEXPECTED_TOKEN + token);
}
}
} catch (IOException e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,14 +96,9 @@ public static SensitivityVariableSet parseJson(JsonParser parser) {
if (token == JsonToken.FIELD_NAME) {
String fieldName = parser.getCurrentName();
switch (fieldName) {
case "id":
id = parser.nextTextValue();
break;
case "variables":
variables = WeightedSensitivityVariable.parseJson(parser);
break;
default:
throw new PowsyblException("Unexpected field: " + fieldName);
case "id" -> id = parser.nextTextValue();
case "variables" -> variables = WeightedSensitivityVariable.parseJson(parser);
default -> throw new PowsyblException("Unexpected field: " + fieldName);
}
} else if (token == JsonToken.END_OBJECT) {
return new SensitivityVariableSet(id, variables);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,15 +81,14 @@ public static List<WeightedSensitivityVariable> parseJson(JsonParser parser) {
if (token == JsonToken.FIELD_NAME) {
String fieldName = parser.getCurrentName();
switch (fieldName) {
case "id":
context.id = parser.nextTextValue();
break;
case "weight":
case "id" -> context.id = parser.nextTextValue();
case "weight" -> {
parser.nextToken();
context.weight = parser.getDoubleValue();
break;
default:
break;
}
default -> {
// Do nothing
}
}
} else if (token == JsonToken.END_ARRAY) {
break;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
package com.powsybl.sensitivity;

import com.fasterxml.jackson.databind.ObjectMapper;
import com.powsybl.commons.PowsyblException;
import com.powsybl.commons.test.AbstractSerDeTest;
import com.powsybl.commons.json.JsonUtil;
import com.powsybl.sensitivity.json.JsonSensitivityAnalysisParameters;
Expand All @@ -16,9 +17,7 @@
import java.util.ArrayList;
import java.util.List;

import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.*;

/**
* @author Geoffroy Jamgotchian {@literal <geoffroy.jamgotchian at rte-france.com>}
Expand Down Expand Up @@ -66,5 +65,19 @@ void testJson() throws IOException {
ObjectMapper objectMapper = JsonSensitivityAnalysisParameters.createObjectMapper();
roundTripTest(variableSet, (variableSet2, jsonFile) -> JsonUtil.writeJson(jsonFile, variableSet, objectMapper),
jsonFile -> JsonUtil.readJson(jsonFile, SensitivityVariableSet.class, objectMapper), "/variableSetRef.json");

String jsonRef = String.join(System.lineSeparator(),
"[ {",
" \"id\" : \"id\",",
" \"error\" : [ {",
" \"id\" : \"v1\",",
" \"weight\" : 3.4",
" }, {",
" \"id\" : \"v2\",",
" \"weight\" : 2.1",
" } ]",
"}]");
PowsyblException e0 = assertThrows(PowsyblException.class, () -> JsonUtil.parseJson(jsonRef, SensitivityVariableSet::parseJson));
assertEquals("Unexpected field: error", e0.getMessage());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -204,24 +204,20 @@ void addStringValue(String value) {
static void parseFieldName(JsonParser parser, JsonParsingContext context) throws IOException {
String fieldName = parser.getCurrentName();
switch (fieldName) {
case "offset":
case "offset" -> {
context.offset = parser.nextIntValue(-1);
context.doubleValues = null;
context.stringValues = null;
break;
case "uncompressedLength":
context.uncompressedLength = parser.nextIntValue(-1);
break;
case "stepLengths":
}
case "uncompressedLength" -> context.uncompressedLength = parser.nextIntValue(-1);
case "stepLengths" -> {
context.stepLengths = new TIntArrayList();
context.valuesOrLengthArray = true;
break;
case "values":
case "stepValues":
context.valuesOrLengthArray = true;
break;
default:
break;
}
case "values", "stepValues" -> context.valuesOrLengthArray = true;
default -> {
// Do nothing
}
}
}

Expand Down Expand Up @@ -288,38 +284,27 @@ static void parseJson(JsonParser parser, List<DoubleDataChunk> doubleChunks,
JsonToken token;
while ((token = parser.nextToken()) != null) {
switch (token) {
case FIELD_NAME:
parseFieldName(parser, context);
break;
case END_OBJECT:
case FIELD_NAME -> parseFieldName(parser, context);
case END_OBJECT -> {
parseEndObject(context);
if (single) {
return;
} else {
break;
}
case END_ARRAY:
}
case END_ARRAY -> {
if (context.valuesOrLengthArray) {
context.valuesOrLengthArray = false;
} else {
return; // end of chunk parsing
}
break;
case VALUE_NUMBER_FLOAT:
context.addDoubleValue(parser.getDoubleValue());
break;
case VALUE_NUMBER_INT:
parseValueNumberInt(parser, context);
break;
case VALUE_STRING:
context.addStringValue(parser.getValueAsString());
break;
case VALUE_NULL:
context.addStringValue(null);
break;

default:
break;
}
case VALUE_NUMBER_FLOAT -> context.addDoubleValue(parser.getDoubleValue());
case VALUE_NUMBER_INT -> parseValueNumberInt(parser, context);
case VALUE_STRING -> context.addStringValue(parser.getValueAsString());
case VALUE_NULL -> context.addStringValue(null);
default -> {
// Do nothing
}
}
}
} catch (IOException e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,26 +83,25 @@ public static RegularTimeSeriesIndex parseJson(JsonParser parser) {
long endTime = -1;
long spacing = -1;
while ((token = parser.nextToken()) != null) {
if (token == JsonToken.FIELD_NAME) {
String fieldName = parser.getCurrentName();
switch (fieldName) {
case "startTime":
startTime = parser.nextLongValue(-1);
break;
case "endTime":
endTime = parser.nextLongValue(-1);
break;
case "spacing":
spacing = parser.nextLongValue(-1);
break;
default:
throw new IllegalStateException("Unexpected field " + fieldName);
switch (token) {
case FIELD_NAME -> {
String fieldName = parser.getCurrentName();
switch (fieldName) {
case "startTime" -> startTime = parser.nextLongValue(-1);
case "endTime" -> endTime = parser.nextLongValue(-1);
case "spacing" -> spacing = parser.nextLongValue(-1);
default -> throw new IllegalStateException("Unexpected field " + fieldName);
}
}
} else if (token == JsonToken.END_OBJECT) {
if (startTime == -1 || endTime == -1 || spacing == -1) {
throw new IllegalStateException("Incomplete regular time series index json");
case END_OBJECT -> {
if (startTime == -1 || endTime == -1 || spacing == -1) {
throw new IllegalStateException("Incomplete regular time series index json");
}
return new RegularTimeSeriesIndex(startTime, endTime, spacing);
}
default -> {
// Do nothing
}
return new RegularTimeSeriesIndex(startTime, endTime, spacing);
}
}
throw new IllegalStateException("Should not happen");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -260,20 +260,17 @@ void parseLine(String[] tokens) {
}

void parseTokenTime(String[] tokens) {
switch (timeSeriesCsvConfig.timeFormat()) {
case DATE_TIME:
times.add(ZonedDateTime.parse(tokens[0]).toInstant().toEpochMilli());
break;
case FRACTIONS_OF_SECOND:
TimeFormat timeFormat = timeSeriesCsvConfig.timeFormat();
switch (timeFormat) {
case DATE_TIME -> times.add(ZonedDateTime.parse(tokens[0]).toInstant().toEpochMilli());
case FRACTIONS_OF_SECOND -> {
Double time = Double.parseDouble(tokens[0]) * 1000;
times.add(time.longValue());
break;
case MILLIS:
}
case MILLIS -> {
Double millis = Double.parseDouble(tokens[0]);
times.add(millis.longValue());
break;
default:
throw new IllegalStateException("Unknown time format " + timeSeriesCsvConfig.timeFormat());
}
}
}

Expand Down Expand Up @@ -498,26 +495,23 @@ static List<TimeSeries> parseJson(JsonParser parser, boolean single) {
if (token == JsonToken.FIELD_NAME) {
String fieldName = parser.getCurrentName();
switch (fieldName) {
case "metadata":
metadata = TimeSeriesMetadata.parseJson(parser);
break;
case "chunks":
case "metadata" -> metadata = TimeSeriesMetadata.parseJson(parser);
case "chunks" -> {
if (metadata == null) {
throw new TimeSeriesException("metadata is null");
}
parseChunks(parser, metadata, timeSeriesList);
metadata = null;
break;
case "name":
name = parser.nextTextValue();
break;
case "expr":
}
case "name" -> name = parser.nextTextValue();
case "expr" -> {
Objects.requireNonNull(name);
NodeCalc nodeCalc = NodeCalc.parseJson(parser);
timeSeriesList.add(new CalculatedTimeSeries(name, nodeCalc));
break;
default:
break;
}
default -> {
// Do nothing
}
}
} else if (token == JsonToken.END_OBJECT && single) {
break;
Expand Down
Loading

0 comments on commit 3a3190f

Please sign in to comment.