diff --git a/.editorconfig b/.editorconfig index 106c69ef..224396a6 100644 --- a/.editorconfig +++ b/.editorconfig @@ -7,4 +7,4 @@ indent_size = 2 max_line_length = 100 [*.java] -indent_size = 4 +indent_size = 2 diff --git a/.gitignore b/.gitignore index 42fd951d..f22e0458 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,7 @@ /data/ *.tmp +*~ # Node npm-debug.log diff --git a/backend/pom.xml b/backend/pom.xml index 9314a286..87185c25 100644 --- a/backend/pom.xml +++ b/backend/pom.xml @@ -6,7 +6,7 @@ sparqles 0.0.1-SNAPSHOT - 11 + 17 ${jdkLevel} ${jdkLevel} ${jdkLevel} @@ -55,7 +55,7 @@ 3.6.0 - 11 + 17 @@ -177,9 +177,12 @@ + + Autogenerated by Avro + 1.24.0 - + true true @@ -188,6 +191,23 @@ + + org.apache.avro + avro-maven-plugin + ${avro.version} + + + generate-sources + + schema + + + ${project.basedir}/src/main/avro/ + ${project.basedir}/src/main/java/ + + + + @@ -262,7 +282,6 @@ org.slf4j slf4j-log4j12 - diff --git a/backend/run.sh b/backend/run.sh index f5e2f042..b874df4d 100755 --- a/backend/run.sh +++ b/backend/run.sh @@ -15,29 +15,37 @@ export JAVA_OPTS="-XX:MaxRAMPercentage=80" # while : # do - echo "Running SPARQLes full cycle" + + #echo "Running SPARQLes full cycle" # interop - echo "Running SPARQLes full cycle [ftask]" - bin/sparqles $CMDARGS -run ftask + #echo "Running SPARQLes full cycle [ftask]" + #bin/sparqles $CMDARGS -run ftask # # availability - # echo "Running SPARQLes full cycle [atask]" - # bin/sparqles $CMDARGS -run atask + #echo "Running SPARQLes full cycle [atask]" + #bin/sparqles $CMDARGS -run atask + # coherence + #echo "Running SPARQLes full cycle [ctask]" + #bin/sparqles $CMDARGS -run ctask # # performance - echo "Running SPARQLes full cycle [ptask]" - bin/sparqles $CMDARGS -run ptask + #echo "Running SPARQLes full cycle [ptask]" + #bin/sparqles $CMDARGS -run ptask # # discoverability - echo "Running SPARQLes full cycle [dtask]" - bin/sparqles $CMDARGS -run dtask - # index view - echo "Running SPARQLes full cycle [iv]" - bin/sparqles $CMDARGS -iv + #echo "Running SPARQLes full cycle [dtask]" + #bin/sparqles $CMDARGS -run dtask # stats - echo "Running SPARQLes full cycle [st]" - bin/sparqles $CMDARGS -st + #echo "Running SPARQLes full cycle [st]" + #bin/sparqles $CMDARGS -st # # recompute - # echo "Running SPARQLes full cycle [r]" - # bin/sparqles $CMDARGS -r + #echo "Running SPARQLes full cycle [r]" + #bin/sparqles $CMDARGS -r + #echo "Running SPARQLes - recompute last [rl]" + #bin/sparqles $CMDARGS -rl + # index view + # FIXME: crashes on SPARQLES.recomputeIndexView + #echo "Running SPARQLes full cycle [iv]" + #bin/sparqles $CMDARGS -iv + # index from old.datahub.io # echo "Running SPARQLes full cycle [itask]" # bin/sparqles $CMDARGS -run itask @@ -46,8 +54,12 @@ export JAVA_OPTS="-XX:MaxRAMPercentage=80" # sleep $DELAY # done -echo "${JAVA_OPTS}" +#echo "Running SPARQLes [reschedule all tasks]" +#bin/sparqles $CMDARGS -run reschedule + +#echo "${JAVA_OPTS}" +echo "Running SPARQLes [start service]" ## Fully automatic JAVA_OPTS="${JAVA_OPTS} " bin/sparqles $CMDARGS --start diff --git a/backend/src/main/avro/AResult.avsc b/backend/src/main/avro/AResult.avsc index a8263571..51672520 100644 --- a/backend/src/main/avro/AResult.avsc +++ b/backend/src/main/avro/AResult.avsc @@ -1,5 +1,6 @@ {"namespace": "sparqles.avro.availability", "type": "record", + "import" : "EndpointResult.avsc", "name": "AResult", "fields": [ {"name": "endpointResult", "type": "sparqles.avro.EndpointResult"}, diff --git a/backend/src/main/avro/CResult.avsc b/backend/src/main/avro/CResult.avsc new file mode 100644 index 00000000..a4f55328 --- /dev/null +++ b/backend/src/main/avro/CResult.avsc @@ -0,0 +1,28 @@ +{ + "namespace": "sparqles.avro.calculation", + "type": "record", + "import" : "EndpointResult.avsc", + "name": "CResult", + "fields": [ + {"name": "endpointResult", "type": "sparqles.avro.EndpointResult"}, + {"name": "triples", "type": "long"}, + {"name": "entities", "type": "long"}, + {"name": "classes", "type": "long"}, + {"name": "properties", "type": "long"}, + {"name": "distinctSubjects", "type": "long"}, + {"name": "distinctObjects", "type": "long"}, + {"name": "exampleResources", "type": + {"type": "array", "items": + { + "name": "uri", "type": "string" + } + } + }, + {"name": "VoID", "type": "string"}, + {"name": "VoIDPart", "type": "boolean"}, + {"name": "SD", "type": "string"}, + {"name": "SDPart", "type": "boolean"}, + {"name": "coherence", "type": "double"}, + {"name": "RS", "type": "double"} + ] +} diff --git a/backend/src/main/avro/CalculationView.avsc b/backend/src/main/avro/CalculationView.avsc new file mode 100644 index 00000000..798af624 --- /dev/null +++ b/backend/src/main/avro/CalculationView.avsc @@ -0,0 +1,15 @@ +{ +"namespace": "sparqles.avro.analytics", +"type": "record", +"name": "CalculationView", +"fields": [ + {"name": "endpoint", "type": "sparqles.avro.Endpoint"}, + {"name": "VoID", "type": "boolean"}, + {"name": "VoIDPart", "type": "boolean"}, + {"name": "SD", "type": "boolean"}, + {"name": "SDPart", "type": "boolean"}, + {"name": "coherence", "type": "double"}, + {"name": "RS", "type": "double"}, + {"name": "lastUpdate", "type": "long"} +] +} diff --git a/backend/src/main/avro/DResult.avsc b/backend/src/main/avro/DResult.avsc index e3556281..11977616 100644 --- a/backend/src/main/avro/DResult.avsc +++ b/backend/src/main/avro/DResult.avsc @@ -1,6 +1,7 @@ { "namespace": "sparqles.avro.discovery", "type": "record", + "import" : "EndpointResult.avsc", "name": "DResult", "fields": [ {"name": "endpointResult", "type": "sparqles.avro.EndpointResult"}, diff --git a/backend/src/main/avro/EPView.avsc b/backend/src/main/avro/EPView.avsc index e8bae891..7b1692af 100644 --- a/backend/src/main/avro/EPView.avsc +++ b/backend/src/main/avro/EPView.avsc @@ -2,10 +2,10 @@ "namespace": "sparqles.avro.analytics", "type": "record", "name": "EPView", -"fields": [ +"fields": [ {"name": "endpoint", "type": "sparqles.avro.Endpoint"}, - {"name": "availability", "type": { - "namespace": "sparqles.avro.analytics", + {"name": "availability", "type": { + "namespace": "sparqles.avro.analytics", "name": "EPViewAvailability", "type": "record", "fields" : [ @@ -16,15 +16,15 @@ { "name": "uptimeLast31d", "type": "double"}, { "name": "uptimeOverall", "type": "double"}, { "name": "data", "type": { - "namespace": "sparqles.avro.analytics", + "namespace": "sparqles.avro.analytics", "name": "EPViewAvailabilityData", "type": "record", "fields" : [ { "name": "key", "type": "string"}, - { "name": "values", "type": - {"type": "array", "items": + { "name": "values", "type": + {"type": "array", "items": { - "namespace": "sparqles.avro.analytics", + "namespace": "sparqles.avro.analytics", "name": "EPViewAvailabilityDataPoint", "type": "record", "fields" : [ @@ -46,19 +46,19 @@ "name":"EPViewPerformance", "fields":[ {"name": "threshold", "type": "long"}, - {"name": "ask" , "type": - {"type": "array", "items": + {"name": "ask" , "type": + {"type": "array", "items": { - "namespace": "sparqles.avro.analytics", + "namespace": "sparqles.avro.analytics", "name": "EPViewPerformanceData", "type": "record", "fields" : [ { "name": "key", "type": "string"}, { "name": "color", "type": "string"}, { "name": "data" , "type": - { "type": "array", "items": - { - "namespace": "sparqles.avro.analytics", + { "type": "array", "items": + { + "namespace": "sparqles.avro.analytics", "name": "EPViewPerformanceDataValues", "type": "record", "fields" : [ @@ -70,7 +70,7 @@ } } ] - } + } } }, {"name": "join" , "type": {"type": "array", "items": "array", "items" : "sparqles.avro.analytics.EPViewPerformanceData"}} @@ -83,10 +83,10 @@ "type":"record", "name":"EPViewInteroperability", "fields":[ - {"name": "SPARQL1Features" , "type": - {"type": "array", "items": + {"name": "SPARQL1Features" , "type": + {"type": "array", "items": { - "namespace": "sparqles.avro.analytics", + "namespace": "sparqles.avro.analytics", "name": "EPViewInteroperabilityData", "type": "record", "fields" : [ @@ -94,7 +94,7 @@ { "name": "value", "type": "boolean"}, { "name": "exception", "type": ["string", "null"]} ] - } + } } }, {"name": "SPARQL11Features" , "type": {"type": "array", "items": "array", "items" : "sparqles.avro.analytics.EPViewInteroperabilityData"}} @@ -107,23 +107,65 @@ "type":"record", "name":"EPViewDiscoverability", "fields":[ - {"name": "serverName" , "type" : "string"}, - {"name": "VoIDDescription" , "type": - {"type": "array", "items": + {"name": "serverName" , "type" : "string"}, + {"name": "VoIDDescription" , "type": + {"type": "array", "items": { - "namespace": "sparqles.avro.analytics", + "namespace": "sparqles.avro.analytics", "name": "EPViewDiscoverabilityData", "type": "record", "fields" : [ { "name": "label", "type": "string"}, { "name": "value", "type": "boolean"} ] - } + } } }, {"name": "SDDescription" , "type": {"type": "array", "items": "array", "items" : "sparqles.avro.analytics.EPViewDiscoverabilityData"}} ] } + }, + {"name": "calculation", "type": { + "namespace":"sparqles.avro.analytics", + "type":"record", + "name":"EPViewCalculation", + "fields":[ + {"name": "triples", "type": "long"}, + {"name": "entities", "type": "long"}, + {"name": "classes", "type": "long"}, + {"name": "properties", "type": "long"}, + {"name": "distinctSubjects", "type": "long"}, + {"name": "distinctObjects", "type": "long"}, + {"name": "exampleResources", "type": + {"type": "array", "items": + { + "name": "uri", "type": "string" + } + } + }, + {"name": "VoID", "type": "string"}, + {"name": "VoIDPart", "type": "boolean"}, + {"name": "SD", "type": "string"}, + {"name": "SDPart", "type": "boolean"}, + {"name": "coherence", "type": "double"}, + {"name": "RS", "type": "double"} + ] + }, + "default": { + "triples": -1, + "entities": -1, + "classes": -1, + "properties": -1, + "distinctSubjects": -1, + "distinctObjects": -1, + "exampleResources": [], + "VoID": "n/a", + "VoIDPart": false, + "SD": "n/a", + "SDPart": false, + "coherence": -1.0, + "RS": -1.0 + } } - ] +] } diff --git a/backend/src/main/avro/FResult.avsc b/backend/src/main/avro/FResult.avsc index a37772d2..ba3aa422 100644 --- a/backend/src/main/avro/FResult.avsc +++ b/backend/src/main/avro/FResult.avsc @@ -1,6 +1,8 @@ { "namespace": "sparqles.avro.features", "type": "record", + "import" : "EndpointResult.avsc", + "import" : "Run.avsc", "name": "FResult", "fields": [ {"name": "endpointResult", "type": "sparqles.avro.EndpointResult"}, diff --git a/backend/src/main/avro/Index.avsc b/backend/src/main/avro/Index.avsc index 72e6fe50..7b5042ae 100644 --- a/backend/src/main/avro/Index.avsc +++ b/backend/src/main/avro/Index.avsc @@ -91,7 +91,7 @@ ] } } - } + } ] } } @@ -99,7 +99,6 @@ ] } }, - {"name": "discoverability", "type": { "namespace":"sparqles.avro.analytics", @@ -137,7 +136,50 @@ ] } + }, + {"name": "calculation", "type": + { + "namespace":"sparqles.avro.analytics", + "type":"record", + "name":"IndexViewCalculation", + "fields":[ + {"name": "coherences" , "type": + {"type": "array", "items": + { + "namespace": "sparqles.avro.analytics", + "name": "IndexViewCalculationData", + "type": "record", + "fields" : [ + { "name": "key", "type": "string"}, + { "name": "values" , "type": + { "type": "array", "items": + { + "namespace": "sparqles.avro.analytics", + "name": "IndexViewCalculationDataValues", + "type": "record", + "fields" : [ + { "name": "label", "type": "string"}, + { "name": "value", "type": "double"} + ] + } + } + } + ] + } + } + }, + {"name": "rss" , "type": + {"type": "array", "items": "sparqles.avro.analytics.IndexViewCalculationData"} + }, + { "name": "VoID", "type": "double"}, + { "name": "VoIDPart", "type": "double"}, + { "name": "SD", "type": "double"}, + { "name": "SDPart", "type": "double"}, + { "name": "Coherence", "type": "double"}, + { "name": "RS", "type": "double"} + ] + } + } ] } - diff --git a/backend/src/main/avro/PResult.avsc b/backend/src/main/avro/PResult.avsc index f83398d3..8cc0703b 100644 --- a/backend/src/main/avro/PResult.avsc +++ b/backend/src/main/avro/PResult.avsc @@ -1,6 +1,8 @@ { "namespace": "sparqles.avro.performance", "type": "record", + "import" : "EndpointResult.avsc", + "import" : "Run.avsc", "name": "PResult", "fields": [ {"name": "endpointResult", "type": "sparqles.avro.EndpointResult"}, @@ -10,17 +12,7 @@ "type": "record", "fields" : [ { "name": "query", "type": "string"}, - { "name": "cold", "type": -{"type":"record","name":"Run","namespace":"sparqles.avro.performance", - "fields":[ - {"name": "frestout", "type": "long"}, - {"name": "solutions", "type": "int"}, - {"name": "inittime", "type": "long"}, - {"name": "exectime", "type": "long"}, - {"name": "closetime", "type": "long"}, - {"name": "Exception", "type": ["string", "null"]}, - {"name": "exectout", "type": "long"} - ]}}, + { "name": "cold", "type": "sparqles.avro.performance.Run"}, { "name": "warm", "type": "sparqles.avro.performance.Run"} ]} } diff --git a/backend/src/main/avro/Run.avsc b/backend/src/main/avro/Run.avsc new file mode 100644 index 00000000..505644ee --- /dev/null +++ b/backend/src/main/avro/Run.avsc @@ -0,0 +1,14 @@ +{ + "namespace":"sparqles.avro.performance", + "type":"record", + "name":"Run", + "fields":[ + {"name": "frestout", "type": "long"}, + {"name": "solutions", "type": "int"}, + {"name": "inittime", "type": "long"}, + {"name": "exectime", "type": "long"}, + {"name": "closetime", "type": "long"}, + {"name": "Exception", "type": ["string", "null"]}, + {"name": "exectout", "type": "long"} + ] +} diff --git a/backend/src/main/avro/Schedule.avsc b/backend/src/main/avro/Schedule.avsc index 85daf388..cfba9483 100644 --- a/backend/src/main/avro/Schedule.avsc +++ b/backend/src/main/avro/Schedule.avsc @@ -8,6 +8,7 @@ {"name": "FTask", "type": ["string", "null"]}, {"name": "PTask", "type": ["string", "null"]}, {"name": "DTask", "type": ["string", "null"]}, + {"name": "CTask", "type": ["string", "null"]}, {"name": "ITask", "type": ["string", "null"]}, {"name": "ETask", "type": ["string", "null"]} ] diff --git a/backend/src/main/config/log4j.properties b/backend/src/main/config/log4j.properties index 2dfb4cd6..3ae37ea4 100644 --- a/backend/src/main/config/log4j.properties +++ b/backend/src/main/config/log4j.properties @@ -6,17 +6,73 @@ log4j.rootLogger=INFO, stdout, stderr #log4j.logger.sparqles.core.features=DEBUG, flog #log4j.logger.sparqles.core.performance=DEBUG, plog #log4j.logger.sparqles.utils.ExceptionHandler=INFO, exlog -#DISABLE certain packages -log4j.logger.org.apache.http=WARN -log4j.logger.org.apache.commons.httpclient.params.DefaultHttpParams=INFO -log4j.logger.com.hp.hpl.jena.sparql=WARN -log4j.logger.org.apache.jena=WARN +# Direct log messages to a log file +log4j.appender.file=org.apache.log4j.DailyRollingFileAppender +log4j.appender.file.DatePattern = '.'yyyy-MM-dd +log4j.appender.file.Append = true +log4j.appender.file.Threshold=INFO +log4j.appender.file.File=logs/main.log +log4j.appender.file.layout=org.apache.log4j.PatternLayout +log4j.appender.file.layout.ConversionPattern=%d{dd-MM-yy HH:mm:ss} %15.15c{1}:%-3.3L %5p - %m%n +# Log all information for ATask log messages to a log file +log4j.appender.alog=org.apache.log4j.DailyRollingFileAppender +log4j.appender.alog.DatePattern = '.'yyyy-MM-dd +log4j.appender.alog.Append = true +log4j.appender.alog.Threshold=INFO +log4j.appender.alog.File=logs/availability.log +log4j.appender.alog.layout=org.apache.log4j.PatternLayout +log4j.appender.alog.layout.ConversionPattern=%d{dd-MM-yy HH:mm:ss} %15.15c{1}:%-3.3L %5p - %m%n + + +# Log all information for ATask log messages to exception log file +log4j.appender.exlog=org.apache.log4j.DailyRollingFileAppender +log4j.appender.exlog.DatePattern = '.'yyyy-MM-dd +log4j.appender.exlog.Append = true +log4j.appender.exlog.Threshold=INFO +log4j.appender.exlog.File=logs/exception.log +log4j.appender.exlog.layout=org.apache.log4j.PatternLayout +log4j.appender.exlog.layout.ConversionPattern=%d{dd-MM-yy HH:mm:ss} %15.15c{1}:%-3.3L %5p - %m%n + + + +# Log all information for ATask log messages to a log file +log4j.appender.flog=org.apache.log4j.DailyRollingFileAppender +log4j.appender.flog.DatePattern = '.'yyyy-MM-dd +log4j.appender.flog.Append = true +log4j.appender.flog.Threshold=INFO +log4j.appender.flog.File=logs/interoperability.log +log4j.appender.flog.layout=org.apache.log4j.PatternLayout +log4j.appender.flog.layout.ConversionPattern=%d{dd-MM-yy HH:mm:ss} %15.15c{1}:%-3.3L %5p - %m%n + +# Log all information for ATask log messages to a log file +log4j.appender.dlog=org.apache.log4j.DailyRollingFileAppender +log4j.appender.dlog.DatePattern = '.'yyyy-MM-dd +log4j.appender.dlog.Append = true +log4j.appender.dlog.Threshold=INFO +log4j.appender.dlog.File=logs/discoverability.log +log4j.appender.dlog.layout=org.apache.log4j.PatternLayout +log4j.appender.dlog.layout.ConversionPattern=%d{dd-MM-yy HH:mm:ss} %15.15c{1}:%-3.3L %5p - %m%n + +# Log all information for ATask log messages to a log file +log4j.appender.plog=org.apache.log4j.DailyRollingFileAppender +log4j.appender.plog.DatePattern = '.'yyyy-MM-dd +log4j.appender.plog.Append = true +log4j.appender.plog.Threshold=INFO +log4j.appender.plog.File=logs/performance.log +log4j.appender.plog.layout=org.apache.log4j.PatternLayout +log4j.appender.plog.layout.ConversionPattern=%d{dd-MM-yy HH:mm:ss} %15.15c{1}:%-3.3L %5p - %m%n + + + # Direct log messages to stdout log4j.appender.stdout=org.apache.log4j.ConsoleAppender log4j.appender.stdout.Target=System.out log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +#log4j.appender.stdout.layout.ConversionPattern=%d{dd-MM-yy HH:mm:ss} %5p [%c{1}:%M:%L] - %m%n +#log4j.appender.stdout.layout.ConversionPattern=%d{dd-MM-yy HH:mm:ss} %15.15c{1}:%-3.3L %5p - %m%n + log4j.appender.stderr=org.apache.log4j.ConsoleAppender log4j.appender.stderr.Target=System.err log4j.appender.stderr.Threshold=ERROR @@ -32,65 +88,16 @@ log4j.appender.stdout.layout.ConversionPattern=%d{dd-MM-yy HH:mm:ss} [%5p] %c:%L log4j.appender.stderr.layout.ConversionPattern=%d{dd-MM-yy HH:mm:ss} [%5p] %c:%L - %m%n -# Don't want to log to file when in Docker -## Direct log messages to a log file -#log4j.appender.file=org.apache.log4j.DailyRollingFileAppender -#log4j.appender.file.DatePattern = '.'yyyy-MM-dd -#log4j.appender.file.Append = true -#log4j.appender.file.Threshold=INFO -#log4j.appender.file.File=logs/main.log -#log4j.appender.file.layout=org.apache.log4j.PatternLayout -#log4j.appender.file.layout.ConversionPattern=%d{dd-MM-yy HH:mm:ss} %15.15c{1}:%-3.3L %5p - %m%n -# -## Log all information for ATask log messages to a log file -#log4j.appender.alog=org.apache.log4j.DailyRollingFileAppender -#log4j.appender.alog.DatePattern = '.'yyyy-MM-dd -#log4j.appender.alog.Append = true -#log4j.appender.alog.Threshold=INFO -#log4j.appender.alog.File=logs/availability.log -#log4j.appender.alog.layout=org.apache.log4j.PatternLayout -#log4j.appender.alog.layout.ConversionPattern=%d{dd-MM-yy HH:mm:ss} %15.15c{1}:%-3.3L %5p - %m%n -# -# -## Log all information for ATask log messages to exception log file -#log4j.appender.exlog=org.apache.log4j.DailyRollingFileAppender -#log4j.appender.exlog.DatePattern = '.'yyyy-MM-dd -#log4j.appender.exlog.Append = true -#log4j.appender.exlog.Threshold=INFO -#log4j.appender.exlog.File=logs/exception.log -#log4j.appender.exlog.layout=org.apache.log4j.PatternLayout -#log4j.appender.exlog.layout.ConversionPattern=%d{dd-MM-yy HH:mm:ss} %15.15c{1}:%-3.3L %5p - %m%n -# -# -# -## Log all information for ATask log messages to a log file -#log4j.appender.flog=org.apache.log4j.DailyRollingFileAppender -#log4j.appender.flog.DatePattern = '.'yyyy-MM-dd -#log4j.appender.flog.Append = true -#log4j.appender.flog.Threshold=INFO -#log4j.appender.flog.File=logs/interoperability.log -#log4j.appender.flog.layout=org.apache.log4j.PatternLayout -#log4j.appender.flog.layout.ConversionPattern=%d{dd-MM-yy HH:mm:ss} %15.15c{1}:%-3.3L %5p - %m%n -# -## Log all information for ATask log messages to a log file -#log4j.appender.dlog=org.apache.log4j.DailyRollingFileAppender -#log4j.appender.dlog.DatePattern = '.'yyyy-MM-dd -#log4j.appender.dlog.Append = true -#log4j.appender.dlog.Threshold=INFO -#log4j.appender.dlog.File=logs/discoverability.log -#log4j.appender.dlog.layout=org.apache.log4j.PatternLayout -#log4j.appender.dlog.layout.ConversionPattern=%d{dd-MM-yy HH:mm:ss} %15.15c{1}:%-3.3L %5p - %m%n -# -## Log all information for ATask log messages to a log file -#log4j.appender.plog=org.apache.log4j.DailyRollingFileAppender -#log4j.appender.plog.DatePattern = '.'yyyy-MM-dd -#log4j.appender.plog.Append = true -#log4j.appender.plog.Threshold=INFO -#log4j.appender.plog.File=logs/performance.log -#log4j.appender.plog.layout=org.apache.log4j.PatternLayout -#log4j.appender.plog.layout.ConversionPattern=%d{dd-MM-yy HH:mm:ss} %15.15c{1}:%-3.3L %5p - %m%n -# -#log4j.appender.HTML=org.apache.log4j.FileAppender -#log4j.appender.HTML.File=logs/main.html -#log4j.appender.HTML.layout=org.apache.log4j.HTMLLayout -#log4j.appender.HTML.Threshold=DEBUG +log4j.appender.HTML=org.apache.log4j.FileAppender +log4j.appender.HTML.File=logs/main.html +log4j.appender.HTML.layout=org.apache.log4j.HTMLLayout +log4j.appender.HTML.Threshold=DEBUG + +#DISABLE certain packages +log4j.logger.org.apache.http=WARN +log4j.logger.org.apache.commons.httpclient.params.DefaultHttpParams=INFO +log4j.logger.com.hp.hpl.jena.sparql=WARN +log4j.logger.org.apache.jena=WARN +log4j.logger.sparqles.core.availability.ATask=DEBUG +#log4j.logger.sparqles.utils.FaultDiagnostic=TRACE +log4j.logger.sparqles.analytics.IndexViewAnalytics=DEBUG diff --git a/backend/src/main/java/org/osjava/norbert/AbstractRule.java b/backend/src/main/java/org/osjava/norbert/AbstractRule.java index 415ff285..7ed1ec60 100644 --- a/backend/src/main/java/org/osjava/norbert/AbstractRule.java +++ b/backend/src/main/java/org/osjava/norbert/AbstractRule.java @@ -34,20 +34,20 @@ /** Provides implementation for the path property and a handy toString. */ abstract class AbstractRule implements Rule { - private String path; + private String path; - public AbstractRule(String path) { - this.path = path.trim(); - } + public AbstractRule(String path) { + this.path = path.trim(); + } - /** A url path snippet for which a rule exists */ - public String getPath() { - return this.path; - } + /** A url path snippet for which a rule exists */ + public String getPath() { + return this.path; + } - public abstract Boolean isAllowed(String query); + public abstract Boolean isAllowed(String query); - public String toString() { - return getClass().getName() + " on " + this.path; - } + public String toString() { + return getClass().getName() + " on " + this.path; + } } diff --git a/backend/src/main/java/org/osjava/norbert/AllowedRule.java b/backend/src/main/java/org/osjava/norbert/AllowedRule.java index 1c04a135..d28c14c8 100644 --- a/backend/src/main/java/org/osjava/norbert/AllowedRule.java +++ b/backend/src/main/java/org/osjava/norbert/AllowedRule.java @@ -34,20 +34,20 @@ /** A norobots Allow: rule. Any path which begins with the rule's path is allowed. */ class AllowedRule extends AbstractRule { - public AllowedRule(String path) { - super(path); - } + public AllowedRule(String path) { + super(path); + } - public Boolean isAllowed(String query) { - if ("".equals(super.getPath())) { - // What does the spec say here? Until I know, I'll just ignore this. - return null; - } - boolean test = query.startsWith(super.getPath()); - if (!test) { - return null; - } else { - return Boolean.TRUE; - } + public Boolean isAllowed(String query) { + if ("".equals(super.getPath())) { + // What does the spec say here? Until I know, I'll just ignore this. + return null; + } + boolean test = query.startsWith(super.getPath()); + if (!test) { + return null; + } else { + return Boolean.TRUE; } + } } diff --git a/backend/src/main/java/org/osjava/norbert/DisallowedRule.java b/backend/src/main/java/org/osjava/norbert/DisallowedRule.java index b58a783d..e7c017bd 100644 --- a/backend/src/main/java/org/osjava/norbert/DisallowedRule.java +++ b/backend/src/main/java/org/osjava/norbert/DisallowedRule.java @@ -34,19 +34,19 @@ /** A norobots Disallow: rule. Any path which begins with the rule's path is not allowed. */ class DisallowedRule extends AbstractRule { - public DisallowedRule(String path) { - super(path); - } + public DisallowedRule(String path) { + super(path); + } - public Boolean isAllowed(String query) { - if ("".equals(super.getPath())) { - return Boolean.TRUE; - } - boolean test = query.startsWith(super.getPath()); - if (!test) { - return null; - } else { - return Boolean.FALSE; - } + public Boolean isAllowed(String query) { + if ("".equals(super.getPath())) { + return Boolean.TRUE; + } + boolean test = query.startsWith(super.getPath()); + if (!test) { + return null; + } else { + return Boolean.FALSE; } + } } diff --git a/backend/src/main/java/org/osjava/norbert/NoRobotClient.java b/backend/src/main/java/org/osjava/norbert/NoRobotClient.java index 1af7ec1e..45b30b96 100644 --- a/backend/src/main/java/org/osjava/norbert/NoRobotClient.java +++ b/backend/src/main/java/org/osjava/norbert/NoRobotClient.java @@ -43,200 +43,201 @@ */ public class NoRobotClient { - private String userAgent; - private RulesEngine rules; - private RulesEngine wildcardRules; - private URL baseUrl; - - /** - * Create a Client for a particular user-agent name. - * - * @param userAgent name for the robot - */ - public NoRobotClient(String userAgent) { - this.userAgent = userAgent; - } - - /** - * Head to a website and suck in their robots.txt file. Note that the URL passed in is for the - * website and does not include the robots.txt file itself. - * - * @param baseUrl of the site - */ - // public void parse(URL baseUrl) throws NoRobotException { - // - // this.rules = new RulesEngine(); - // - // this.baseUrl = baseUrl; - // - // URL txtUrl = null; - // try { - // // fetch baseUrl+"robots.txt" - // txtUrl = new URL(baseUrl, "robots.txt"); - // } catch(MalformedURLException murle) { - // throw new NoRobotException("Bad URL: "+baseUrl+", robots.txt. ", murle); - // } - // - // String txt = null; - // try { - // txt = loadContent(txtUrl, this.userAgent); - // if(txt == null) { - // throw new NoRobotException("No content found for: "+txtUrl); - // } - // } catch(IOException ioe) { - // throw new NoRobotException("Unable to get content for: "+txtUrl, ioe); - // } - // - // try { - // parseText(txt); - // } catch(NoRobotException nre) { - // throw new NoRobotException("Problem while parsing "+txtUrl, nre); - // } - // } - public void parse(String txt, URL baseUrl) throws NoRobotException { - this.baseUrl = baseUrl; - parseText(txt); - } - - public void parseText(String txt) throws NoRobotException { - this.rules = parseTextForUserAgent(txt, this.userAgent); - this.wildcardRules = parseTextForUserAgent(txt, "*"); - } + private String userAgent; + private RulesEngine rules; + private RulesEngine wildcardRules; + private URL baseUrl; + + /** + * Create a Client for a particular user-agent name. + * + * @param userAgent name for the robot (short one, e.g. {@link + * sparqles.core.CONSTANTS#USER_AGENT_STRING_RAW} ) + */ + public NoRobotClient(String userAgent) { + this.userAgent = userAgent; + } + + /** + * Head to a website and suck in their robots.txt file. Note that the URL passed in is for the + * website and does not include the robots.txt file itself. + * + * @param baseUrl of the site + */ + // public void parse(URL baseUrl) throws NoRobotException { + // + // this.rules = new RulesEngine(); + // + // this.baseUrl = baseUrl; + // + // URL txtUrl = null; + // try { + // // fetch baseUrl+"robots.txt" + // txtUrl = new URL(baseUrl, "robots.txt"); + // } catch(MalformedURLException murle) { + // throw new NoRobotException("Bad URL: "+baseUrl+", robots.txt. ", murle); + // } + // + // String txt = null; + // try { + // txt = loadContent(txtUrl, this.userAgent); + // if(txt == null) { + // throw new NoRobotException("No content found for: "+txtUrl); + // } + // } catch(IOException ioe) { + // throw new NoRobotException("Unable to get content for: "+txtUrl, ioe); + // } + // + // try { + // parseText(txt); + // } catch(NoRobotException nre) { + // throw new NoRobotException("Problem while parsing "+txtUrl, nre); + // } + // } + + public void parse(String txt, URL baseUrl) throws NoRobotException { + this.baseUrl = baseUrl; + parseText(txt); + } + + public void parseText(String txt) throws NoRobotException { + this.rules = parseTextForUserAgent(txt, this.userAgent); + this.wildcardRules = parseTextForUserAgent(txt, "*"); + } + + private RulesEngine parseTextForUserAgent(String txt, String userAgent) throws NoRobotException { + + RulesEngine engine = new RulesEngine(); + + // Classic basic parser style, read an element at a time, + // changing a state variable [parsingAllowBlock] + + // take each line, one at a time + BufferedReader rdr = new BufferedReader(new StringReader(txt)); + String line = ""; + String value = null; + boolean parsingAllowBlock = false; + try { + while ((line = rdr.readLine()) != null) { + // trim whitespace from either side + line = line.trim(); + + // ignore startsWith('#') + if (line.startsWith("#")) { + continue; + } - private RulesEngine parseTextForUserAgent(String txt, String userAgent) - throws NoRobotException { - - RulesEngine engine = new RulesEngine(); - - // Classic basic parser style, read an element at a time, - // changing a state variable [parsingAllowBlock] - - // take each line, one at a time - BufferedReader rdr = new BufferedReader(new StringReader(txt)); - String line = ""; - String value = null; - boolean parsingAllowBlock = false; - try { - while ((line = rdr.readLine()) != null) { - // trim whitespace from either side - line = line.trim(); - - // ignore startsWith('#') - if (line.startsWith("#")) { - continue; - } - - // if User-agent == userAgent - // record the rest up until end or next User-agent - // then quit (? check spec) - if (line.toLowerCase().startsWith("user-agent:")) { - - if (parsingAllowBlock) { - // we've just finished reading allows/disallows - if (engine.isEmpty()) { - // multiple user agents in a line, let's - // wait til we get rules - continue; - } else { - break; - } - } - - value = line.toLowerCase().substring("user-agent:".length()).trim(); - if (value.equalsIgnoreCase(userAgent)) { - parsingAllowBlock = true; - continue; - } - } else { - // if not, then store if we're currently the user agent - if (parsingAllowBlock) { - if (line.startsWith("Allow:")) { - value = line.substring("Allow:".length()).trim(); - value = URLDecoder.decode(value, "UTF-8"); - engine.allowPath(value); - } else if (line.startsWith("Disallow:")) { - value = line.substring("Disallow:".length()).trim(); - value = URLDecoder.decode(value, "UTF-8"); - engine.disallowPath(value); - } else { - // ignore - continue; - } - } else { - // ignore - continue; - } - } + // if User-agent == userAgent + // record the rest up until end or next User-agent + // then quit (? check spec) + if (line.toLowerCase().startsWith("user-agent:")) { + + if (parsingAllowBlock) { + // we've just finished reading allows/disallows + if (engine.isEmpty()) { + // multiple user agents in a line, let's + // wait til we get rules + continue; + } else { + break; + } + } + + value = line.toLowerCase().substring("user-agent:".length()).trim(); + if (value.equalsIgnoreCase(userAgent.toLowerCase())) { + parsingAllowBlock = true; + continue; + } + } else { + // if not, then store if we're currently the user agent + if (parsingAllowBlock) { + if (line.startsWith("Allow:")) { + value = line.substring("Allow:".length()).trim(); + value = URLDecoder.decode(value, "UTF-8"); + engine.allowPath(value); + } else if (line.startsWith("Disallow:")) { + value = line.substring("Disallow:".length()).trim(); + value = URLDecoder.decode(value, "UTF-8"); + engine.disallowPath(value); + } else { + // ignore + continue; } - } catch (IOException ioe) { - // As this is parsing a String, it should not have an IOE - throw new NoRobotException("Problem while parsing text. ", ioe); + } else { + // ignore + continue; + } } - - return engine; + } + } catch (IOException ioe) { + // As this is parsing a String, it should not have an IOE + throw new NoRobotException("Problem while parsing text. ", ioe); } - /** - * Decide if the parsed website will allow this URL to be be seen. - * - *

Note that parse(URL) must be called before this method is called. - * - * @param url in question - * @return is the url allowed? - * @throws IllegalStateException when parse has not been called - */ - public boolean isUrlAllowed(URL url) throws IllegalStateException, IllegalArgumentException { - if (rules == null) { - throw new IllegalStateException("You must call parse before you call this method. "); - } - - if (!baseUrl.getHost().equals(url.getHost()) - || baseUrl.getPort() != url.getPort() - || !baseUrl.getProtocol().equals(url.getProtocol())) { - throw new IllegalArgumentException( - "Illegal to use a different url, " - + url.toExternalForm() - + ", for this robots.txt: " - + this.baseUrl.toExternalForm()); - } - String urlStr = url.toExternalForm().substring(this.baseUrl.toExternalForm().length() - 1); - if ("/robots.txt".equals(urlStr)) { - return true; - } - urlStr = URLDecoder.decode(urlStr); - Boolean allowed = this.rules.isAllowed(urlStr); - if (allowed == null) { - allowed = this.wildcardRules.isAllowed(urlStr); - } - if (allowed == null) { - allowed = Boolean.TRUE; - } - - return allowed.booleanValue(); + return engine; + } + + /** + * Decide if the parsed website will allow this URL to be be seen. + * + *

Note that parse(URL) must be called before this method is called. + * + * @param url in question + * @return is the url allowed? + * @throws IllegalStateException when parse has not been called + */ + public boolean isUrlAllowed(URL url) throws IllegalStateException, IllegalArgumentException { + if (rules == null) { + throw new IllegalStateException("You must call parse before you call this method. "); } - // // INLINE: as such from genjava/gj-core's net package. Simple method - // // stolen from Payload too. - // private static String loadContent(URL url, String userAgent) throws IOException { - // URLConnection urlConn = url.openConnection(); - // if(urlConn instanceof HttpURLConnection) { - // if(userAgent != null) { - // ((HttpURLConnection)urlConn).addRequestProperty("User-Agent", userAgent); - // } - // } - // InputStream in = urlConn.getInputStream(); - // BufferedReader rdr = new BufferedReader(new InputStreamReader(in)); - // StringBuffer buffer = new StringBuffer(); - // String line = ""; - // while( (line = rdr.readLine()) != null) { - // buffer.append(line); - // buffer.append("\n"); - // } - // in.close(); - // return buffer.toString(); - // } - - public String toString() { - return this.rules.toString() + " " + this.wildcardRules.toString(); + if (!baseUrl.getHost().equals(url.getHost()) + || baseUrl.getPort() != url.getPort() + || !baseUrl.getProtocol().equals(url.getProtocol())) { + throw new IllegalArgumentException( + "Illegal to use a different url, " + + url.toExternalForm() + + ", for this robots.txt: " + + this.baseUrl.toExternalForm()); } + String urlStr = url.toExternalForm().substring(this.baseUrl.toExternalForm().length() - 1); + if ("/robots.txt".equals(urlStr)) { + return true; + } + urlStr = URLDecoder.decode(urlStr); + Boolean allowed = this.rules.isAllowed(urlStr); + if (allowed == null) { + allowed = this.wildcardRules.isAllowed(urlStr); + } + if (allowed == null) { + allowed = Boolean.TRUE; + } + + return allowed.booleanValue(); + } + + // // INLINE: as such from genjava/gj-core's net package. Simple method + // // stolen from Payload too. + // private static String loadContent(URL url, String userAgent) throws IOException { + // URLConnection urlConn = url.openConnection(); + // if(urlConn instanceof HttpURLConnection) { + // if(userAgent != null) { + // ((HttpURLConnection)urlConn).addRequestProperty("User-Agent", userAgent); + // } + // } + // InputStream in = urlConn.getInputStream(); + // BufferedReader rdr = new BufferedReader(new InputStreamReader(in)); + // StringBuffer buffer = new StringBuffer(); + // String line = ""; + // while( (line = rdr.readLine()) != null) { + // buffer.append(line); + // buffer.append("\n"); + // } + // in.close(); + // return buffer.toString(); + // } + + public String toString() { + return this.rules.toString() + " " + this.wildcardRules.toString(); + } } diff --git a/backend/src/main/java/org/osjava/norbert/NoRobotException.java b/backend/src/main/java/org/osjava/norbert/NoRobotException.java index fa5f7291..b16fd5a5 100644 --- a/backend/src/main/java/org/osjava/norbert/NoRobotException.java +++ b/backend/src/main/java/org/osjava/norbert/NoRobotException.java @@ -37,11 +37,11 @@ */ public class NoRobotException extends Exception { - public NoRobotException(String message) { - super(message); - } + public NoRobotException(String message) { + super(message); + } - public NoRobotException(String message, Throwable t) { - super(message + " :::: " + t.getMessage()); - } + public NoRobotException(String message, Throwable t) { + super(message + " :::: " + t.getMessage()); + } } diff --git a/backend/src/main/java/org/osjava/norbert/Rule.java b/backend/src/main/java/org/osjava/norbert/Rule.java index 26b8cca5..b6e993ae 100644 --- a/backend/src/main/java/org/osjava/norbert/Rule.java +++ b/backend/src/main/java/org/osjava/norbert/Rule.java @@ -34,9 +34,9 @@ /** A robots.txt rule. Is a particular path allowed? */ public interface Rule { - /** - * Boolean.TRUE means it is allowed. Boolean.FALSE means it is not allowed. null means that this - * rule is not applicable. - */ - Boolean isAllowed(String path); + /** + * Boolean.TRUE means it is allowed. Boolean.FALSE means it is not allowed. null means that this + * rule is not applicable. + */ + Boolean isAllowed(String path); } diff --git a/backend/src/main/java/org/osjava/norbert/RulesEngine.java b/backend/src/main/java/org/osjava/norbert/RulesEngine.java index b9d532ce..4c2c0aa8 100644 --- a/backend/src/main/java/org/osjava/norbert/RulesEngine.java +++ b/backend/src/main/java/org/osjava/norbert/RulesEngine.java @@ -41,47 +41,47 @@ // TODO: Make this package private? class RulesEngine { - private List rules; + private List rules; - public RulesEngine() { - this.rules = new ArrayList(); - } - - public void allowPath(String path) { - add(new AllowedRule(path)); - } + public RulesEngine() { + this.rules = new ArrayList(); + } - public void disallowPath(String path) { - add(new DisallowedRule(path)); - } + public void allowPath(String path) { + add(new AllowedRule(path)); + } - public void add(Rule rule) { - this.rules.add(rule); - } + public void disallowPath(String path) { + add(new DisallowedRule(path)); + } - /** - * Run each Rule in series on the path. If a Rule returns a Boolean, return that. When no more - * rules are left, return null to indicate there were no rules for this path.. - */ - public Boolean isAllowed(String path) { + public void add(Rule rule) { + this.rules.add(rule); + } - Iterator iterator = this.rules.iterator(); - while (iterator.hasNext()) { - Rule rule = (Rule) iterator.next(); - Boolean test = rule.isAllowed(path); - if (test != null) { - return test; - } - } + /** + * Run each Rule in series on the path. If a Rule returns a Boolean, return that. When no more + * rules are left, return null to indicate there were no rules for this path.. + */ + public Boolean isAllowed(String path) { - return null; + Iterator iterator = this.rules.iterator(); + while (iterator.hasNext()) { + Rule rule = (Rule) iterator.next(); + Boolean test = rule.isAllowed(path); + if (test != null) { + return test; + } } - public boolean isEmpty() { - return this.rules.isEmpty(); - } + return null; + } - public String toString() { - return "RulesEngine: " + this.rules; - } + public boolean isEmpty() { + return this.rules.isEmpty(); + } + + public String toString() { + return "RulesEngine: " + this.rules; + } } diff --git a/backend/src/main/java/sparqles/analytics/AAnalyser.java b/backend/src/main/java/sparqles/analytics/AAnalyser.java index 6926b1a9..058431cc 100644 --- a/backend/src/main/java/sparqles/analytics/AAnalyser.java +++ b/backend/src/main/java/sparqles/analytics/AAnalyser.java @@ -14,219 +14,218 @@ import sparqles.utils.MongoDBManager; public class AAnalyser extends Analytics { - public static final int LAST_HOUR = 0; - public static final int LAST_24HOURS = 1; - public static final int LAST_7DAYS = 2; - public static final int LAST_31DAYS = 3; - public static final int THIS_WEEK = 4; - private static final Logger log = LoggerFactory.getLogger(AAnalyser.class); - public static DateCalculator _dates = new DateCalculator(); - - public AAnalyser(MongoDBManager dbm) { - super(dbm); - } - - static void setDateCalculator(DateCalculator calc) { - _dates = calc; - } - - /** Computes the aggregated statistics for the Availability task */ - public boolean analyse(AResult ares) { - try { - log.info("[ANALYSE] {}", ares); - - Calendar now = Calendar.getInstance(); - now.setTimeInMillis(ares.getEndpointResult().getStart()); - log.debug("Start date: {}", now.getTime()); - - Endpoint ep = ares.getEndpointResult().getEndpoint(); - Calendar[] dates = _dates.getDates(ares.getEndpointResult().getStart()); - - // get the views - AvailabilityView aview = getView(ep); - EPView epview = getEPView(ep); - - // query mongodb for all AResults in the last 31 days - List results = - _db.getResultsSince( - ep, - AResult.class, - AResult.SCHEMA$, - dates[LAST_31DAYS].getTimeInMillis(), - now.getTimeInMillis()); - log.debug( - "Query for {}< - >={} returned " + results.size() + " results", - dates[LAST_31DAYS].getTime(), - now.getTime()); - - SummaryStatistics last24HoursStats = new SummaryStatistics(); - SummaryStatistics last7DaysStats = new SummaryStatistics(); - SummaryStatistics last31DaysStats = new SummaryStatistics(); - SummaryStatistics thisWeekStats = new SummaryStatistics(); - - for (AResult res : results) { - long start = res.getEndpointResult().getStart(); - Calendar next = Calendar.getInstance(); - next.setTimeInMillis(start); - - if (start > dates[LAST_24HOURS].getTimeInMillis()) { - update(last24HoursStats, res); - log.debug(" {} >24h {}", next.getTime(), dates[LAST_24HOURS].getTime()); - } - if (start > dates[LAST_7DAYS].getTimeInMillis()) { - update(last7DaysStats, res); - log.debug(" {} >7d {}", next.getTime(), dates[LAST_7DAYS].getTime()); - } - if (start > dates[LAST_31DAYS].getTimeInMillis()) { - update(last31DaysStats, res); - log.debug(" {} >31d {}", next.getTime(), dates[LAST_31DAYS].getTime()); - } - if (start > dates[THIS_WEEK].getTimeInMillis()) { - update(thisWeekStats, res); - log.debug(" {} >week {}", next.getTime(), dates[THIS_WEEK].getTime()); - } - } - - // Update the views - EPViewAvailability epav = epview.getAvailability(); - - double last24HouerMean = 0; - if (!Double.isNaN(last24HoursStats.getMean())) - last24HouerMean = last24HoursStats.getMean(); - epav.setUptimeLast24h(last24HouerMean); - aview.setUptimeLast24h(last24HouerMean); - - boolean upNow = ares.getIsAvailable(); - aview.setUpNow(upNow); - epav.setUpNow(upNow); - - double last7dayMean = 0; - if (!Double.isNaN(last7DaysStats.getMean())) last7dayMean = last7DaysStats.getMean(); - aview.setUptimeLast7d(last7dayMean); - epav.setUptimeLast7d(last7dayMean); - - double thisweek = 0D; - if (!Double.isNaN(thisWeekStats.getMean())) { - thisweek = thisWeekStats.getMean(); - } - - Long key = dates[THIS_WEEK].getTimeInMillis(); - boolean exists = false; - for (EPViewAvailabilityDataPoint dd : epav.getData().getValues()) { - // System.out.println(dd.getX()+" =?= "+key); - if (dd.getX().equals(key)) { - exists = true; - dd.setY(thisweek); - } - } - // System.out.println(exists); - if (!exists) { - epav.getData().getValues().add(new EPViewAvailabilityDataPoint(key, thisweek)); - log.debug("Add new week: " + key); - } - - // if(thisweek<1D && thisweek>0D){ - // System.out.println("Hello"); - // } - - double last31dayMean = 0; - if (!Double.isNaN(last31DaysStats.getMean())) last31dayMean = last31DaysStats.getMean(); - epav.setUptimeLast31d(last31dayMean); - - // update overallUp - int runs = epav.getTestRuns(); - Double mean = epav.getUptimeOverall() * runs; - if (mean == null) mean = 0D; - if (upNow) mean += 1; - epav.setTestRuns(runs + 1); - epav.setUptimeOverall(mean / (double) (runs + 1)); - - log.debug(" [AView] {}", aview); - log.debug(" [EPView] {}", epview); - aview.setLastUpdate(ares.getEndpointResult().getEnd()); - - boolean succ = false; - succ = _db.update(aview); - succ = _db.update(epview); - - // System.err.println("AView (after)="+aview); - // System.err.println("EPView (after)="+epview); - - return succ; - } catch (Exception e) { - log.warn("[EXEC] {}", e); + public static final int LAST_HOUR = 0; + public static final int LAST_24HOURS = 1; + public static final int LAST_7DAYS = 2; + public static final int LAST_31DAYS = 3; + public static final int THIS_WEEK = 4; + private static final Logger log = LoggerFactory.getLogger(AAnalyser.class); + public static DateCalculator _dates = new DateCalculator(); + + public AAnalyser(MongoDBManager dbm) { + super(dbm); + } + + static void setDateCalculator(DateCalculator calc) { + _dates = calc; + } + + /** Computes the aggregated statistics for the Availability task */ + public boolean analyse(AResult ares) { + try { + log.info("[ANALYSE] {}", ares); + + Calendar now = Calendar.getInstance(); + now.setTimeInMillis(ares.getEndpointResult().getStart()); + log.debug("Start date: {}", now.getTime()); + + Endpoint ep = ares.getEndpointResult().getEndpoint(); + Calendar[] dates = _dates.getDates(ares.getEndpointResult().getStart()); + + // get the views + AvailabilityView aview = getView(ep); + EPView epview = getEPView(ep); + + // query mongodb for all AResults in the last 31 days + List results = + _db.getResultsSince( + ep, + AResult.class, + AResult.SCHEMA$, + dates[LAST_31DAYS].getTimeInMillis(), + now.getTimeInMillis()); + log.debug( + "Query for {}< - >={} returned " + results.size() + " results", + dates[LAST_31DAYS].getTime(), + now.getTime()); + + SummaryStatistics last24HoursStats = new SummaryStatistics(); + SummaryStatistics last7DaysStats = new SummaryStatistics(); + SummaryStatistics last31DaysStats = new SummaryStatistics(); + SummaryStatistics thisWeekStats = new SummaryStatistics(); + + for (AResult res : results) { + long start = res.getEndpointResult().getStart(); + Calendar next = Calendar.getInstance(); + next.setTimeInMillis(start); + + if (start > dates[LAST_24HOURS].getTimeInMillis()) { + update(last24HoursStats, res); + log.debug(" {} >24h {}", next.getTime(), dates[LAST_24HOURS].getTime()); } - return false; - } - - private void update(SummaryStatistics stats, AResult res) { - if (res.getIsAvailable()) { - stats.addValue(1); - } else stats.addValue(0); - } - - private AvailabilityView getView(Endpoint ep) { - AvailabilityView view = null; - List views = - _db.getResults(ep, AvailabilityView.class, AvailabilityView.SCHEMA$); - if (views.size() != 1) { - log.warn("We have {} AvailabilityView, expected was 1", views.size()); + if (start > dates[LAST_7DAYS].getTimeInMillis()) { + update(last7DaysStats, res); + log.debug(" {} >7d {}", next.getTime(), dates[LAST_7DAYS].getTime()); } - if (views.size() == 0) { - view = new AvailabilityView(); - view.setEndpoint(ep); - _db.insert(view); - - } else { - view = views.get(0); + if (start > dates[LAST_31DAYS].getTimeInMillis()) { + update(last31DaysStats, res); + log.debug(" {} >31d {}", next.getTime(), dates[LAST_31DAYS].getTime()); + } + if (start > dates[THIS_WEEK].getTimeInMillis()) { + update(thisWeekStats, res); + log.debug(" {} >week {}", next.getTime(), dates[THIS_WEEK].getTime()); } - return view; + } + + // Update the views + EPViewAvailability epav = epview.getAvailability(); + + double last24HouerMean = 0; + if (!Double.isNaN(last24HoursStats.getMean())) last24HouerMean = last24HoursStats.getMean(); + epav.setUptimeLast24h(last24HouerMean); + aview.setUptimeLast24h(last24HouerMean); + + boolean upNow = ares.getIsAvailable(); + aview.setUpNow(upNow); + epav.setUpNow(upNow); + + double last7dayMean = 0; + if (!Double.isNaN(last7DaysStats.getMean())) last7dayMean = last7DaysStats.getMean(); + aview.setUptimeLast7d(last7dayMean); + epav.setUptimeLast7d(last7dayMean); + + double thisweek = 0D; + if (!Double.isNaN(thisWeekStats.getMean())) { + thisweek = thisWeekStats.getMean(); + } + + long key = dates[THIS_WEEK].getTimeInMillis(); + boolean exists = false; + for (EPViewAvailabilityDataPoint dd : epav.getData().getValues()) { + // System.out.println(dd.getX()+" =?= "+key); + if (dd.getX() == key) { + exists = true; + dd.setY(thisweek); + } + } + // System.out.println(exists); + if (!exists) { + epav.getData().getValues().add(new EPViewAvailabilityDataPoint(key, thisweek)); + log.debug("Add new week: " + key); + } + + // if(thisweek<1D && thisweek>0D){ + // System.out.println("Hello"); + // } + + double last31dayMean = 0; + if (!Double.isNaN(last31DaysStats.getMean())) last31dayMean = last31DaysStats.getMean(); + epav.setUptimeLast31d(last31dayMean); + + // update overallUp + int runs = epav.getTestRuns(); + Double mean = epav.getUptimeOverall() * runs; + if (mean == null) mean = 0D; + if (upNow) mean += 1; + epav.setTestRuns(runs + 1); + epav.setUptimeOverall(mean / (double) (runs + 1)); + + log.debug(" [AView] {}", aview); + log.debug(" [EPView] {}", epview); + aview.setLastUpdate(ares.getEndpointResult().getEnd()); + + boolean succ = false; + succ = _db.update(aview); + succ = _db.update(epview); + + // System.err.println("AView (after)="+aview); + // System.err.println("EPView (after)="+epview); + + return succ; + } catch (Exception e) { + log.warn("[EXEC] {}", e); + } + return false; + } + + private void update(SummaryStatistics stats, AResult res) { + if (res.getIsAvailable()) { + stats.addValue(1); + } else stats.addValue(0); + } + + private AvailabilityView getView(Endpoint ep) { + AvailabilityView view = null; + List views = + _db.getResults(ep, AvailabilityView.class, AvailabilityView.SCHEMA$); + if (views.size() != 1) { + log.warn("We have {} AvailabilityView, expected was 1", views.size()); } + if (views.size() == 0) { + view = new AvailabilityView(); + view.setEndpoint(ep); + _db.insert(view); - // private Calendar[] getDates(long time) { - // Calendar now = Calendar.getInstance(); - // now.setTimeInMillis(time); - // - // Calendar lastHour = (Calendar) now.clone(); - // lastHour.add(Calendar.HOUR, -1); - // //testing - // //lastHour.add(Calendar.MINUTE, -2); - // - // Calendar last24Hour = (Calendar) now.clone(); - //// last24Hour.add(Calendar.HOUR, -24); - // last24Hour.add(Calendar.MINUTE, -6); - // - // Calendar last7Days = (Calendar) now.clone(); - // // last7Days.add(Calendar.DAY_OF_YEAR, -7); - // last7Days.add(Calendar.MINUTE, -12); - // - // - // Calendar last31Days = (Calendar) now.clone(); - // // last31Days.add(Calendar.DAY_OF_YEAR, -31); - // last31Days.add(Calendar.MINUTE, -18); - // - // - // Calendar thisweek = Calendar.getInstance(); - // // thisweek.set(Calendar.YEAR, now.get(Calendar.YEAR)); - // // thisweek.set(Calendar.WEEK_OF_YEAR, now.get(Calendar.WEEK_OF_YEAR)); - // thisweek.set(Calendar.YEAR, now.get(Calendar.YEAR)); - // thisweek.set(Calendar.DAY_OF_YEAR, now.get(Calendar.DAY_OF_YEAR)); - // thisweek.set(Calendar.HOUR_OF_DAY, now.get(Calendar.HOUR_OF_DAY)); - // thisweek.set(Calendar.MINUTE, (now.get(Calendar.MINUTE)/10)*10); - // - // - // - // Calendar [] c = new Calendar[5]; - // c[LAST_HOUR]=lastHour; - // c[LAST_24HOURS]=last24Hour; - // c[LAST_7DAYS]= last7Days; - // c[LAST_31DAYS] = last31Days; - // c[THIS_WEEK] = thisweek; - //// System.out.println("[DATES] from "+now.getTime()+" last1h:"+lastHour.getTime()+" - // last24h:"+last24Hour.getTime()); - //// System.out.println(thisweek.getTime()); - // return c; - // } + } else { + view = views.get(0); + } + return view; + } + + // private Calendar[] getDates(long time) { + // Calendar now = Calendar.getInstance(); + // now.setTimeInMillis(time); + // + // Calendar lastHour = (Calendar) now.clone(); + // lastHour.add(Calendar.HOUR, -1); + // //testing + // //lastHour.add(Calendar.MINUTE, -2); + // + // Calendar last24Hour = (Calendar) now.clone(); + //// last24Hour.add(Calendar.HOUR, -24); + // last24Hour.add(Calendar.MINUTE, -6); + // + // Calendar last7Days = (Calendar) now.clone(); + // // last7Days.add(Calendar.DAY_OF_YEAR, -7); + // last7Days.add(Calendar.MINUTE, -12); + // + // + // Calendar last31Days = (Calendar) now.clone(); + // // last31Days.add(Calendar.DAY_OF_YEAR, -31); + // last31Days.add(Calendar.MINUTE, -18); + // + // + // Calendar thisweek = Calendar.getInstance(); + // // thisweek.set(Calendar.YEAR, now.get(Calendar.YEAR)); + // // thisweek.set(Calendar.WEEK_OF_YEAR, now.get(Calendar.WEEK_OF_YEAR)); + // thisweek.set(Calendar.YEAR, now.get(Calendar.YEAR)); + // thisweek.set(Calendar.DAY_OF_YEAR, now.get(Calendar.DAY_OF_YEAR)); + // thisweek.set(Calendar.HOUR_OF_DAY, now.get(Calendar.HOUR_OF_DAY)); + // thisweek.set(Calendar.MINUTE, (now.get(Calendar.MINUTE)/10)*10); + // + // + // + // Calendar [] c = new Calendar[5]; + // c[LAST_HOUR]=lastHour; + // c[LAST_24HOURS]=last24Hour; + // c[LAST_7DAYS]= last7Days; + // c[LAST_31DAYS] = last31Days; + // c[THIS_WEEK] = thisweek; + //// System.out.println("[DATES] from "+now.getTime()+" last1h:"+lastHour.getTime()+" + // last24h:"+last24Hour.getTime()); + //// System.out.println(thisweek.getTime()); + // return c; + // } } @@ -236,40 +235,40 @@ private AvailabilityView getView(Endpoint ep) { * @author umbrichj */ class DateCalculator { - Calendar[] getDates(long time) { - Calendar now = Calendar.getInstance(); - now.setTimeInMillis(time); - - Calendar lastHour = (Calendar) now.clone(); - lastHour.add(Calendar.HOUR, -1); - - Calendar last24Hour = (Calendar) now.clone(); - last24Hour.add(Calendar.HOUR, -24); - - Calendar last7Days = (Calendar) now.clone(); - last7Days.add(Calendar.DAY_OF_YEAR, -7); - - Calendar last31Days = (Calendar) now.clone(); - last31Days.add(Calendar.DAY_OF_YEAR, -31); - - Calendar thisweek = Calendar.getInstance(); - // thisweek.add(Calendar.DAY_OF_YEAR, -1); - - thisweek.set(Calendar.YEAR, now.get(Calendar.YEAR)); - thisweek.set(Calendar.WEEK_OF_YEAR, now.get(Calendar.WEEK_OF_YEAR)); - thisweek.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY); - thisweek.set(Calendar.HOUR_OF_DAY, 0); - thisweek.set(Calendar.MINUTE, 0); - thisweek.set(Calendar.SECOND, 0); - thisweek.set(Calendar.MILLISECOND, 0); - // - - Calendar[] c = new Calendar[5]; - c[AAnalyser.LAST_HOUR] = lastHour; - c[AAnalyser.LAST_24HOURS] = last24Hour; - c[AAnalyser.LAST_7DAYS] = last7Days; - c[AAnalyser.LAST_31DAYS] = last31Days; - c[AAnalyser.THIS_WEEK] = thisweek; - return c; - } + Calendar[] getDates(long time) { + Calendar now = Calendar.getInstance(); + now.setTimeInMillis(time); + + Calendar lastHour = (Calendar) now.clone(); + lastHour.add(Calendar.HOUR, -1); + + Calendar last24Hour = (Calendar) now.clone(); + last24Hour.add(Calendar.HOUR, -24); + + Calendar last7Days = (Calendar) now.clone(); + last7Days.add(Calendar.DAY_OF_YEAR, -7); + + Calendar last31Days = (Calendar) now.clone(); + last31Days.add(Calendar.DAY_OF_YEAR, -31); + + Calendar thisweek = Calendar.getInstance(); + // thisweek.add(Calendar.DAY_OF_YEAR, -1); + + thisweek.set(Calendar.YEAR, now.get(Calendar.YEAR)); + thisweek.set(Calendar.WEEK_OF_YEAR, now.get(Calendar.WEEK_OF_YEAR)); + thisweek.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY); + thisweek.set(Calendar.HOUR_OF_DAY, 0); + thisweek.set(Calendar.MINUTE, 0); + thisweek.set(Calendar.SECOND, 0); + thisweek.set(Calendar.MILLISECOND, 0); + // + + Calendar[] c = new Calendar[5]; + c[AAnalyser.LAST_HOUR] = lastHour; + c[AAnalyser.LAST_24HOURS] = last24Hour; + c[AAnalyser.LAST_7DAYS] = last7Days; + c[AAnalyser.LAST_31DAYS] = last31Days; + c[AAnalyser.THIS_WEEK] = thisweek; + return c; + } } diff --git a/backend/src/main/java/sparqles/analytics/AEvol.java b/backend/src/main/java/sparqles/analytics/AEvol.java index 351bfadb..085b800f 100644 --- a/backend/src/main/java/sparqles/analytics/AEvol.java +++ b/backend/src/main/java/sparqles/analytics/AEvol.java @@ -1,6 +1,5 @@ package sparqles.analytics; -import com.google.gson.Gson; import com.mongodb.MongoClient; import java.io.BufferedReader; import java.io.IOException; @@ -8,124 +7,144 @@ import java.net.URL; import java.text.SimpleDateFormat; import java.util.Calendar; +import java.util.Collection; +import java.util.TimeZone; import org.jongo.Jongo; import org.jongo.MongoCollection; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import sparqles.avro.Endpoint; import sparqles.core.SPARQLESProperties; import sparqles.paper.objects.AMonth; -import sparqles.paper.objects.AvailEpFromList; import sparqles.utils.MongoDBManager; public class AEvol { - - public AEvol(String[] args) { - try { - Gson gson = new Gson(); - - // check if there is any stat to run or if it is up to date - // open connection to mongodb aEvol collection - Jongo jongo = - new Jongo( - new MongoClient( - SPARQLESProperties.getDB_HOST() - + ":" - + SPARQLESProperties.getDB_PORT()) - .getDB(SPARQLESProperties.getDB_NAME())); - MongoCollection amonthsColl = jongo.getCollection(MongoDBManager.COLL_AMONTHS); - - // get last month - AMonth lastMonth = amonthsColl.findOne().orderBy("{date: -1}").as(AMonth.class); - // check that lastMonth is from a different month than the current one - Calendar cal = Calendar.getInstance(); - cal.setTime(lastMonth.getDate()); - Calendar calNow = Calendar.getInstance(); - - // in case there is at least a full new month to process - cal.add(Calendar.MONTH, 1); - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); - while (calNow.get(Calendar.MONTH) > cal.get(Calendar.MONTH)) { - // get the end of the month - Calendar calEnd = (Calendar) cal.clone(); - calEnd.add(Calendar.MONTH, 1); - System.out.println( - "Computing month aggregation from date [" - + sdf.format(cal.getTime()) - + " to " - + sdf.format(calEnd.getTime()) - + "["); - - // read the list of endpoints - String json = readUrl("http://sparqles.ai.wu.ac.at/api/endpoint/list"); - AvailEpFromList[] epArray = gson.fromJson(json, AvailEpFromList[].class); - MongoCollection atasksColl = jongo.getCollection(MongoDBManager.COLL_AVAIL); - // System.out.println(atasksColl.count("{'endpointResult.start': {$gt : #}}", - // cal.getTimeInMillis())); - - AMonth newMonth = new AMonth(); - newMonth.setDate(cal.getTime()); - - // for each endpoint in the list (remove ghosts from the picture), get it's period - // availability and add this ton the result object - for (int i = 0; i < epArray.length; i++) { - - // get number of avail and unavail tests - long nbAvail = - atasksColl.count( - "{'endpointResult.endpoint.uri': '" - + epArray[i].getUri() - + "', 'isAvailable':true, 'endpointResult.start': {$gte" - + " : " - + cal.getTimeInMillis() - + ", $lt : " - + calEnd.getTimeInMillis() - + "}}}"); - long nbUnavail = - atasksColl.count( - "{'endpointResult.endpoint.uri': '" - + epArray[i].getUri() - + "', 'isAvailable':false, 'endpointResult.start':" - + " {$gte : " - + cal.getTimeInMillis() - + ", $lt : " - + calEnd.getTimeInMillis() - + "}}}"); - // System.out.println(nbAvail+"\t"+nbUnavail+"\t"+epArray[i].getUri()); - newMonth.addEndpoint(nbAvail, nbUnavail); - } - - // add the new month to the collection - amonthsColl.insert(newMonth); - - // increment the month to process - cal.add(Calendar.MONTH, 1); - } - - } catch (IOException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } + private static final Logger log = LoggerFactory.getLogger(AEvol.class); + + /** + * @param args + */ + public static void main(String[] args) { + new AEvol(args); + } + + public AEvol(String[] args) { + SPARQLESProperties.init(new java.io.File("src/main/resources/sparqles_docker.properties")); + + // read the list of endpoints + MongoDBManager dbm = new MongoDBManager(); + try { + recalculateMonthly(dbm); + } catch (Exception e) { + log.error("Error while recalculating monthly data", e); } + } + + public static void recalculateMonthly(MongoDBManager dbm) { + try { + Collection eps = dbm.get(Endpoint.class, Endpoint.SCHEMA$); + + // check if there is any stat to run or if it is up to date + // open connection to mongodb aEvol collection + Jongo jongo = + new Jongo( + new MongoClient( + SPARQLESProperties.getDB_HOST() + ":" + SPARQLESProperties.getDB_PORT()) + .getDB(SPARQLESProperties.getDB_NAME())); + MongoCollection amonthsColl = jongo.getCollection(MongoDBManager.COLL_AMONTHS); + // get last month + AMonth lastMonth = amonthsColl.findOne().orderBy("{date: -1}").as(AMonth.class); + Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC")); + Calendar calNow = Calendar.getInstance(TimeZone.getTimeZone("UTC")); + calNow.set(Calendar.DAY_OF_MONTH, 1); + calNow.set(Calendar.HOUR, 0); + calNow.set(Calendar.MINUTE, 0); + calNow.set(Calendar.SECOND, 0); + calNow.add(Calendar.MONTH, -1); + if (lastMonth == null) { + cal.setTimeInMillis((dbm.getFirstAvailabitlityTime() / 1000) * 1000); + cal.set(Calendar.DAY_OF_MONTH, 1); + cal.set(Calendar.HOUR, 0); + cal.set(Calendar.MINUTE, 0); + cal.set(Calendar.SECOND, 0); + } else { + cal.setTime(lastMonth.getDate()); + cal.add(Calendar.MONTH, 1); + } + + // in case there is at least a full new month to process + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); + while (calNow.compareTo(cal) >= 0) { + // get the end of the month + Calendar calEnd = (Calendar) cal.clone(); + calEnd.add(Calendar.MONTH, 1); + log.debug( + "Computing month aggregation from date [" + + sdf.format(cal.getTime()) + + " to " + + sdf.format(calEnd.getTime()) + + "["); + + // String json = readUrl("https://sparqles.demo.openlinksw.com/api/endpoint/lis"); + // AvailEpFromList[] epArray = gson.fromJson(json, AvailEpFromList[].class); + MongoCollection atasksColl = jongo.getCollection(MongoDBManager.COLL_AVAIL); + // System.out.println(atasksColl.count("{'endpointResult.start': {$gt : #}}", + // cal.getTimeInMillis())); + + AMonth newMonth = new AMonth(); + newMonth.setDate(cal.getTime()); + + // for each endpoint in the collection + for (Endpoint e : eps) { + // get number of avail and unavail tests + long nbAvail = + atasksColl.count( + "{'endpointResult.endpoint.uri': '" + + e.getUri() + + "', 'isAvailable':true, 'endpointResult.start': {$gte : " + + cal.getTimeInMillis() + + ", $lt : " + + calEnd.getTimeInMillis() + + "}}}"); + long nbUnavail = + atasksColl.count( + "{'endpointResult.endpoint.uri': '" + + e.getUri() + + "', 'isAvailable':false, 'endpointResult.start': {$gte : " + + cal.getTimeInMillis() + + ", $lt : " + + calEnd.getTimeInMillis() + + "}}}"); + newMonth.addEndpoint(nbAvail, nbUnavail); + } - /** - * @param args - */ - public static void main(String[] args) { - new AEvol(args); + // add the new month to the collection + amonthsColl.insert(newMonth); + + // increment the month to process + cal.add(Calendar.MONTH, 1); + } + log.debug("Recalculating availability monthly COMPLETE"); + } catch (IOException e) { + log.info("Exception while processing availability monthly (IO)", e); + } catch (Exception e) { + log.info("Exception while processing availability monthly (unknown)", e); } - - private static String readUrl(String urlString) throws Exception { - BufferedReader reader = null; - try { - URL url = new URL(urlString); - reader = new BufferedReader(new InputStreamReader(url.openStream())); - StringBuffer buffer = new StringBuffer(); - int read; - char[] chars = new char[1024]; - while ((read = reader.read(chars)) != -1) buffer.append(chars, 0, read); - - return buffer.toString(); - } finally { - if (reader != null) reader.close(); - } + } + + private static String readUrl(String urlString) throws Exception { + BufferedReader reader = null; + try { + URL url = new URL(urlString); + reader = new BufferedReader(new InputStreamReader(url.openStream())); + StringBuffer buffer = new StringBuffer(); + int read; + char[] chars = new char[1024]; + while ((read = reader.read(chars)) != -1) buffer.append(chars, 0, read); + + return buffer.toString(); + } finally { + if (reader != null) reader.close(); } + } } diff --git a/backend/src/main/java/sparqles/analytics/AnalyserInit.java b/backend/src/main/java/sparqles/analytics/AnalyserInit.java index 81504429..c4e6582e 100644 --- a/backend/src/main/java/sparqles/analytics/AnalyserInit.java +++ b/backend/src/main/java/sparqles/analytics/AnalyserInit.java @@ -7,6 +7,7 @@ import org.slf4j.LoggerFactory; import sparqles.avro.Endpoint; import sparqles.avro.availability.AResult; +import sparqles.avro.calculation.CResult; import sparqles.avro.discovery.DResult; import sparqles.avro.features.FResult; import sparqles.avro.performance.PResult; @@ -14,154 +15,180 @@ public class AnalyserInit { - private static final Logger log = LoggerFactory.getLogger(AnalyserInit.class); + private static final Logger log = LoggerFactory.getLogger(AnalyserInit.class); - private MongoDBManager _db; + private MongoDBManager _db; - private boolean _onlyLast; + private boolean _onlyLast; - public AnalyserInit(MongoDBManager db) { - this(db, false); - } + public AnalyserInit(MongoDBManager db) { + this(db, false); + } - public AnalyserInit(MongoDBManager db, boolean onlyLast) { - _db = db; - _onlyLast = onlyLast; - } + public AnalyserInit(MongoDBManager db, boolean onlyLast) { + _db = db; + _onlyLast = onlyLast; + } - /** - * Computes the aggregated statistics for the Availability task - * - * @param ep - */ - public void run() { - - List eps = _db.get(Endpoint.class, Endpoint.SCHEMA$); - AAnalyser a = new AAnalyser(_db); - PAnalyser p = new PAnalyser(_db); - DAnalyser d = new DAnalyser(_db); - FAnalyser f = new FAnalyser(_db); - - log.info("Analysing {} endpoints", eps.size()); - for (Endpoint ep : eps) { - log.info("ANALYSE {}", ep.getUri()); - - availability(ep, a); - discoverability(ep, d); - interoperability(ep, f); - performance(ep, p); - } - } + /** Computes the aggregated statistics for the Availability task */ + public void run() { - private void discoverability(Endpoint ep, DAnalyser d) { - TreeSet res = - new TreeSet( - new Comparator() { - public int compare(DResult o1, DResult o2) { - int diff = - o1.getEndpointResult() - .getStart() - .compareTo(o2.getEndpointResult().getStart()); - return diff; - } - }); - - List epRes = _db.getResults(ep, DResult.class, DResult.SCHEMA$); - for (DResult epres : epRes) { - res.add(epres); - } - log.info("Analyse {} Performance results", epRes.size()); - if (_onlyLast && epRes.size() != 0) { - d.analyse(res.last()); - } else { - for (DResult ares : res) { - d.analyse(ares); - } - } - log.info("ANALYSE DISCOVERABILITY {} and {}", ep, epRes.size()); - } + List eps = _db.get(Endpoint.class, Endpoint.SCHEMA$); + AAnalyser a = new AAnalyser(_db); + PAnalyser p = new PAnalyser(_db); + DAnalyser d = new DAnalyser(_db); + FAnalyser f = new FAnalyser(_db); + CAnalyser c = new CAnalyser(_db); - private void performance(Endpoint ep, PAnalyser p) { - TreeSet res = - new TreeSet( - new Comparator() { - public int compare(PResult o1, PResult o2) { - int diff = - o1.getEndpointResult() - .getStart() - .compareTo(o2.getEndpointResult().getStart()); - return diff; - } - }); - - List epRes = _db.getResults(ep, PResult.class, PResult.SCHEMA$); - for (PResult epres : epRes) { - res.add(epres); - } - log.info("Analyse {} Performance results", epRes.size()); - if (_onlyLast && epRes.size() != 0) { - p.analyse(res.last()); - } else { - for (PResult ares : res) { - p.analyse(ares); - } - } - log.info("ANALYSE PERFORMANCE {} and {}", ep, epRes.size()); - } + log.info("Analysing {} endpoints", eps.size()); + for (Endpoint ep : eps) { + log.info("ANALYSE {}", ep.getUri()); - private void interoperability(Endpoint ep, FAnalyser f) { - TreeSet res = - new TreeSet( - new Comparator() { - public int compare(FResult o1, FResult o2) { - int diff = - o1.getEndpointResult() - .getStart() - .compareTo(o2.getEndpointResult().getStart()); - return diff; - } - }); - - List epRes = _db.getResults(ep, FResult.class, FResult.SCHEMA$); - for (FResult epres : epRes) { - res.add(epres); - } - log.info("Analyse {} Interoperability results", epRes.size()); - if (_onlyLast && epRes.size() != 0) { - f.analyse(res.last()); - } else { - for (FResult ares : res) { - f.analyse(ares); - } - } - log.info("ANALYSE INTEROPERABILITY {} and {}", ep, epRes.size()); + availability(ep, a); + discoverability(ep, d); + interoperability(ep, f); + performance(ep, p); + calculation(ep, c); } - - private void availability(Endpoint ep, AAnalyser a) { - - TreeSet res = - new TreeSet( - new Comparator() { - public int compare(AResult o1, AResult o2) { - int diff = - o1.getEndpointResult() - .getStart() - .compareTo(o2.getEndpointResult().getStart()); - return diff; - } - }); - - List epRes = _db.getResults(ep, AResult.class, AResult.SCHEMA$); - for (AResult epres : epRes) { - res.add(epres); - } - if (_onlyLast && epRes.size() != 0) { - a.analyse(res.last()); - } else { - for (AResult ares : res) { - a.analyse(ares); - } - } - log.info("ANALYSE AVAILABILITY {} and {}", ep.getUri(), epRes.size()); + } + + private void discoverability(Endpoint ep, DAnalyser d) { + TreeSet res = + new TreeSet( + new Comparator() { + public int compare(DResult o1, DResult o2) { + int diff = + Comparator.comparingLong( + (DResult value) -> value.getEndpointResult().getStart()) + .compare(o1, o2); + return diff; + } + }); + + List epRes = _db.getResults(ep, DResult.class, DResult.SCHEMA$); + for (DResult epres : epRes) { + res.add(epres); + } + log.info("Analyse {} Performance results", epRes.size()); + if (_onlyLast && epRes.size() != 0) { + d.analyse(res.last()); + } else { + for (DResult ares : res) { + d.analyse(ares); + } + } + log.info("ANALYSE DISCOVERABILITY {} and {}", ep, epRes.size()); + } + + private void performance(Endpoint ep, PAnalyser p) { + TreeSet res = + new TreeSet( + new Comparator() { + public int compare(PResult o1, PResult o2) { + int diff = + Comparator.comparingLong( + (PResult value) -> value.getEndpointResult().getStart()) + .compare(o1, o2); + return diff; + } + }); + + List epRes = _db.getResults(ep, PResult.class, PResult.SCHEMA$); + for (PResult epres : epRes) { + res.add(epres); + } + log.info("Analyse {} Performance results", epRes.size()); + if (_onlyLast && epRes.size() != 0) { + p.analyse(res.last()); + } else { + for (PResult ares : res) { + p.analyse(ares); + } + } + log.info("ANALYSE PERFORMANCE {} and {}", ep, epRes.size()); + } + + private void interoperability(Endpoint ep, FAnalyser f) { + TreeSet res = + new TreeSet( + new Comparator() { + public int compare(FResult o1, FResult o2) { + int diff = + Comparator.comparingLong( + (FResult value) -> value.getEndpointResult().getStart()) + .compare(o1, o2); + return diff; + } + }); + + List epRes = _db.getResults(ep, FResult.class, FResult.SCHEMA$); + for (FResult epres : epRes) { + res.add(epres); + } + log.info("Analyse {} Interoperability results", epRes.size()); + if (_onlyLast && epRes.size() != 0) { + f.analyse(res.last()); + } else { + for (FResult ares : res) { + f.analyse(ares); + } + } + log.info("ANALYSE INTEROPERABILITY {} and {}", ep, epRes.size()); + } + + private void availability(Endpoint ep, AAnalyser a) { + + TreeSet res = + new TreeSet( + new Comparator() { + public int compare(AResult o1, AResult o2) { + int diff = + Comparator.comparingLong( + (AResult value) -> value.getEndpointResult().getStart()) + .compare(o1, o2); + return diff; + } + }); + + List epRes = _db.getResults(ep, AResult.class, AResult.SCHEMA$); + for (AResult epres : epRes) { + res.add(epres); + } + if (_onlyLast && epRes.size() != 0) { + a.analyse(res.last()); + } else { + for (AResult ares : res) { + a.analyse(ares); + } + } + log.info("ANALYSE AVAILABILITY {} and {}", ep.getUri(), epRes.size()); + } + + private void calculation(Endpoint ep, CAnalyser c) { + + TreeSet res = + new TreeSet( + new Comparator() { + public int compare(CResult o1, CResult o2) { + int diff = + Comparator.comparingLong( + (CResult value) -> value.getEndpointResult().getStart()) + .compare(o1, o2); + return diff; + } + }); + + List epRes = _db.getResults(ep, CResult.class, CResult.SCHEMA$); + for (CResult epres : epRes) { + res.add(epres); + } + if (_onlyLast && epRes.size() != 0) { + c.analyse(res.last()); + } else { + for (CResult cres : res) { + c.analyse(cres); + } } + log.info("ANALYSE CALCULATION {} and {}", ep.getUri(), epRes.size()); + } } diff --git a/backend/src/main/java/sparqles/analytics/Analytics.java b/backend/src/main/java/sparqles/analytics/Analytics.java index 8f11e249..95911aeb 100644 --- a/backend/src/main/java/sparqles/analytics/Analytics.java +++ b/backend/src/main/java/sparqles/analytics/Analytics.java @@ -10,12 +10,17 @@ import sparqles.avro.analytics.EPViewAvailability; import sparqles.avro.analytics.EPViewAvailabilityData; import sparqles.avro.analytics.EPViewAvailabilityDataPoint; +import sparqles.avro.analytics.EPViewCalculation; import sparqles.avro.analytics.EPViewDiscoverability; import sparqles.avro.analytics.EPViewDiscoverabilityData; import sparqles.avro.analytics.EPViewInteroperability; import sparqles.avro.analytics.EPViewInteroperabilityData; import sparqles.avro.analytics.EPViewPerformance; import sparqles.avro.analytics.EPViewPerformanceData; +import sparqles.avro.availability.AResult; +import sparqles.avro.discovery.DResult; +import sparqles.avro.features.FResult; +import sparqles.avro.performance.PResult; import sparqles.utils.MongoDBManager; /** @@ -26,63 +31,80 @@ */ public abstract class Analytics { - private static Logger Log = LoggerFactory.getLogger(Analytics.class); + private static Logger log = LoggerFactory.getLogger(Analytics.class); - protected final MongoDBManager _db; + protected final MongoDBManager _db; - public Analytics(MongoDBManager db) { - _db = db; + public Analytics(MongoDBManager db) { + _db = db; + } + + protected EPView getEPView(Endpoint ep) { + EPView view = null; + List views = _db.getResults(ep, EPView.class, EPView.SCHEMA$); + if (views.size() != 1) { + log.warn("We have {} EPView, expected was 1", views.size()); } + if (views.size() == 0) { + view = new EPView(); + view.setEndpoint(ep); - protected EPView getEPView(Endpoint ep) { - EPView view = null; - List views = _db.getResults(ep, EPView.class, EPView.SCHEMA$); - if (views.size() != 1) { - Log.warn("We have {} EPView, expected was 1", views.size()); - } - if (views.size() == 0) { - view = new EPView(); - view.setEndpoint(ep); + EPViewAvailability av = new EPViewAvailability(); + view.setAvailability(av); + EPViewAvailabilityData data = new EPViewAvailabilityData(); + av.setData(data); + data.setKey("Availability"); + data.setValues(new ArrayList()); - EPViewAvailability av = new EPViewAvailability(); - view.setAvailability(av); - EPViewAvailabilityData data = new EPViewAvailabilityData(); - av.setData(data); - data.setKey("Availability"); - data.setValues(new ArrayList()); + EPViewPerformance p = new EPViewPerformance(); + ArrayList askdata = new ArrayList(); + ArrayList joindata = new ArrayList(); - EPViewPerformance p = new EPViewPerformance(); - ArrayList askdata = new ArrayList(); - ArrayList joindata = new ArrayList(); + p.setAsk(askdata); + p.setJoin(joindata); - p.setAsk(askdata); - p.setJoin(joindata); + EPViewInteroperability iv = new EPViewInteroperability(); + iv.setSPARQL11Features(new ArrayList()); + iv.setSPARQL1Features(new ArrayList()); - EPViewInteroperability iv = new EPViewInteroperability(); - iv.setSPARQL11Features(new ArrayList()); - iv.setSPARQL1Features(new ArrayList()); + view.setPerformance(p); + view.setAvailability(av); + view.setInteroperability(iv); - view.setPerformance(p); - view.setAvailability(av); - view.setInteroperability(iv); + EPViewDiscoverability dv = + new EPViewDiscoverability( + "", + new ArrayList(), + new ArrayList()); + view.setDiscoverability(dv); - EPViewDiscoverability dv = - new EPViewDiscoverability( - "", - new ArrayList(), - new ArrayList()); - view.setDiscoverability(dv); + EPViewCalculation cv = + new EPViewCalculation( + -1l, + -1l, + -1l, + -1l, + -1l, + -1l, + new java.util.ArrayList(), + "", + false, + "", + false, + -1.0, + -1.0); + view.setCalculation(cv); - _db.insert(view); - } else { - view = views.get(0); - } - return view; + _db.insert(view); + } else { + view = views.get(0); } + return view; + } - /** - * @param result - the result to analyse - * @return true in case of success, false otherwise - */ - public abstract boolean analyse(V result); + /** + * @param result - the result to analyse + * @return true in case of success, false otherwise + */ + public abstract boolean analyse(V result); } diff --git a/backend/src/main/java/sparqles/analytics/CAnalyser.java b/backend/src/main/java/sparqles/analytics/CAnalyser.java new file mode 100644 index 00000000..92c2a80b --- /dev/null +++ b/backend/src/main/java/sparqles/analytics/CAnalyser.java @@ -0,0 +1,95 @@ +package sparqles.analytics; + +import java.util.ArrayList; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import sparqles.avro.Endpoint; +import sparqles.avro.analytics.CalculationView; +import sparqles.avro.analytics.EPView; +import sparqles.avro.analytics.EPViewCalculation; +import sparqles.avro.calculation.CResult; +import sparqles.utils.MongoDBManager; + +public class CAnalyser extends Analytics { + private static final Logger log = LoggerFactory.getLogger(CAnalyser.class); + + public CAnalyser(MongoDBManager db) { + super(db); + } + + @Override + public boolean analyse(CResult pres) { + log.info("[Analytics] {}", pres.getEndpointResult().getEndpoint()); + + Endpoint ep = pres.getEndpointResult().getEndpoint(); + + EPView epview = getEPView(ep); + CalculationView cview = getView(ep); + + if (pres.getVoIDPart()) { + cview.setVoIDPart(true); + cview.setVoID(false); + } else { + cview.setVoID(!pres.getVoID().toString().equals("")); + cview.setVoIDPart(false); + } + if (pres.getSDPart()) { + cview.setSDPart(true); + cview.setSD(false); + } else { + cview.setSD(!pres.getSD().toString().equals("")); + cview.setSDPart(false); + } + cview.setCoherence(pres.getCoherence()); + cview.setRS(pres.getRS()); + + cview.setLastUpdate(pres.getEndpointResult().getEnd()); + + EPViewCalculation cepview = epview.getCalculation(); + cepview.setTriples(pres.getTriples()); + cepview.setEntities(pres.getEntities()); + cepview.setClasses(pres.getClasses()); + cepview.setProperties(pres.getProperties()); + cepview.setDistinctSubjects(pres.getDistinctSubjects()); + cepview.setDistinctObjects(pres.getDistinctObjects()); + cepview.setExampleResources(pres.getExampleResources()); + cepview.setVoID(pres.getVoID()); + cepview.setVoIDPart(pres.getVoIDPart()); + cepview.setSD(pres.getSD()); + cepview.setSDPart(pres.getSDPart()); + cepview.setCoherence(pres.getCoherence()); + cepview.setRS(pres.getRS()); + + _db.update(cview); + _db.update(epview); + + return true; + } + + private CalculationView getView(Endpoint ep) { + CalculationView view = null; + List views = new ArrayList(); + if (_db != null) { + views = _db.getResults(ep, CalculationView.class, CalculationView.SCHEMA$); + } + if (views.size() != 1) { + log.warn("We have {} CalculationView, expected was 1", views.size()); + } + if (views.size() == 0) { + view = new CalculationView(); + view.setEndpoint(ep); + view.setSD(false); + view.setSDPart(false); + view.setVoID(false); + view.setVoIDPart(false); + view.setCoherence(-1.0); + view.setRS(-1.0); + view.setLastUpdate(-1L); + if (_db != null) _db.insert(view); + } else { + view = views.get(0); + } + return view; + } +} diff --git a/backend/src/main/java/sparqles/analytics/DAnalyser.java b/backend/src/main/java/sparqles/analytics/DAnalyser.java index ed073452..1b089dcb 100644 --- a/backend/src/main/java/sparqles/analytics/DAnalyser.java +++ b/backend/src/main/java/sparqles/analytics/DAnalyser.java @@ -16,202 +16,198 @@ import sparqles.utils.MongoDBManager; public class DAnalyser extends Analytics { - private static final Logger log = LoggerFactory.getLogger(DAnalyser.class); + private static final Logger log = LoggerFactory.getLogger(DAnalyser.class); - public DAnalyser(MongoDBManager db) { - super(db); - } + public DAnalyser(MongoDBManager db) { + super(db); + } - @Override - public boolean analyse(DResult pres) { - log.info("[Analytics] {}", pres); - - Endpoint ep = pres.getEndpointResult().getEndpoint(); - - DiscoverabilityView dview = getView(ep); - EPView epview = getEPView(ep); - - List lvoid = new ArrayList(); - List lsd = new ArrayList(); - - String serverName = "missing"; - for (DGETInfo info : pres.getDescriptionFiles()) { - if (info.getOperation().toString().equals(DTask.EPURL)) { - if (!info.getResponseServer().toString().equalsIgnoreCase("missing")) { - serverName = info.getResponseServer().toString(); - } - - EPViewDiscoverabilityData d = - new EPViewDiscoverabilityData("HTTP Get", info.getVoiDpreds().size() != 0); - lvoid.add(d); - - d = - new EPViewDiscoverabilityData( - "HTTP Get", info.getSPARQLDESCpreds().size() != 0); - lsd.add(d); - } - if (info.getOperation().toString().equalsIgnoreCase("wellknown")) { - if (!info.getResponseServer().toString().equalsIgnoreCase("missing")) { - serverName = info.getResponseServer().toString(); - } - - EPViewDiscoverabilityData d = - new EPViewDiscoverabilityData( - "/.well-known/void", info.getVoiDpreds().size() != 0); - lvoid.add(d); - - d = - new EPViewDiscoverabilityData( - "/.well-known/void", info.getSPARQLDESCpreds().size() != 0); - lsd.add(d); - } - if (info.getSPARQLDESCpreds().size() > 0) { - dview.setSD(true); - } - if (info.getVoiDpreds().size() > 0) { - dview.setVoID(true); - } - } - log.info("Setting server name to {}", serverName); - dview.setServerName(serverName); - EPViewDiscoverability depview = epview.getDiscoverability(); + @Override + public boolean analyse(DResult pres) { + log.info("[Analytics] {}", pres); - depview.setServerName(dview.getServerName()); - depview.setVoIDDescription(lvoid); + Endpoint ep = pres.getEndpointResult().getEndpoint(); - for (QueryInfo info : pres.getQueryInfo()) { + DiscoverabilityView dview = getView(ep); + EPView epview = getEPView(ep); - if (info.getOperation().equals("query-self")) { - EPViewDiscoverabilityData d = - new EPViewDiscoverabilityData( - "SPARQL Endpoint content", info.getResults().size() != 0); - lvoid.add(d); - } + List lvoid = new ArrayList(); + List lsd = new ArrayList(); + + String serverName = "missing"; + for (DGETInfo info : pres.getDescriptionFiles()) { + if (info.getOperation().toString().equals(DTask.EPURL)) { + if (!info.getResponseServer().toString().equalsIgnoreCase("missing")) { + serverName = info.getResponseServer().toString(); } - // - // - - // d = new EPViewDiscoverabilityData("HTTP Get", - // pres.getGetResult().getSPARQLDESCterms()!=0); - // l.add(d); - // d = new EPViewDiscoverabilityData("SPARQL Endpoint content", - // pres.getVoidResult().getSPARQLFile().size()!=0); - // l.add(d); - - // depview.setSDDescription(l); - - dview.setLastUpdate(pres.getEndpointResult().getEnd()); - - _db.update(dview); - _db.update(epview); - return true; - - // SummaryStatistics askStatsCold = new SummaryStatistics(); - // SummaryStatistics askStatsWarm = new SummaryStatistics(); - // SummaryStatistics joinStatsCold = new SummaryStatistics(); - // SummaryStatistics joinStatsWarm = new SummaryStatistics(); - // - // //prepare eppview data - // EPViewPerformance eppview = epview.getPerformance(); - // EPViewPerformanceData askCold = new EPViewPerformanceData("Cold ASK Tests","#1f77b4", - // new - // ArrayList()); - // EPViewPerformanceData askWarm = new EPViewPerformanceData("WARM ASK Tests","#2ca02c", - // new - // ArrayList()); - // EPViewPerformanceData joinCold = new EPViewPerformanceData("Cold JOIN Tests","#1f77b4", - // new - // ArrayList()); - // EPViewPerformanceData joinWarm = new EPViewPerformanceData("Warm JOIN Tests","#2ca02c", - // new - // ArrayList()); - // - // ArrayList askdata= new ArrayList(); - // askdata.add(askCold); - // askdata.add(askWarm); - // ArrayList joindata= new ArrayList(); - // joindata.add(joinCold); - // joindata.add(joinWarm); - // - // eppview.setAsk(askdata); - // eppview.setJoin(joindata); - // - // - // Map map = pres.getResults(); - // int limit =0 ; - // - // for(Entry ent: map.entrySet()){ - // PSingleResult res = ent.getValue(); - // if(ent.getKey().toString().startsWith("ASK")){ - // askStatsCold.addValue(res.getCold().getClosetime()/(double)1000); - // askStatsWarm.addValue(res.getWarm().getClosetime()/(double)1000); - // - // String key = ent.getKey().toString().replaceAll("ASK", "").toLowerCase(); - // - // - // askCold.getData().add(new - // EPViewPerformanceDataValues(key,res.getCold().getClosetime()/(double)1000)); - // askWarm.getData().add(new - // EPViewPerformanceDataValues(key,res.getWarm().getClosetime()/(double)1000)); - // }else if(ent.getKey().toString().startsWith("JOIN")){ - // joinStatsCold.addValue(res.getCold().getClosetime()/(double)1000); - // joinStatsWarm.addValue(res.getCold().getClosetime()/(double)1000); - // - // String key = ent.getKey().toString().replaceAll("JOIN", "").toLowerCase(); - // - // joinCold.getData().add(new - // EPViewPerformanceDataValues(key,res.getCold().getClosetime()/(double)1000)); - // joinWarm.getData().add(new - // EPViewPerformanceDataValues(key,res.getWarm().getClosetime()/(double)1000)); - // }else if(ent.getKey().toString().startsWith("LIMIT")){ - // int sol = res.getCold().getSolutions(); - // if(Math.max(limit, sol)==sol){ - // limit = sol; - // } - // sol = res.getWarm().getSolutions(); - // if(Math.max(limit, sol)==sol){ - // limit = sol; - // } - // } - // } - // - // - // //Update pview data - // pview.setAskMeanCold(askStatsCold.getMean()); - // pview.setAskMeanWarm(askStatsWarm.getMean()); - // pview.setJoinMeanCold(joinStatsCold.getMean()); - // pview.setJoinMeanWarm(joinStatsWarm.getMean()); - // - // - // System.out.println(pview); - // System.out.println(epview); - // _db.update(pview); - // _db.update(epview); - // - // return true; - } + EPViewDiscoverabilityData d = + new EPViewDiscoverabilityData("HTTP Get", info.getVoiDpreds().size() != 0); + lvoid.add(d); - private DiscoverabilityView getView(Endpoint ep) { - DiscoverabilityView view = null; - List views = new ArrayList(); - if (_db != null) { - views = _db.getResults(ep, DiscoverabilityView.class, DiscoverabilityView.SCHEMA$); + d = new EPViewDiscoverabilityData("HTTP Get", info.getSPARQLDESCpreds().size() != 0); + lsd.add(d); + } + if (info.getOperation().toString().equalsIgnoreCase("wellknown")) { + if (!info.getResponseServer().toString().equalsIgnoreCase("missing")) { + serverName = info.getResponseServer().toString(); } - if (views.size() != 1) { - log.warn("We have {} FeatureView, expected was 1", views.size()); - } - if (views.size() == 0) { - view = new DiscoverabilityView(); - view.setEndpoint(ep); - view.setSD(false); - view.setVoID(false); - view.setServerName("missing"); - view.setLastUpdate(-1L); - if (_db != null) _db.insert(view); - } else { - view = views.get(0); - } - return view; + + EPViewDiscoverabilityData d = + new EPViewDiscoverabilityData("/.well-known/void", info.getVoiDpreds().size() != 0); + lvoid.add(d); + + d = + new EPViewDiscoverabilityData( + "/.well-known/void", info.getSPARQLDESCpreds().size() != 0); + lsd.add(d); + } + if (info.getSPARQLDESCpreds().size() > 0) { + dview.setSD(true); + } + if (info.getVoiDpreds().size() > 0) { + dview.setVoID(true); + } + } + log.info("Setting server name to {}", serverName); + dview.setServerName(serverName); + EPViewDiscoverability depview = epview.getDiscoverability(); + + depview.setServerName(dview.getServerName()); + depview.setVoIDDescription(lvoid); + + for (QueryInfo info : pres.getQueryInfo()) { + + if (info.getOperation().equals("query-self")) { + EPViewDiscoverabilityData d = + new EPViewDiscoverabilityData("SPARQL Endpoint content", info.getResults().size() != 0); + lvoid.add(d); + } + } + + // + // + + // d = new EPViewDiscoverabilityData("HTTP Get", + // pres.getGetResult().getSPARQLDESCterms()!=0); + // l.add(d); + // d = new EPViewDiscoverabilityData("SPARQL Endpoint content", + // pres.getVoidResult().getSPARQLFile().size()!=0); + // l.add(d); + + // depview.setSDDescription(l); + + dview.setLastUpdate(pres.getEndpointResult().getEnd()); + + _db.update(dview); + _db.update(epview); + return true; + + // SummaryStatistics askStatsCold = new SummaryStatistics(); + // SummaryStatistics askStatsWarm = new SummaryStatistics(); + // SummaryStatistics joinStatsCold = new SummaryStatistics(); + // SummaryStatistics joinStatsWarm = new SummaryStatistics(); + // + // //prepare eppview data + // EPViewPerformance eppview = epview.getPerformance(); + // EPViewPerformanceData askCold = new EPViewPerformanceData("Cold ASK Tests","#1f77b4", + // new + // ArrayList()); + // EPViewPerformanceData askWarm = new EPViewPerformanceData("WARM ASK Tests","#2ca02c", + // new + // ArrayList()); + // EPViewPerformanceData joinCold = new EPViewPerformanceData("Cold JOIN Tests","#1f77b4", + // new + // ArrayList()); + // EPViewPerformanceData joinWarm = new EPViewPerformanceData("Warm JOIN Tests","#2ca02c", + // new + // ArrayList()); + // + // ArrayList askdata= new ArrayList(); + // askdata.add(askCold); + // askdata.add(askWarm); + // ArrayList joindata= new ArrayList(); + // joindata.add(joinCold); + // joindata.add(joinWarm); + // + // eppview.setAsk(askdata); + // eppview.setJoin(joindata); + // + // + // Map map = pres.getResults(); + // int limit =0 ; + // + // for(Entry ent: map.entrySet()){ + // PSingleResult res = ent.getValue(); + // if(ent.getKey().toString().startsWith("ASK")){ + // askStatsCold.addValue(res.getCold().getClosetime()/(double)1000); + // askStatsWarm.addValue(res.getWarm().getClosetime()/(double)1000); + // + // String key = ent.getKey().toString().replaceAll("ASK", "").toLowerCase(); + // + // + // askCold.getData().add(new + // EPViewPerformanceDataValues(key,res.getCold().getClosetime()/(double)1000)); + // askWarm.getData().add(new + // EPViewPerformanceDataValues(key,res.getWarm().getClosetime()/(double)1000)); + // }else if(ent.getKey().toString().startsWith("JOIN")){ + // joinStatsCold.addValue(res.getCold().getClosetime()/(double)1000); + // joinStatsWarm.addValue(res.getCold().getClosetime()/(double)1000); + // + // String key = ent.getKey().toString().replaceAll("JOIN", "").toLowerCase(); + // + // joinCold.getData().add(new + // EPViewPerformanceDataValues(key,res.getCold().getClosetime()/(double)1000)); + // joinWarm.getData().add(new + // EPViewPerformanceDataValues(key,res.getWarm().getClosetime()/(double)1000)); + // }else if(ent.getKey().toString().startsWith("LIMIT")){ + // int sol = res.getCold().getSolutions(); + // if(Math.max(limit, sol)==sol){ + // limit = sol; + // } + // sol = res.getWarm().getSolutions(); + // if(Math.max(limit, sol)==sol){ + // limit = sol; + // } + // } + // } + // + // + // //Update pview data + // pview.setAskMeanCold(askStatsCold.getMean()); + // pview.setAskMeanWarm(askStatsWarm.getMean()); + // pview.setJoinMeanCold(joinStatsCold.getMean()); + // pview.setJoinMeanWarm(joinStatsWarm.getMean()); + // + // + // System.out.println(pview); + // System.out.println(epview); + // _db.update(pview); + // _db.update(epview); + // + // return true; + } + + private DiscoverabilityView getView(Endpoint ep) { + DiscoverabilityView view = null; + List views = new ArrayList(); + if (_db != null) { + views = _db.getResults(ep, DiscoverabilityView.class, DiscoverabilityView.SCHEMA$); + } + if (views.size() != 1) { + log.warn("We have {} FeatureView, expected was 1", views.size()); + } + if (views.size() == 0) { + view = new DiscoverabilityView(); + view.setEndpoint(ep); + view.setSD(false); + view.setVoID(false); + view.setServerName("missing"); + view.setLastUpdate(-1L); + if (_db != null) _db.insert(view); + } else { + view = views.get(0); } + return view; + } } diff --git a/backend/src/main/java/sparqles/analytics/EndpointComparator.java b/backend/src/main/java/sparqles/analytics/EndpointComparator.java index 32aad11e..1b0f16a7 100644 --- a/backend/src/main/java/sparqles/analytics/EndpointComparator.java +++ b/backend/src/main/java/sparqles/analytics/EndpointComparator.java @@ -5,20 +5,20 @@ public class EndpointComparator implements Comparator { - @Override - public int compare(Endpoint o1, Endpoint o2) { - int diff = o1.getUri().toString().compareToIgnoreCase(o2.getUri().toString()); + @Override + public int compare(Endpoint o1, Endpoint o2) { + int diff = o1.getUri().toString().compareToIgnoreCase(o2.getUri().toString()); - // if(diff == 0) - // diff= o1.getDatasets().size()- o2.getDatasets().size(); + // if(diff == 0) + // diff= o1.getDatasets().size()- o2.getDatasets().size(); - // if(diff == 0){ - // for(Dataset d: o1.getDatasets()){ - // if(!o2.getDatasets().contains(d)){ - // return -1; - // } - // } - // } - return diff; - } + // if(diff == 0){ + // for(Dataset d: o1.getDatasets()){ + // if(!o2.getDatasets().contains(d)){ + // return -1; + // } + // } + // } + return diff; + } } diff --git a/backend/src/main/java/sparqles/analytics/FAnalyser.java b/backend/src/main/java/sparqles/analytics/FAnalyser.java index ef7e80f7..9ae49c76 100644 --- a/backend/src/main/java/sparqles/analytics/FAnalyser.java +++ b/backend/src/main/java/sparqles/analytics/FAnalyser.java @@ -16,82 +16,80 @@ import sparqles.utils.MongoDBManager; public class FAnalyser extends Analytics { - private static final Logger log = LoggerFactory.getLogger(FAnalyser.class); + private static final Logger log = LoggerFactory.getLogger(FAnalyser.class); - public FAnalyser(MongoDBManager db) { - super(db); - } + public FAnalyser(MongoDBManager db) { + super(db); + } - @Override - public boolean analyse(FResult pres) { - log.trace("[Analytics] {}", pres); - - Endpoint ep = pres.getEndpointResult().getEndpoint(); - - InteroperabilityView fview = getView(ep); - EPView epview = getEPView(ep); - - List sparql1Feat = new ArrayList(); - List sparql11Feat = new ArrayList(); - - int SPARQL1 = 0, SPARQL11 = 0; - for (Entry ent : pres.getResults().entrySet()) { - String key = ent.getKey().toString(); - Run run = ent.getValue().getRun(); - - String q = SpecificFTask.valueOf(key).toString().toLowerCase(); - if (key.contains("SPARQL1_")) { - q = q.replaceAll("sparql10/", "").replace(".rq", ""); - EPViewInteroperabilityData t = - new EPViewInteroperabilityData(q, false, run.getException()); - - if (run.getException() == null) { - SPARQL1++; - t.setValue(true); - } - sparql1Feat.add(t); - } else if (key.contains("SPARQL11_")) { - q = q.replaceAll("sparql11/", "").replace(".rq", ""); - EPViewInteroperabilityData t = - new EPViewInteroperabilityData(q, false, run.getException()); - - if (run.getException() == null) { - SPARQL11++; - t.setValue(true); - } - sparql11Feat.add(t); - } - } + @Override + public boolean analyse(FResult pres) { + log.trace("[Analytics] {}", pres); - fview.setNbCompliantSPARQL1Features(SPARQL1); - fview.setNbCompliantSPARQL11Features(SPARQL11); - epview.getInteroperability().setSPARQL1Features(sparql1Feat); - epview.getInteroperability().setSPARQL11Features(sparql11Feat); + Endpoint ep = pres.getEndpointResult().getEndpoint(); - fview.setLastUpdate(pres.getEndpointResult().getEnd()); + InteroperabilityView fview = getView(ep); + EPView epview = getEPView(ep); - _db.update(fview); - _db.update(epview); - return true; - } + List sparql1Feat = new ArrayList(); + List sparql11Feat = new ArrayList(); - private InteroperabilityView getView(Endpoint ep) { - InteroperabilityView view = null; - List views = - _db.getResults(ep, InteroperabilityView.class, InteroperabilityView.SCHEMA$); - if (views.size() != 1) { - log.warn("We have {} FeatureView, expected was 1", views.size()); + int SPARQL1 = 0, SPARQL11 = 0; + for (Entry ent : pres.getResults().entrySet()) { + String key = ent.getKey().toString(); + Run run = ent.getValue().getRun(); + + String q = SpecificFTask.valueOf(key).toString().toLowerCase(); + if (key.contains("SPARQL1_")) { + q = q.replaceAll("sparql10/", "").replace(".rq", ""); + EPViewInteroperabilityData t = new EPViewInteroperabilityData(q, false, run.getException()); + + if (run.getException() == null) { + SPARQL1++; + t.setValue(true); } - if (views.size() == 0) { - view = new InteroperabilityView(); - view.setEndpoint(ep); - view.setNbCompliantSPARQL11Features(-1); - view.setNbCompliantSPARQL1Features(-1); - _db.insert(view); - - } else { - view = views.get(0); + sparql1Feat.add(t); + } else if (key.contains("SPARQL11_")) { + q = q.replaceAll("sparql11/", "").replace(".rq", ""); + EPViewInteroperabilityData t = new EPViewInteroperabilityData(q, false, run.getException()); + + if (run.getException() == null) { + SPARQL11++; + t.setValue(true); } - return view; + sparql11Feat.add(t); + } + } + + fview.setNbCompliantSPARQL1Features(SPARQL1); + fview.setNbCompliantSPARQL11Features(SPARQL11); + epview.getInteroperability().setSPARQL1Features(sparql1Feat); + epview.getInteroperability().setSPARQL11Features(sparql11Feat); + + fview.setLastUpdate(pres.getEndpointResult().getEnd()); + + _db.update(fview); + _db.update(epview); + return true; + } + + private InteroperabilityView getView(Endpoint ep) { + InteroperabilityView view = null; + List views = + _db.getResults(ep, InteroperabilityView.class, InteroperabilityView.SCHEMA$); + if (views.size() != 1) { + log.warn("We have {} FeatureView, expected was 1", views.size()); + } + if (views.size() == 0) { + view = new InteroperabilityView(); + view.setEndpoint(ep); + view.setNbCompliantSPARQL11Features(-1); + view.setNbCompliantSPARQL1Features(-1); + _db.insert(view); + + } else { + view = views.get(0); } + return view; + } } diff --git a/backend/src/main/java/sparqles/analytics/IndexViewAnalytics.java b/backend/src/main/java/sparqles/analytics/IndexViewAnalytics.java index cf4314d6..dbb6dc70 100644 --- a/backend/src/main/java/sparqles/analytics/IndexViewAnalytics.java +++ b/backend/src/main/java/sparqles/analytics/IndexViewAnalytics.java @@ -1,15 +1,8 @@ package sparqles.analytics; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.Map.Entry; -import java.util.TreeMap; -import java.util.TreeSet; +import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics; import org.apache.commons.math3.stat.descriptive.SummaryStatistics; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -17,6 +10,7 @@ import sparqles.avro.analytics.EPView; import sparqles.avro.analytics.EPViewAvailability; import sparqles.avro.analytics.EPViewAvailabilityDataPoint; +import sparqles.avro.analytics.EPViewCalculation; import sparqles.avro.analytics.EPViewDiscoverability; import sparqles.avro.analytics.EPViewDiscoverabilityData; import sparqles.avro.analytics.EPViewInteroperability; @@ -26,6 +20,9 @@ import sparqles.avro.analytics.EPViewPerformanceDataValues; import sparqles.avro.analytics.Index; import sparqles.avro.analytics.IndexAvailabilityDataPoint; +import sparqles.avro.analytics.IndexViewCalculation; +import sparqles.avro.analytics.IndexViewCalculationData; +import sparqles.avro.analytics.IndexViewCalculationDataValues; import sparqles.avro.analytics.IndexViewDiscoverability; import sparqles.avro.analytics.IndexViewDiscoverabilityData; import sparqles.avro.analytics.IndexViewDiscoverabilityDataValues; @@ -35,550 +32,702 @@ import sparqles.avro.analytics.IndexViewPerformance; import sparqles.avro.analytics.IndexViewPerformanceData; import sparqles.avro.analytics.IndexViewPerformanceDataValues; -import sparqles.core.CONSTANTS; +import sparqles.core.SPARQLESProperties; import sparqles.core.Task; import sparqles.utils.MongoDBManager; public class IndexViewAnalytics implements Task { - private static final Logger log = LoggerFactory.getLogger(IndexViewAnalytics.class); - final int askCold = 0, askWarm = 1, joinCold = 2, joinWarm = 3; - final int sparql1_solMods = 0, - sparql1_com = 1, - sparql1_graph = 2, - sparql11_agg = 3, - sparql11_filter = 4, - sparql11_other = 5; - private MongoDBManager _dbm; - - @Override - public Index call() throws Exception { - - // get the index view - Collection idxs = _dbm.get(Index.class, Index.SCHEMA$); - Index idx = null; - if (idxs.size() == 0) { - idx = createIndex(); - _dbm.insert(idx); - } else if (idxs.size() > 1) { - // log.warn("Too many results"); - } else idx = idxs.iterator().next(); - - // get epview - Collection epviews = _dbm.get(EPView.class, EPView.SCHEMA$); - log.info("Found {} idx views and {} epviews", idxs.size(), epviews.size()); - - // Prepare aggregated analytics - Map weekHist = new HashMap(); - - SummaryStatistics[] perfStats = { - new SummaryStatistics(), - new SummaryStatistics(), - new SummaryStatistics(), - new SummaryStatistics() - }; - - SimpleHistogram[] interStats = { - new SimpleHistogram(), - new SimpleHistogram(), - new SimpleHistogram(), - new SimpleHistogram(), - new SimpleHistogram(), - new SimpleHistogram() - }; - - Count[] discoStats = {new Count(), new Count()}; - - // iterate over all epviews and analyse them - for (EPView epv : epviews) { - System.err.println(epv); - // analyse availability - analyseAvailability(epv.getAvailability(), weekHist); - - // analyse performance - analysePerformance(epv.getPerformance(), perfStats); - - // analyse interoperability - analyseInteroperability(epv.getInteroperability(), interStats); - - // analyse interoperability - analyseDiscoverability(epv.getDiscoverability(), discoStats); - } - - // update the index view - updateAvailabilityStats(idx, weekHist); + private static final Logger log = LoggerFactory.getLogger(IndexViewAnalytics.class); + + private MongoDBManager _dbm; + final int askCold = 0, askWarm = 1, joinCold = 2, joinWarm = 3; + final int sparql1_solMods = 0, + sparql1_com = 1, + sparql1_graph = 2, + sparql11_agg = 3, + sparql11_filter = 4, + sparql11_other = 5; + + @Override + public Index call() throws Exception { + + // get the index view + Collection idxs = _dbm.get(Index.class, Index.SCHEMA$); + Index idx = null; + if (idxs == null || idxs.isEmpty()) { + idx = createIndex(); + _dbm.insert(idx); + } else if (idxs.size() > 1) { + log.error("Too many results"); + } else { + idx = idxs.iterator().next(); + } - // update performance stats - updatePerformanceStats(idx, perfStats); + // get epview + Collection epviews = _dbm.get(EPView.class, EPView.SCHEMA$); + log.info("Found {} idx views and {} epviews", idxs.size(), epviews.size()); - // update interoperability stats - updateInteroperability(idx, interStats); + // Prepare aggregated analytics + Map weekHist = new HashMap(); - updateDiscoverability(idx, discoStats); + SummaryStatistics[] perfStats = { + new SummaryStatistics(), + new SummaryStatistics(), + new SummaryStatistics(), + new SummaryStatistics() + }; + var thresholdStats = new DescriptiveStatistics(); - log.info("Updated view {}", idx); - _dbm.update(idx); + SimpleHistogram[] interStats = { + new SimpleHistogram(), + new SimpleHistogram(), + new SimpleHistogram(), + new SimpleHistogram(), + new SimpleHistogram(), + new SimpleHistogram() + }; - return idx; - } + Count[] discoStats = {new Count(), new Count()}; - private void analyseDiscoverability( - EPViewDiscoverability discoverability, Count[] discoStats) { - discoStats[1].add(discoverability.getServerName().toString()); - - boolean sd = false, voidd = false; - if (discoverability.getSDDescription().size() != 0) { - for (EPViewDiscoverabilityData d : discoverability.getSDDescription()) { - if (d.getValue()) { - discoStats[0].add("sd"); - sd = true; - break; - } - } - } - if (discoverability.getVoIDDescription().size() != 0) { - for (EPViewDiscoverabilityData d : discoverability.getVoIDDescription()) { - if (d.getValue()) { - discoStats[0].add("void"); - voidd = true; - break; - } - } - } - if (!voidd && !sd) { - discoStats[0].add("no"); - } + Count[] calcStats = {new Count(), new Count(), new Count()}; - discoStats[0].add("total"); - } + // analyse availability + recalculateAvailabilityMonthly(); - private void updateDiscoverability(Index idx, Count[] object) { - - IndexViewDiscoverability iv = idx.getDiscoverability(); - Count server = object[1]; - - List l = new ArrayList(); - List lv = - new ArrayList(); - - TreeSet set = - new TreeSet( - new Comparator() { - - @Override - public int compare( - IndexViewDiscoverabilityDataValues o1, - IndexViewDiscoverabilityDataValues o2) { - int diff = o1.getValue().compareTo(o2.getValue()); - if (diff == 0) diff = -1; - return diff; - } - }); - - for (String k : server.keySet()) { - set.add( - new IndexViewDiscoverabilityDataValues( - k, server.get(k) / (double) server.getTotal())); - } + // iterate over all epviews and analyse them + for (EPView epv : epviews) { + log.trace("EPView: {}", epv); + analyseAvailability(epv.getAvailability(), weekHist); - for (IndexViewDiscoverabilityDataValues d : set.descendingSet()) { - lv.add(d); - } - l.add(new IndexViewDiscoverabilityData("Server Names", lv)); - iv.setServerName(l); - - Count stats = object[0]; - int v = 0; - if (stats.containsKey("no")) { - v = stats.get("no"); - iv.setNoDescription(v / (double) stats.get("total")); - } else iv.setNoDescription(0D); - - v = stats.getOrDefault("sd", 0); - - Integer totalVal = stats.get("total"); - if (totalVal != null) { - iv.setSDDescription(v / (double) totalVal); - - v = stats.getOrDefault("void", 0); - iv.setVoIDDescription(v / (double) totalVal); - } else { - log.error("Total value is missing"); - iv.setSDDescription(-1.0); - iv.setVoIDDescription(-1.0); - } - } + // analyse performance + analysePerformance(epv.getPerformance(), perfStats, thresholdStats); - private void updateInteroperability(Index idx, SimpleHistogram[] interStats) { - IndexViewInteroperability iv = idx.getInteroperability(); + // analyse interoperability + analyseInteroperability(epv.getInteroperability(), interStats); - List v = new ArrayList(); - iv.setData(v); + // analyse discoverability + analyseDiscoverability(epv.getDiscoverability(), discoStats); - v.add(updateSPARQL1(interStats)); - v.add(updateSPARQL11(interStats)); + // analyse calculation + analyseCalculation(epv.getCalculation(), calcStats); } - private IndexViewInterData updateSPARQL11(SimpleHistogram[] interStats) { - IndexViewInterData ivd = new IndexViewInterData(); - ivd.setColor("#2ca02c"); - ivd.setKey("SPARQL 1.1"); - - ArrayList v = new ArrayList(); - // sparql1 mod - double perc = - interStats[sparql11_agg].bin[3] / (double) interStats[sparql11_agg].sampleSize; - v.add(new IndexViewInterDataValues("Aggregate", perc)); - - // sparql1 com - perc = interStats[sparql11_filter].bin[3] / (double) interStats[sparql11_filter].sampleSize; - v.add(new IndexViewInterDataValues("Filter", perc)); - - // sparql1 graph - perc = interStats[sparql11_other].bin[3] / (double) interStats[sparql11_other].sampleSize; - v.add(new IndexViewInterDataValues("Other", perc)); - - ivd.setData(v); + // update the index view + updateAvailabilityStats(idx, weekHist); + + // update performance stats + updatePerformanceStats(idx, perfStats, thresholdStats); + + // update interoperability stats + updateInteroperability(idx, interStats); + + // update discoverability stats + updateDiscoverability(idx, discoStats); + + // update calculation stats + updateCalculation(idx, calcStats); + + log.info("Updated view {}", idx); + _dbm.update(idx); + + return idx; + } + + private void recalculateAvailabilityMonthly() { + log.info("Recalculating availability monthly"); + AEvol.recalculateMonthly(_dbm); + } + + private void analyseCalculation(EPViewCalculation calculation, Count[] calcStats) { + double coherence = calculation.getCoherence(); + if (coherence < 0) + ; // DO NOTHING + else if (coherence < 0.25) calcStats[1].add("[0.00-0.25["); + else if (coherence < 0.5) calcStats[1].add("[0.25-0.50["); + else if (coherence < 0.75) calcStats[1].add("[0.50-0.75["); + else if (coherence < 0.95) calcStats[1].add("[0.75-0.95["); + else calcStats[1].add("[0.95-1.00]"); + + double rs = calculation.getRS(); + if (rs < 0) + ; // DO NOTHING + else if (rs < 10) calcStats[2].add("[0-10["); + else if (rs < 100) calcStats[2].add("[10-100["); + else if (rs < 1000) calcStats[2].add("[100-1000["); + else if (rs < 10000) calcStats[2].add("[1000-10000["); + else calcStats[2].add("[10000-)"); + + if (!calculation.getVoID().toString().equals("")) calcStats[0].add("VoID"); + if (calculation.getVoIDPart()) calcStats[0].add("VoIDPart"); + if (!calculation.getSD().toString().equals("")) calcStats[0].add("SD"); + if (calculation.getSDPart()) calcStats[0].add("SDPart"); + if (calculation.getCoherence() >= 0) calcStats[0].add("Coherence"); + if (calculation.getRS() >= 0) calcStats[0].add("RS"); + + calcStats[0].add("total"); + } + + private void updateCalculation(Index idx, Count[] object) { + + IndexViewCalculation iv = idx.getCalculation(); + + Count coherence = object[1]; + + List l1 = new ArrayList(); + List lv1 = new ArrayList(); + + TreeSet set1 = + new TreeSet( + new Comparator() { + + @Override + public int compare( + IndexViewCalculationDataValues o1, IndexViewCalculationDataValues o2) { + int diff = + Comparator.comparingDouble( + (IndexViewCalculationDataValues value) -> value.getValue()) + .compare(o1, o2); + if (diff == 0) diff = -1; + return diff; + } + }); + + for (String k : coherence.keySet()) { + set1.add( + new IndexViewCalculationDataValues(k, coherence.get(k) / (double) coherence.getTotal())); + } - return ivd; + for (IndexViewCalculationDataValues d : set1.descendingSet()) { + lv1.add(d); } + l1.add(new IndexViewCalculationData("Coherence", lv1)); - private IndexViewInterData updateSPARQL1(SimpleHistogram[] interStats) { - IndexViewInterData ivd = new IndexViewInterData(); - ivd.setColor("#1f77b4"); - ivd.setKey("SPARQL 1.0"); + iv.setCoherences(l1); - ArrayList v = new ArrayList(); - // sparql1 mod - double perc = - interStats[sparql1_solMods].bin[3] - / (double) interStats[sparql1_solMods].sampleSize; - v.add(new IndexViewInterDataValues("Solution Modifiers", perc)); + Count rs = object[2]; - // sparql1 com - perc = interStats[sparql1_com].bin[3] / (double) interStats[sparql1_com].sampleSize; - v.add(new IndexViewInterDataValues("Common Operators and Filters", perc)); + List l2 = new ArrayList(); + List lv2 = new ArrayList(); - // sparql1 graph - perc = interStats[sparql1_graph].bin[3] / (double) interStats[sparql1_graph].sampleSize; - v.add(new IndexViewInterDataValues("Graph and other", perc)); + TreeSet set2 = + new TreeSet( + new Comparator() { - ivd.setData(v); + @Override + public int compare( + IndexViewCalculationDataValues o1, IndexViewCalculationDataValues o2) { + int diff = + Comparator.comparingDouble( + (IndexViewCalculationDataValues value) -> value.getValue()) + .compare(o1, o2); + if (diff == 0) diff = -1; + return diff; + } + }); - return ivd; + for (String k : rs.keySet()) { + set2.add(new IndexViewCalculationDataValues(k, rs.get(k) / (double) rs.getTotal())); } - private void analyseInteroperability( - EPViewInteroperability interoperability, SimpleHistogram[] interStats) { - boolean[] all = new boolean[6]; - Arrays.fill(all, true); - - for (EPViewInteroperabilityData d : interoperability.getSPARQL1Features()) { - - String l = d.getLabel().toString(); - - boolean bv = d.getValue(); - /* - SEL[.]*ORDERBY*OFFSET - SEL[.]*ORDERBY-ASC - SEL[.]*ORDERBY-DESC - SEL[.]*ORDERBY - SEL-DISTINCT[.] - SEL-REDUCED[.] - */ - if (l.contains("orderby") || l.contains("distinct") || l.contains("reduced")) { - all[sparql1_solMods] = all[sparql1_solMods] && bv; - } - - /* - SEL[.] - SEL[JOIN] - SEL[OPT] - SEL[UNION] - -- matches fil -- - SEL[FIL(!BOUND)] - SEL[FIL(BLANK)] - SEL[FIL(BOOL)] - SEL[FIL(IRI)] - SEL[FIL(NUM)] - SEL[FIL(REGEX)] - SEL[FIL(REGEX-i)] - SEL[FIL(STR)] - SEL[BNODE] -> bnode - SEL[EMPTY] -> empty - */ - else if (l.contains("fil") - || l.contains("bnode") - || l.contains("empty") - || l.contains("union") - || l.contains("opt") - || l.contains("join") - || l.contains("sel[.]")) { - - all[sparql1_com] = all[sparql1_com] && bv; - - } - /* - SEL[FROM] - SEL[GRAPH] - SEL[GRAPH;JOIN] - SEL[GRAPH;UNION] - CON[.] - CON[JOIN] - CON[OPT] - ASK[.] - */ - else if (l.contains("graph") - || l.contains("con") - || l.contains("ask") - || l.contains("from")) { - all[sparql1_graph] = all[sparql1_graph] && bv; - } else { - log.info("Could not match {}", l); - } - } - for (EPViewInteroperabilityData d : interoperability.getSPARQL11Features()) { - String l = d.getLabel().toString(); - boolean bv = d.getValue(); - /* - Aggregate - SEL[AVG]*GROUPBY - SEL[AVG] - SEL[COUNT]*GROUPBY - SEL[MAX] - SEL[MIN] - SEL[MINUS] - SEL[SUM] - */ - if (l.contains("avg") - || l.contains("count") - || l.contains("max") - || l.contains("min") - || l.contains("minus") - || l.contains("sum")) { - all[sparql11_agg] = all[sparql11_agg] && bv; - } - /* - Filter - SEL[FIL(!EXISTS)] - SEL[FIL(ABS)] - SEL[FIL(CONTAINS)] - SEL[FIL(EXISTS)] - SEL[FIL(START)] - */ - else if (l.contains("fil") || l.contains("distinct") || l.contains("reduced")) { - all[sparql11_filter] = all[sparql11_filter] && bv; - } - /* - Other - ASK[FIL(!IN)] - CON-[.] - SEL[BIND] - SEL[PATHS] - SEL[SERVICE] - SEL[SUBQ] - SEL[SUBQ;GRAPH] - SEL[VALUES] - */ - else if (l.contains("ask") - || l.contains("con") - || l.contains("bind") - || l.contains("paths") - || l.contains("service") - || l.contains("subq") - || l.contains("values")) { - all[sparql11_other] = all[sparql11_other] && bv; - } else { - log.info("Could not match {}", l); - } - } - - boolean update = false; - for (int i = 0; i < all.length; i++) { - update = update || all[i]; + for (IndexViewCalculationDataValues d : set2.descendingSet()) { + lv2.add(d); + } + l2.add(new IndexViewCalculationData("RS", lv2)); + + iv.setRss(l2); + + Count stats = object[0]; + double totalVal = (double) stats.get("total"); + iv.setVoID(stats.get("VoID") / totalVal); + iv.setVoIDPart(stats.get("VoIDPart") / totalVal); + iv.setSD(stats.get("SD") / totalVal); + iv.setSDPart(stats.get("SDPart") / totalVal); + iv.setCoherence(stats.get("Coherence") / totalVal); + iv.setRS(stats.get("RS") / totalVal); + } + + private void analyseDiscoverability( + EPViewDiscoverability discoverability, Count[] discoStats) { + discoStats[1].add(discoverability.getServerName().toString()); + + boolean sd = false, voidd = false; + if (discoverability.getSDDescription().size() != 0) { + for (EPViewDiscoverabilityData d : discoverability.getSDDescription()) { + if (d.getValue()) { + discoStats[0].add("sd"); + sd = true; + break; } - - if (update) { - for (int i = 0; i < all.length; i++) { - if (all[i]) interStats[i].add(1D); - else interStats[i].add(0D); - } + } + } + if (discoverability.getVoIDDescription().size() != 0) { + for (EPViewDiscoverabilityData d : discoverability.getVoIDDescription()) { + if (d.getValue()) { + discoStats[0].add("void"); + voidd = true; + break; } - // System.out.println(Arrays.toString(interStats)); - + } + } + if (!voidd && !sd) { + discoStats[0].add("no"); } - private void analysePerformance(EPViewPerformance performance, SummaryStatistics[] perfStats) { - update(performance.getAsk(), perfStats[askCold], perfStats[askWarm]); - update(performance.getJoin(), perfStats[joinCold], perfStats[joinWarm]); + discoStats[0].add("total"); + } + + private void updateDiscoverability(Index idx, Count[] object) { + + IndexViewDiscoverability iv = idx.getDiscoverability(); + Count server = object[1]; + + List l = new ArrayList(); + List lv = + new ArrayList(); + + TreeSet set = + new TreeSet( + new Comparator() { + + @Override + public int compare( + IndexViewDiscoverabilityDataValues o1, IndexViewDiscoverabilityDataValues o2) { + int diff = + Comparator.comparingDouble( + (IndexViewDiscoverabilityDataValues value) -> value.getValue()) + .compare(o1, o2); + if (diff == 0) diff = -1; + return diff; + } + }); + + for (String k : server.keySet()) { + set.add( + new IndexViewDiscoverabilityDataValues(k, server.get(k) / (double) server.getTotal())); } - private void analyseAvailability( - EPViewAvailability availability, Map weekHist) { - for (EPViewAvailabilityDataPoint value : availability.getData().getValues()) { - update(value, weekHist); - } + for (IndexViewDiscoverabilityDataValues d : set.descendingSet()) { + lv.add(d); + } + l.add(new IndexViewDiscoverabilityData("Server Names", lv)); + iv.setServerName(l); + + Count stats = object[0]; + int v = 0; + if (stats.containsKey("no")) { + v = stats.get("no"); + iv.setNoDescription(v / (double) stats.get("total")); + } else iv.setNoDescription(0D); + + v = stats.getOrDefault("sd", 0); + + Integer totalVal = stats.get("total"); + if (totalVal != null) { + iv.setSDDescription(v / (double) totalVal); + + v = stats.getOrDefault("void", 0); + iv.setVoIDDescription(v / (double) totalVal); + } else { + log.error("Total value is missing"); + iv.setSDDescription(-1.0); + iv.setVoIDDescription(-1.0); + } + } + + private void updateInteroperability(Index idx, SimpleHistogram[] interStats) { + IndexViewInteroperability iv = idx.getInteroperability(); + + List v = new ArrayList(); + iv.setData(v); + + v.add(updateSPARQL1(interStats)); + v.add(updateSPARQL11(interStats)); + } + + private IndexViewInterData updateSPARQL11(SimpleHistogram[] interStats) { + IndexViewInterData ivd = new IndexViewInterData(); + ivd.setColor("#2ca02c"); + ivd.setKey("SPARQL 1.1"); + + ArrayList v = new ArrayList(); + // sparql1 mod + double perc = interStats[sparql11_agg].bin[3] / (double) interStats[sparql11_agg].sampleSize; + v.add(new IndexViewInterDataValues("Aggregate", perc)); + + // sparql1 com + perc = interStats[sparql11_filter].bin[3] / (double) interStats[sparql11_filter].sampleSize; + v.add(new IndexViewInterDataValues("Filter", perc)); + + // sparql1 graph + perc = interStats[sparql11_other].bin[3] / (double) interStats[sparql11_other].sampleSize; + v.add(new IndexViewInterDataValues("Other", perc)); + + ivd.setData(v); + + return ivd; + } + + private IndexViewInterData updateSPARQL1(SimpleHistogram[] interStats) { + IndexViewInterData ivd = new IndexViewInterData(); + ivd.setColor("#1f77b4"); + ivd.setKey("SPARQL 1.0"); + + ArrayList v = new ArrayList(); + // sparql1 mod + double perc = + interStats[sparql1_solMods].bin[3] / (double) interStats[sparql1_solMods].sampleSize; + v.add(new IndexViewInterDataValues("Solution Modifiers", perc)); + + // sparql1 com + perc = interStats[sparql1_com].bin[3] / (double) interStats[sparql1_com].sampleSize; + v.add(new IndexViewInterDataValues("Common Operators and Filters", perc)); + + // sparql1 graph + perc = interStats[sparql1_graph].bin[3] / (double) interStats[sparql1_graph].sampleSize; + v.add(new IndexViewInterDataValues("Graph and other", perc)); + + ivd.setData(v); + + return ivd; + } + + private void analyseInteroperability( + EPViewInteroperability interoperability, SimpleHistogram[] interStats) { + boolean[] all = new boolean[6]; + Arrays.fill(all, true); + + for (EPViewInteroperabilityData d : interoperability.getSPARQL1Features()) { + + String l = d.getLabel().toString(); + + boolean bv = d.getValue(); + /* + SEL[.]*ORDERBY*OFFSET + SEL[.]*ORDERBY-ASC + SEL[.]*ORDERBY-DESC + SEL[.]*ORDERBY + SEL-DISTINCT[.] + SEL-REDUCED[.] + */ + if (l.contains("orderby") || l.contains("distinct") || l.contains("reduced")) { + all[sparql1_solMods] = all[sparql1_solMods] && bv; + } + + /* + SEL[.] + SEL[JOIN] + SEL[OPT] + SEL[UNION] + -- matches fil -- + SEL[FIL(!BOUND)] + SEL[FIL(BLANK)] + SEL[FIL(BOOL)] + SEL[FIL(IRI)] + SEL[FIL(NUM)] + SEL[FIL(REGEX)] + SEL[FIL(REGEX-i)] + SEL[FIL(STR)] + SEL[BNODE] -> bnode + SEL[EMPTY] -> empty + */ + else if (l.contains("fil") + || l.contains("bnode") + || l.contains("empty") + || l.contains("union") + || l.contains("opt") + || l.contains("join") + || l.contains("sel[.]")) { + + all[sparql1_com] = all[sparql1_com] && bv; + + } + /* + SEL[FROM] + SEL[GRAPH] + SEL[GRAPH;JOIN] + SEL[GRAPH;UNION] + CON[.] + CON[JOIN] + CON[OPT] + ASK[.] + */ + else if (l.contains("graph") + || l.contains("con") + || l.contains("ask") + || l.contains("from")) { + all[sparql1_graph] = all[sparql1_graph] && bv; + } else { + log.info("Could not match {}", l); + } + } + for (EPViewInteroperabilityData d : interoperability.getSPARQL11Features()) { + String l = d.getLabel().toString(); + boolean bv = d.getValue(); + /* + Aggregate + SEL[AVG]*GROUPBY + SEL[AVG] + SEL[COUNT]*GROUPBY + SEL[MAX] + SEL[MIN] + SEL[MINUS] + SEL[SUM] + */ + if (l.contains("avg") + || l.contains("count") + || l.contains("max") + || l.contains("min") + || l.contains("minus") + || l.contains("sum")) { + all[sparql11_agg] = all[sparql11_agg] && bv; + } + /* + Filter + SEL[FIL(!EXISTS)] + SEL[FIL(ABS)] + SEL[FIL(CONTAINS)] + SEL[FIL(EXISTS)] + SEL[FIL(START)] + */ + else if (l.contains("fil") || l.contains("distinct") || l.contains("reduced")) { + all[sparql11_filter] = all[sparql11_filter] && bv; + } + /* + Other + ASK[FIL(!IN)] + CON-[.] + SEL[BIND] + SEL[PATHS] + SEL[SERVICE] + SEL[SUBQ] + SEL[SUBQ;GRAPH] + SEL[VALUES] + */ + else if (l.contains("ask") + || l.contains("con") + || l.contains("bind") + || l.contains("paths") + || l.contains("service") + || l.contains("subq") + || l.contains("values")) { + all[sparql11_other] = all[sparql11_other] && bv; + } else { + log.info("Could not match {}", l); + } } - private void updatePerformanceStats(Index idx, SummaryStatistics[] perfStats) { - ArrayList data = new ArrayList(); - List l = new ArrayList(); - IndexViewPerformanceData cold = new IndexViewPerformanceData("Cold Tests", "#1f77b4", l); - l = new ArrayList(); - IndexViewPerformanceData warm = new IndexViewPerformanceData("Warm Tests", "#2ca02c", l); - data.add(cold); - data.add(warm); - - double v = (perfStats[askCold].getN() == 0) ? -1D : perfStats[askCold].getMean(); - cold.getData().add(new IndexViewPerformanceDataValues("Average ASK", v)); - v = (perfStats[joinCold].getN() == 0) ? -1D : perfStats[joinCold].getMean(); - cold.getData().add(new IndexViewPerformanceDataValues("Average JOIN", v)); - - v = (perfStats[askWarm].getN() == 0) ? -1D : perfStats[askWarm].getMean(); - warm.getData().add(new IndexViewPerformanceDataValues("Average ASK", v)); - v = (perfStats[joinWarm].getN() == 0) ? -1D : perfStats[joinWarm].getMean(); - warm.getData().add(new IndexViewPerformanceDataValues("Average JOIN", v)); - - idx.getPerformance().setThreshold(-1L); - idx.getPerformance().setData(data); + boolean update = false; + for (int i = 0; i < all.length; i++) { + update = update || all[i]; } - private void updateAvailabilityStats(Index idx, Map weekHist) { - List aidxs = idx.getAvailability(); - // update availability stats - for (Entry week : weekHist.entrySet()) { - SimpleHistogram sh = week.getValue(); - - int total = sh.sampleSize; - - for (AvailabilityIndex aidx : aidxs) { - int value = 0; - if (aidx.getKey().equals("[0;5]")) value = sh.bin[0]; - if (aidx.getKey().equals("]5;90]")) value = sh.bin[1]; - if (aidx.getKey().equals("]90;95]")) value = sh.bin[2]; - if (aidx.getKey().equals("]95;100]")) value = sh.bin[3]; - - boolean exists = false; - for (IndexAvailabilityDataPoint i : aidx.getValues()) { - if (i.getX().equals(week.getKey())) { - exists = true; - i.setY(value / (double) total); - } - } - if (!exists) - aidx.getValues() - .add( - new IndexAvailabilityDataPoint( - week.getKey(), value / (double) total)); - } - } + if (update) { + for (int i = 0; i < all.length; i++) { + if (all[i]) interStats[i].add(1D); + else interStats[i].add(0D); + } + } + // System.out.println(Arrays.toString(interStats)); + + } + + private void analysePerformance( + EPViewPerformance performance, + SummaryStatistics[] perfStats, + DescriptiveStatistics thresholdStats) { + update(performance.getAsk(), perfStats[askCold], perfStats[askWarm]); + update(performance.getJoin(), perfStats[joinCold], perfStats[joinWarm]); + updateThreshold(performance.getThreshold(), thresholdStats); + } + + private void updateThreshold(long threshold, DescriptiveStatistics thresholdStats) { + log.debug("Found threshold: {}", threshold); + // 100002 is the LIMIT used to detect a threshold + if (threshold > 0 && threshold < 100_002) { + thresholdStats.addValue(threshold); } + } - private void update( - List results, SummaryStatistics cold, SummaryStatistics warm) { - for (EPViewPerformanceData pdata : results) { - if (pdata.getKey().toString().contains("Cold")) { - for (EPViewPerformanceDataValues v : pdata.getData()) { - cold.addValue(v.getValue()); - } - } - if (pdata.getKey().toString().contains("Warm")) { - for (EPViewPerformanceDataValues v : pdata.getData()) { - warm.addValue(v.getValue()); - } - } + private void analyseAvailability( + EPViewAvailability availability, Map weekHist) { + for (EPViewAvailabilityDataPoint value : availability.getData().getValues()) { + update(value, weekHist); + } + } + + private void updatePerformanceStats( + Index idx, SummaryStatistics[] perfStats, DescriptiveStatistics thresholdStats) { + ArrayList data = new ArrayList(); + List l = new ArrayList(); + IndexViewPerformanceData cold = new IndexViewPerformanceData("Cold Tests", "#1f77b4", l); + l = new ArrayList(); + IndexViewPerformanceData warm = new IndexViewPerformanceData("Warm Tests", "#2ca02c", l); + data.add(cold); + data.add(warm); + + double v = (perfStats[askCold].getN() == 0) ? -1D : perfStats[askCold].getMean(); + cold.getData().add(new IndexViewPerformanceDataValues("Average ASK", v)); + v = (perfStats[joinCold].getN() == 0) ? -1D : perfStats[joinCold].getMean(); + cold.getData().add(new IndexViewPerformanceDataValues("Average JOIN", v)); + + v = (perfStats[askWarm].getN() == 0) ? -1D : perfStats[askWarm].getMean(); + warm.getData().add(new IndexViewPerformanceDataValues("Average ASK", v)); + v = (perfStats[joinWarm].getN() == 0) ? -1D : perfStats[joinWarm].getMean(); + warm.getData().add(new IndexViewPerformanceDataValues("Average JOIN", v)); + + // median + var medianThresh = (long) thresholdStats.getPercentile(50); + log.debug("Median thresh: {}", medianThresh); + idx.getPerformance().setThreshold(medianThresh); + idx.getPerformance().setData(data); + } + + private void updateAvailabilityStats(Index idx, Map weekHist) { + List aidxs = idx.getAvailability(); + // update availability stats + for (Entry week : weekHist.entrySet()) { + SimpleHistogram sh = week.getValue(); + + int total = sh.sampleSize; + + for (AvailabilityIndex aidx : aidxs) { + int value = 0; + if (aidx.getKey().equals("[0;5]")) value = sh.bin[0]; + if (aidx.getKey().equals("]5;90]")) value = sh.bin[1]; + if (aidx.getKey().equals("]90;95]")) value = sh.bin[2]; + if (aidx.getKey().equals("]95;100]")) value = sh.bin[3]; + + boolean exists = false; + for (IndexAvailabilityDataPoint i : aidx.getValues()) { + if (i.getX().equals(week.getKey())) { + exists = true; + i.setY(value / (double) total); + } } + if (!exists) + aidx.getValues() + .add(new IndexAvailabilityDataPoint(week.getKey(), value / (double) total)); + } } + } - private Index createIndex() { - Index idx = new Index(); - idx.setEndpoint(CONSTANTS.SPARQLES); + private void update( + List results, SummaryStatistics cold, SummaryStatistics warm) { - AvailabilityIndex aidx = - new AvailabilityIndex("[0;5]", new ArrayList()); - List aidxs = new ArrayList(); - - aidxs.add(aidx); - - aidx = new AvailabilityIndex("]5;90]", new ArrayList()); - aidxs.add(aidx); - - aidx = new AvailabilityIndex("]90;95]", new ArrayList()); - aidxs.add(aidx); - - aidx = new AvailabilityIndex("]95;100]", new ArrayList()); - aidxs.add(aidx); - - idx.setAvailability(aidxs); + for (EPViewPerformanceData pdata : results) { + if (pdata.getKey().toString().contains("Cold")) { + for (EPViewPerformanceDataValues v : pdata.getData()) { + cold.addValue(v.getValue()); + } + } + if (pdata.getKey().toString().contains("Warm")) { + for (EPViewPerformanceDataValues v : pdata.getData()) { + warm.addValue(v.getValue()); + } + } + } + } + + private Index createIndex() { + Index idx = new Index(); + idx.setEndpoint(SPARQLESProperties.getSparqlesEndpoint()); + + AvailabilityIndex aidx = + new AvailabilityIndex("[0;5]", new ArrayList()); + List aidxs = new ArrayList(); + + aidxs.add(aidx); + + aidx = new AvailabilityIndex("]5;90]", new ArrayList()); + aidxs.add(aidx); + + aidx = new AvailabilityIndex("]90;95]", new ArrayList()); + aidxs.add(aidx); + + aidx = new AvailabilityIndex("]95;100]", new ArrayList()); + aidxs.add(aidx); + + idx.setAvailability(aidxs); + + IndexViewPerformance idxp = new IndexViewPerformance(); + idxp.setThreshold(-1L); + idxp.setData(new ArrayList()); + idx.setPerformance(idxp); + + IndexViewInteroperability idxi = new IndexViewInteroperability(); + idxi.setData(new ArrayList()); + idx.setInteroperability(idxi); + + IndexViewDiscoverability idxd = new IndexViewDiscoverability(); + idxd.setNoDescription(-1D); + idxd.setSDDescription(-1D); + idxd.setVoIDDescription(-1D); + idxd.setServerName(new ArrayList()); + idx.setDiscoverability(idxd); + + IndexViewCalculation idxc = new IndexViewCalculation(); + idxc.setVoID(-1D); + idxc.setVoIDPart(-1D); + idxc.setSD(-1D); + idxc.setSDPart(-1D); + idxc.setCoherence(-1D); + idxc.setRS(-1D); + idxc.setCoherences(new ArrayList()); + idxc.setRss(new ArrayList()); + idx.setCalculation(idxc); + + return idx; + } + + private void update(EPViewAvailabilityDataPoint value, Map weekHist) { + String key = "" + value.getX(); + SimpleHistogram sh = weekHist.get(key); + if (sh == null) { + sh = new SimpleHistogram(); + weekHist.put(key, sh); + } + sh.add(value.getY()); + } - IndexViewPerformance idxp = new IndexViewPerformance(); - idxp.setThreshold(-1L); - idxp.setData(new ArrayList()); - idx.setPerformance(idxp); + @Override + public void setDBManager(MongoDBManager dbm) { + _dbm = dbm; + } - IndexViewInteroperability idxi = new IndexViewInteroperability(); - idxi.setData(new ArrayList()); - idx.setInteroperability(idxi); + class SimpleHistogram { - IndexViewDiscoverability idxd = new IndexViewDiscoverability(); - idxd.setNoDescription(-1D); - idxd.setSDDescription(-1D); - idxd.setVoIDDescription(-1D); - idxd.setServerName(new ArrayList()); - idx.setDiscoverability(idxd); + int sampleSize = 0; + int[] bin = {0, 0, 0, 0}; - return idx; - } + public void add(Double d) { + if (d <= 0.05) bin[0]++; + else if (0.05 < d && d <= 0.9) bin[1]++; + else if (0.9 < d && d <= 0.95) bin[2]++; + else if (0.95 < d && d <= 1) bin[3]++; - private void update(EPViewAvailabilityDataPoint value, Map weekHist) { - String key = "" + value.getX(); - SimpleHistogram sh = weekHist.get(key); - if (sh == null) { - sh = new SimpleHistogram(); - weekHist.put(key, sh); - } - sh.add(value.getY()); + sampleSize++; } @Override - public void setDBManager(MongoDBManager dbm) { - _dbm = dbm; + public String toString() { + return Arrays.toString(bin) + ":" + sampleSize; } + } - class SimpleHistogram { + class Count extends TreeMap { + int sampleSize = 0; - int sampleSize = 0; - int[] bin = {0, 0, 0, 0}; - - public void add(Double d) { - if (d <= 0.05) bin[0]++; - else if (0.05 < d && d <= 0.9) bin[1]++; - else if (0.9 < d && d <= 0.95) bin[2]++; - else if (0.95 < d && d <= 1) bin[3]++; - - sampleSize++; - } + public void add(T t) { + if (this.containsKey(t)) { + this.put(t, this.get(t) + 1); + } else this.put(t, 1); - @Override - public String toString() { - return Arrays.toString(bin) + ":" + sampleSize; - } + sampleSize++; } - class Count extends TreeMap { - int sampleSize = 0; - - public void add(T t) { - if (this.containsKey(t)) { - this.put(t, this.get(t) + 1); - } else this.put(t, 1); - - sampleSize++; - } - - public double getTotal() { - // TODO Auto-generated method stub - return sampleSize; - } + public double getTotal() { + return sampleSize; } + } } diff --git a/backend/src/main/java/sparqles/analytics/PAnalyser.java b/backend/src/main/java/sparqles/analytics/PAnalyser.java index e65cf8a7..1d5ab046 100644 --- a/backend/src/main/java/sparqles/analytics/PAnalyser.java +++ b/backend/src/main/java/sparqles/analytics/PAnalyser.java @@ -18,143 +18,148 @@ import sparqles.utils.MongoDBManager; public class PAnalyser extends Analytics { - private static final Logger log = LoggerFactory.getLogger(PAnalyser.class); - - public PAnalyser(MongoDBManager db) { - super(db); + private static final Logger log = LoggerFactory.getLogger(PAnalyser.class); + + public PAnalyser(MongoDBManager db) { + super(db); + } + + @Override + public boolean analyse(PResult pres) { + Endpoint ep = pres.getEndpointResult().getEndpoint(); + + log.info("Analyzing PResult for endpoint={}", ep.getUri()); + log.debug("Analyse {}", pres); + + PerformanceView pview = getView(ep); + EPView epview = getEPView(ep); + + SummaryStatistics askStatsCold = new SummaryStatistics(); + SummaryStatistics askStatsWarm = new SummaryStatistics(); + SummaryStatistics joinStatsCold = new SummaryStatistics(); + SummaryStatistics joinStatsWarm = new SummaryStatistics(); + + // prepare eppview data + EPViewPerformance eppview = epview.getPerformance(); + EPViewPerformanceData askCold = + new EPViewPerformanceData( + "Cold ASK Tests", "#1f77b4", new ArrayList()); + EPViewPerformanceData askWarm = + new EPViewPerformanceData( + "Warm ASK Tests", "#2ca02c", new ArrayList()); + EPViewPerformanceData joinCold = + new EPViewPerformanceData( + "Cold JOIN Tests", "#1f77b4", new ArrayList()); + EPViewPerformanceData joinWarm = + new EPViewPerformanceData( + "Warm JOIN Tests", "#2ca02c", new ArrayList()); + + ArrayList askdata = new ArrayList(); + askdata.add(askCold); + askdata.add(askWarm); + ArrayList joindata = new ArrayList(); + joindata.add(joinCold); + joindata.add(joinWarm); + + eppview.setAsk(askdata); + eppview.setJoin(joindata); + + Map map = pres.getResults(); + long limit = 0; + + for (Entry ent : map.entrySet()) { + PSingleResult res = ent.getValue(); + + if (ent.getKey().toString().startsWith("ASK")) { + + askStatsCold.addValue(res.getCold().getClosetime() / (double) 1000); + askStatsWarm.addValue(res.getWarm().getClosetime() / (double) 1000); + + String key = ent.getKey().toString().replaceAll("ASK", "").toLowerCase(); + + askCold + .getData() + .add( + new EPViewPerformanceDataValues( + key, + res.getCold().getClosetime() / (double) 1000, + res.getCold().getException())); + askWarm + .getData() + .add( + new EPViewPerformanceDataValues( + key, + res.getWarm().getClosetime() / (double) 1000, + res.getWarm().getException())); + } else if (ent.getKey().toString().startsWith("JOIN")) { + joinStatsCold.addValue(res.getCold().getClosetime() / (double) 1000); + joinStatsWarm.addValue(res.getWarm().getClosetime() / (double) 1000); + + String key = ent.getKey().toString().replaceAll("JOIN", "").toLowerCase(); + + joinCold + .getData() + .add( + new EPViewPerformanceDataValues( + key, + res.getCold().getClosetime() / (double) 1000, + res.getCold().getException())); + joinWarm + .getData() + .add( + new EPViewPerformanceDataValues( + key, + res.getWarm().getClosetime() / (double) 1000, + res.getWarm().getException())); + } else if (ent.getKey().toString().startsWith("LIMIT")) { + int sol = res.getCold().getSolutions(); + if (Math.max(limit, sol) == sol) { + limit = sol; + } + sol = res.getWarm().getSolutions(); + if (Math.max(limit, sol) == sol) { + limit = sol; + } + } } + eppview.setThreshold(limit); + pview.setThreshold(limit); - @Override - public boolean analyse(PResult pres) { - log.info("Analyse {}", pres); - - Endpoint ep = pres.getEndpointResult().getEndpoint(); - - PerformanceView pview = getView(ep); - EPView epview = getEPView(ep); - - SummaryStatistics askStatsCold = new SummaryStatistics(); - SummaryStatistics askStatsWarm = new SummaryStatistics(); - SummaryStatistics joinStatsCold = new SummaryStatistics(); - SummaryStatistics joinStatsWarm = new SummaryStatistics(); - - // prepare eppview data - EPViewPerformance eppview = epview.getPerformance(); - EPViewPerformanceData askCold = - new EPViewPerformanceData( - "Cold ASK Tests", "#1f77b4", new ArrayList()); - EPViewPerformanceData askWarm = - new EPViewPerformanceData( - "Warm ASK Tests", "#2ca02c", new ArrayList()); - EPViewPerformanceData joinCold = - new EPViewPerformanceData( - "Cold JOIN Tests", "#1f77b4", new ArrayList()); - EPViewPerformanceData joinWarm = - new EPViewPerformanceData( - "Warm JOIN Tests", "#2ca02c", new ArrayList()); - - ArrayList askdata = new ArrayList(); - askdata.add(askCold); - askdata.add(askWarm); - ArrayList joindata = new ArrayList(); - joindata.add(joinCold); - joindata.add(joinWarm); - - eppview.setAsk(askdata); - eppview.setJoin(joindata); - - Map map = pres.getResults(); - long limit = 0; - - for (Entry ent : map.entrySet()) { - PSingleResult res = ent.getValue(); - - if (ent.getKey().toString().startsWith("ASK")) { - - askStatsCold.addValue(res.getCold().getClosetime() / (double) 1000); - askStatsWarm.addValue(res.getWarm().getClosetime() / (double) 1000); - - String key = ent.getKey().toString().replaceAll("ASK", "").toLowerCase(); - - askCold.getData() - .add( - new EPViewPerformanceDataValues( - key, - res.getCold().getClosetime() / (double) 1000, - res.getCold().getException())); - askWarm.getData() - .add( - new EPViewPerformanceDataValues( - key, - res.getWarm().getClosetime() / (double) 1000, - res.getWarm().getException())); - } else if (ent.getKey().toString().startsWith("JOIN")) { - joinStatsCold.addValue(res.getCold().getClosetime() / (double) 1000); - joinStatsWarm.addValue(res.getWarm().getClosetime() / (double) 1000); - - String key = ent.getKey().toString().replaceAll("JOIN", "").toLowerCase(); - - joinCold.getData() - .add( - new EPViewPerformanceDataValues( - key, - res.getCold().getClosetime() / (double) 1000, - res.getCold().getException())); - joinWarm.getData() - .add( - new EPViewPerformanceDataValues( - key, - res.getWarm().getClosetime() / (double) 1000, - res.getWarm().getException())); - } else if (ent.getKey().toString().startsWith("LIMIT")) { - int sol = res.getCold().getSolutions(); - if (Math.max(limit, sol) == sol) { - limit = sol; - } - sol = res.getWarm().getSolutions(); - if (Math.max(limit, sol) == sol) { - limit = sol; - } - } - } - eppview.setThreshold(limit); - pview.setThreshold(limit); + // Update pview data + pview.setAskMeanCold(checkForNAN(askStatsCold.getMean())); + pview.setAskMeanWarm(checkForNAN(askStatsWarm.getMean())); + pview.setJoinMeanCold(checkForNAN(joinStatsCold.getMean())); + pview.setJoinMeanWarm(checkForNAN(joinStatsWarm.getMean())); - // Update pview data - pview.setAskMeanCold(checkForNAN(askStatsCold.getMean())); - pview.setAskMeanWarm(checkForNAN(askStatsWarm.getMean())); - pview.setJoinMeanCold(checkForNAN(joinStatsCold.getMean())); - pview.setJoinMeanWarm(checkForNAN(joinStatsWarm.getMean())); + pview.setLastUpdate(pres.getEndpointResult().getEnd()); + _db.update(pview); + _db.update(epview); - pview.setLastUpdate(pres.getEndpointResult().getEnd()); - _db.update(pview); - _db.update(epview); + return true; + } - return true; + private Double checkForNAN(double mean) { + if (Double.isNaN(mean)) { + return -1D; } - - private Double checkForNAN(double mean) { - if (Double.isNaN(mean)) { - return -1D; - } - return mean; + return mean; + } + + private PerformanceView getView(Endpoint ep) { + PerformanceView view = null; + List views = + _db.getResults(ep, PerformanceView.class, PerformanceView.SCHEMA$); + if (views.size() != 1) { + log.warn("We have {} PerformanceView, expected was 1", views.size()); } + if (views.size() == 0) { + view = new PerformanceView(); + view.setEndpoint(ep); + _db.insert(view); - private PerformanceView getView(Endpoint ep) { - PerformanceView view = null; - List views = - _db.getResults(ep, PerformanceView.class, PerformanceView.SCHEMA$); - if (views.size() != 1) { - log.warn("We have {} AvailabilityView, expected was 1", views.size()); - } - if (views.size() == 0) { - view = new PerformanceView(); - view.setEndpoint(ep); - _db.insert(view); - - } else { - view = views.get(0); - } - return view; + } else { + view = views.get(0); } + return view; + } } diff --git a/backend/src/main/java/sparqles/analytics/RefreshDataHubTask.java b/backend/src/main/java/sparqles/analytics/RefreshDataHubTask.java index 5a07de4d..c40ae548 100644 --- a/backend/src/main/java/sparqles/analytics/RefreshDataHubTask.java +++ b/backend/src/main/java/sparqles/analytics/RefreshDataHubTask.java @@ -12,89 +12,89 @@ public class RefreshDataHubTask implements Task { - private static final Logger log = LoggerFactory.getLogger(RefreshDataHubTask.class); - private MongoDBManager _dbm; - private Scheduler _s; + private static final Logger log = LoggerFactory.getLogger(RefreshDataHubTask.class); + private MongoDBManager _dbm; + private Scheduler _s; - @Override - public Index call() throws Exception { - log.info("execute updating ckan catalog"); + @Override + public Index call() throws Exception { + log.info("execute updating ckan catalog"); - Collection datahub = DatahubAccess.checkEndpointList(); + Collection datahub = DatahubAccess.checkEndpointList(); - if (datahub.size() == 0) return null; + if (datahub.size() == 0) return null; - // flush the endpoint collection - _dbm.initEndpointCollection(); + // flush the endpoint collection + _dbm.initEndpointCollection(); - int newEPs = 0, upEPs = 0, remEPs = 0; - for (Endpoint ep : datahub) { - if (_dbm.insert(ep)) { - newEPs++; - } - } + int newEPs = 0, upEPs = 0, remEPs = 0; + for (Endpoint ep : datahub) { + if (_dbm.insert(ep)) { + newEPs++; + } + } - /* - //Collection db = _dbm.get(Endpoint.class, Endpoint.SCHEMA$); - TreeSet ckan = new TreeSet(new EndpointComparator()); - ckan.addAll(datahub); + /* + //Collection db = _dbm.get(Endpoint.class, Endpoint.SCHEMA$); + TreeSet ckan = new TreeSet(new EndpointComparator()); + ckan.addAll(datahub); - TreeSet sparqles = new TreeSet(new EndpointComparator()); - sparqles.addAll(db); + TreeSet sparqles = new TreeSet(new EndpointComparator()); + sparqles.addAll(db); - int newEPs = 0, upEPs=0, remEPs=0; - for(Endpoint ep : ckan){ - if(! sparqles.contains(ep)){ - log.info("New endpoint {}",ep); - //new + int newEPs = 0, upEPs=0, remEPs=0; + for(Endpoint ep : ckan){ + if(! sparqles.contains(ep)){ + log.info("New endpoint {}",ep); + //new - if(_dbm.insert(ep)){ - newEPs++; - // Schedule sch = _s.defaultSchedule(ep); - // _dbm.insert(sch); + if(_dbm.insert(ep)){ + newEPs++; + // Schedule sch = _s.defaultSchedule(ep); + // _dbm.insert(sch); - // _s.initSchedule(sch); - } + // _s.initSchedule(sch); + } - }else{ - //update - log.info("Update endpoint {}",ep); - if( _dbm.update(ep)) - upEPs++; + }else{ + //update + log.info("Update endpoint {}",ep); + if( _dbm.update(ep)) + upEPs++; - } - } + } + } - for(Endpoint ep : sparqles){ - if(ep.getUri().equals(CONSTANTS.SPARQLES.getUri())) continue; - if(! ckan.contains(ep)){ - //remove - log.info("Remove endpoint {}",ep); - if( _dbm.cleanup(ep)){ - remEPs++; + for(Endpoint ep : sparqles){ + if(ep.getUri().equals(CONSTANTS.SPARQLES.getUri())) continue; + if(! ckan.contains(ep)){ + //remove + log.info("Remove endpoint {}",ep); + if( _dbm.cleanup(ep)){ + remEPs++; - } + } - } - } + } + } - log.info("executed updating ckan catalog, {} total, {} updates, {} new, {} removals",ckan.size(),upEPs, newEPs,remEPs); - */ - log.info("executed updating ckan catalog, {} new", newEPs); - return null; - } + log.info("executed updating ckan catalog, {} total, {} updates, {} new, {} removals",ckan.size(),upEPs, newEPs,remEPs); + */ + log.info("executed updating ckan catalog, {} new", newEPs); + return null; + } - @Override - public void setDBManager(MongoDBManager dbm) { - _dbm = dbm; - } + @Override + public void setDBManager(MongoDBManager dbm) { + _dbm = dbm; + } - public void setScheduler(Scheduler scheduler) { - _s = scheduler; - } + public void setScheduler(Scheduler scheduler) { + _s = scheduler; + } } diff --git a/backend/src/main/java/sparqles/analytics/StatsAnalyser.java b/backend/src/main/java/sparqles/analytics/StatsAnalyser.java index 447d78de..95229fd0 100644 --- a/backend/src/main/java/sparqles/analytics/StatsAnalyser.java +++ b/backend/src/main/java/sparqles/analytics/StatsAnalyser.java @@ -24,203 +24,197 @@ import sparqles.utils.MongoDBManager; public class StatsAnalyser implements Task { - static SimpleDateFormat yearweek = new SimpleDateFormat("YYYY-'W'ww"); - - private MongoDBManager _dbm; - - public static Date trim(Date date, int hours) { - final GregorianCalendar calendar = new GregorianCalendar(); - calendar.setTime(date); - calendar.set(Calendar.MILLISECOND, 0); - calendar.set(Calendar.SECOND, 0); - calendar.set(Calendar.MINUTE, 0); - calendar.set(Calendar.HOUR_OF_DAY, hours); - return calendar.getTime(); + static SimpleDateFormat yearweek = new SimpleDateFormat("YYYY-'W'ww"); + + private MongoDBManager _dbm; + + public static Date trim(Date date, int hours) { + final GregorianCalendar calendar = new GregorianCalendar(); + calendar.setTime(date); + calendar.set(Calendar.MILLISECOND, 0); + calendar.set(Calendar.SECOND, 0); + calendar.set(Calendar.MINUTE, 0); + calendar.set(Calendar.HOUR_OF_DAY, hours); + return calendar.getTime(); + } + + public static void main(String[] args) { + SPARQLESProperties.init(new File("src/test/resources/sparqles.properties")); + MongoDBManager m = new MongoDBManager(); + + StatsAnalyser s = new StatsAnalyser(); + s.setDBManager(m); + + try { + s.call(); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); } + } - public static void main(String[] args) { - SPARQLESProperties.init(new File("src/test/resources/sparqles.properties")); - MongoDBManager m = new MongoDBManager(); + @Override + public Index call() throws Exception { - StatsAnalyser s = new StatsAnalyser(); - s.setDBManager(m); + analyse_p(); + analyse_a(); + analyse_f(); + analyse_d(); - try { - s.call(); - } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } + return null; + } - @Override - public Index call() throws Exception { + private void analyse_a() { + System.out.println("Analysing a"); + TreeMap> epWeekCount = new TreeMap>(); - analyse_p(); - analyse_a(); - analyse_f(); - analyse_d(); + TreeSet weeks = new TreeSet<>(); + Iterator iter = _dbm.getIterator(AResult.class, AResult.SCHEMA$); + int count = 0; + while (iter.hasNext()) { + count++; + AResult p = iter.next(); - return null; - } + update(epWeekCount, weeks, p.getEndpointResult()); - private void analyse_a() { - System.out.println("Analysing a"); - TreeMap> epWeekCount = - new TreeMap>(); + if (count % 1000 == 0) { + System.out.println(count); + } + } - TreeSet weeks = new TreeSet<>(); - Iterator iter = _dbm.getIterator(AResult.class, AResult.SCHEMA$); - int count = 0; - while (iter.hasNext()) { - count++; - AResult p = iter.next(); + printStats(epWeekCount, weeks, "a_runs.csv"); + } - update(epWeekCount, weeks, p.getEndpointResult()); + private void update( + TreeMap> epWeekCount, + TreeSet weeks, + EndpointResult epr) { + Date cur = trim(new Date(epr.getStart()), 0); + String ep = epr.getEndpoint().getUri().toString(); - if (count % 1000 == 0) { - System.out.println(count); - } - } + String yyyyww = yearweek.format(cur); + weeks.add(yyyyww); - printStats(epWeekCount, weeks, "a_runs.csv"); + Map weekCount = epWeekCount.get(ep); + if (weekCount == null) { + weekCount = new TreeMap(); + epWeekCount.put(ep, weekCount); + } + Integer c = weekCount.get(yyyyww); + if (c == null) { + c = 0; } + weekCount.put(yyyyww, c + 1); + } - private void update( - TreeMap> epWeekCount, - TreeSet weeks, - EndpointResult epr) { - Date cur = trim(new Date(epr.getStart()), 0); - String ep = epr.getEndpoint().getUri().toString(); + private void analyse_p() { - String yyyyww = yearweek.format(cur); - weeks.add(yyyyww); + System.out.println("Analysing p"); + TreeMap> epWeekCount = new TreeMap>(); + TreeSet weeks = new TreeSet<>(); + Iterator iter = _dbm.getIterator(PResult.class, PResult.SCHEMA$); + int count = 0; + while (iter.hasNext()) { + count++; + PResult p = iter.next(); - Map weekCount = epWeekCount.get(ep); - if (weekCount == null) { - weekCount = new TreeMap(); - epWeekCount.put(ep, weekCount); - } - Integer c = weekCount.get(yyyyww); - if (c == null) { - c = 0; - } - weekCount.put(yyyyww, c + 1); - } + update(epWeekCount, weeks, p.getEndpointResult()); - private void analyse_p() { + if (count % 10000 == 0) { + System.out.println(count); + } + } - System.out.println("Analysing p"); - TreeMap> epWeekCount = - new TreeMap>(); - TreeSet weeks = new TreeSet<>(); - Iterator iter = _dbm.getIterator(PResult.class, PResult.SCHEMA$); - int count = 0; - while (iter.hasNext()) { - count++; - PResult p = iter.next(); + printStats(epWeekCount, weeks, "p_runs.csv"); + } - update(epWeekCount, weeks, p.getEndpointResult()); + private void analyse_d() { + System.out.println("Analysing d"); + TreeMap> epWeekCount = new TreeMap>(); + TreeSet weeks = new TreeSet<>(); + Iterator iter = _dbm.getIterator(DResult.class, DResult.SCHEMA$); + int count = 0; + while (iter.hasNext()) { + count++; + DResult p = iter.next(); - if (count % 10000 == 0) { - System.out.println(count); - } - } + update(epWeekCount, weeks, p.getEndpointResult()); - printStats(epWeekCount, weeks, "p_runs.csv"); + if (count % 10000 == 0) { + System.out.println(count); + } } - private void analyse_d() { - System.out.println("Analysing d"); - TreeMap> epWeekCount = - new TreeMap>(); - TreeSet weeks = new TreeSet<>(); - Iterator iter = _dbm.getIterator(DResult.class, DResult.SCHEMA$); - int count = 0; - while (iter.hasNext()) { - count++; - DResult p = iter.next(); - - update(epWeekCount, weeks, p.getEndpointResult()); - - if (count % 10000 == 0) { - System.out.println(count); - } - } + printStats(epWeekCount, weeks, "d_runs.csv"); + } - printStats(epWeekCount, weeks, "d_runs.csv"); - } + private void analyse_f() { + System.out.println("Analysing f"); + TreeMap> epWeekCount = new TreeMap>(); + TreeSet weeks = new TreeSet<>(); + Iterator iter = _dbm.getIterator(FResult.class, FResult.SCHEMA$); + int count = 0; + while (iter.hasNext()) { + count++; + FResult p = iter.next(); - private void analyse_f() { - System.out.println("Analysing f"); - TreeMap> epWeekCount = - new TreeMap>(); - TreeSet weeks = new TreeSet<>(); - Iterator iter = _dbm.getIterator(FResult.class, FResult.SCHEMA$); - int count = 0; - while (iter.hasNext()) { - count++; - FResult p = iter.next(); - - update(epWeekCount, weeks, p.getEndpointResult()); - - if (count % 10000 == 0) { - System.out.println(count); - } - } + update(epWeekCount, weeks, p.getEndpointResult()); - printStats(epWeekCount, weeks, "f_runs.csv"); + if (count % 10000 == 0) { + System.out.println(count); + } } - private void printStats( - TreeMap> epWeekCount, - TreeSet weeks, - String fName) { - System.out.println("Printing stats to " + fName); - PrintWriter fw; - try { - fw = new PrintWriter(new File(fName)); - fw.print("#ep, empty, min, mean, max"); - for (String yyyyww : weeks) { - fw.print("," + yyyyww); - } - fw.println(); - SummaryStatistics s = new SummaryStatistics(); - - for (Entry> ent : epWeekCount.entrySet()) { - fw.print(ent.getKey()); - int empty = 0; - - for (String yyyyww : weeks) { - if (ent.getValue().containsKey(yyyyww)) { - s.addValue(ent.getValue().get(yyyyww)); - } else { - s.addValue(0); - empty++; - } - } - fw.print(" ," + empty); - fw.print(" ," + s.getMin()); - fw.print(" ," + s.getMean()); - fw.print(" ," + s.getMax()); - - for (String yyyyww : weeks) { - if (ent.getValue().containsKey(yyyyww)) { - fw.print(" ," + ent.getValue().get(yyyyww)); - } else { - fw.print(" ,0"); - } - } - fw.println(); - } - fw.close(); - } catch (FileNotFoundException e) { - e.printStackTrace(); + printStats(epWeekCount, weeks, "f_runs.csv"); + } + + private void printStats( + TreeMap> epWeekCount, TreeSet weeks, String fName) { + System.out.println("Printing stats to " + fName); + PrintWriter fw; + try { + fw = new PrintWriter(new File(fName)); + fw.print("#ep, empty, min, mean, max"); + for (String yyyyww : weeks) { + fw.print("," + yyyyww); + } + fw.println(); + SummaryStatistics s = new SummaryStatistics(); + + for (Entry> ent : epWeekCount.entrySet()) { + fw.print(ent.getKey()); + int empty = 0; + + for (String yyyyww : weeks) { + if (ent.getValue().containsKey(yyyyww)) { + s.addValue(ent.getValue().get(yyyyww)); + } else { + s.addValue(0); + empty++; + } } + fw.print(" ," + empty); + fw.print(" ," + s.getMin()); + fw.print(" ," + s.getMean()); + fw.print(" ," + s.getMax()); + + for (String yyyyww : weeks) { + if (ent.getValue().containsKey(yyyyww)) { + fw.print(" ," + ent.getValue().get(yyyyww)); + } else { + fw.print(" ,0"); + } + } + fw.println(); + } + fw.close(); + } catch (FileNotFoundException e) { + e.printStackTrace(); } + } - @Override - public void setDBManager(MongoDBManager dbm) { - _dbm = dbm; - } + @Override + public void setDBManager(MongoDBManager dbm) { + _dbm = dbm; + } } diff --git a/backend/src/main/java/sparqles/avro/Dataset.java b/backend/src/main/java/sparqles/avro/Dataset.java index bccf7c8a..f4f60de0 100644 --- a/backend/src/main/java/sparqles/avro/Dataset.java +++ b/backend/src/main/java/sparqles/avro/Dataset.java @@ -5,211 +5,419 @@ */ package sparqles.avro; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class Dataset extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"Dataset\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}"); - @Deprecated public java.lang.CharSequence uri; - @Deprecated public java.lang.CharSequence label; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 5704361370392649754L; - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public Dataset() {} + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"Dataset\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}"); - /** All-args constructor. */ - public Dataset(java.lang.CharSequence uri, java.lang.CharSequence label) { - this.uri = uri; - this.label = label; - } + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + /** + * Serializes this Dataset to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a Dataset from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a Dataset instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static Dataset fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence uri; + private java.lang.CharSequence label; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public Dataset() {} + + /** + * All-args constructor. + * + * @param uri The new value for uri + * @param label The new value for label + */ + public Dataset(java.lang.CharSequence uri, java.lang.CharSequence label) { + this.uri = uri; + this.label = label; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return uri; + case 1: + return label; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - /** Creates a new Dataset RecordBuilder */ - public static sparqles.avro.Dataset.Builder newBuilder() { - return new sparqles.avro.Dataset.Builder(); + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + uri = (java.lang.CharSequence) value$; + break; + case 1: + label = (java.lang.CharSequence) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - /** Creates a new Dataset RecordBuilder by copying an existing Builder */ - public static sparqles.avro.Dataset.Builder newBuilder(sparqles.avro.Dataset.Builder other) { - return new sparqles.avro.Dataset.Builder(other); + /** + * Gets the value of the 'uri' field. + * + * @return The value of the 'uri' field. + */ + public java.lang.CharSequence getUri() { + return uri; + } + + /** + * Sets the value of the 'uri' field. + * + * @param value the value to set. + */ + public void setUri(java.lang.CharSequence value) { + this.uri = value; + } + + /** + * Gets the value of the 'label' field. + * + * @return The value of the 'label' field. + */ + public java.lang.CharSequence getLabel() { + return label; + } + + /** + * Sets the value of the 'label' field. + * + * @param value the value to set. + */ + public void setLabel(java.lang.CharSequence value) { + this.label = value; + } + + /** + * Creates a new Dataset RecordBuilder. + * + * @return A new Dataset RecordBuilder + */ + public static sparqles.avro.Dataset.Builder newBuilder() { + return new sparqles.avro.Dataset.Builder(); + } + + /** + * Creates a new Dataset RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new Dataset RecordBuilder + */ + public static sparqles.avro.Dataset.Builder newBuilder(sparqles.avro.Dataset.Builder other) { + if (other == null) { + return new sparqles.avro.Dataset.Builder(); + } else { + return new sparqles.avro.Dataset.Builder(other); } + } - /** Creates a new Dataset RecordBuilder by copying an existing Dataset instance */ - public static sparqles.avro.Dataset.Builder newBuilder(sparqles.avro.Dataset other) { - return new sparqles.avro.Dataset.Builder(other); + /** + * Creates a new Dataset RecordBuilder by copying an existing Dataset instance. + * + * @param other The existing instance to copy. + * @return A new Dataset RecordBuilder + */ + public static sparqles.avro.Dataset.Builder newBuilder(sparqles.avro.Dataset other) { + if (other == null) { + return new sparqles.avro.Dataset.Builder(); + } else { + return new sparqles.avro.Dataset.Builder(other); } + } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** RecordBuilder for Dataset instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence uri; + private java.lang.CharSequence label; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return uri; - case 1: - return label; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.Dataset.Builder other) { + super(other); + if (isValidValue(fields()[0], other.uri)) { + this.uri = data().deepCopy(fields()[0].schema(), other.uri); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.label)) { + this.label = data().deepCopy(fields()[1].schema(), other.label); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - uri = (java.lang.CharSequence) value$; - break; - case 1: - label = (java.lang.CharSequence) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Creates a Builder by copying an existing Dataset instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.Dataset other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.uri)) { + this.uri = data().deepCopy(fields()[0].schema(), other.uri); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.label)) { + this.label = data().deepCopy(fields()[1].schema(), other.label); + fieldSetFlags()[1] = true; + } } - /** Gets the value of the 'uri' field. */ + /** + * Gets the value of the 'uri' field. + * + * @return The value. + */ public java.lang.CharSequence getUri() { - return uri; + return uri; } /** * Sets the value of the 'uri' field. * - * @param value the value to set. + * @param value The value of 'uri'. + * @return This builder. + */ + public sparqles.avro.Dataset.Builder setUri(java.lang.CharSequence value) { + validate(fields()[0], value); + this.uri = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'uri' field has been set. + * + * @return True if the 'uri' field has been set, false otherwise. + */ + public boolean hasUri() { + return fieldSetFlags()[0]; + } + + /** + * Clears the value of the 'uri' field. + * + * @return This builder. */ - public void setUri(java.lang.CharSequence value) { - this.uri = value; + public sparqles.avro.Dataset.Builder clearUri() { + uri = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'label' field. */ + /** + * Gets the value of the 'label' field. + * + * @return The value. + */ public java.lang.CharSequence getLabel() { - return label; + return label; } /** * Sets the value of the 'label' field. * - * @param value the value to set. + * @param value The value of 'label'. + * @return This builder. + */ + public sparqles.avro.Dataset.Builder setLabel(java.lang.CharSequence value) { + validate(fields()[1], value); + this.label = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'label' field has been set. + * + * @return True if the 'label' field has been set, false otherwise. */ - public void setLabel(java.lang.CharSequence value) { - this.label = value; + public boolean hasLabel() { + return fieldSetFlags()[1]; } - /** RecordBuilder for Dataset instances. */ - public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Clears the value of the 'label' field. + * + * @return This builder. + */ + public sparqles.avro.Dataset.Builder clearLabel() { + label = null; + fieldSetFlags()[1] = false; + return this; + } - private java.lang.CharSequence uri; - private java.lang.CharSequence label; + @Override + @SuppressWarnings("unchecked") + public Dataset build() { + try { + Dataset record = new Dataset(); + record.uri = + fieldSetFlags()[0] ? this.uri : (java.lang.CharSequence) defaultValue(fields()[0]); + record.label = + fieldSetFlags()[1] ? this.label : (java.lang.CharSequence) defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.Dataset.SCHEMA$); - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.Dataset.Builder other) { - super(other); - if (isValidValue(fields()[0], other.uri)) { - this.uri = data().deepCopy(fields()[0].schema(), other.uri); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.label)) { - this.label = data().deepCopy(fields()[1].schema(), other.label); - fieldSetFlags()[1] = true; - } - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Creates a Builder by copying an existing Dataset instance */ - private Builder(sparqles.avro.Dataset other) { - super(sparqles.avro.Dataset.SCHEMA$); - if (isValidValue(fields()[0], other.uri)) { - this.uri = data().deepCopy(fields()[0].schema(), other.uri); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.label)) { - this.label = data().deepCopy(fields()[1].schema(), other.label); - fieldSetFlags()[1] = true; - } - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Gets the value of the 'uri' field */ - public java.lang.CharSequence getUri() { - return uri; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Sets the value of the 'uri' field */ - public sparqles.avro.Dataset.Builder setUri(java.lang.CharSequence value) { - validate(fields()[0], value); - this.uri = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Checks whether the 'uri' field has been set */ - public boolean hasUri() { - return fieldSetFlags()[0]; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.uri); - /** Clears the value of the 'uri' field */ - public sparqles.avro.Dataset.Builder clearUri() { - uri = null; - fieldSetFlags()[0] = false; - return this; - } + out.writeString(this.label); + } - /** Gets the value of the 'label' field */ - public java.lang.CharSequence getLabel() { - return label; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.uri = in.readString(this.uri instanceof Utf8 ? (Utf8) this.uri : null); - /** Sets the value of the 'label' field */ - public sparqles.avro.Dataset.Builder setLabel(java.lang.CharSequence value) { - validate(fields()[1], value); - this.label = value; - fieldSetFlags()[1] = true; - return this; - } + this.label = in.readString(this.label instanceof Utf8 ? (Utf8) this.label : null); - /** Checks whether the 'label' field has been set */ - public boolean hasLabel() { - return fieldSetFlags()[1]; - } + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.uri = in.readString(this.uri instanceof Utf8 ? (Utf8) this.uri : null); + break; - /** Clears the value of the 'label' field */ - public sparqles.avro.Dataset.Builder clearLabel() { - label = null; - fieldSetFlags()[1] = false; - return this; - } + case 1: + this.label = in.readString(this.label instanceof Utf8 ? (Utf8) this.label : null); + break; - @Override - public Dataset build() { - try { - Dataset record = new Dataset(); - record.uri = - fieldSetFlags()[0] - ? this.uri - : (java.lang.CharSequence) defaultValue(fields()[0]); - record.label = - fieldSetFlags()[1] - ? this.label - : (java.lang.CharSequence) defaultValue(fields()[1]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/Endpoint.java b/backend/src/main/java/sparqles/avro/Endpoint.java index 794cb026..cf5c4e83 100644 --- a/backend/src/main/java/sparqles/avro/Endpoint.java +++ b/backend/src/main/java/sparqles/avro/Endpoint.java @@ -5,212 +5,475 @@ */ package sparqles.avro; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class Endpoint extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"Endpoint\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}"); - @Deprecated public java.lang.CharSequence uri; - @Deprecated public java.util.List datasets; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 5345063537835847863L; - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public Endpoint() {} + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"Endpoint\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}"); - /** All-args constructor. */ - public Endpoint(java.lang.CharSequence uri, java.util.List datasets) { - this.uri = uri; - this.datasets = datasets; - } + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + /** + * Serializes this Endpoint to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a Endpoint from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a Endpoint instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static Endpoint fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence uri; + private java.util.List datasets; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public Endpoint() {} + + /** + * All-args constructor. + * + * @param uri The new value for uri + * @param datasets The new value for datasets + */ + public Endpoint(java.lang.CharSequence uri, java.util.List datasets) { + this.uri = uri; + this.datasets = datasets; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return uri; + case 1: + return datasets; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - /** Creates a new Endpoint RecordBuilder */ - public static sparqles.avro.Endpoint.Builder newBuilder() { - return new sparqles.avro.Endpoint.Builder(); + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + uri = (java.lang.CharSequence) value$; + break; + case 1: + datasets = (java.util.List) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } + + /** + * Gets the value of the 'uri' field. + * + * @return The value of the 'uri' field. + */ + public java.lang.CharSequence getUri() { + return uri; + } - /** Creates a new Endpoint RecordBuilder by copying an existing Builder */ - public static sparqles.avro.Endpoint.Builder newBuilder(sparqles.avro.Endpoint.Builder other) { - return new sparqles.avro.Endpoint.Builder(other); + /** + * Sets the value of the 'uri' field. + * + * @param value the value to set. + */ + public void setUri(java.lang.CharSequence value) { + this.uri = value; + } + + /** + * Gets the value of the 'datasets' field. + * + * @return The value of the 'datasets' field. + */ + public java.util.List getDatasets() { + return datasets; + } + + /** + * Sets the value of the 'datasets' field. + * + * @param value the value to set. + */ + public void setDatasets(java.util.List value) { + this.datasets = value; + } + + /** + * Creates a new Endpoint RecordBuilder. + * + * @return A new Endpoint RecordBuilder + */ + public static sparqles.avro.Endpoint.Builder newBuilder() { + return new sparqles.avro.Endpoint.Builder(); + } + + /** + * Creates a new Endpoint RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new Endpoint RecordBuilder + */ + public static sparqles.avro.Endpoint.Builder newBuilder(sparqles.avro.Endpoint.Builder other) { + if (other == null) { + return new sparqles.avro.Endpoint.Builder(); + } else { + return new sparqles.avro.Endpoint.Builder(other); } + } - /** Creates a new Endpoint RecordBuilder by copying an existing Endpoint instance */ - public static sparqles.avro.Endpoint.Builder newBuilder(sparqles.avro.Endpoint other) { - return new sparqles.avro.Endpoint.Builder(other); + /** + * Creates a new Endpoint RecordBuilder by copying an existing Endpoint instance. + * + * @param other The existing instance to copy. + * @return A new Endpoint RecordBuilder + */ + public static sparqles.avro.Endpoint.Builder newBuilder(sparqles.avro.Endpoint other) { + if (other == null) { + return new sparqles.avro.Endpoint.Builder(); + } else { + return new sparqles.avro.Endpoint.Builder(other); } + } + + /** RecordBuilder for Endpoint instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + private java.lang.CharSequence uri; + private java.util.List datasets; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return uri; - case 1: - return datasets; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.Endpoint.Builder other) { + super(other); + if (isValidValue(fields()[0], other.uri)) { + this.uri = data().deepCopy(fields()[0].schema(), other.uri); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.datasets)) { + this.datasets = data().deepCopy(fields()[1].schema(), other.datasets); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - uri = (java.lang.CharSequence) value$; - break; - case 1: - datasets = (java.util.List) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Creates a Builder by copying an existing Endpoint instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.Endpoint other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.uri)) { + this.uri = data().deepCopy(fields()[0].schema(), other.uri); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.datasets)) { + this.datasets = data().deepCopy(fields()[1].schema(), other.datasets); + fieldSetFlags()[1] = true; + } } - /** Gets the value of the 'uri' field. */ + /** + * Gets the value of the 'uri' field. + * + * @return The value. + */ public java.lang.CharSequence getUri() { - return uri; + return uri; } /** * Sets the value of the 'uri' field. * - * @param value the value to set. + * @param value The value of 'uri'. + * @return This builder. */ - public void setUri(java.lang.CharSequence value) { - this.uri = value; + public sparqles.avro.Endpoint.Builder setUri(java.lang.CharSequence value) { + validate(fields()[0], value); + this.uri = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'datasets' field. */ + /** + * Checks whether the 'uri' field has been set. + * + * @return True if the 'uri' field has been set, false otherwise. + */ + public boolean hasUri() { + return fieldSetFlags()[0]; + } + + /** + * Clears the value of the 'uri' field. + * + * @return This builder. + */ + public sparqles.avro.Endpoint.Builder clearUri() { + uri = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'datasets' field. + * + * @return The value. + */ public java.util.List getDatasets() { - return datasets; + return datasets; } /** * Sets the value of the 'datasets' field. * - * @param value the value to set. + * @param value The value of 'datasets'. + * @return This builder. */ - public void setDatasets(java.util.List value) { - this.datasets = value; + public sparqles.avro.Endpoint.Builder setDatasets(java.util.List value) { + validate(fields()[1], value); + this.datasets = value; + fieldSetFlags()[1] = true; + return this; } - /** RecordBuilder for Endpoint instances. */ - public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Checks whether the 'datasets' field has been set. + * + * @return True if the 'datasets' field has been set, false otherwise. + */ + public boolean hasDatasets() { + return fieldSetFlags()[1]; + } - private java.lang.CharSequence uri; - private java.util.List datasets; + /** + * Clears the value of the 'datasets' field. + * + * @return This builder. + */ + public sparqles.avro.Endpoint.Builder clearDatasets() { + datasets = null; + fieldSetFlags()[1] = false; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.Endpoint.SCHEMA$); - } + @Override + @SuppressWarnings("unchecked") + public Endpoint build() { + try { + Endpoint record = new Endpoint(); + record.uri = + fieldSetFlags()[0] ? this.uri : (java.lang.CharSequence) defaultValue(fields()[0]); + record.datasets = + fieldSetFlags()[1] + ? this.datasets + : (java.util.List) defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.Endpoint.Builder other) { - super(other); - if (isValidValue(fields()[0], other.uri)) { - this.uri = data().deepCopy(fields()[0].schema(), other.uri); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.datasets)) { - this.datasets = data().deepCopy(fields()[1].schema(), other.datasets); - fieldSetFlags()[1] = true; - } - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Creates a Builder by copying an existing Endpoint instance */ - private Builder(sparqles.avro.Endpoint other) { - super(sparqles.avro.Endpoint.SCHEMA$); - if (isValidValue(fields()[0], other.uri)) { - this.uri = data().deepCopy(fields()[0].schema(), other.uri); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.datasets)) { - this.datasets = data().deepCopy(fields()[1].schema(), other.datasets); - fieldSetFlags()[1] = true; - } - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Gets the value of the 'uri' field */ - public java.lang.CharSequence getUri() { - return uri; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Sets the value of the 'uri' field */ - public sparqles.avro.Endpoint.Builder setUri(java.lang.CharSequence value) { - validate(fields()[0], value); - this.uri = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Checks whether the 'uri' field has been set */ - public boolean hasUri() { - return fieldSetFlags()[0]; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Clears the value of the 'uri' field */ - public sparqles.avro.Endpoint.Builder clearUri() { - uri = null; - fieldSetFlags()[0] = false; - return this; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.uri); - /** Gets the value of the 'datasets' field */ - public java.util.List getDatasets() { - return datasets; - } + long size0 = this.datasets.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (sparqles.avro.Dataset e0 : this.datasets) { + actualSize0++; + out.startItem(); + e0.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); + } - /** Sets the value of the 'datasets' field */ - public sparqles.avro.Endpoint.Builder setDatasets( - java.util.List value) { - validate(fields()[1], value); - this.datasets = value; - fieldSetFlags()[1] = true; - return this; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.uri = in.readString(this.uri instanceof Utf8 ? (Utf8) this.uri : null); - /** Checks whether the 'datasets' field has been set */ - public boolean hasDatasets() { - return fieldSetFlags()[1]; + long size0 = in.readArrayStart(); + java.util.List a0 = this.datasets; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("datasets").schema()); + this.datasets = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.Dataset e0 = (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.Dataset(); + } + e0.customDecode(in); + a0.add(e0); } + } - /** Clears the value of the 'datasets' field */ - public sparqles.avro.Endpoint.Builder clearDatasets() { - datasets = null; - fieldSetFlags()[1] = false; - return this; - } + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.uri = in.readString(this.uri instanceof Utf8 ? (Utf8) this.uri : null); + break; - @Override - public Endpoint build() { - try { - Endpoint record = new Endpoint(); - record.uri = - fieldSetFlags()[0] - ? this.uri - : (java.lang.CharSequence) defaultValue(fields()[0]); - record.datasets = - fieldSetFlags()[1] - ? this.datasets - : (java.util.List) defaultValue(fields()[1]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); + case 1: + long size0 = in.readArrayStart(); + java.util.List a0 = this.datasets; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("datasets").schema()); + this.datasets = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.Dataset e0 = (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.Dataset(); + } + e0.customDecode(in); + a0.add(e0); + } } + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/EndpointResult.java b/backend/src/main/java/sparqles/avro/EndpointResult.java index 517553c2..0b41adf2 100644 --- a/backend/src/main/java/sparqles/avro/EndpointResult.java +++ b/backend/src/main/java/sparqles/avro/EndpointResult.java @@ -5,270 +5,566 @@ */ package sparqles.avro; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; + @org.apache.avro.specific.AvroGenerated public class EndpointResult extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"EndpointResult\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"start\",\"type\":\"long\"},{\"name\":\"end\",\"type\":\"long\"}],\"import\":\"Endpoint.avsc\"}"); - @Deprecated public sparqles.avro.Endpoint endpoint; - @Deprecated public long start; - @Deprecated public long end; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -787273293297893760L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"EndpointResult\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"start\",\"type\":\"long\"},{\"name\":\"end\",\"type\":\"long\"}],\"import\":\"Endpoint.avsc\"}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this EndpointResult to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a EndpointResult from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a EndpointResult instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static EndpointResult fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private sparqles.avro.Endpoint endpoint; + private long start; + private long end; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public EndpointResult() {} + + /** + * All-args constructor. + * + * @param endpoint The new value for endpoint + * @param start The new value for start + * @param end The new value for end + */ + public EndpointResult(sparqles.avro.Endpoint endpoint, java.lang.Long start, java.lang.Long end) { + this.endpoint = endpoint; + this.start = start; + this.end = end; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return endpoint; + case 1: + return start; + case 2: + return end; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + endpoint = (sparqles.avro.Endpoint) value$; + break; + case 1: + start = (java.lang.Long) value$; + break; + case 2: + end = (java.lang.Long) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'endpoint' field. + * + * @return The value of the 'endpoint' field. + */ + public sparqles.avro.Endpoint getEndpoint() { + return endpoint; + } + + /** + * Sets the value of the 'endpoint' field. + * + * @param value the value to set. + */ + public void setEndpoint(sparqles.avro.Endpoint value) { + this.endpoint = value; + } + + /** + * Gets the value of the 'start' field. + * + * @return The value of the 'start' field. + */ + public long getStart() { + return start; + } + + /** + * Sets the value of the 'start' field. + * + * @param value the value to set. + */ + public void setStart(long value) { + this.start = value; + } + + /** + * Gets the value of the 'end' field. + * + * @return The value of the 'end' field. + */ + public long getEnd() { + return end; + } + + /** + * Sets the value of the 'end' field. + * + * @param value the value to set. + */ + public void setEnd(long value) { + this.end = value; + } + + /** + * Creates a new EndpointResult RecordBuilder. + * + * @return A new EndpointResult RecordBuilder + */ + public static sparqles.avro.EndpointResult.Builder newBuilder() { + return new sparqles.avro.EndpointResult.Builder(); + } + + /** + * Creates a new EndpointResult RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new EndpointResult RecordBuilder + */ + public static sparqles.avro.EndpointResult.Builder newBuilder( + sparqles.avro.EndpointResult.Builder other) { + if (other == null) { + return new sparqles.avro.EndpointResult.Builder(); + } else { + return new sparqles.avro.EndpointResult.Builder(other); + } + } + + /** + * Creates a new EndpointResult RecordBuilder by copying an existing EndpointResult instance. + * + * @param other The existing instance to copy. + * @return A new EndpointResult RecordBuilder + */ + public static sparqles.avro.EndpointResult.Builder newBuilder( + sparqles.avro.EndpointResult other) { + if (other == null) { + return new sparqles.avro.EndpointResult.Builder(); + } else { + return new sparqles.avro.EndpointResult.Builder(other); + } + } + + /** RecordBuilder for EndpointResult instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private sparqles.avro.Endpoint endpoint; + private sparqles.avro.Endpoint.Builder endpointBuilder; + private long start; + private long end; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public EndpointResult() {} - - /** All-args constructor. */ - public EndpointResult( - sparqles.avro.Endpoint endpoint, java.lang.Long start, java.lang.Long end) { - this.endpoint = endpoint; - this.start = start; - this.end = end; + private Builder(sparqles.avro.EndpointResult.Builder other) { + super(other); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (other.hasEndpointBuilder()) { + this.endpointBuilder = sparqles.avro.Endpoint.newBuilder(other.getEndpointBuilder()); + } + if (isValidValue(fields()[1], other.start)) { + this.start = data().deepCopy(fields()[1].schema(), other.start); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.end)) { + this.end = data().deepCopy(fields()[2].schema(), other.end); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; - } - - /** Creates a new EndpointResult RecordBuilder */ - public static sparqles.avro.EndpointResult.Builder newBuilder() { - return new sparqles.avro.EndpointResult.Builder(); + /** + * Creates a Builder by copying an existing EndpointResult instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.EndpointResult other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = true; + } + this.endpointBuilder = null; + if (isValidValue(fields()[1], other.start)) { + this.start = data().deepCopy(fields()[1].schema(), other.start); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.end)) { + this.end = data().deepCopy(fields()[2].schema(), other.end); + fieldSetFlags()[2] = true; + } } - /** Creates a new EndpointResult RecordBuilder by copying an existing Builder */ - public static sparqles.avro.EndpointResult.Builder newBuilder( - sparqles.avro.EndpointResult.Builder other) { - return new sparqles.avro.EndpointResult.Builder(other); + /** + * Gets the value of the 'endpoint' field. + * + * @return The value. + */ + public sparqles.avro.Endpoint getEndpoint() { + return endpoint; } - /** Creates a new EndpointResult RecordBuilder by copying an existing EndpointResult instance */ - public static sparqles.avro.EndpointResult.Builder newBuilder( - sparqles.avro.EndpointResult other) { - return new sparqles.avro.EndpointResult.Builder(other); + /** + * Sets the value of the 'endpoint' field. + * + * @param value The value of 'endpoint'. + * @return This builder. + */ + public sparqles.avro.EndpointResult.Builder setEndpoint(sparqles.avro.Endpoint value) { + validate(fields()[0], value); + this.endpointBuilder = null; + this.endpoint = value; + fieldSetFlags()[0] = true; + return this; } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Checks whether the 'endpoint' field has been set. + * + * @return True if the 'endpoint' field has been set, false otherwise. + */ + public boolean hasEndpoint() { + return fieldSetFlags()[0]; } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return endpoint; - case 1: - return start; - case 2: - return end; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); + /** + * Gets the Builder instance for the 'endpoint' field and creates one if it doesn't exist yet. + * + * @return This builder. + */ + public sparqles.avro.Endpoint.Builder getEndpointBuilder() { + if (endpointBuilder == null) { + if (hasEndpoint()) { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder(endpoint)); + } else { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder()); } + } + return endpointBuilder; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - endpoint = (sparqles.avro.Endpoint) value$; - break; - case 1: - start = (java.lang.Long) value$; - break; - case 2: - end = (java.lang.Long) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the Builder instance for the 'endpoint' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.EndpointResult.Builder setEndpointBuilder( + sparqles.avro.Endpoint.Builder value) { + clearEndpoint(); + endpointBuilder = value; + return this; } - /** Gets the value of the 'endpoint' field. */ - public sparqles.avro.Endpoint getEndpoint() { - return endpoint; + /** + * Checks whether the 'endpoint' field has an active Builder instance + * + * @return True if the 'endpoint' field has an active Builder instance + */ + public boolean hasEndpointBuilder() { + return endpointBuilder != null; } /** - * Sets the value of the 'endpoint' field. + * Clears the value of the 'endpoint' field. * - * @param value the value to set. + * @return This builder. */ - public void setEndpoint(sparqles.avro.Endpoint value) { - this.endpoint = value; + public sparqles.avro.EndpointResult.Builder clearEndpoint() { + endpoint = null; + endpointBuilder = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'start' field. */ - public java.lang.Long getStart() { - return start; + /** + * Gets the value of the 'start' field. + * + * @return The value. + */ + public long getStart() { + return start; } /** * Sets the value of the 'start' field. * - * @param value the value to set. + * @param value The value of 'start'. + * @return This builder. */ - public void setStart(java.lang.Long value) { - this.start = value; + public sparqles.avro.EndpointResult.Builder setStart(long value) { + validate(fields()[1], value); + this.start = value; + fieldSetFlags()[1] = true; + return this; } - /** Gets the value of the 'end' field. */ - public java.lang.Long getEnd() { - return end; + /** + * Checks whether the 'start' field has been set. + * + * @return True if the 'start' field has been set, false otherwise. + */ + public boolean hasStart() { + return fieldSetFlags()[1]; } /** - * Sets the value of the 'end' field. + * Clears the value of the 'start' field. * - * @param value the value to set. + * @return This builder. */ - public void setEnd(java.lang.Long value) { - this.end = value; + public sparqles.avro.EndpointResult.Builder clearStart() { + fieldSetFlags()[1] = false; + return this; } - /** RecordBuilder for EndpointResult instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { - - private sparqles.avro.Endpoint endpoint; - private long start; - private long end; - - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.EndpointResult.SCHEMA$); - } - - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.EndpointResult.Builder other) { - super(other); - if (isValidValue(fields()[0], other.endpoint)) { - this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.start)) { - this.start = data().deepCopy(fields()[1].schema(), other.start); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.end)) { - this.end = data().deepCopy(fields()[2].schema(), other.end); - fieldSetFlags()[2] = true; - } - } - - /** Creates a Builder by copying an existing EndpointResult instance */ - private Builder(sparqles.avro.EndpointResult other) { - super(sparqles.avro.EndpointResult.SCHEMA$); - if (isValidValue(fields()[0], other.endpoint)) { - this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.start)) { - this.start = data().deepCopy(fields()[1].schema(), other.start); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.end)) { - this.end = data().deepCopy(fields()[2].schema(), other.end); - fieldSetFlags()[2] = true; - } - } - - /** Gets the value of the 'endpoint' field */ - public sparqles.avro.Endpoint getEndpoint() { - return endpoint; - } - - /** Sets the value of the 'endpoint' field */ - public sparqles.avro.EndpointResult.Builder setEndpoint(sparqles.avro.Endpoint value) { - validate(fields()[0], value); - this.endpoint = value; - fieldSetFlags()[0] = true; - return this; - } - - /** Checks whether the 'endpoint' field has been set */ - public boolean hasEndpoint() { - return fieldSetFlags()[0]; - } - - /** Clears the value of the 'endpoint' field */ - public sparqles.avro.EndpointResult.Builder clearEndpoint() { - endpoint = null; - fieldSetFlags()[0] = false; - return this; - } - - /** Gets the value of the 'start' field */ - public java.lang.Long getStart() { - return start; - } - - /** Sets the value of the 'start' field */ - public sparqles.avro.EndpointResult.Builder setStart(long value) { - validate(fields()[1], value); - this.start = value; - fieldSetFlags()[1] = true; - return this; - } + /** + * Gets the value of the 'end' field. + * + * @return The value. + */ + public long getEnd() { + return end; + } - /** Checks whether the 'start' field has been set */ - public boolean hasStart() { - return fieldSetFlags()[1]; - } + /** + * Sets the value of the 'end' field. + * + * @param value The value of 'end'. + * @return This builder. + */ + public sparqles.avro.EndpointResult.Builder setEnd(long value) { + validate(fields()[2], value); + this.end = value; + fieldSetFlags()[2] = true; + return this; + } - /** Clears the value of the 'start' field */ - public sparqles.avro.EndpointResult.Builder clearStart() { - fieldSetFlags()[1] = false; - return this; - } + /** + * Checks whether the 'end' field has been set. + * + * @return True if the 'end' field has been set, false otherwise. + */ + public boolean hasEnd() { + return fieldSetFlags()[2]; + } - /** Gets the value of the 'end' field */ - public java.lang.Long getEnd() { - return end; - } + /** + * Clears the value of the 'end' field. + * + * @return This builder. + */ + public sparqles.avro.EndpointResult.Builder clearEnd() { + fieldSetFlags()[2] = false; + return this; + } - /** Sets the value of the 'end' field */ - public sparqles.avro.EndpointResult.Builder setEnd(long value) { - validate(fields()[2], value); - this.end = value; - fieldSetFlags()[2] = true; - return this; + @Override + @SuppressWarnings("unchecked") + public EndpointResult build() { + try { + EndpointResult record = new EndpointResult(); + if (endpointBuilder != null) { + try { + record.endpoint = this.endpointBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("endpoint")); + throw e; + } + } else { + record.endpoint = + fieldSetFlags()[0] + ? this.endpoint + : (sparqles.avro.Endpoint) defaultValue(fields()[0]); } + record.start = fieldSetFlags()[1] ? this.start : (java.lang.Long) defaultValue(fields()[1]); + record.end = fieldSetFlags()[2] ? this.end : (java.lang.Long) defaultValue(fields()[2]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + this.endpoint.customEncode(out); + + out.writeLong(this.start); + + out.writeLong(this.end); + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); + } + this.endpoint.customDecode(in); + + this.start = in.readLong(); + + this.end = in.readLong(); + + } else { + for (int i = 0; i < 3; i++) { + switch (fieldOrder[i].pos()) { + case 0: + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); + } + this.endpoint.customDecode(in); + break; - /** Checks whether the 'end' field has been set */ - public boolean hasEnd() { - return fieldSetFlags()[2]; - } + case 1: + this.start = in.readLong(); + break; - /** Clears the value of the 'end' field */ - public sparqles.avro.EndpointResult.Builder clearEnd() { - fieldSetFlags()[2] = false; - return this; - } + case 2: + this.end = in.readLong(); + break; - @Override - public EndpointResult build() { - try { - EndpointResult record = new EndpointResult(); - record.endpoint = - fieldSetFlags()[0] - ? this.endpoint - : (sparqles.avro.Endpoint) defaultValue(fields()[0]); - record.start = - fieldSetFlags()[1] - ? this.start - : (java.lang.Long) defaultValue(fields()[1]); - record.end = - fieldSetFlags()[2] ? this.end : (java.lang.Long) defaultValue(fields()[2]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/AvailabilityIndex.java b/backend/src/main/java/sparqles/avro/analytics/AvailabilityIndex.java index f325922d..7c3477de 100644 --- a/backend/src/main/java/sparqles/avro/analytics/AvailabilityIndex.java +++ b/backend/src/main/java/sparqles/avro/analytics/AvailabilityIndex.java @@ -5,225 +5,484 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class AvailabilityIndex extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"AvailabilityIndex\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"values\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexAvailabilityDataPoint\",\"fields\":[{\"name\":\"x\",\"type\":\"string\"},{\"name\":\"y\",\"type\":\"double\"}]}}}]}"); - @Deprecated public java.lang.CharSequence key; - @Deprecated public java.util.List values; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -8916363243407959834L; - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public AvailabilityIndex() {} + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"AvailabilityIndex\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"values\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexAvailabilityDataPoint\",\"fields\":[{\"name\":\"x\",\"type\":\"string\"},{\"name\":\"y\",\"type\":\"double\"}]}}}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } - /** All-args constructor. */ - public AvailabilityIndex( - java.lang.CharSequence key, - java.util.List values) { - this.key = key; - this.values = values; + /** + * Serializes this AvailabilityIndex to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a AvailabilityIndex from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a AvailabilityIndex instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static AvailabilityIndex fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence key; + private java.util.List values; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public AvailabilityIndex() {} + + /** + * All-args constructor. + * + * @param key The new value for key + * @param values The new value for values + */ + public AvailabilityIndex( + java.lang.CharSequence key, + java.util.List values) { + this.key = key; + this.values = values; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return key; + case 1: + return values; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + key = (java.lang.CharSequence) value$; + break; + case 1: + values = (java.util.List) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } + + /** + * Gets the value of the 'key' field. + * + * @return The value of the 'key' field. + */ + public java.lang.CharSequence getKey() { + return key; + } - /** Creates a new AvailabilityIndex RecordBuilder */ - public static sparqles.avro.analytics.AvailabilityIndex.Builder newBuilder() { - return new sparqles.avro.analytics.AvailabilityIndex.Builder(); + /** + * Sets the value of the 'key' field. + * + * @param value the value to set. + */ + public void setKey(java.lang.CharSequence value) { + this.key = value; + } + + /** + * Gets the value of the 'values' field. + * + * @return The value of the 'values' field. + */ + public java.util.List getValues() { + return values; + } + + /** + * Sets the value of the 'values' field. + * + * @param value the value to set. + */ + public void setValues(java.util.List value) { + this.values = value; + } + + /** + * Creates a new AvailabilityIndex RecordBuilder. + * + * @return A new AvailabilityIndex RecordBuilder + */ + public static sparqles.avro.analytics.AvailabilityIndex.Builder newBuilder() { + return new sparqles.avro.analytics.AvailabilityIndex.Builder(); + } + + /** + * Creates a new AvailabilityIndex RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new AvailabilityIndex RecordBuilder + */ + public static sparqles.avro.analytics.AvailabilityIndex.Builder newBuilder( + sparqles.avro.analytics.AvailabilityIndex.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.AvailabilityIndex.Builder(); + } else { + return new sparqles.avro.analytics.AvailabilityIndex.Builder(other); + } + } + + /** + * Creates a new AvailabilityIndex RecordBuilder by copying an existing AvailabilityIndex + * instance. + * + * @param other The existing instance to copy. + * @return A new AvailabilityIndex RecordBuilder + */ + public static sparqles.avro.analytics.AvailabilityIndex.Builder newBuilder( + sparqles.avro.analytics.AvailabilityIndex other) { + if (other == null) { + return new sparqles.avro.analytics.AvailabilityIndex.Builder(); + } else { + return new sparqles.avro.analytics.AvailabilityIndex.Builder(other); } + } + + /** RecordBuilder for AvailabilityIndex instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence key; + private java.util.List values; - /** Creates a new AvailabilityIndex RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.AvailabilityIndex.Builder newBuilder( - sparqles.avro.analytics.AvailabilityIndex.Builder other) { - return new sparqles.avro.analytics.AvailabilityIndex.Builder(other); + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } /** - * Creates a new AvailabilityIndex RecordBuilder by copying an existing AvailabilityIndex - * instance + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public static sparqles.avro.analytics.AvailabilityIndex.Builder newBuilder( - sparqles.avro.analytics.AvailabilityIndex other) { - return new sparqles.avro.analytics.AvailabilityIndex.Builder(other); + private Builder(sparqles.avro.analytics.AvailabilityIndex.Builder other) { + super(other); + if (isValidValue(fields()[0], other.key)) { + this.key = data().deepCopy(fields()[0].schema(), other.key); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.values)) { + this.values = data().deepCopy(fields()[1].schema(), other.values); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Creates a Builder by copying an existing AvailabilityIndex instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.AvailabilityIndex other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.key)) { + this.key = data().deepCopy(fields()[0].schema(), other.key); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.values)) { + this.values = data().deepCopy(fields()[1].schema(), other.values); + fieldSetFlags()[1] = true; + } } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return key; - case 1: - return values; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Gets the value of the 'key' field. + * + * @return The value. + */ + public java.lang.CharSequence getKey() { + return key; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - key = (java.lang.CharSequence) value$; - break; - case 1: - values = - (java.util.List) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the value of the 'key' field. + * + * @param value The value of 'key'. + * @return This builder. + */ + public sparqles.avro.analytics.AvailabilityIndex.Builder setKey(java.lang.CharSequence value) { + validate(fields()[0], value); + this.key = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'key' field. */ - public java.lang.CharSequence getKey() { - return key; + /** + * Checks whether the 'key' field has been set. + * + * @return True if the 'key' field has been set, false otherwise. + */ + public boolean hasKey() { + return fieldSetFlags()[0]; } /** - * Sets the value of the 'key' field. + * Clears the value of the 'key' field. * - * @param value the value to set. + * @return This builder. */ - public void setKey(java.lang.CharSequence value) { - this.key = value; + public sparqles.avro.analytics.AvailabilityIndex.Builder clearKey() { + key = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'values' field. */ + /** + * Gets the value of the 'values' field. + * + * @return The value. + */ public java.util.List getValues() { - return values; + return values; } /** * Sets the value of the 'values' field. * - * @param value the value to set. + * @param value The value of 'values'. + * @return This builder. */ - public void setValues( - java.util.List value) { - this.values = value; + public sparqles.avro.analytics.AvailabilityIndex.Builder setValues( + java.util.List value) { + validate(fields()[1], value); + this.values = value; + fieldSetFlags()[1] = true; + return this; } - /** RecordBuilder for AvailabilityIndex instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Checks whether the 'values' field has been set. + * + * @return True if the 'values' field has been set, false otherwise. + */ + public boolean hasValues() { + return fieldSetFlags()[1]; + } - private java.lang.CharSequence key; - private java.util.List values; + /** + * Clears the value of the 'values' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.AvailabilityIndex.Builder clearValues() { + values = null; + fieldSetFlags()[1] = false; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.AvailabilityIndex.SCHEMA$); - } + @Override + @SuppressWarnings("unchecked") + public AvailabilityIndex build() { + try { + AvailabilityIndex record = new AvailabilityIndex(); + record.key = + fieldSetFlags()[0] ? this.key : (java.lang.CharSequence) defaultValue(fields()[0]); + record.values = + fieldSetFlags()[1] + ? this.values + : (java.util.List) + defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.AvailabilityIndex.Builder other) { - super(other); - if (isValidValue(fields()[0], other.key)) { - this.key = data().deepCopy(fields()[0].schema(), other.key); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.values)) { - this.values = data().deepCopy(fields()[1].schema(), other.values); - fieldSetFlags()[1] = true; - } - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Creates a Builder by copying an existing AvailabilityIndex instance */ - private Builder(sparqles.avro.analytics.AvailabilityIndex other) { - super(sparqles.avro.analytics.AvailabilityIndex.SCHEMA$); - if (isValidValue(fields()[0], other.key)) { - this.key = data().deepCopy(fields()[0].schema(), other.key); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.values)) { - this.values = data().deepCopy(fields()[1].schema(), other.values); - fieldSetFlags()[1] = true; - } - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Gets the value of the 'key' field */ - public java.lang.CharSequence getKey() { - return key; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Sets the value of the 'key' field */ - public sparqles.avro.analytics.AvailabilityIndex.Builder setKey( - java.lang.CharSequence value) { - validate(fields()[0], value); - this.key = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Checks whether the 'key' field has been set */ - public boolean hasKey() { - return fieldSetFlags()[0]; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Clears the value of the 'key' field */ - public sparqles.avro.analytics.AvailabilityIndex.Builder clearKey() { - key = null; - fieldSetFlags()[0] = false; - return this; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.key); - /** Gets the value of the 'values' field */ - public java.util.List getValues() { - return values; - } + long size0 = this.values.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (sparqles.avro.analytics.IndexAvailabilityDataPoint e0 : this.values) { + actualSize0++; + out.startItem(); + e0.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); + } - /** Sets the value of the 'values' field */ - public sparqles.avro.analytics.AvailabilityIndex.Builder setValues( - java.util.List value) { - validate(fields()[1], value); - this.values = value; - fieldSetFlags()[1] = true; - return this; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.key = in.readString(this.key instanceof Utf8 ? (Utf8) this.key : null); - /** Checks whether the 'values' field has been set */ - public boolean hasValues() { - return fieldSetFlags()[1]; + long size0 = in.readArrayStart(); + java.util.List a0 = this.values; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("values").schema()); + this.values = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.IndexAvailabilityDataPoint e0 = (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.IndexAvailabilityDataPoint(); + } + e0.customDecode(in); + a0.add(e0); } + } - /** Clears the value of the 'values' field */ - public sparqles.avro.analytics.AvailabilityIndex.Builder clearValues() { - values = null; - fieldSetFlags()[1] = false; - return this; - } + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.key = in.readString(this.key instanceof Utf8 ? (Utf8) this.key : null); + break; - @Override - public AvailabilityIndex build() { - try { - AvailabilityIndex record = new AvailabilityIndex(); - record.key = - fieldSetFlags()[0] - ? this.key - : (java.lang.CharSequence) defaultValue(fields()[0]); - record.values = - fieldSetFlags()[1] - ? this.values - : (java.util.List< - sparqles.avro.analytics.IndexAvailabilityDataPoint>) - defaultValue(fields()[1]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); + case 1: + long size0 = in.readArrayStart(); + java.util.List a0 = this.values; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("values").schema()); + this.values = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.IndexAvailabilityDataPoint e0 = + (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.IndexAvailabilityDataPoint(); + } + e0.customDecode(in); + a0.add(e0); + } } + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/AvailabilityView.java b/backend/src/main/java/sparqles/avro/analytics/AvailabilityView.java index ea1433dd..3cae1685 100644 --- a/backend/src/main/java/sparqles/avro/analytics/AvailabilityView.java +++ b/backend/src/main/java/sparqles/avro/analytics/AvailabilityView.java @@ -5,395 +5,746 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; + @org.apache.avro.specific.AvroGenerated public class AvailabilityView extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"AvailabilityView\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"upNow\",\"type\":\"boolean\"},{\"name\":\"uptimeLast24h\",\"type\":\"double\"},{\"name\":\"uptimeLast7d\",\"type\":\"double\"},{\"name\":\"lastUpdate\",\"type\":\"long\"}]}"); - @Deprecated public sparqles.avro.Endpoint endpoint; - @Deprecated public boolean upNow; - @Deprecated public double uptimeLast24h; - @Deprecated public double uptimeLast7d; - @Deprecated public long lastUpdate; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 2061486749207014956L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"AvailabilityView\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"upNow\",\"type\":\"boolean\"},{\"name\":\"uptimeLast24h\",\"type\":\"double\"},{\"name\":\"uptimeLast7d\",\"type\":\"double\"},{\"name\":\"lastUpdate\",\"type\":\"long\"}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this AvailabilityView to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a AvailabilityView from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a AvailabilityView instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static AvailabilityView fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private sparqles.avro.Endpoint endpoint; + private boolean upNow; + private double uptimeLast24h; + private double uptimeLast7d; + private long lastUpdate; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public AvailabilityView() {} + + /** + * All-args constructor. + * + * @param endpoint The new value for endpoint + * @param upNow The new value for upNow + * @param uptimeLast24h The new value for uptimeLast24h + * @param uptimeLast7d The new value for uptimeLast7d + * @param lastUpdate The new value for lastUpdate + */ + public AvailabilityView( + sparqles.avro.Endpoint endpoint, + java.lang.Boolean upNow, + java.lang.Double uptimeLast24h, + java.lang.Double uptimeLast7d, + java.lang.Long lastUpdate) { + this.endpoint = endpoint; + this.upNow = upNow; + this.uptimeLast24h = uptimeLast24h; + this.uptimeLast7d = uptimeLast7d; + this.lastUpdate = lastUpdate; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return endpoint; + case 1: + return upNow; + case 2: + return uptimeLast24h; + case 3: + return uptimeLast7d; + case 4: + return lastUpdate; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + endpoint = (sparqles.avro.Endpoint) value$; + break; + case 1: + upNow = (java.lang.Boolean) value$; + break; + case 2: + uptimeLast24h = (java.lang.Double) value$; + break; + case 3: + uptimeLast7d = (java.lang.Double) value$; + break; + case 4: + lastUpdate = (java.lang.Long) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'endpoint' field. + * + * @return The value of the 'endpoint' field. + */ + public sparqles.avro.Endpoint getEndpoint() { + return endpoint; + } + + /** + * Sets the value of the 'endpoint' field. + * + * @param value the value to set. + */ + public void setEndpoint(sparqles.avro.Endpoint value) { + this.endpoint = value; + } + + /** + * Gets the value of the 'upNow' field. + * + * @return The value of the 'upNow' field. + */ + public boolean getUpNow() { + return upNow; + } + + /** + * Sets the value of the 'upNow' field. + * + * @param value the value to set. + */ + public void setUpNow(boolean value) { + this.upNow = value; + } + + /** + * Gets the value of the 'uptimeLast24h' field. + * + * @return The value of the 'uptimeLast24h' field. + */ + public double getUptimeLast24h() { + return uptimeLast24h; + } + + /** + * Sets the value of the 'uptimeLast24h' field. + * + * @param value the value to set. + */ + public void setUptimeLast24h(double value) { + this.uptimeLast24h = value; + } + + /** + * Gets the value of the 'uptimeLast7d' field. + * + * @return The value of the 'uptimeLast7d' field. + */ + public double getUptimeLast7d() { + return uptimeLast7d; + } + + /** + * Sets the value of the 'uptimeLast7d' field. + * + * @param value the value to set. + */ + public void setUptimeLast7d(double value) { + this.uptimeLast7d = value; + } + + /** + * Gets the value of the 'lastUpdate' field. + * + * @return The value of the 'lastUpdate' field. + */ + public long getLastUpdate() { + return lastUpdate; + } + + /** + * Sets the value of the 'lastUpdate' field. + * + * @param value the value to set. + */ + public void setLastUpdate(long value) { + this.lastUpdate = value; + } + + /** + * Creates a new AvailabilityView RecordBuilder. + * + * @return A new AvailabilityView RecordBuilder + */ + public static sparqles.avro.analytics.AvailabilityView.Builder newBuilder() { + return new sparqles.avro.analytics.AvailabilityView.Builder(); + } + + /** + * Creates a new AvailabilityView RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new AvailabilityView RecordBuilder + */ + public static sparqles.avro.analytics.AvailabilityView.Builder newBuilder( + sparqles.avro.analytics.AvailabilityView.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.AvailabilityView.Builder(); + } else { + return new sparqles.avro.analytics.AvailabilityView.Builder(other); + } + } + + /** + * Creates a new AvailabilityView RecordBuilder by copying an existing AvailabilityView instance. + * + * @param other The existing instance to copy. + * @return A new AvailabilityView RecordBuilder + */ + public static sparqles.avro.analytics.AvailabilityView.Builder newBuilder( + sparqles.avro.analytics.AvailabilityView other) { + if (other == null) { + return new sparqles.avro.analytics.AvailabilityView.Builder(); + } else { + return new sparqles.avro.analytics.AvailabilityView.Builder(other); + } + } + + /** RecordBuilder for AvailabilityView instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private sparqles.avro.Endpoint endpoint; + private sparqles.avro.Endpoint.Builder endpointBuilder; + private boolean upNow; + private double uptimeLast24h; + private double uptimeLast7d; + private long lastUpdate; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public AvailabilityView() {} + private Builder(sparqles.avro.analytics.AvailabilityView.Builder other) { + super(other); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (other.hasEndpointBuilder()) { + this.endpointBuilder = sparqles.avro.Endpoint.newBuilder(other.getEndpointBuilder()); + } + if (isValidValue(fields()[1], other.upNow)) { + this.upNow = data().deepCopy(fields()[1].schema(), other.upNow); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.uptimeLast24h)) { + this.uptimeLast24h = data().deepCopy(fields()[2].schema(), other.uptimeLast24h); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.uptimeLast7d)) { + this.uptimeLast7d = data().deepCopy(fields()[3].schema(), other.uptimeLast7d); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + if (isValidValue(fields()[4], other.lastUpdate)) { + this.lastUpdate = data().deepCopy(fields()[4].schema(), other.lastUpdate); + fieldSetFlags()[4] = other.fieldSetFlags()[4]; + } + } - /** All-args constructor. */ - public AvailabilityView( - sparqles.avro.Endpoint endpoint, - java.lang.Boolean upNow, - java.lang.Double uptimeLast24h, - java.lang.Double uptimeLast7d, - java.lang.Long lastUpdate) { - this.endpoint = endpoint; - this.upNow = upNow; - this.uptimeLast24h = uptimeLast24h; - this.uptimeLast7d = uptimeLast7d; - this.lastUpdate = lastUpdate; + /** + * Creates a Builder by copying an existing AvailabilityView instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.AvailabilityView other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = true; + } + this.endpointBuilder = null; + if (isValidValue(fields()[1], other.upNow)) { + this.upNow = data().deepCopy(fields()[1].schema(), other.upNow); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.uptimeLast24h)) { + this.uptimeLast24h = data().deepCopy(fields()[2].schema(), other.uptimeLast24h); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.uptimeLast7d)) { + this.uptimeLast7d = data().deepCopy(fields()[3].schema(), other.uptimeLast7d); + fieldSetFlags()[3] = true; + } + if (isValidValue(fields()[4], other.lastUpdate)) { + this.lastUpdate = data().deepCopy(fields()[4].schema(), other.lastUpdate); + fieldSetFlags()[4] = true; + } } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + /** + * Gets the value of the 'endpoint' field. + * + * @return The value. + */ + public sparqles.avro.Endpoint getEndpoint() { + return endpoint; } - /** Creates a new AvailabilityView RecordBuilder */ - public static sparqles.avro.analytics.AvailabilityView.Builder newBuilder() { - return new sparqles.avro.analytics.AvailabilityView.Builder(); + /** + * Sets the value of the 'endpoint' field. + * + * @param value The value of 'endpoint'. + * @return This builder. + */ + public sparqles.avro.analytics.AvailabilityView.Builder setEndpoint( + sparqles.avro.Endpoint value) { + validate(fields()[0], value); + this.endpointBuilder = null; + this.endpoint = value; + fieldSetFlags()[0] = true; + return this; } - /** Creates a new AvailabilityView RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.AvailabilityView.Builder newBuilder( - sparqles.avro.analytics.AvailabilityView.Builder other) { - return new sparqles.avro.analytics.AvailabilityView.Builder(other); + /** + * Checks whether the 'endpoint' field has been set. + * + * @return True if the 'endpoint' field has been set, false otherwise. + */ + public boolean hasEndpoint() { + return fieldSetFlags()[0]; } /** - * Creates a new AvailabilityView RecordBuilder by copying an existing AvailabilityView instance + * Gets the Builder instance for the 'endpoint' field and creates one if it doesn't exist yet. + * + * @return This builder. */ - public static sparqles.avro.analytics.AvailabilityView.Builder newBuilder( - sparqles.avro.analytics.AvailabilityView other) { - return new sparqles.avro.analytics.AvailabilityView.Builder(other); - } - - public org.apache.avro.Schema getSchema() { - return SCHEMA$; - } - - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return endpoint; - case 1: - return upNow; - case 2: - return uptimeLast24h; - case 3: - return uptimeLast7d; - case 4: - return lastUpdate; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); + public sparqles.avro.Endpoint.Builder getEndpointBuilder() { + if (endpointBuilder == null) { + if (hasEndpoint()) { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder(endpoint)); + } else { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder()); } + } + return endpointBuilder; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - endpoint = (sparqles.avro.Endpoint) value$; - break; - case 1: - upNow = (java.lang.Boolean) value$; - break; - case 2: - uptimeLast24h = (java.lang.Double) value$; - break; - case 3: - uptimeLast7d = (java.lang.Double) value$; - break; - case 4: - lastUpdate = (java.lang.Long) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the Builder instance for the 'endpoint' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.analytics.AvailabilityView.Builder setEndpointBuilder( + sparqles.avro.Endpoint.Builder value) { + clearEndpoint(); + endpointBuilder = value; + return this; } - /** Gets the value of the 'endpoint' field. */ - public sparqles.avro.Endpoint getEndpoint() { - return endpoint; + /** + * Checks whether the 'endpoint' field has an active Builder instance + * + * @return True if the 'endpoint' field has an active Builder instance + */ + public boolean hasEndpointBuilder() { + return endpointBuilder != null; } /** - * Sets the value of the 'endpoint' field. + * Clears the value of the 'endpoint' field. * - * @param value the value to set. + * @return This builder. */ - public void setEndpoint(sparqles.avro.Endpoint value) { - this.endpoint = value; + public sparqles.avro.analytics.AvailabilityView.Builder clearEndpoint() { + endpoint = null; + endpointBuilder = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'upNow' field. */ - public java.lang.Boolean getUpNow() { - return upNow; + /** + * Gets the value of the 'upNow' field. + * + * @return The value. + */ + public boolean getUpNow() { + return upNow; } /** * Sets the value of the 'upNow' field. * - * @param value the value to set. + * @param value The value of 'upNow'. + * @return This builder. */ - public void setUpNow(java.lang.Boolean value) { - this.upNow = value; + public sparqles.avro.analytics.AvailabilityView.Builder setUpNow(boolean value) { + validate(fields()[1], value); + this.upNow = value; + fieldSetFlags()[1] = true; + return this; } - /** Gets the value of the 'uptimeLast24h' field. */ - public java.lang.Double getUptimeLast24h() { - return uptimeLast24h; + /** + * Checks whether the 'upNow' field has been set. + * + * @return True if the 'upNow' field has been set, false otherwise. + */ + public boolean hasUpNow() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'upNow' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.AvailabilityView.Builder clearUpNow() { + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'uptimeLast24h' field. + * + * @return The value. + */ + public double getUptimeLast24h() { + return uptimeLast24h; } /** * Sets the value of the 'uptimeLast24h' field. * - * @param value the value to set. + * @param value The value of 'uptimeLast24h'. + * @return This builder. */ - public void setUptimeLast24h(java.lang.Double value) { - this.uptimeLast24h = value; + public sparqles.avro.analytics.AvailabilityView.Builder setUptimeLast24h(double value) { + validate(fields()[2], value); + this.uptimeLast24h = value; + fieldSetFlags()[2] = true; + return this; } - /** Gets the value of the 'uptimeLast7d' field. */ - public java.lang.Double getUptimeLast7d() { - return uptimeLast7d; + /** + * Checks whether the 'uptimeLast24h' field has been set. + * + * @return True if the 'uptimeLast24h' field has been set, false otherwise. + */ + public boolean hasUptimeLast24h() { + return fieldSetFlags()[2]; + } + + /** + * Clears the value of the 'uptimeLast24h' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.AvailabilityView.Builder clearUptimeLast24h() { + fieldSetFlags()[2] = false; + return this; + } + + /** + * Gets the value of the 'uptimeLast7d' field. + * + * @return The value. + */ + public double getUptimeLast7d() { + return uptimeLast7d; } /** * Sets the value of the 'uptimeLast7d' field. * - * @param value the value to set. + * @param value The value of 'uptimeLast7d'. + * @return This builder. */ - public void setUptimeLast7d(java.lang.Double value) { - this.uptimeLast7d = value; + public sparqles.avro.analytics.AvailabilityView.Builder setUptimeLast7d(double value) { + validate(fields()[3], value); + this.uptimeLast7d = value; + fieldSetFlags()[3] = true; + return this; } - /** Gets the value of the 'lastUpdate' field. */ - public java.lang.Long getLastUpdate() { - return lastUpdate; + /** + * Checks whether the 'uptimeLast7d' field has been set. + * + * @return True if the 'uptimeLast7d' field has been set, false otherwise. + */ + public boolean hasUptimeLast7d() { + return fieldSetFlags()[3]; } /** - * Sets the value of the 'lastUpdate' field. + * Clears the value of the 'uptimeLast7d' field. * - * @param value the value to set. + * @return This builder. */ - public void setLastUpdate(java.lang.Long value) { - this.lastUpdate = value; + public sparqles.avro.analytics.AvailabilityView.Builder clearUptimeLast7d() { + fieldSetFlags()[3] = false; + return this; } - /** RecordBuilder for AvailabilityView instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Gets the value of the 'lastUpdate' field. + * + * @return The value. + */ + public long getLastUpdate() { + return lastUpdate; + } - private sparqles.avro.Endpoint endpoint; - private boolean upNow; - private double uptimeLast24h; - private double uptimeLast7d; - private long lastUpdate; + /** + * Sets the value of the 'lastUpdate' field. + * + * @param value The value of 'lastUpdate'. + * @return This builder. + */ + public sparqles.avro.analytics.AvailabilityView.Builder setLastUpdate(long value) { + validate(fields()[4], value); + this.lastUpdate = value; + fieldSetFlags()[4] = true; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.AvailabilityView.SCHEMA$); - } + /** + * Checks whether the 'lastUpdate' field has been set. + * + * @return True if the 'lastUpdate' field has been set, false otherwise. + */ + public boolean hasLastUpdate() { + return fieldSetFlags()[4]; + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.AvailabilityView.Builder other) { - super(other); - if (isValidValue(fields()[0], other.endpoint)) { - this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.upNow)) { - this.upNow = data().deepCopy(fields()[1].schema(), other.upNow); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.uptimeLast24h)) { - this.uptimeLast24h = data().deepCopy(fields()[2].schema(), other.uptimeLast24h); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.uptimeLast7d)) { - this.uptimeLast7d = data().deepCopy(fields()[3].schema(), other.uptimeLast7d); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.lastUpdate)) { - this.lastUpdate = data().deepCopy(fields()[4].schema(), other.lastUpdate); - fieldSetFlags()[4] = true; - } - } + /** + * Clears the value of the 'lastUpdate' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.AvailabilityView.Builder clearLastUpdate() { + fieldSetFlags()[4] = false; + return this; + } - /** Creates a Builder by copying an existing AvailabilityView instance */ - private Builder(sparqles.avro.analytics.AvailabilityView other) { - super(sparqles.avro.analytics.AvailabilityView.SCHEMA$); - if (isValidValue(fields()[0], other.endpoint)) { - this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.upNow)) { - this.upNow = data().deepCopy(fields()[1].schema(), other.upNow); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.uptimeLast24h)) { - this.uptimeLast24h = data().deepCopy(fields()[2].schema(), other.uptimeLast24h); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.uptimeLast7d)) { - this.uptimeLast7d = data().deepCopy(fields()[3].schema(), other.uptimeLast7d); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.lastUpdate)) { - this.lastUpdate = data().deepCopy(fields()[4].schema(), other.lastUpdate); - fieldSetFlags()[4] = true; - } + @Override + @SuppressWarnings("unchecked") + public AvailabilityView build() { + try { + AvailabilityView record = new AvailabilityView(); + if (endpointBuilder != null) { + try { + record.endpoint = this.endpointBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("endpoint")); + throw e; + } + } else { + record.endpoint = + fieldSetFlags()[0] + ? this.endpoint + : (sparqles.avro.Endpoint) defaultValue(fields()[0]); } + record.upNow = + fieldSetFlags()[1] ? this.upNow : (java.lang.Boolean) defaultValue(fields()[1]); + record.uptimeLast24h = + fieldSetFlags()[2] ? this.uptimeLast24h : (java.lang.Double) defaultValue(fields()[2]); + record.uptimeLast7d = + fieldSetFlags()[3] ? this.uptimeLast7d : (java.lang.Double) defaultValue(fields()[3]); + record.lastUpdate = + fieldSetFlags()[4] ? this.lastUpdate : (java.lang.Long) defaultValue(fields()[4]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Gets the value of the 'endpoint' field */ - public sparqles.avro.Endpoint getEndpoint() { - return endpoint; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Sets the value of the 'endpoint' field */ - public sparqles.avro.analytics.AvailabilityView.Builder setEndpoint( - sparqles.avro.Endpoint value) { - validate(fields()[0], value); - this.endpoint = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Checks whether the 'endpoint' field has been set */ - public boolean hasEndpoint() { - return fieldSetFlags()[0]; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Clears the value of the 'endpoint' field */ - public sparqles.avro.analytics.AvailabilityView.Builder clearEndpoint() { - endpoint = null; - fieldSetFlags()[0] = false; - return this; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Gets the value of the 'upNow' field */ - public java.lang.Boolean getUpNow() { - return upNow; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Sets the value of the 'upNow' field */ - public sparqles.avro.analytics.AvailabilityView.Builder setUpNow(boolean value) { - validate(fields()[1], value); - this.upNow = value; - fieldSetFlags()[1] = true; - return this; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + this.endpoint.customEncode(out); - /** Checks whether the 'upNow' field has been set */ - public boolean hasUpNow() { - return fieldSetFlags()[1]; - } + out.writeBoolean(this.upNow); - /** Clears the value of the 'upNow' field */ - public sparqles.avro.analytics.AvailabilityView.Builder clearUpNow() { - fieldSetFlags()[1] = false; - return this; - } + out.writeDouble(this.uptimeLast24h); - /** Gets the value of the 'uptimeLast24h' field */ - public java.lang.Double getUptimeLast24h() { - return uptimeLast24h; - } + out.writeDouble(this.uptimeLast7d); - /** Sets the value of the 'uptimeLast24h' field */ - public sparqles.avro.analytics.AvailabilityView.Builder setUptimeLast24h(double value) { - validate(fields()[2], value); - this.uptimeLast24h = value; - fieldSetFlags()[2] = true; - return this; - } + out.writeLong(this.lastUpdate); + } - /** Checks whether the 'uptimeLast24h' field has been set */ - public boolean hasUptimeLast24h() { - return fieldSetFlags()[2]; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); + } + this.endpoint.customDecode(in); - /** Clears the value of the 'uptimeLast24h' field */ - public sparqles.avro.analytics.AvailabilityView.Builder clearUptimeLast24h() { - fieldSetFlags()[2] = false; - return this; - } + this.upNow = in.readBoolean(); - /** Gets the value of the 'uptimeLast7d' field */ - public java.lang.Double getUptimeLast7d() { - return uptimeLast7d; - } + this.uptimeLast24h = in.readDouble(); - /** Sets the value of the 'uptimeLast7d' field */ - public sparqles.avro.analytics.AvailabilityView.Builder setUptimeLast7d(double value) { - validate(fields()[3], value); - this.uptimeLast7d = value; - fieldSetFlags()[3] = true; - return this; - } + this.uptimeLast7d = in.readDouble(); - /** Checks whether the 'uptimeLast7d' field has been set */ - public boolean hasUptimeLast7d() { - return fieldSetFlags()[3]; - } + this.lastUpdate = in.readLong(); - /** Clears the value of the 'uptimeLast7d' field */ - public sparqles.avro.analytics.AvailabilityView.Builder clearUptimeLast7d() { - fieldSetFlags()[3] = false; - return this; - } + } else { + for (int i = 0; i < 5; i++) { + switch (fieldOrder[i].pos()) { + case 0: + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); + } + this.endpoint.customDecode(in); + break; - /** Gets the value of the 'lastUpdate' field */ - public java.lang.Long getLastUpdate() { - return lastUpdate; - } + case 1: + this.upNow = in.readBoolean(); + break; - /** Sets the value of the 'lastUpdate' field */ - public sparqles.avro.analytics.AvailabilityView.Builder setLastUpdate(long value) { - validate(fields()[4], value); - this.lastUpdate = value; - fieldSetFlags()[4] = true; - return this; - } + case 2: + this.uptimeLast24h = in.readDouble(); + break; - /** Checks whether the 'lastUpdate' field has been set */ - public boolean hasLastUpdate() { - return fieldSetFlags()[4]; - } + case 3: + this.uptimeLast7d = in.readDouble(); + break; - /** Clears the value of the 'lastUpdate' field */ - public sparqles.avro.analytics.AvailabilityView.Builder clearLastUpdate() { - fieldSetFlags()[4] = false; - return this; - } + case 4: + this.lastUpdate = in.readLong(); + break; - @Override - public AvailabilityView build() { - try { - AvailabilityView record = new AvailabilityView(); - record.endpoint = - fieldSetFlags()[0] - ? this.endpoint - : (sparqles.avro.Endpoint) defaultValue(fields()[0]); - record.upNow = - fieldSetFlags()[1] - ? this.upNow - : (java.lang.Boolean) defaultValue(fields()[1]); - record.uptimeLast24h = - fieldSetFlags()[2] - ? this.uptimeLast24h - : (java.lang.Double) defaultValue(fields()[2]); - record.uptimeLast7d = - fieldSetFlags()[3] - ? this.uptimeLast7d - : (java.lang.Double) defaultValue(fields()[3]); - record.lastUpdate = - fieldSetFlags()[4] - ? this.lastUpdate - : (java.lang.Long) defaultValue(fields()[4]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/CalculationView.java b/backend/src/main/java/sparqles/avro/analytics/CalculationView.java new file mode 100644 index 00000000..e1b7715f --- /dev/null +++ b/backend/src/main/java/sparqles/avro/analytics/CalculationView.java @@ -0,0 +1,1009 @@ +/** + * Autogenerated by Avro + * + *

DO NOT EDIT DIRECTLY + */ +package sparqles.avro.analytics; + +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; + +@org.apache.avro.specific.AvroGenerated +public class CalculationView extends org.apache.avro.specific.SpecificRecordBase + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -7721487622059393994L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"CalculationView\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"VoID\",\"type\":\"boolean\"},{\"name\":\"VoIDPart\",\"type\":\"boolean\"},{\"name\":\"SD\",\"type\":\"boolean\"},{\"name\":\"SDPart\",\"type\":\"boolean\"},{\"name\":\"coherence\",\"type\":\"double\"},{\"name\":\"RS\",\"type\":\"double\"},{\"name\":\"lastUpdate\",\"type\":\"long\"}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this CalculationView to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a CalculationView from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a CalculationView instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static CalculationView fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private sparqles.avro.Endpoint endpoint; + private boolean VoID; + private boolean VoIDPart; + private boolean SD; + private boolean SDPart; + private double coherence; + private double RS; + private long lastUpdate; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public CalculationView() {} + + /** + * All-args constructor. + * + * @param endpoint The new value for endpoint + * @param VoID The new value for VoID + * @param VoIDPart The new value for VoIDPart + * @param SD The new value for SD + * @param SDPart The new value for SDPart + * @param coherence The new value for coherence + * @param RS The new value for RS + * @param lastUpdate The new value for lastUpdate + */ + public CalculationView( + sparqles.avro.Endpoint endpoint, + java.lang.Boolean VoID, + java.lang.Boolean VoIDPart, + java.lang.Boolean SD, + java.lang.Boolean SDPart, + java.lang.Double coherence, + java.lang.Double RS, + java.lang.Long lastUpdate) { + this.endpoint = endpoint; + this.VoID = VoID; + this.VoIDPart = VoIDPart; + this.SD = SD; + this.SDPart = SDPart; + this.coherence = coherence; + this.RS = RS; + this.lastUpdate = lastUpdate; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return endpoint; + case 1: + return VoID; + case 2: + return VoIDPart; + case 3: + return SD; + case 4: + return SDPart; + case 5: + return coherence; + case 6: + return RS; + case 7: + return lastUpdate; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + endpoint = (sparqles.avro.Endpoint) value$; + break; + case 1: + VoID = (java.lang.Boolean) value$; + break; + case 2: + VoIDPart = (java.lang.Boolean) value$; + break; + case 3: + SD = (java.lang.Boolean) value$; + break; + case 4: + SDPart = (java.lang.Boolean) value$; + break; + case 5: + coherence = (java.lang.Double) value$; + break; + case 6: + RS = (java.lang.Double) value$; + break; + case 7: + lastUpdate = (java.lang.Long) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'endpoint' field. + * + * @return The value of the 'endpoint' field. + */ + public sparqles.avro.Endpoint getEndpoint() { + return endpoint; + } + + /** + * Sets the value of the 'endpoint' field. + * + * @param value the value to set. + */ + public void setEndpoint(sparqles.avro.Endpoint value) { + this.endpoint = value; + } + + /** + * Gets the value of the 'VoID' field. + * + * @return The value of the 'VoID' field. + */ + public boolean getVoID() { + return VoID; + } + + /** + * Sets the value of the 'VoID' field. + * + * @param value the value to set. + */ + public void setVoID(boolean value) { + this.VoID = value; + } + + /** + * Gets the value of the 'VoIDPart' field. + * + * @return The value of the 'VoIDPart' field. + */ + public boolean getVoIDPart() { + return VoIDPart; + } + + /** + * Sets the value of the 'VoIDPart' field. + * + * @param value the value to set. + */ + public void setVoIDPart(boolean value) { + this.VoIDPart = value; + } + + /** + * Gets the value of the 'SD' field. + * + * @return The value of the 'SD' field. + */ + public boolean getSD() { + return SD; + } + + /** + * Sets the value of the 'SD' field. + * + * @param value the value to set. + */ + public void setSD(boolean value) { + this.SD = value; + } + + /** + * Gets the value of the 'SDPart' field. + * + * @return The value of the 'SDPart' field. + */ + public boolean getSDPart() { + return SDPart; + } + + /** + * Sets the value of the 'SDPart' field. + * + * @param value the value to set. + */ + public void setSDPart(boolean value) { + this.SDPart = value; + } + + /** + * Gets the value of the 'coherence' field. + * + * @return The value of the 'coherence' field. + */ + public double getCoherence() { + return coherence; + } + + /** + * Sets the value of the 'coherence' field. + * + * @param value the value to set. + */ + public void setCoherence(double value) { + this.coherence = value; + } + + /** + * Gets the value of the 'RS' field. + * + * @return The value of the 'RS' field. + */ + public double getRS() { + return RS; + } + + /** + * Sets the value of the 'RS' field. + * + * @param value the value to set. + */ + public void setRS(double value) { + this.RS = value; + } + + /** + * Gets the value of the 'lastUpdate' field. + * + * @return The value of the 'lastUpdate' field. + */ + public long getLastUpdate() { + return lastUpdate; + } + + /** + * Sets the value of the 'lastUpdate' field. + * + * @param value the value to set. + */ + public void setLastUpdate(long value) { + this.lastUpdate = value; + } + + /** + * Creates a new CalculationView RecordBuilder. + * + * @return A new CalculationView RecordBuilder + */ + public static sparqles.avro.analytics.CalculationView.Builder newBuilder() { + return new sparqles.avro.analytics.CalculationView.Builder(); + } + + /** + * Creates a new CalculationView RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new CalculationView RecordBuilder + */ + public static sparqles.avro.analytics.CalculationView.Builder newBuilder( + sparqles.avro.analytics.CalculationView.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.CalculationView.Builder(); + } else { + return new sparqles.avro.analytics.CalculationView.Builder(other); + } + } + + /** + * Creates a new CalculationView RecordBuilder by copying an existing CalculationView instance. + * + * @param other The existing instance to copy. + * @return A new CalculationView RecordBuilder + */ + public static sparqles.avro.analytics.CalculationView.Builder newBuilder( + sparqles.avro.analytics.CalculationView other) { + if (other == null) { + return new sparqles.avro.analytics.CalculationView.Builder(); + } else { + return new sparqles.avro.analytics.CalculationView.Builder(other); + } + } + + /** RecordBuilder for CalculationView instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private sparqles.avro.Endpoint endpoint; + private sparqles.avro.Endpoint.Builder endpointBuilder; + private boolean VoID; + private boolean VoIDPart; + private boolean SD; + private boolean SDPart; + private double coherence; + private double RS; + private long lastUpdate; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } + + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.analytics.CalculationView.Builder other) { + super(other); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (other.hasEndpointBuilder()) { + this.endpointBuilder = sparqles.avro.Endpoint.newBuilder(other.getEndpointBuilder()); + } + if (isValidValue(fields()[1], other.VoID)) { + this.VoID = data().deepCopy(fields()[1].schema(), other.VoID); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.VoIDPart)) { + this.VoIDPart = data().deepCopy(fields()[2].schema(), other.VoIDPart); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.SD)) { + this.SD = data().deepCopy(fields()[3].schema(), other.SD); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + if (isValidValue(fields()[4], other.SDPart)) { + this.SDPart = data().deepCopy(fields()[4].schema(), other.SDPart); + fieldSetFlags()[4] = other.fieldSetFlags()[4]; + } + if (isValidValue(fields()[5], other.coherence)) { + this.coherence = data().deepCopy(fields()[5].schema(), other.coherence); + fieldSetFlags()[5] = other.fieldSetFlags()[5]; + } + if (isValidValue(fields()[6], other.RS)) { + this.RS = data().deepCopy(fields()[6].schema(), other.RS); + fieldSetFlags()[6] = other.fieldSetFlags()[6]; + } + if (isValidValue(fields()[7], other.lastUpdate)) { + this.lastUpdate = data().deepCopy(fields()[7].schema(), other.lastUpdate); + fieldSetFlags()[7] = other.fieldSetFlags()[7]; + } + } + + /** + * Creates a Builder by copying an existing CalculationView instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.CalculationView other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = true; + } + this.endpointBuilder = null; + if (isValidValue(fields()[1], other.VoID)) { + this.VoID = data().deepCopy(fields()[1].schema(), other.VoID); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.VoIDPart)) { + this.VoIDPart = data().deepCopy(fields()[2].schema(), other.VoIDPart); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.SD)) { + this.SD = data().deepCopy(fields()[3].schema(), other.SD); + fieldSetFlags()[3] = true; + } + if (isValidValue(fields()[4], other.SDPart)) { + this.SDPart = data().deepCopy(fields()[4].schema(), other.SDPart); + fieldSetFlags()[4] = true; + } + if (isValidValue(fields()[5], other.coherence)) { + this.coherence = data().deepCopy(fields()[5].schema(), other.coherence); + fieldSetFlags()[5] = true; + } + if (isValidValue(fields()[6], other.RS)) { + this.RS = data().deepCopy(fields()[6].schema(), other.RS); + fieldSetFlags()[6] = true; + } + if (isValidValue(fields()[7], other.lastUpdate)) { + this.lastUpdate = data().deepCopy(fields()[7].schema(), other.lastUpdate); + fieldSetFlags()[7] = true; + } + } + + /** + * Gets the value of the 'endpoint' field. + * + * @return The value. + */ + public sparqles.avro.Endpoint getEndpoint() { + return endpoint; + } + + /** + * Sets the value of the 'endpoint' field. + * + * @param value The value of 'endpoint'. + * @return This builder. + */ + public sparqles.avro.analytics.CalculationView.Builder setEndpoint( + sparqles.avro.Endpoint value) { + validate(fields()[0], value); + this.endpointBuilder = null; + this.endpoint = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'endpoint' field has been set. + * + * @return True if the 'endpoint' field has been set, false otherwise. + */ + public boolean hasEndpoint() { + return fieldSetFlags()[0]; + } + + /** + * Gets the Builder instance for the 'endpoint' field and creates one if it doesn't exist yet. + * + * @return This builder. + */ + public sparqles.avro.Endpoint.Builder getEndpointBuilder() { + if (endpointBuilder == null) { + if (hasEndpoint()) { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder(endpoint)); + } else { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder()); + } + } + return endpointBuilder; + } + + /** + * Sets the Builder instance for the 'endpoint' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.analytics.CalculationView.Builder setEndpointBuilder( + sparqles.avro.Endpoint.Builder value) { + clearEndpoint(); + endpointBuilder = value; + return this; + } + + /** + * Checks whether the 'endpoint' field has an active Builder instance + * + * @return True if the 'endpoint' field has an active Builder instance + */ + public boolean hasEndpointBuilder() { + return endpointBuilder != null; + } + + /** + * Clears the value of the 'endpoint' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.CalculationView.Builder clearEndpoint() { + endpoint = null; + endpointBuilder = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'VoID' field. + * + * @return The value. + */ + public boolean getVoID() { + return VoID; + } + + /** + * Sets the value of the 'VoID' field. + * + * @param value The value of 'VoID'. + * @return This builder. + */ + public sparqles.avro.analytics.CalculationView.Builder setVoID(boolean value) { + validate(fields()[1], value); + this.VoID = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'VoID' field has been set. + * + * @return True if the 'VoID' field has been set, false otherwise. + */ + public boolean hasVoID() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'VoID' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.CalculationView.Builder clearVoID() { + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'VoIDPart' field. + * + * @return The value. + */ + public boolean getVoIDPart() { + return VoIDPart; + } + + /** + * Sets the value of the 'VoIDPart' field. + * + * @param value The value of 'VoIDPart'. + * @return This builder. + */ + public sparqles.avro.analytics.CalculationView.Builder setVoIDPart(boolean value) { + validate(fields()[2], value); + this.VoIDPart = value; + fieldSetFlags()[2] = true; + return this; + } + + /** + * Checks whether the 'VoIDPart' field has been set. + * + * @return True if the 'VoIDPart' field has been set, false otherwise. + */ + public boolean hasVoIDPart() { + return fieldSetFlags()[2]; + } + + /** + * Clears the value of the 'VoIDPart' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.CalculationView.Builder clearVoIDPart() { + fieldSetFlags()[2] = false; + return this; + } + + /** + * Gets the value of the 'SD' field. + * + * @return The value. + */ + public boolean getSD() { + return SD; + } + + /** + * Sets the value of the 'SD' field. + * + * @param value The value of 'SD'. + * @return This builder. + */ + public sparqles.avro.analytics.CalculationView.Builder setSD(boolean value) { + validate(fields()[3], value); + this.SD = value; + fieldSetFlags()[3] = true; + return this; + } + + /** + * Checks whether the 'SD' field has been set. + * + * @return True if the 'SD' field has been set, false otherwise. + */ + public boolean hasSD() { + return fieldSetFlags()[3]; + } + + /** + * Clears the value of the 'SD' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.CalculationView.Builder clearSD() { + fieldSetFlags()[3] = false; + return this; + } + + /** + * Gets the value of the 'SDPart' field. + * + * @return The value. + */ + public boolean getSDPart() { + return SDPart; + } + + /** + * Sets the value of the 'SDPart' field. + * + * @param value The value of 'SDPart'. + * @return This builder. + */ + public sparqles.avro.analytics.CalculationView.Builder setSDPart(boolean value) { + validate(fields()[4], value); + this.SDPart = value; + fieldSetFlags()[4] = true; + return this; + } + + /** + * Checks whether the 'SDPart' field has been set. + * + * @return True if the 'SDPart' field has been set, false otherwise. + */ + public boolean hasSDPart() { + return fieldSetFlags()[4]; + } + + /** + * Clears the value of the 'SDPart' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.CalculationView.Builder clearSDPart() { + fieldSetFlags()[4] = false; + return this; + } + + /** + * Gets the value of the 'coherence' field. + * + * @return The value. + */ + public double getCoherence() { + return coherence; + } + + /** + * Sets the value of the 'coherence' field. + * + * @param value The value of 'coherence'. + * @return This builder. + */ + public sparqles.avro.analytics.CalculationView.Builder setCoherence(double value) { + validate(fields()[5], value); + this.coherence = value; + fieldSetFlags()[5] = true; + return this; + } + + /** + * Checks whether the 'coherence' field has been set. + * + * @return True if the 'coherence' field has been set, false otherwise. + */ + public boolean hasCoherence() { + return fieldSetFlags()[5]; + } + + /** + * Clears the value of the 'coherence' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.CalculationView.Builder clearCoherence() { + fieldSetFlags()[5] = false; + return this; + } + + /** + * Gets the value of the 'RS' field. + * + * @return The value. + */ + public double getRS() { + return RS; + } + + /** + * Sets the value of the 'RS' field. + * + * @param value The value of 'RS'. + * @return This builder. + */ + public sparqles.avro.analytics.CalculationView.Builder setRS(double value) { + validate(fields()[6], value); + this.RS = value; + fieldSetFlags()[6] = true; + return this; + } + + /** + * Checks whether the 'RS' field has been set. + * + * @return True if the 'RS' field has been set, false otherwise. + */ + public boolean hasRS() { + return fieldSetFlags()[6]; + } + + /** + * Clears the value of the 'RS' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.CalculationView.Builder clearRS() { + fieldSetFlags()[6] = false; + return this; + } + + /** + * Gets the value of the 'lastUpdate' field. + * + * @return The value. + */ + public long getLastUpdate() { + return lastUpdate; + } + + /** + * Sets the value of the 'lastUpdate' field. + * + * @param value The value of 'lastUpdate'. + * @return This builder. + */ + public sparqles.avro.analytics.CalculationView.Builder setLastUpdate(long value) { + validate(fields()[7], value); + this.lastUpdate = value; + fieldSetFlags()[7] = true; + return this; + } + + /** + * Checks whether the 'lastUpdate' field has been set. + * + * @return True if the 'lastUpdate' field has been set, false otherwise. + */ + public boolean hasLastUpdate() { + return fieldSetFlags()[7]; + } + + /** + * Clears the value of the 'lastUpdate' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.CalculationView.Builder clearLastUpdate() { + fieldSetFlags()[7] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public CalculationView build() { + try { + CalculationView record = new CalculationView(); + if (endpointBuilder != null) { + try { + record.endpoint = this.endpointBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("endpoint")); + throw e; + } + } else { + record.endpoint = + fieldSetFlags()[0] + ? this.endpoint + : (sparqles.avro.Endpoint) defaultValue(fields()[0]); + } + record.VoID = + fieldSetFlags()[1] ? this.VoID : (java.lang.Boolean) defaultValue(fields()[1]); + record.VoIDPart = + fieldSetFlags()[2] ? this.VoIDPart : (java.lang.Boolean) defaultValue(fields()[2]); + record.SD = fieldSetFlags()[3] ? this.SD : (java.lang.Boolean) defaultValue(fields()[3]); + record.SDPart = + fieldSetFlags()[4] ? this.SDPart : (java.lang.Boolean) defaultValue(fields()[4]); + record.coherence = + fieldSetFlags()[5] ? this.coherence : (java.lang.Double) defaultValue(fields()[5]); + record.RS = fieldSetFlags()[6] ? this.RS : (java.lang.Double) defaultValue(fields()[6]); + record.lastUpdate = + fieldSetFlags()[7] ? this.lastUpdate : (java.lang.Long) defaultValue(fields()[7]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + this.endpoint.customEncode(out); + + out.writeBoolean(this.VoID); + + out.writeBoolean(this.VoIDPart); + + out.writeBoolean(this.SD); + + out.writeBoolean(this.SDPart); + + out.writeDouble(this.coherence); + + out.writeDouble(this.RS); + + out.writeLong(this.lastUpdate); + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); + } + this.endpoint.customDecode(in); + + this.VoID = in.readBoolean(); + + this.VoIDPart = in.readBoolean(); + + this.SD = in.readBoolean(); + + this.SDPart = in.readBoolean(); + + this.coherence = in.readDouble(); + + this.RS = in.readDouble(); + + this.lastUpdate = in.readLong(); + + } else { + for (int i = 0; i < 8; i++) { + switch (fieldOrder[i].pos()) { + case 0: + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); + } + this.endpoint.customDecode(in); + break; + + case 1: + this.VoID = in.readBoolean(); + break; + + case 2: + this.VoIDPart = in.readBoolean(); + break; + + case 3: + this.SD = in.readBoolean(); + break; + + case 4: + this.SDPart = in.readBoolean(); + break; + + case 5: + this.coherence = in.readDouble(); + break; + + case 6: + this.RS = in.readDouble(); + break; + + case 7: + this.lastUpdate = in.readLong(); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } +} diff --git a/backend/src/main/java/sparqles/avro/analytics/DiscoverabilityView.java b/backend/src/main/java/sparqles/avro/analytics/DiscoverabilityView.java index e2b96661..cf6ccac6 100644 --- a/backend/src/main/java/sparqles/avro/analytics/DiscoverabilityView.java +++ b/backend/src/main/java/sparqles/avro/analytics/DiscoverabilityView.java @@ -5,398 +5,754 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class DiscoverabilityView extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"DiscoverabilityView\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"VoID\",\"type\":\"boolean\"},{\"name\":\"SD\",\"type\":\"boolean\"},{\"name\":\"serverName\",\"type\":\"string\"},{\"name\":\"lastUpdate\",\"type\":\"long\"}]}"); - @Deprecated public sparqles.avro.Endpoint endpoint; - @Deprecated public boolean VoID; - @Deprecated public boolean SD; - @Deprecated public java.lang.CharSequence serverName; - @Deprecated public long lastUpdate; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 7160642562817123913L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"DiscoverabilityView\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"VoID\",\"type\":\"boolean\"},{\"name\":\"SD\",\"type\":\"boolean\"},{\"name\":\"serverName\",\"type\":\"string\"},{\"name\":\"lastUpdate\",\"type\":\"long\"}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this DiscoverabilityView to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a DiscoverabilityView from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a DiscoverabilityView instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static DiscoverabilityView fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private sparqles.avro.Endpoint endpoint; + private boolean VoID; + private boolean SD; + private java.lang.CharSequence serverName; + private long lastUpdate; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public DiscoverabilityView() {} + + /** + * All-args constructor. + * + * @param endpoint The new value for endpoint + * @param VoID The new value for VoID + * @param SD The new value for SD + * @param serverName The new value for serverName + * @param lastUpdate The new value for lastUpdate + */ + public DiscoverabilityView( + sparqles.avro.Endpoint endpoint, + java.lang.Boolean VoID, + java.lang.Boolean SD, + java.lang.CharSequence serverName, + java.lang.Long lastUpdate) { + this.endpoint = endpoint; + this.VoID = VoID; + this.SD = SD; + this.serverName = serverName; + this.lastUpdate = lastUpdate; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return endpoint; + case 1: + return VoID; + case 2: + return SD; + case 3: + return serverName; + case 4: + return lastUpdate; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + endpoint = (sparqles.avro.Endpoint) value$; + break; + case 1: + VoID = (java.lang.Boolean) value$; + break; + case 2: + SD = (java.lang.Boolean) value$; + break; + case 3: + serverName = (java.lang.CharSequence) value$; + break; + case 4: + lastUpdate = (java.lang.Long) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'endpoint' field. + * + * @return The value of the 'endpoint' field. + */ + public sparqles.avro.Endpoint getEndpoint() { + return endpoint; + } + + /** + * Sets the value of the 'endpoint' field. + * + * @param value the value to set. + */ + public void setEndpoint(sparqles.avro.Endpoint value) { + this.endpoint = value; + } + + /** + * Gets the value of the 'VoID' field. + * + * @return The value of the 'VoID' field. + */ + public boolean getVoID() { + return VoID; + } + + /** + * Sets the value of the 'VoID' field. + * + * @param value the value to set. + */ + public void setVoID(boolean value) { + this.VoID = value; + } + + /** + * Gets the value of the 'SD' field. + * + * @return The value of the 'SD' field. + */ + public boolean getSD() { + return SD; + } + + /** + * Sets the value of the 'SD' field. + * + * @param value the value to set. + */ + public void setSD(boolean value) { + this.SD = value; + } + + /** + * Gets the value of the 'serverName' field. + * + * @return The value of the 'serverName' field. + */ + public java.lang.CharSequence getServerName() { + return serverName; + } + + /** + * Sets the value of the 'serverName' field. + * + * @param value the value to set. + */ + public void setServerName(java.lang.CharSequence value) { + this.serverName = value; + } + + /** + * Gets the value of the 'lastUpdate' field. + * + * @return The value of the 'lastUpdate' field. + */ + public long getLastUpdate() { + return lastUpdate; + } + + /** + * Sets the value of the 'lastUpdate' field. + * + * @param value the value to set. + */ + public void setLastUpdate(long value) { + this.lastUpdate = value; + } + + /** + * Creates a new DiscoverabilityView RecordBuilder. + * + * @return A new DiscoverabilityView RecordBuilder + */ + public static sparqles.avro.analytics.DiscoverabilityView.Builder newBuilder() { + return new sparqles.avro.analytics.DiscoverabilityView.Builder(); + } + + /** + * Creates a new DiscoverabilityView RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new DiscoverabilityView RecordBuilder + */ + public static sparqles.avro.analytics.DiscoverabilityView.Builder newBuilder( + sparqles.avro.analytics.DiscoverabilityView.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.DiscoverabilityView.Builder(); + } else { + return new sparqles.avro.analytics.DiscoverabilityView.Builder(other); + } + } + + /** + * Creates a new DiscoverabilityView RecordBuilder by copying an existing DiscoverabilityView + * instance. + * + * @param other The existing instance to copy. + * @return A new DiscoverabilityView RecordBuilder + */ + public static sparqles.avro.analytics.DiscoverabilityView.Builder newBuilder( + sparqles.avro.analytics.DiscoverabilityView other) { + if (other == null) { + return new sparqles.avro.analytics.DiscoverabilityView.Builder(); + } else { + return new sparqles.avro.analytics.DiscoverabilityView.Builder(other); + } + } + + /** RecordBuilder for DiscoverabilityView instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private sparqles.avro.Endpoint endpoint; + private sparqles.avro.Endpoint.Builder endpointBuilder; + private boolean VoID; + private boolean SD; + private java.lang.CharSequence serverName; + private long lastUpdate; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public DiscoverabilityView() {} + private Builder(sparqles.avro.analytics.DiscoverabilityView.Builder other) { + super(other); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (other.hasEndpointBuilder()) { + this.endpointBuilder = sparqles.avro.Endpoint.newBuilder(other.getEndpointBuilder()); + } + if (isValidValue(fields()[1], other.VoID)) { + this.VoID = data().deepCopy(fields()[1].schema(), other.VoID); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.SD)) { + this.SD = data().deepCopy(fields()[2].schema(), other.SD); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.serverName)) { + this.serverName = data().deepCopy(fields()[3].schema(), other.serverName); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + if (isValidValue(fields()[4], other.lastUpdate)) { + this.lastUpdate = data().deepCopy(fields()[4].schema(), other.lastUpdate); + fieldSetFlags()[4] = other.fieldSetFlags()[4]; + } + } - /** All-args constructor. */ - public DiscoverabilityView( - sparqles.avro.Endpoint endpoint, - java.lang.Boolean VoID, - java.lang.Boolean SD, - java.lang.CharSequence serverName, - java.lang.Long lastUpdate) { - this.endpoint = endpoint; - this.VoID = VoID; - this.SD = SD; - this.serverName = serverName; - this.lastUpdate = lastUpdate; + /** + * Creates a Builder by copying an existing DiscoverabilityView instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.DiscoverabilityView other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = true; + } + this.endpointBuilder = null; + if (isValidValue(fields()[1], other.VoID)) { + this.VoID = data().deepCopy(fields()[1].schema(), other.VoID); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.SD)) { + this.SD = data().deepCopy(fields()[2].schema(), other.SD); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.serverName)) { + this.serverName = data().deepCopy(fields()[3].schema(), other.serverName); + fieldSetFlags()[3] = true; + } + if (isValidValue(fields()[4], other.lastUpdate)) { + this.lastUpdate = data().deepCopy(fields()[4].schema(), other.lastUpdate); + fieldSetFlags()[4] = true; + } } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + /** + * Gets the value of the 'endpoint' field. + * + * @return The value. + */ + public sparqles.avro.Endpoint getEndpoint() { + return endpoint; } - /** Creates a new DiscoverabilityView RecordBuilder */ - public static sparqles.avro.analytics.DiscoverabilityView.Builder newBuilder() { - return new sparqles.avro.analytics.DiscoverabilityView.Builder(); + /** + * Sets the value of the 'endpoint' field. + * + * @param value The value of 'endpoint'. + * @return This builder. + */ + public sparqles.avro.analytics.DiscoverabilityView.Builder setEndpoint( + sparqles.avro.Endpoint value) { + validate(fields()[0], value); + this.endpointBuilder = null; + this.endpoint = value; + fieldSetFlags()[0] = true; + return this; } - /** Creates a new DiscoverabilityView RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.DiscoverabilityView.Builder newBuilder( - sparqles.avro.analytics.DiscoverabilityView.Builder other) { - return new sparqles.avro.analytics.DiscoverabilityView.Builder(other); + /** + * Checks whether the 'endpoint' field has been set. + * + * @return True if the 'endpoint' field has been set, false otherwise. + */ + public boolean hasEndpoint() { + return fieldSetFlags()[0]; } /** - * Creates a new DiscoverabilityView RecordBuilder by copying an existing DiscoverabilityView - * instance + * Gets the Builder instance for the 'endpoint' field and creates one if it doesn't exist yet. + * + * @return This builder. */ - public static sparqles.avro.analytics.DiscoverabilityView.Builder newBuilder( - sparqles.avro.analytics.DiscoverabilityView other) { - return new sparqles.avro.analytics.DiscoverabilityView.Builder(other); - } - - public org.apache.avro.Schema getSchema() { - return SCHEMA$; - } - - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return endpoint; - case 1: - return VoID; - case 2: - return SD; - case 3: - return serverName; - case 4: - return lastUpdate; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); + public sparqles.avro.Endpoint.Builder getEndpointBuilder() { + if (endpointBuilder == null) { + if (hasEndpoint()) { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder(endpoint)); + } else { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder()); } + } + return endpointBuilder; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - endpoint = (sparqles.avro.Endpoint) value$; - break; - case 1: - VoID = (java.lang.Boolean) value$; - break; - case 2: - SD = (java.lang.Boolean) value$; - break; - case 3: - serverName = (java.lang.CharSequence) value$; - break; - case 4: - lastUpdate = (java.lang.Long) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the Builder instance for the 'endpoint' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.analytics.DiscoverabilityView.Builder setEndpointBuilder( + sparqles.avro.Endpoint.Builder value) { + clearEndpoint(); + endpointBuilder = value; + return this; } - /** Gets the value of the 'endpoint' field. */ - public sparqles.avro.Endpoint getEndpoint() { - return endpoint; + /** + * Checks whether the 'endpoint' field has an active Builder instance + * + * @return True if the 'endpoint' field has an active Builder instance + */ + public boolean hasEndpointBuilder() { + return endpointBuilder != null; } /** - * Sets the value of the 'endpoint' field. + * Clears the value of the 'endpoint' field. * - * @param value the value to set. + * @return This builder. */ - public void setEndpoint(sparqles.avro.Endpoint value) { - this.endpoint = value; + public sparqles.avro.analytics.DiscoverabilityView.Builder clearEndpoint() { + endpoint = null; + endpointBuilder = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'VoID' field. */ - public java.lang.Boolean getVoID() { - return VoID; + /** + * Gets the value of the 'VoID' field. + * + * @return The value. + */ + public boolean getVoID() { + return VoID; } /** * Sets the value of the 'VoID' field. * - * @param value the value to set. + * @param value The value of 'VoID'. + * @return This builder. */ - public void setVoID(java.lang.Boolean value) { - this.VoID = value; + public sparqles.avro.analytics.DiscoverabilityView.Builder setVoID(boolean value) { + validate(fields()[1], value); + this.VoID = value; + fieldSetFlags()[1] = true; + return this; } - /** Gets the value of the 'SD' field. */ - public java.lang.Boolean getSD() { - return SD; + /** + * Checks whether the 'VoID' field has been set. + * + * @return True if the 'VoID' field has been set, false otherwise. + */ + public boolean hasVoID() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'VoID' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.DiscoverabilityView.Builder clearVoID() { + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'SD' field. + * + * @return The value. + */ + public boolean getSD() { + return SD; } /** * Sets the value of the 'SD' field. * - * @param value the value to set. + * @param value The value of 'SD'. + * @return This builder. */ - public void setSD(java.lang.Boolean value) { - this.SD = value; + public sparqles.avro.analytics.DiscoverabilityView.Builder setSD(boolean value) { + validate(fields()[2], value); + this.SD = value; + fieldSetFlags()[2] = true; + return this; } - /** Gets the value of the 'serverName' field. */ + /** + * Checks whether the 'SD' field has been set. + * + * @return True if the 'SD' field has been set, false otherwise. + */ + public boolean hasSD() { + return fieldSetFlags()[2]; + } + + /** + * Clears the value of the 'SD' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.DiscoverabilityView.Builder clearSD() { + fieldSetFlags()[2] = false; + return this; + } + + /** + * Gets the value of the 'serverName' field. + * + * @return The value. + */ public java.lang.CharSequence getServerName() { - return serverName; + return serverName; } /** * Sets the value of the 'serverName' field. * - * @param value the value to set. + * @param value The value of 'serverName'. + * @return This builder. */ - public void setServerName(java.lang.CharSequence value) { - this.serverName = value; + public sparqles.avro.analytics.DiscoverabilityView.Builder setServerName( + java.lang.CharSequence value) { + validate(fields()[3], value); + this.serverName = value; + fieldSetFlags()[3] = true; + return this; } - /** Gets the value of the 'lastUpdate' field. */ - public java.lang.Long getLastUpdate() { - return lastUpdate; + /** + * Checks whether the 'serverName' field has been set. + * + * @return True if the 'serverName' field has been set, false otherwise. + */ + public boolean hasServerName() { + return fieldSetFlags()[3]; } /** - * Sets the value of the 'lastUpdate' field. + * Clears the value of the 'serverName' field. * - * @param value the value to set. + * @return This builder. */ - public void setLastUpdate(java.lang.Long value) { - this.lastUpdate = value; + public sparqles.avro.analytics.DiscoverabilityView.Builder clearServerName() { + serverName = null; + fieldSetFlags()[3] = false; + return this; } - /** RecordBuilder for DiscoverabilityView instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Gets the value of the 'lastUpdate' field. + * + * @return The value. + */ + public long getLastUpdate() { + return lastUpdate; + } - private sparqles.avro.Endpoint endpoint; - private boolean VoID; - private boolean SD; - private java.lang.CharSequence serverName; - private long lastUpdate; + /** + * Sets the value of the 'lastUpdate' field. + * + * @param value The value of 'lastUpdate'. + * @return This builder. + */ + public sparqles.avro.analytics.DiscoverabilityView.Builder setLastUpdate(long value) { + validate(fields()[4], value); + this.lastUpdate = value; + fieldSetFlags()[4] = true; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.DiscoverabilityView.SCHEMA$); - } + /** + * Checks whether the 'lastUpdate' field has been set. + * + * @return True if the 'lastUpdate' field has been set, false otherwise. + */ + public boolean hasLastUpdate() { + return fieldSetFlags()[4]; + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.DiscoverabilityView.Builder other) { - super(other); - if (isValidValue(fields()[0], other.endpoint)) { - this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.VoID)) { - this.VoID = data().deepCopy(fields()[1].schema(), other.VoID); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.SD)) { - this.SD = data().deepCopy(fields()[2].schema(), other.SD); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.serverName)) { - this.serverName = data().deepCopy(fields()[3].schema(), other.serverName); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.lastUpdate)) { - this.lastUpdate = data().deepCopy(fields()[4].schema(), other.lastUpdate); - fieldSetFlags()[4] = true; - } - } + /** + * Clears the value of the 'lastUpdate' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.DiscoverabilityView.Builder clearLastUpdate() { + fieldSetFlags()[4] = false; + return this; + } - /** Creates a Builder by copying an existing DiscoverabilityView instance */ - private Builder(sparqles.avro.analytics.DiscoverabilityView other) { - super(sparqles.avro.analytics.DiscoverabilityView.SCHEMA$); - if (isValidValue(fields()[0], other.endpoint)) { - this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.VoID)) { - this.VoID = data().deepCopy(fields()[1].schema(), other.VoID); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.SD)) { - this.SD = data().deepCopy(fields()[2].schema(), other.SD); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.serverName)) { - this.serverName = data().deepCopy(fields()[3].schema(), other.serverName); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.lastUpdate)) { - this.lastUpdate = data().deepCopy(fields()[4].schema(), other.lastUpdate); - fieldSetFlags()[4] = true; - } + @Override + @SuppressWarnings("unchecked") + public DiscoverabilityView build() { + try { + DiscoverabilityView record = new DiscoverabilityView(); + if (endpointBuilder != null) { + try { + record.endpoint = this.endpointBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("endpoint")); + throw e; + } + } else { + record.endpoint = + fieldSetFlags()[0] + ? this.endpoint + : (sparqles.avro.Endpoint) defaultValue(fields()[0]); } + record.VoID = + fieldSetFlags()[1] ? this.VoID : (java.lang.Boolean) defaultValue(fields()[1]); + record.SD = fieldSetFlags()[2] ? this.SD : (java.lang.Boolean) defaultValue(fields()[2]); + record.serverName = + fieldSetFlags()[3] + ? this.serverName + : (java.lang.CharSequence) defaultValue(fields()[3]); + record.lastUpdate = + fieldSetFlags()[4] ? this.lastUpdate : (java.lang.Long) defaultValue(fields()[4]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Gets the value of the 'endpoint' field */ - public sparqles.avro.Endpoint getEndpoint() { - return endpoint; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Sets the value of the 'endpoint' field */ - public sparqles.avro.analytics.DiscoverabilityView.Builder setEndpoint( - sparqles.avro.Endpoint value) { - validate(fields()[0], value); - this.endpoint = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Checks whether the 'endpoint' field has been set */ - public boolean hasEndpoint() { - return fieldSetFlags()[0]; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Clears the value of the 'endpoint' field */ - public sparqles.avro.analytics.DiscoverabilityView.Builder clearEndpoint() { - endpoint = null; - fieldSetFlags()[0] = false; - return this; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Gets the value of the 'VoID' field */ - public java.lang.Boolean getVoID() { - return VoID; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Sets the value of the 'VoID' field */ - public sparqles.avro.analytics.DiscoverabilityView.Builder setVoID(boolean value) { - validate(fields()[1], value); - this.VoID = value; - fieldSetFlags()[1] = true; - return this; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + this.endpoint.customEncode(out); - /** Checks whether the 'VoID' field has been set */ - public boolean hasVoID() { - return fieldSetFlags()[1]; - } + out.writeBoolean(this.VoID); - /** Clears the value of the 'VoID' field */ - public sparqles.avro.analytics.DiscoverabilityView.Builder clearVoID() { - fieldSetFlags()[1] = false; - return this; - } + out.writeBoolean(this.SD); - /** Gets the value of the 'SD' field */ - public java.lang.Boolean getSD() { - return SD; - } + out.writeString(this.serverName); - /** Sets the value of the 'SD' field */ - public sparqles.avro.analytics.DiscoverabilityView.Builder setSD(boolean value) { - validate(fields()[2], value); - this.SD = value; - fieldSetFlags()[2] = true; - return this; - } + out.writeLong(this.lastUpdate); + } - /** Checks whether the 'SD' field has been set */ - public boolean hasSD() { - return fieldSetFlags()[2]; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); + } + this.endpoint.customDecode(in); - /** Clears the value of the 'SD' field */ - public sparqles.avro.analytics.DiscoverabilityView.Builder clearSD() { - fieldSetFlags()[2] = false; - return this; - } + this.VoID = in.readBoolean(); - /** Gets the value of the 'serverName' field */ - public java.lang.CharSequence getServerName() { - return serverName; - } + this.SD = in.readBoolean(); - /** Sets the value of the 'serverName' field */ - public sparqles.avro.analytics.DiscoverabilityView.Builder setServerName( - java.lang.CharSequence value) { - validate(fields()[3], value); - this.serverName = value; - fieldSetFlags()[3] = true; - return this; - } + this.serverName = + in.readString(this.serverName instanceof Utf8 ? (Utf8) this.serverName : null); - /** Checks whether the 'serverName' field has been set */ - public boolean hasServerName() { - return fieldSetFlags()[3]; - } + this.lastUpdate = in.readLong(); - /** Clears the value of the 'serverName' field */ - public sparqles.avro.analytics.DiscoverabilityView.Builder clearServerName() { - serverName = null; - fieldSetFlags()[3] = false; - return this; - } + } else { + for (int i = 0; i < 5; i++) { + switch (fieldOrder[i].pos()) { + case 0: + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); + } + this.endpoint.customDecode(in); + break; - /** Gets the value of the 'lastUpdate' field */ - public java.lang.Long getLastUpdate() { - return lastUpdate; - } + case 1: + this.VoID = in.readBoolean(); + break; - /** Sets the value of the 'lastUpdate' field */ - public sparqles.avro.analytics.DiscoverabilityView.Builder setLastUpdate(long value) { - validate(fields()[4], value); - this.lastUpdate = value; - fieldSetFlags()[4] = true; - return this; - } + case 2: + this.SD = in.readBoolean(); + break; - /** Checks whether the 'lastUpdate' field has been set */ - public boolean hasLastUpdate() { - return fieldSetFlags()[4]; - } + case 3: + this.serverName = + in.readString(this.serverName instanceof Utf8 ? (Utf8) this.serverName : null); + break; - /** Clears the value of the 'lastUpdate' field */ - public sparqles.avro.analytics.DiscoverabilityView.Builder clearLastUpdate() { - fieldSetFlags()[4] = false; - return this; - } + case 4: + this.lastUpdate = in.readLong(); + break; - @Override - public DiscoverabilityView build() { - try { - DiscoverabilityView record = new DiscoverabilityView(); - record.endpoint = - fieldSetFlags()[0] - ? this.endpoint - : (sparqles.avro.Endpoint) defaultValue(fields()[0]); - record.VoID = - fieldSetFlags()[1] - ? this.VoID - : (java.lang.Boolean) defaultValue(fields()[1]); - record.SD = - fieldSetFlags()[2] - ? this.SD - : (java.lang.Boolean) defaultValue(fields()[2]); - record.serverName = - fieldSetFlags()[3] - ? this.serverName - : (java.lang.CharSequence) defaultValue(fields()[3]); - record.lastUpdate = - fieldSetFlags()[4] - ? this.lastUpdate - : (java.lang.Long) defaultValue(fields()[4]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/EPView.java b/backend/src/main/java/sparqles/avro/analytics/EPView.java index e29db70a..939b55eb 100644 --- a/backend/src/main/java/sparqles/avro/analytics/EPView.java +++ b/backend/src/main/java/sparqles/avro/analytics/EPView.java @@ -5,405 +5,1166 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; + @org.apache.avro.specific.AvroGenerated public class EPView extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"EPView\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"availability\",\"type\":{\"type\":\"record\",\"name\":\"EPViewAvailability\",\"fields\":[{\"name\":\"upNow\",\"type\":\"boolean\"},{\"name\":\"testRuns\",\"type\":\"int\"},{\"name\":\"uptimeLast24h\",\"type\":\"double\"},{\"name\":\"uptimeLast7d\",\"type\":\"double\"},{\"name\":\"uptimeLast31d\",\"type\":\"double\"},{\"name\":\"uptimeOverall\",\"type\":\"double\"},{\"name\":\"data\",\"type\":{\"type\":\"record\",\"name\":\"EPViewAvailabilityData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"values\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewAvailabilityDataPoint\",\"fields\":[{\"name\":\"x\",\"type\":\"long\"},{\"name\":\"y\",\"type\":\"double\"}]}}}]}}]}},{\"name\":\"performance\",\"type\":{\"type\":\"record\",\"name\":\"EPViewPerformance\",\"fields\":[{\"name\":\"threshold\",\"type\":\"long\"},{\"name\":\"ask\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewPerformanceData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"color\",\"type\":\"string\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewPerformanceDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"},{\"name\":\"exception\",\"type\":[\"string\",\"null\"]}]}}}]}}},{\"name\":\"join\",\"type\":{\"type\":\"array\",\"items\":\"EPViewPerformanceData\"}}]}},{\"name\":\"interoperability\",\"type\":{\"type\":\"record\",\"name\":\"EPViewInteroperability\",\"fields\":[{\"name\":\"SPARQL1Features\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewInteroperabilityData\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"boolean\"},{\"name\":\"exception\",\"type\":[\"string\",\"null\"]}]}}},{\"name\":\"SPARQL11Features\",\"type\":{\"type\":\"array\",\"items\":\"EPViewInteroperabilityData\"}}]}},{\"name\":\"discoverability\",\"type\":{\"type\":\"record\",\"name\":\"EPViewDiscoverability\",\"fields\":[{\"name\":\"serverName\",\"type\":\"string\"},{\"name\":\"VoIDDescription\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewDiscoverabilityData\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"boolean\"}]}}},{\"name\":\"SDDescription\",\"type\":{\"type\":\"array\",\"items\":\"EPViewDiscoverabilityData\"}}]}}]}"); - @Deprecated public sparqles.avro.Endpoint endpoint; - @Deprecated public sparqles.avro.analytics.EPViewAvailability availability; - @Deprecated public sparqles.avro.analytics.EPViewPerformance performance; - @Deprecated public sparqles.avro.analytics.EPViewInteroperability interoperability; - @Deprecated public sparqles.avro.analytics.EPViewDiscoverability discoverability; - - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public EPView() {} - - /** All-args constructor. */ - public EPView( - sparqles.avro.Endpoint endpoint, - sparqles.avro.analytics.EPViewAvailability availability, - sparqles.avro.analytics.EPViewPerformance performance, - sparqles.avro.analytics.EPViewInteroperability interoperability, - sparqles.avro.analytics.EPViewDiscoverability discoverability) { - this.endpoint = endpoint; - this.availability = availability; - this.performance = performance; - this.interoperability = interoperability; - this.discoverability = discoverability; - } - - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; - } - - /** Creates a new EPView RecordBuilder */ - public static sparqles.avro.analytics.EPView.Builder newBuilder() { - return new sparqles.avro.analytics.EPView.Builder(); - } - - /** Creates a new EPView RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.EPView.Builder newBuilder( - sparqles.avro.analytics.EPView.Builder other) { - return new sparqles.avro.analytics.EPView.Builder(other); - } - - /** Creates a new EPView RecordBuilder by copying an existing EPView instance */ - public static sparqles.avro.analytics.EPView.Builder newBuilder( - sparqles.avro.analytics.EPView other) { - return new sparqles.avro.analytics.EPView.Builder(other); - } - - public org.apache.avro.Schema getSchema() { - return SCHEMA$; - } - - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return endpoint; - case 1: - return availability; - case 2: - return performance; - case 3: - return interoperability; - case 4: - return discoverability; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 4059786559800603384L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"EPView\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"availability\",\"type\":{\"type\":\"record\",\"name\":\"EPViewAvailability\",\"fields\":[{\"name\":\"upNow\",\"type\":\"boolean\"},{\"name\":\"testRuns\",\"type\":\"int\"},{\"name\":\"uptimeLast24h\",\"type\":\"double\"},{\"name\":\"uptimeLast7d\",\"type\":\"double\"},{\"name\":\"uptimeLast31d\",\"type\":\"double\"},{\"name\":\"uptimeOverall\",\"type\":\"double\"},{\"name\":\"data\",\"type\":{\"type\":\"record\",\"name\":\"EPViewAvailabilityData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"values\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewAvailabilityDataPoint\",\"fields\":[{\"name\":\"x\",\"type\":\"long\"},{\"name\":\"y\",\"type\":\"double\"}]}}}]}}]}},{\"name\":\"performance\",\"type\":{\"type\":\"record\",\"name\":\"EPViewPerformance\",\"fields\":[{\"name\":\"threshold\",\"type\":\"long\"},{\"name\":\"ask\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewPerformanceData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"color\",\"type\":\"string\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewPerformanceDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"},{\"name\":\"exception\",\"type\":[\"string\",\"null\"]}]}}}]}}},{\"name\":\"join\",\"type\":{\"type\":\"array\",\"items\":\"EPViewPerformanceData\"}}]}},{\"name\":\"interoperability\",\"type\":{\"type\":\"record\",\"name\":\"EPViewInteroperability\",\"fields\":[{\"name\":\"SPARQL1Features\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewInteroperabilityData\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"boolean\"},{\"name\":\"exception\",\"type\":[\"string\",\"null\"]}]}}},{\"name\":\"SPARQL11Features\",\"type\":{\"type\":\"array\",\"items\":\"EPViewInteroperabilityData\"}}]}},{\"name\":\"discoverability\",\"type\":{\"type\":\"record\",\"name\":\"EPViewDiscoverability\",\"fields\":[{\"name\":\"serverName\",\"type\":\"string\"},{\"name\":\"VoIDDescription\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewDiscoverabilityData\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"boolean\"}]}}},{\"name\":\"SDDescription\",\"type\":{\"type\":\"array\",\"items\":\"EPViewDiscoverabilityData\"}}]}},{\"name\":\"calculation\",\"type\":{\"type\":\"record\",\"name\":\"EPViewCalculation\",\"fields\":[{\"name\":\"triples\",\"type\":\"long\"},{\"name\":\"entities\",\"type\":\"long\"},{\"name\":\"classes\",\"type\":\"long\"},{\"name\":\"properties\",\"type\":\"long\"},{\"name\":\"distinctSubjects\",\"type\":\"long\"},{\"name\":\"distinctObjects\",\"type\":\"long\"},{\"name\":\"exampleResources\",\"type\":{\"type\":\"array\",\"items\":\"string\"}},{\"name\":\"VoID\",\"type\":\"string\"},{\"name\":\"VoIDPart\",\"type\":\"boolean\"},{\"name\":\"SD\",\"type\":\"string\"},{\"name\":\"SDPart\",\"type\":\"boolean\"},{\"name\":\"coherence\",\"type\":\"double\"},{\"name\":\"RS\",\"type\":\"double\"}]},\"default\":{\"triples\":-1,\"entities\":-1,\"classes\":-1,\"properties\":-1,\"distinctSubjects\":-1,\"distinctObjects\":-1,\"exampleResources\":[],\"VoID\":\"n/a\",\"VoIDPart\":false,\"SD\":\"n/a\",\"SDPart\":false,\"coherence\":-1.0,\"RS\":-1.0}}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this EPView to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a EPView from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a EPView instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static EPView fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private sparqles.avro.Endpoint endpoint; + private sparqles.avro.analytics.EPViewAvailability availability; + private sparqles.avro.analytics.EPViewPerformance performance; + private sparqles.avro.analytics.EPViewInteroperability interoperability; + private sparqles.avro.analytics.EPViewDiscoverability discoverability; + private sparqles.avro.analytics.EPViewCalculation calculation; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public EPView() {} + + /** + * All-args constructor. + * + * @param endpoint The new value for endpoint + * @param availability The new value for availability + * @param performance The new value for performance + * @param interoperability The new value for interoperability + * @param discoverability The new value for discoverability + * @param calculation The new value for calculation + */ + public EPView( + sparqles.avro.Endpoint endpoint, + sparqles.avro.analytics.EPViewAvailability availability, + sparqles.avro.analytics.EPViewPerformance performance, + sparqles.avro.analytics.EPViewInteroperability interoperability, + sparqles.avro.analytics.EPViewDiscoverability discoverability, + sparqles.avro.analytics.EPViewCalculation calculation) { + this.endpoint = endpoint; + this.availability = availability; + this.performance = performance; + this.interoperability = interoperability; + this.discoverability = discoverability; + this.calculation = calculation; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return endpoint; + case 1: + return availability; + case 2: + return performance; + case 3: + return interoperability; + case 4: + return discoverability; + case 5: + return calculation; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - endpoint = (sparqles.avro.Endpoint) value$; - break; - case 1: - availability = (sparqles.avro.analytics.EPViewAvailability) value$; - break; - case 2: - performance = (sparqles.avro.analytics.EPViewPerformance) value$; - break; - case 3: - interoperability = (sparqles.avro.analytics.EPViewInteroperability) value$; - break; - case 4: - discoverability = (sparqles.avro.analytics.EPViewDiscoverability) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + endpoint = (sparqles.avro.Endpoint) value$; + break; + case 1: + availability = (sparqles.avro.analytics.EPViewAvailability) value$; + break; + case 2: + performance = (sparqles.avro.analytics.EPViewPerformance) value$; + break; + case 3: + interoperability = (sparqles.avro.analytics.EPViewInteroperability) value$; + break; + case 4: + discoverability = (sparqles.avro.analytics.EPViewDiscoverability) value$; + break; + case 5: + calculation = (sparqles.avro.analytics.EPViewCalculation) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } + + /** + * Gets the value of the 'endpoint' field. + * + * @return The value of the 'endpoint' field. + */ + public sparqles.avro.Endpoint getEndpoint() { + return endpoint; + } + + /** + * Sets the value of the 'endpoint' field. + * + * @param value the value to set. + */ + public void setEndpoint(sparqles.avro.Endpoint value) { + this.endpoint = value; + } + + /** + * Gets the value of the 'availability' field. + * + * @return The value of the 'availability' field. + */ + public sparqles.avro.analytics.EPViewAvailability getAvailability() { + return availability; + } + + /** + * Sets the value of the 'availability' field. + * + * @param value the value to set. + */ + public void setAvailability(sparqles.avro.analytics.EPViewAvailability value) { + this.availability = value; + } + + /** + * Gets the value of the 'performance' field. + * + * @return The value of the 'performance' field. + */ + public sparqles.avro.analytics.EPViewPerformance getPerformance() { + return performance; + } + + /** + * Sets the value of the 'performance' field. + * + * @param value the value to set. + */ + public void setPerformance(sparqles.avro.analytics.EPViewPerformance value) { + this.performance = value; + } + + /** + * Gets the value of the 'interoperability' field. + * + * @return The value of the 'interoperability' field. + */ + public sparqles.avro.analytics.EPViewInteroperability getInteroperability() { + return interoperability; + } + + /** + * Sets the value of the 'interoperability' field. + * + * @param value the value to set. + */ + public void setInteroperability(sparqles.avro.analytics.EPViewInteroperability value) { + this.interoperability = value; + } + + /** + * Gets the value of the 'discoverability' field. + * + * @return The value of the 'discoverability' field. + */ + public sparqles.avro.analytics.EPViewDiscoverability getDiscoverability() { + return discoverability; + } + + /** + * Sets the value of the 'discoverability' field. + * + * @param value the value to set. + */ + public void setDiscoverability(sparqles.avro.analytics.EPViewDiscoverability value) { + this.discoverability = value; + } + + /** + * Gets the value of the 'calculation' field. + * + * @return The value of the 'calculation' field. + */ + public sparqles.avro.analytics.EPViewCalculation getCalculation() { + return calculation; + } + + /** + * Sets the value of the 'calculation' field. + * + * @param value the value to set. + */ + public void setCalculation(sparqles.avro.analytics.EPViewCalculation value) { + this.calculation = value; + } - /** Gets the value of the 'endpoint' field. */ + /** + * Creates a new EPView RecordBuilder. + * + * @return A new EPView RecordBuilder + */ + public static sparqles.avro.analytics.EPView.Builder newBuilder() { + return new sparqles.avro.analytics.EPView.Builder(); + } + + /** + * Creates a new EPView RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new EPView RecordBuilder + */ + public static sparqles.avro.analytics.EPView.Builder newBuilder( + sparqles.avro.analytics.EPView.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.EPView.Builder(); + } else { + return new sparqles.avro.analytics.EPView.Builder(other); + } + } + + /** + * Creates a new EPView RecordBuilder by copying an existing EPView instance. + * + * @param other The existing instance to copy. + * @return A new EPView RecordBuilder + */ + public static sparqles.avro.analytics.EPView.Builder newBuilder( + sparqles.avro.analytics.EPView other) { + if (other == null) { + return new sparqles.avro.analytics.EPView.Builder(); + } else { + return new sparqles.avro.analytics.EPView.Builder(other); + } + } + + /** RecordBuilder for EPView instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private sparqles.avro.Endpoint endpoint; + private sparqles.avro.Endpoint.Builder endpointBuilder; + private sparqles.avro.analytics.EPViewAvailability availability; + private sparqles.avro.analytics.EPViewAvailability.Builder availabilityBuilder; + private sparqles.avro.analytics.EPViewPerformance performance; + private sparqles.avro.analytics.EPViewPerformance.Builder performanceBuilder; + private sparqles.avro.analytics.EPViewInteroperability interoperability; + private sparqles.avro.analytics.EPViewInteroperability.Builder interoperabilityBuilder; + private sparqles.avro.analytics.EPViewDiscoverability discoverability; + private sparqles.avro.analytics.EPViewDiscoverability.Builder discoverabilityBuilder; + private sparqles.avro.analytics.EPViewCalculation calculation; + private sparqles.avro.analytics.EPViewCalculation.Builder calculationBuilder; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } + + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.analytics.EPView.Builder other) { + super(other); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (other.hasEndpointBuilder()) { + this.endpointBuilder = sparqles.avro.Endpoint.newBuilder(other.getEndpointBuilder()); + } + if (isValidValue(fields()[1], other.availability)) { + this.availability = data().deepCopy(fields()[1].schema(), other.availability); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (other.hasAvailabilityBuilder()) { + this.availabilityBuilder = + sparqles.avro.analytics.EPViewAvailability.newBuilder(other.getAvailabilityBuilder()); + } + if (isValidValue(fields()[2], other.performance)) { + this.performance = data().deepCopy(fields()[2].schema(), other.performance); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (other.hasPerformanceBuilder()) { + this.performanceBuilder = + sparqles.avro.analytics.EPViewPerformance.newBuilder(other.getPerformanceBuilder()); + } + if (isValidValue(fields()[3], other.interoperability)) { + this.interoperability = data().deepCopy(fields()[3].schema(), other.interoperability); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + if (other.hasInteroperabilityBuilder()) { + this.interoperabilityBuilder = + sparqles.avro.analytics.EPViewInteroperability.newBuilder( + other.getInteroperabilityBuilder()); + } + if (isValidValue(fields()[4], other.discoverability)) { + this.discoverability = data().deepCopy(fields()[4].schema(), other.discoverability); + fieldSetFlags()[4] = other.fieldSetFlags()[4]; + } + if (other.hasDiscoverabilityBuilder()) { + this.discoverabilityBuilder = + sparqles.avro.analytics.EPViewDiscoverability.newBuilder( + other.getDiscoverabilityBuilder()); + } + if (isValidValue(fields()[5], other.calculation)) { + this.calculation = data().deepCopy(fields()[5].schema(), other.calculation); + fieldSetFlags()[5] = other.fieldSetFlags()[5]; + } + if (other.hasCalculationBuilder()) { + this.calculationBuilder = + sparqles.avro.analytics.EPViewCalculation.newBuilder(other.getCalculationBuilder()); + } + } + + /** + * Creates a Builder by copying an existing EPView instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.EPView other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = true; + } + this.endpointBuilder = null; + if (isValidValue(fields()[1], other.availability)) { + this.availability = data().deepCopy(fields()[1].schema(), other.availability); + fieldSetFlags()[1] = true; + } + this.availabilityBuilder = null; + if (isValidValue(fields()[2], other.performance)) { + this.performance = data().deepCopy(fields()[2].schema(), other.performance); + fieldSetFlags()[2] = true; + } + this.performanceBuilder = null; + if (isValidValue(fields()[3], other.interoperability)) { + this.interoperability = data().deepCopy(fields()[3].schema(), other.interoperability); + fieldSetFlags()[3] = true; + } + this.interoperabilityBuilder = null; + if (isValidValue(fields()[4], other.discoverability)) { + this.discoverability = data().deepCopy(fields()[4].schema(), other.discoverability); + fieldSetFlags()[4] = true; + } + this.discoverabilityBuilder = null; + if (isValidValue(fields()[5], other.calculation)) { + this.calculation = data().deepCopy(fields()[5].schema(), other.calculation); + fieldSetFlags()[5] = true; + } + this.calculationBuilder = null; + } + + /** + * Gets the value of the 'endpoint' field. + * + * @return The value. + */ public sparqles.avro.Endpoint getEndpoint() { - return endpoint; + return endpoint; } /** * Sets the value of the 'endpoint' field. * - * @param value the value to set. + * @param value The value of 'endpoint'. + * @return This builder. */ - public void setEndpoint(sparqles.avro.Endpoint value) { - this.endpoint = value; + public sparqles.avro.analytics.EPView.Builder setEndpoint(sparqles.avro.Endpoint value) { + validate(fields()[0], value); + this.endpointBuilder = null; + this.endpoint = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'availability' field. */ + /** + * Checks whether the 'endpoint' field has been set. + * + * @return True if the 'endpoint' field has been set, false otherwise. + */ + public boolean hasEndpoint() { + return fieldSetFlags()[0]; + } + + /** + * Gets the Builder instance for the 'endpoint' field and creates one if it doesn't exist yet. + * + * @return This builder. + */ + public sparqles.avro.Endpoint.Builder getEndpointBuilder() { + if (endpointBuilder == null) { + if (hasEndpoint()) { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder(endpoint)); + } else { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder()); + } + } + return endpointBuilder; + } + + /** + * Sets the Builder instance for the 'endpoint' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.analytics.EPView.Builder setEndpointBuilder( + sparqles.avro.Endpoint.Builder value) { + clearEndpoint(); + endpointBuilder = value; + return this; + } + + /** + * Checks whether the 'endpoint' field has an active Builder instance + * + * @return True if the 'endpoint' field has an active Builder instance + */ + public boolean hasEndpointBuilder() { + return endpointBuilder != null; + } + + /** + * Clears the value of the 'endpoint' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPView.Builder clearEndpoint() { + endpoint = null; + endpointBuilder = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'availability' field. + * + * @return The value. + */ public sparqles.avro.analytics.EPViewAvailability getAvailability() { - return availability; + return availability; } /** * Sets the value of the 'availability' field. * - * @param value the value to set. + * @param value The value of 'availability'. + * @return This builder. + */ + public sparqles.avro.analytics.EPView.Builder setAvailability( + sparqles.avro.analytics.EPViewAvailability value) { + validate(fields()[1], value); + this.availabilityBuilder = null; + this.availability = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'availability' field has been set. + * + * @return True if the 'availability' field has been set, false otherwise. + */ + public boolean hasAvailability() { + return fieldSetFlags()[1]; + } + + /** + * Gets the Builder instance for the 'availability' field and creates one if it doesn't exist + * yet. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewAvailability.Builder getAvailabilityBuilder() { + if (availabilityBuilder == null) { + if (hasAvailability()) { + setAvailabilityBuilder( + sparqles.avro.analytics.EPViewAvailability.newBuilder(availability)); + } else { + setAvailabilityBuilder(sparqles.avro.analytics.EPViewAvailability.newBuilder()); + } + } + return availabilityBuilder; + } + + /** + * Sets the Builder instance for the 'availability' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.analytics.EPView.Builder setAvailabilityBuilder( + sparqles.avro.analytics.EPViewAvailability.Builder value) { + clearAvailability(); + availabilityBuilder = value; + return this; + } + + /** + * Checks whether the 'availability' field has an active Builder instance + * + * @return True if the 'availability' field has an active Builder instance + */ + public boolean hasAvailabilityBuilder() { + return availabilityBuilder != null; + } + + /** + * Clears the value of the 'availability' field. + * + * @return This builder. */ - public void setAvailability(sparqles.avro.analytics.EPViewAvailability value) { - this.availability = value; + public sparqles.avro.analytics.EPView.Builder clearAvailability() { + availability = null; + availabilityBuilder = null; + fieldSetFlags()[1] = false; + return this; } - /** Gets the value of the 'performance' field. */ + /** + * Gets the value of the 'performance' field. + * + * @return The value. + */ public sparqles.avro.analytics.EPViewPerformance getPerformance() { - return performance; + return performance; } /** * Sets the value of the 'performance' field. * - * @param value the value to set. + * @param value The value of 'performance'. + * @return This builder. + */ + public sparqles.avro.analytics.EPView.Builder setPerformance( + sparqles.avro.analytics.EPViewPerformance value) { + validate(fields()[2], value); + this.performanceBuilder = null; + this.performance = value; + fieldSetFlags()[2] = true; + return this; + } + + /** + * Checks whether the 'performance' field has been set. + * + * @return True if the 'performance' field has been set, false otherwise. */ - public void setPerformance(sparqles.avro.analytics.EPViewPerformance value) { - this.performance = value; + public boolean hasPerformance() { + return fieldSetFlags()[2]; } - /** Gets the value of the 'interoperability' field. */ + /** + * Gets the Builder instance for the 'performance' field and creates one if it doesn't exist + * yet. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewPerformance.Builder getPerformanceBuilder() { + if (performanceBuilder == null) { + if (hasPerformance()) { + setPerformanceBuilder(sparqles.avro.analytics.EPViewPerformance.newBuilder(performance)); + } else { + setPerformanceBuilder(sparqles.avro.analytics.EPViewPerformance.newBuilder()); + } + } + return performanceBuilder; + } + + /** + * Sets the Builder instance for the 'performance' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.analytics.EPView.Builder setPerformanceBuilder( + sparqles.avro.analytics.EPViewPerformance.Builder value) { + clearPerformance(); + performanceBuilder = value; + return this; + } + + /** + * Checks whether the 'performance' field has an active Builder instance + * + * @return True if the 'performance' field has an active Builder instance + */ + public boolean hasPerformanceBuilder() { + return performanceBuilder != null; + } + + /** + * Clears the value of the 'performance' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPView.Builder clearPerformance() { + performance = null; + performanceBuilder = null; + fieldSetFlags()[2] = false; + return this; + } + + /** + * Gets the value of the 'interoperability' field. + * + * @return The value. + */ public sparqles.avro.analytics.EPViewInteroperability getInteroperability() { - return interoperability; + return interoperability; } /** * Sets the value of the 'interoperability' field. * - * @param value the value to set. + * @param value The value of 'interoperability'. + * @return This builder. + */ + public sparqles.avro.analytics.EPView.Builder setInteroperability( + sparqles.avro.analytics.EPViewInteroperability value) { + validate(fields()[3], value); + this.interoperabilityBuilder = null; + this.interoperability = value; + fieldSetFlags()[3] = true; + return this; + } + + /** + * Checks whether the 'interoperability' field has been set. + * + * @return True if the 'interoperability' field has been set, false otherwise. */ - public void setInteroperability(sparqles.avro.analytics.EPViewInteroperability value) { - this.interoperability = value; + public boolean hasInteroperability() { + return fieldSetFlags()[3]; } - /** Gets the value of the 'discoverability' field. */ + /** + * Gets the Builder instance for the 'interoperability' field and creates one if it doesn't + * exist yet. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewInteroperability.Builder getInteroperabilityBuilder() { + if (interoperabilityBuilder == null) { + if (hasInteroperability()) { + setInteroperabilityBuilder( + sparqles.avro.analytics.EPViewInteroperability.newBuilder(interoperability)); + } else { + setInteroperabilityBuilder(sparqles.avro.analytics.EPViewInteroperability.newBuilder()); + } + } + return interoperabilityBuilder; + } + + /** + * Sets the Builder instance for the 'interoperability' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.analytics.EPView.Builder setInteroperabilityBuilder( + sparqles.avro.analytics.EPViewInteroperability.Builder value) { + clearInteroperability(); + interoperabilityBuilder = value; + return this; + } + + /** + * Checks whether the 'interoperability' field has an active Builder instance + * + * @return True if the 'interoperability' field has an active Builder instance + */ + public boolean hasInteroperabilityBuilder() { + return interoperabilityBuilder != null; + } + + /** + * Clears the value of the 'interoperability' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPView.Builder clearInteroperability() { + interoperability = null; + interoperabilityBuilder = null; + fieldSetFlags()[3] = false; + return this; + } + + /** + * Gets the value of the 'discoverability' field. + * + * @return The value. + */ public sparqles.avro.analytics.EPViewDiscoverability getDiscoverability() { - return discoverability; + return discoverability; } /** * Sets the value of the 'discoverability' field. * - * @param value the value to set. + * @param value The value of 'discoverability'. + * @return This builder. */ - public void setDiscoverability(sparqles.avro.analytics.EPViewDiscoverability value) { - this.discoverability = value; + public sparqles.avro.analytics.EPView.Builder setDiscoverability( + sparqles.avro.analytics.EPViewDiscoverability value) { + validate(fields()[4], value); + this.discoverabilityBuilder = null; + this.discoverability = value; + fieldSetFlags()[4] = true; + return this; } - /** RecordBuilder for EPView instances. */ - public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { - - private sparqles.avro.Endpoint endpoint; - private sparqles.avro.analytics.EPViewAvailability availability; - private sparqles.avro.analytics.EPViewPerformance performance; - private sparqles.avro.analytics.EPViewInteroperability interoperability; - private sparqles.avro.analytics.EPViewDiscoverability discoverability; + /** + * Checks whether the 'discoverability' field has been set. + * + * @return True if the 'discoverability' field has been set, false otherwise. + */ + public boolean hasDiscoverability() { + return fieldSetFlags()[4]; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.EPView.SCHEMA$); + /** + * Gets the Builder instance for the 'discoverability' field and creates one if it doesn't exist + * yet. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewDiscoverability.Builder getDiscoverabilityBuilder() { + if (discoverabilityBuilder == null) { + if (hasDiscoverability()) { + setDiscoverabilityBuilder( + sparqles.avro.analytics.EPViewDiscoverability.newBuilder(discoverability)); + } else { + setDiscoverabilityBuilder(sparqles.avro.analytics.EPViewDiscoverability.newBuilder()); } + } + return discoverabilityBuilder; + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.EPView.Builder other) { - super(other); - if (isValidValue(fields()[0], other.endpoint)) { - this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.availability)) { - this.availability = data().deepCopy(fields()[1].schema(), other.availability); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.performance)) { - this.performance = data().deepCopy(fields()[2].schema(), other.performance); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.interoperability)) { - this.interoperability = - data().deepCopy(fields()[3].schema(), other.interoperability); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.discoverability)) { - this.discoverability = data().deepCopy(fields()[4].schema(), other.discoverability); - fieldSetFlags()[4] = true; - } - } + /** + * Sets the Builder instance for the 'discoverability' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.analytics.EPView.Builder setDiscoverabilityBuilder( + sparqles.avro.analytics.EPViewDiscoverability.Builder value) { + clearDiscoverability(); + discoverabilityBuilder = value; + return this; + } - /** Creates a Builder by copying an existing EPView instance */ - private Builder(sparqles.avro.analytics.EPView other) { - super(sparqles.avro.analytics.EPView.SCHEMA$); - if (isValidValue(fields()[0], other.endpoint)) { - this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.availability)) { - this.availability = data().deepCopy(fields()[1].schema(), other.availability); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.performance)) { - this.performance = data().deepCopy(fields()[2].schema(), other.performance); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.interoperability)) { - this.interoperability = - data().deepCopy(fields()[3].schema(), other.interoperability); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.discoverability)) { - this.discoverability = data().deepCopy(fields()[4].schema(), other.discoverability); - fieldSetFlags()[4] = true; - } - } + /** + * Checks whether the 'discoverability' field has an active Builder instance + * + * @return True if the 'discoverability' field has an active Builder instance + */ + public boolean hasDiscoverabilityBuilder() { + return discoverabilityBuilder != null; + } - /** Gets the value of the 'endpoint' field */ - public sparqles.avro.Endpoint getEndpoint() { - return endpoint; - } + /** + * Clears the value of the 'discoverability' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPView.Builder clearDiscoverability() { + discoverability = null; + discoverabilityBuilder = null; + fieldSetFlags()[4] = false; + return this; + } - /** Sets the value of the 'endpoint' field */ - public sparqles.avro.analytics.EPView.Builder setEndpoint(sparqles.avro.Endpoint value) { - validate(fields()[0], value); - this.endpoint = value; - fieldSetFlags()[0] = true; - return this; - } + /** + * Gets the value of the 'calculation' field. + * + * @return The value. + */ + public sparqles.avro.analytics.EPViewCalculation getCalculation() { + return calculation; + } - /** Checks whether the 'endpoint' field has been set */ - public boolean hasEndpoint() { - return fieldSetFlags()[0]; - } + /** + * Sets the value of the 'calculation' field. + * + * @param value The value of 'calculation'. + * @return This builder. + */ + public sparqles.avro.analytics.EPView.Builder setCalculation( + sparqles.avro.analytics.EPViewCalculation value) { + validate(fields()[5], value); + this.calculationBuilder = null; + this.calculation = value; + fieldSetFlags()[5] = true; + return this; + } - /** Clears the value of the 'endpoint' field */ - public sparqles.avro.analytics.EPView.Builder clearEndpoint() { - endpoint = null; - fieldSetFlags()[0] = false; - return this; - } + /** + * Checks whether the 'calculation' field has been set. + * + * @return True if the 'calculation' field has been set, false otherwise. + */ + public boolean hasCalculation() { + return fieldSetFlags()[5]; + } - /** Gets the value of the 'availability' field */ - public sparqles.avro.analytics.EPViewAvailability getAvailability() { - return availability; + /** + * Gets the Builder instance for the 'calculation' field and creates one if it doesn't exist + * yet. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder getCalculationBuilder() { + if (calculationBuilder == null) { + if (hasCalculation()) { + setCalculationBuilder(sparqles.avro.analytics.EPViewCalculation.newBuilder(calculation)); + } else { + setCalculationBuilder(sparqles.avro.analytics.EPViewCalculation.newBuilder()); } + } + return calculationBuilder; + } - /** Sets the value of the 'availability' field */ - public sparqles.avro.analytics.EPView.Builder setAvailability( - sparqles.avro.analytics.EPViewAvailability value) { - validate(fields()[1], value); - this.availability = value; - fieldSetFlags()[1] = true; - return this; - } + /** + * Sets the Builder instance for the 'calculation' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.analytics.EPView.Builder setCalculationBuilder( + sparqles.avro.analytics.EPViewCalculation.Builder value) { + clearCalculation(); + calculationBuilder = value; + return this; + } - /** Checks whether the 'availability' field has been set */ - public boolean hasAvailability() { - return fieldSetFlags()[1]; - } + /** + * Checks whether the 'calculation' field has an active Builder instance + * + * @return True if the 'calculation' field has an active Builder instance + */ + public boolean hasCalculationBuilder() { + return calculationBuilder != null; + } - /** Clears the value of the 'availability' field */ - public sparqles.avro.analytics.EPView.Builder clearAvailability() { - availability = null; - fieldSetFlags()[1] = false; - return this; - } + /** + * Clears the value of the 'calculation' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPView.Builder clearCalculation() { + calculation = null; + calculationBuilder = null; + fieldSetFlags()[5] = false; + return this; + } - /** Gets the value of the 'performance' field */ - public sparqles.avro.analytics.EPViewPerformance getPerformance() { - return performance; + @Override + @SuppressWarnings("unchecked") + public EPView build() { + try { + EPView record = new EPView(); + if (endpointBuilder != null) { + try { + record.endpoint = this.endpointBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("endpoint")); + throw e; + } + } else { + record.endpoint = + fieldSetFlags()[0] + ? this.endpoint + : (sparqles.avro.Endpoint) defaultValue(fields()[0]); } - - /** Sets the value of the 'performance' field */ - public sparqles.avro.analytics.EPView.Builder setPerformance( - sparqles.avro.analytics.EPViewPerformance value) { - validate(fields()[2], value); - this.performance = value; - fieldSetFlags()[2] = true; - return this; + if (availabilityBuilder != null) { + try { + record.availability = this.availabilityBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("availability")); + throw e; + } + } else { + record.availability = + fieldSetFlags()[1] + ? this.availability + : (sparqles.avro.analytics.EPViewAvailability) defaultValue(fields()[1]); } - - /** Checks whether the 'performance' field has been set */ - public boolean hasPerformance() { - return fieldSetFlags()[2]; + if (performanceBuilder != null) { + try { + record.performance = this.performanceBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("performance")); + throw e; + } + } else { + record.performance = + fieldSetFlags()[2] + ? this.performance + : (sparqles.avro.analytics.EPViewPerformance) defaultValue(fields()[2]); } - - /** Clears the value of the 'performance' field */ - public sparqles.avro.analytics.EPView.Builder clearPerformance() { - performance = null; - fieldSetFlags()[2] = false; - return this; + if (interoperabilityBuilder != null) { + try { + record.interoperability = this.interoperabilityBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("interoperability")); + throw e; + } + } else { + record.interoperability = + fieldSetFlags()[3] + ? this.interoperability + : (sparqles.avro.analytics.EPViewInteroperability) defaultValue(fields()[3]); } - - /** Gets the value of the 'interoperability' field */ - public sparqles.avro.analytics.EPViewInteroperability getInteroperability() { - return interoperability; + if (discoverabilityBuilder != null) { + try { + record.discoverability = this.discoverabilityBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("discoverability")); + throw e; + } + } else { + record.discoverability = + fieldSetFlags()[4] + ? this.discoverability + : (sparqles.avro.analytics.EPViewDiscoverability) defaultValue(fields()[4]); } - - /** Sets the value of the 'interoperability' field */ - public sparqles.avro.analytics.EPView.Builder setInteroperability( - sparqles.avro.analytics.EPViewInteroperability value) { - validate(fields()[3], value); - this.interoperability = value; - fieldSetFlags()[3] = true; - return this; + if (calculationBuilder != null) { + try { + record.calculation = this.calculationBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("calculation")); + throw e; + } + } else { + record.calculation = + fieldSetFlags()[5] + ? this.calculation + : (sparqles.avro.analytics.EPViewCalculation) defaultValue(fields()[5]); } + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Checks whether the 'interoperability' field has been set */ - public boolean hasInteroperability() { - return fieldSetFlags()[3]; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Clears the value of the 'interoperability' field */ - public sparqles.avro.analytics.EPView.Builder clearInteroperability() { - interoperability = null; - fieldSetFlags()[3] = false; - return this; - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Gets the value of the 'discoverability' field */ - public sparqles.avro.analytics.EPViewDiscoverability getDiscoverability() { - return discoverability; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Sets the value of the 'discoverability' field */ - public sparqles.avro.analytics.EPView.Builder setDiscoverability( - sparqles.avro.analytics.EPViewDiscoverability value) { - validate(fields()[4], value); - this.discoverability = value; - fieldSetFlags()[4] = true; - return this; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Checks whether the 'discoverability' field has been set */ - public boolean hasDiscoverability() { - return fieldSetFlags()[4]; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Clears the value of the 'discoverability' field */ - public sparqles.avro.analytics.EPView.Builder clearDiscoverability() { - discoverability = null; - fieldSetFlags()[4] = false; - return this; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + this.endpoint.customEncode(out); + + this.availability.customEncode(out); + + this.performance.customEncode(out); + + this.interoperability.customEncode(out); + + this.discoverability.customEncode(out); + + this.calculation.customEncode(out); + } - @Override - public EPView build() { - try { - EPView record = new EPView(); - record.endpoint = - fieldSetFlags()[0] - ? this.endpoint - : (sparqles.avro.Endpoint) defaultValue(fields()[0]); - record.availability = - fieldSetFlags()[1] - ? this.availability - : (sparqles.avro.analytics.EPViewAvailability) - defaultValue(fields()[1]); - record.performance = - fieldSetFlags()[2] - ? this.performance - : (sparqles.avro.analytics.EPViewPerformance) - defaultValue(fields()[2]); - record.interoperability = - fieldSetFlags()[3] - ? this.interoperability - : (sparqles.avro.analytics.EPViewInteroperability) - defaultValue(fields()[3]); - record.discoverability = - fieldSetFlags()[4] - ? this.discoverability - : (sparqles.avro.analytics.EPViewDiscoverability) - defaultValue(fields()[4]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); + } + this.endpoint.customDecode(in); + + if (this.availability == null) { + this.availability = new sparqles.avro.analytics.EPViewAvailability(); + } + this.availability.customDecode(in); + + if (this.performance == null) { + this.performance = new sparqles.avro.analytics.EPViewPerformance(); + } + this.performance.customDecode(in); + + if (this.interoperability == null) { + this.interoperability = new sparqles.avro.analytics.EPViewInteroperability(); + } + this.interoperability.customDecode(in); + + if (this.discoverability == null) { + this.discoverability = new sparqles.avro.analytics.EPViewDiscoverability(); + } + this.discoverability.customDecode(in); + + if (this.calculation == null) { + this.calculation = new sparqles.avro.analytics.EPViewCalculation(); + } + this.calculation.customDecode(in); + + } else { + for (int i = 0; i < 6; i++) { + switch (fieldOrder[i].pos()) { + case 0: + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); + } + this.endpoint.customDecode(in); + break; + + case 1: + if (this.availability == null) { + this.availability = new sparqles.avro.analytics.EPViewAvailability(); } + this.availability.customDecode(in); + break; + + case 2: + if (this.performance == null) { + this.performance = new sparqles.avro.analytics.EPViewPerformance(); + } + this.performance.customDecode(in); + break; + + case 3: + if (this.interoperability == null) { + this.interoperability = new sparqles.avro.analytics.EPViewInteroperability(); + } + this.interoperability.customDecode(in); + break; + + case 4: + if (this.discoverability == null) { + this.discoverability = new sparqles.avro.analytics.EPViewDiscoverability(); + } + this.discoverability.customDecode(in); + break; + + case 5: + if (this.calculation == null) { + this.calculation = new sparqles.avro.analytics.EPViewCalculation(); + } + this.calculation.customDecode(in); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/EPViewAvailability.java b/backend/src/main/java/sparqles/avro/analytics/EPViewAvailability.java index 63aa07b3..5b84690a 100644 --- a/backend/src/main/java/sparqles/avro/analytics/EPViewAvailability.java +++ b/backend/src/main/java/sparqles/avro/analytics/EPViewAvailability.java @@ -5,515 +5,923 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; + @org.apache.avro.specific.AvroGenerated public class EPViewAvailability extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"EPViewAvailability\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"upNow\",\"type\":\"boolean\"},{\"name\":\"testRuns\",\"type\":\"int\"},{\"name\":\"uptimeLast24h\",\"type\":\"double\"},{\"name\":\"uptimeLast7d\",\"type\":\"double\"},{\"name\":\"uptimeLast31d\",\"type\":\"double\"},{\"name\":\"uptimeOverall\",\"type\":\"double\"},{\"name\":\"data\",\"type\":{\"type\":\"record\",\"name\":\"EPViewAvailabilityData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"values\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewAvailabilityDataPoint\",\"fields\":[{\"name\":\"x\",\"type\":\"long\"},{\"name\":\"y\",\"type\":\"double\"}]}}}]}}]}"); - @Deprecated public boolean upNow; - @Deprecated public int testRuns; - @Deprecated public double uptimeLast24h; - @Deprecated public double uptimeLast7d; - @Deprecated public double uptimeLast31d; - @Deprecated public double uptimeOverall; - @Deprecated public sparqles.avro.analytics.EPViewAvailabilityData data; - - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public EPViewAvailability() {} - - /** All-args constructor. */ - public EPViewAvailability( - java.lang.Boolean upNow, - java.lang.Integer testRuns, - java.lang.Double uptimeLast24h, - java.lang.Double uptimeLast7d, - java.lang.Double uptimeLast31d, - java.lang.Double uptimeOverall, - sparqles.avro.analytics.EPViewAvailabilityData data) { - this.upNow = upNow; - this.testRuns = testRuns; - this.uptimeLast24h = uptimeLast24h; - this.uptimeLast7d = uptimeLast7d; - this.uptimeLast31d = uptimeLast31d; - this.uptimeOverall = uptimeOverall; - this.data = data; - } - - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; - } - - /** Creates a new EPViewAvailability RecordBuilder */ - public static sparqles.avro.analytics.EPViewAvailability.Builder newBuilder() { - return new sparqles.avro.analytics.EPViewAvailability.Builder(); - } - - /** Creates a new EPViewAvailability RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.EPViewAvailability.Builder newBuilder( - sparqles.avro.analytics.EPViewAvailability.Builder other) { - return new sparqles.avro.analytics.EPViewAvailability.Builder(other); - } - - /** - * Creates a new EPViewAvailability RecordBuilder by copying an existing EPViewAvailability - * instance - */ - public static sparqles.avro.analytics.EPViewAvailability.Builder newBuilder( - sparqles.avro.analytics.EPViewAvailability other) { - return new sparqles.avro.analytics.EPViewAvailability.Builder(other); - } - - public org.apache.avro.Schema getSchema() { - return SCHEMA$; - } - - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return upNow; - case 1: - return testRuns; - case 2: - return uptimeLast24h; - case 3: - return uptimeLast7d; - case 4: - return uptimeLast31d; - case 5: - return uptimeOverall; - case 6: - return data; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 6992869734652967820L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"EPViewAvailability\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"upNow\",\"type\":\"boolean\"},{\"name\":\"testRuns\",\"type\":\"int\"},{\"name\":\"uptimeLast24h\",\"type\":\"double\"},{\"name\":\"uptimeLast7d\",\"type\":\"double\"},{\"name\":\"uptimeLast31d\",\"type\":\"double\"},{\"name\":\"uptimeOverall\",\"type\":\"double\"},{\"name\":\"data\",\"type\":{\"type\":\"record\",\"name\":\"EPViewAvailabilityData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"values\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewAvailabilityDataPoint\",\"fields\":[{\"name\":\"x\",\"type\":\"long\"},{\"name\":\"y\",\"type\":\"double\"}]}}}]}}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this EPViewAvailability to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a EPViewAvailability from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a EPViewAvailability instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static EPViewAvailability fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private boolean upNow; + private int testRuns; + private double uptimeLast24h; + private double uptimeLast7d; + private double uptimeLast31d; + private double uptimeOverall; + private sparqles.avro.analytics.EPViewAvailabilityData data; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public EPViewAvailability() {} + + /** + * All-args constructor. + * + * @param upNow The new value for upNow + * @param testRuns The new value for testRuns + * @param uptimeLast24h The new value for uptimeLast24h + * @param uptimeLast7d The new value for uptimeLast7d + * @param uptimeLast31d The new value for uptimeLast31d + * @param uptimeOverall The new value for uptimeOverall + * @param data The new value for data + */ + public EPViewAvailability( + java.lang.Boolean upNow, + java.lang.Integer testRuns, + java.lang.Double uptimeLast24h, + java.lang.Double uptimeLast7d, + java.lang.Double uptimeLast31d, + java.lang.Double uptimeOverall, + sparqles.avro.analytics.EPViewAvailabilityData data) { + this.upNow = upNow; + this.testRuns = testRuns; + this.uptimeLast24h = uptimeLast24h; + this.uptimeLast7d = uptimeLast7d; + this.uptimeLast31d = uptimeLast31d; + this.uptimeOverall = uptimeOverall; + this.data = data; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return upNow; + case 1: + return testRuns; + case 2: + return uptimeLast24h; + case 3: + return uptimeLast7d; + case 4: + return uptimeLast31d; + case 5: + return uptimeOverall; + case 6: + return data; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + upNow = (java.lang.Boolean) value$; + break; + case 1: + testRuns = (java.lang.Integer) value$; + break; + case 2: + uptimeLast24h = (java.lang.Double) value$; + break; + case 3: + uptimeLast7d = (java.lang.Double) value$; + break; + case 4: + uptimeLast31d = (java.lang.Double) value$; + break; + case 5: + uptimeOverall = (java.lang.Double) value$; + break; + case 6: + data = (sparqles.avro.analytics.EPViewAvailabilityData) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'upNow' field. + * + * @return The value of the 'upNow' field. + */ + public boolean getUpNow() { + return upNow; + } + + /** + * Sets the value of the 'upNow' field. + * + * @param value the value to set. + */ + public void setUpNow(boolean value) { + this.upNow = value; + } + + /** + * Gets the value of the 'testRuns' field. + * + * @return The value of the 'testRuns' field. + */ + public int getTestRuns() { + return testRuns; + } + + /** + * Sets the value of the 'testRuns' field. + * + * @param value the value to set. + */ + public void setTestRuns(int value) { + this.testRuns = value; + } + + /** + * Gets the value of the 'uptimeLast24h' field. + * + * @return The value of the 'uptimeLast24h' field. + */ + public double getUptimeLast24h() { + return uptimeLast24h; + } + + /** + * Sets the value of the 'uptimeLast24h' field. + * + * @param value the value to set. + */ + public void setUptimeLast24h(double value) { + this.uptimeLast24h = value; + } + + /** + * Gets the value of the 'uptimeLast7d' field. + * + * @return The value of the 'uptimeLast7d' field. + */ + public double getUptimeLast7d() { + return uptimeLast7d; + } + + /** + * Sets the value of the 'uptimeLast7d' field. + * + * @param value the value to set. + */ + public void setUptimeLast7d(double value) { + this.uptimeLast7d = value; + } + + /** + * Gets the value of the 'uptimeLast31d' field. + * + * @return The value of the 'uptimeLast31d' field. + */ + public double getUptimeLast31d() { + return uptimeLast31d; + } + + /** + * Sets the value of the 'uptimeLast31d' field. + * + * @param value the value to set. + */ + public void setUptimeLast31d(double value) { + this.uptimeLast31d = value; + } + + /** + * Gets the value of the 'uptimeOverall' field. + * + * @return The value of the 'uptimeOverall' field. + */ + public double getUptimeOverall() { + return uptimeOverall; + } + + /** + * Sets the value of the 'uptimeOverall' field. + * + * @param value the value to set. + */ + public void setUptimeOverall(double value) { + this.uptimeOverall = value; + } + + /** + * Gets the value of the 'data' field. + * + * @return The value of the 'data' field. + */ + public sparqles.avro.analytics.EPViewAvailabilityData getData() { + return data; + } + + /** + * Sets the value of the 'data' field. + * + * @param value the value to set. + */ + public void setData(sparqles.avro.analytics.EPViewAvailabilityData value) { + this.data = value; + } + + /** + * Creates a new EPViewAvailability RecordBuilder. + * + * @return A new EPViewAvailability RecordBuilder + */ + public static sparqles.avro.analytics.EPViewAvailability.Builder newBuilder() { + return new sparqles.avro.analytics.EPViewAvailability.Builder(); + } + + /** + * Creates a new EPViewAvailability RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new EPViewAvailability RecordBuilder + */ + public static sparqles.avro.analytics.EPViewAvailability.Builder newBuilder( + sparqles.avro.analytics.EPViewAvailability.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewAvailability.Builder(); + } else { + return new sparqles.avro.analytics.EPViewAvailability.Builder(other); + } + } + + /** + * Creates a new EPViewAvailability RecordBuilder by copying an existing EPViewAvailability + * instance. + * + * @param other The existing instance to copy. + * @return A new EPViewAvailability RecordBuilder + */ + public static sparqles.avro.analytics.EPViewAvailability.Builder newBuilder( + sparqles.avro.analytics.EPViewAvailability other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewAvailability.Builder(); + } else { + return new sparqles.avro.analytics.EPViewAvailability.Builder(other); + } + } + + /** RecordBuilder for EPViewAvailability instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private boolean upNow; + private int testRuns; + private double uptimeLast24h; + private double uptimeLast7d; + private double uptimeLast31d; + private double uptimeOverall; + private sparqles.avro.analytics.EPViewAvailabilityData data; + private sparqles.avro.analytics.EPViewAvailabilityData.Builder dataBuilder; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - upNow = (java.lang.Boolean) value$; - break; - case 1: - testRuns = (java.lang.Integer) value$; - break; - case 2: - uptimeLast24h = (java.lang.Double) value$; - break; - case 3: - uptimeLast7d = (java.lang.Double) value$; - break; - case 4: - uptimeLast31d = (java.lang.Double) value$; - break; - case 5: - uptimeOverall = (java.lang.Double) value$; - break; - case 6: - data = (sparqles.avro.analytics.EPViewAvailabilityData) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.analytics.EPViewAvailability.Builder other) { + super(other); + if (isValidValue(fields()[0], other.upNow)) { + this.upNow = data().deepCopy(fields()[0].schema(), other.upNow); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.testRuns)) { + this.testRuns = data().deepCopy(fields()[1].schema(), other.testRuns); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.uptimeLast24h)) { + this.uptimeLast24h = data().deepCopy(fields()[2].schema(), other.uptimeLast24h); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.uptimeLast7d)) { + this.uptimeLast7d = data().deepCopy(fields()[3].schema(), other.uptimeLast7d); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + if (isValidValue(fields()[4], other.uptimeLast31d)) { + this.uptimeLast31d = data().deepCopy(fields()[4].schema(), other.uptimeLast31d); + fieldSetFlags()[4] = other.fieldSetFlags()[4]; + } + if (isValidValue(fields()[5], other.uptimeOverall)) { + this.uptimeOverall = data().deepCopy(fields()[5].schema(), other.uptimeOverall); + fieldSetFlags()[5] = other.fieldSetFlags()[5]; + } + if (isValidValue(fields()[6], other.data)) { + this.data = data().deepCopy(fields()[6].schema(), other.data); + fieldSetFlags()[6] = other.fieldSetFlags()[6]; + } + if (other.hasDataBuilder()) { + this.dataBuilder = + sparqles.avro.analytics.EPViewAvailabilityData.newBuilder(other.getDataBuilder()); + } } - /** Gets the value of the 'upNow' field. */ - public java.lang.Boolean getUpNow() { - return upNow; + /** + * Creates a Builder by copying an existing EPViewAvailability instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.EPViewAvailability other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.upNow)) { + this.upNow = data().deepCopy(fields()[0].schema(), other.upNow); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.testRuns)) { + this.testRuns = data().deepCopy(fields()[1].schema(), other.testRuns); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.uptimeLast24h)) { + this.uptimeLast24h = data().deepCopy(fields()[2].schema(), other.uptimeLast24h); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.uptimeLast7d)) { + this.uptimeLast7d = data().deepCopy(fields()[3].schema(), other.uptimeLast7d); + fieldSetFlags()[3] = true; + } + if (isValidValue(fields()[4], other.uptimeLast31d)) { + this.uptimeLast31d = data().deepCopy(fields()[4].schema(), other.uptimeLast31d); + fieldSetFlags()[4] = true; + } + if (isValidValue(fields()[5], other.uptimeOverall)) { + this.uptimeOverall = data().deepCopy(fields()[5].schema(), other.uptimeOverall); + fieldSetFlags()[5] = true; + } + if (isValidValue(fields()[6], other.data)) { + this.data = data().deepCopy(fields()[6].schema(), other.data); + fieldSetFlags()[6] = true; + } + this.dataBuilder = null; + } + + /** + * Gets the value of the 'upNow' field. + * + * @return The value. + */ + public boolean getUpNow() { + return upNow; } /** * Sets the value of the 'upNow' field. * - * @param value the value to set. + * @param value The value of 'upNow'. + * @return This builder. */ - public void setUpNow(java.lang.Boolean value) { - this.upNow = value; + public sparqles.avro.analytics.EPViewAvailability.Builder setUpNow(boolean value) { + validate(fields()[0], value); + this.upNow = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'testRuns' field. */ - public java.lang.Integer getTestRuns() { - return testRuns; + /** + * Checks whether the 'upNow' field has been set. + * + * @return True if the 'upNow' field has been set, false otherwise. + */ + public boolean hasUpNow() { + return fieldSetFlags()[0]; + } + + /** + * Clears the value of the 'upNow' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewAvailability.Builder clearUpNow() { + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'testRuns' field. + * + * @return The value. + */ + public int getTestRuns() { + return testRuns; } /** * Sets the value of the 'testRuns' field. * - * @param value the value to set. + * @param value The value of 'testRuns'. + * @return This builder. */ - public void setTestRuns(java.lang.Integer value) { - this.testRuns = value; + public sparqles.avro.analytics.EPViewAvailability.Builder setTestRuns(int value) { + validate(fields()[1], value); + this.testRuns = value; + fieldSetFlags()[1] = true; + return this; } - /** Gets the value of the 'uptimeLast24h' field. */ - public java.lang.Double getUptimeLast24h() { - return uptimeLast24h; + /** + * Checks whether the 'testRuns' field has been set. + * + * @return True if the 'testRuns' field has been set, false otherwise. + */ + public boolean hasTestRuns() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'testRuns' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewAvailability.Builder clearTestRuns() { + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'uptimeLast24h' field. + * + * @return The value. + */ + public double getUptimeLast24h() { + return uptimeLast24h; } /** * Sets the value of the 'uptimeLast24h' field. * - * @param value the value to set. + * @param value The value of 'uptimeLast24h'. + * @return This builder. */ - public void setUptimeLast24h(java.lang.Double value) { - this.uptimeLast24h = value; + public sparqles.avro.analytics.EPViewAvailability.Builder setUptimeLast24h(double value) { + validate(fields()[2], value); + this.uptimeLast24h = value; + fieldSetFlags()[2] = true; + return this; } - /** Gets the value of the 'uptimeLast7d' field. */ - public java.lang.Double getUptimeLast7d() { - return uptimeLast7d; + /** + * Checks whether the 'uptimeLast24h' field has been set. + * + * @return True if the 'uptimeLast24h' field has been set, false otherwise. + */ + public boolean hasUptimeLast24h() { + return fieldSetFlags()[2]; + } + + /** + * Clears the value of the 'uptimeLast24h' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewAvailability.Builder clearUptimeLast24h() { + fieldSetFlags()[2] = false; + return this; + } + + /** + * Gets the value of the 'uptimeLast7d' field. + * + * @return The value. + */ + public double getUptimeLast7d() { + return uptimeLast7d; } /** * Sets the value of the 'uptimeLast7d' field. * - * @param value the value to set. + * @param value The value of 'uptimeLast7d'. + * @return This builder. */ - public void setUptimeLast7d(java.lang.Double value) { - this.uptimeLast7d = value; + public sparqles.avro.analytics.EPViewAvailability.Builder setUptimeLast7d(double value) { + validate(fields()[3], value); + this.uptimeLast7d = value; + fieldSetFlags()[3] = true; + return this; } - /** Gets the value of the 'uptimeLast31d' field. */ - public java.lang.Double getUptimeLast31d() { - return uptimeLast31d; + /** + * Checks whether the 'uptimeLast7d' field has been set. + * + * @return True if the 'uptimeLast7d' field has been set, false otherwise. + */ + public boolean hasUptimeLast7d() { + return fieldSetFlags()[3]; + } + + /** + * Clears the value of the 'uptimeLast7d' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewAvailability.Builder clearUptimeLast7d() { + fieldSetFlags()[3] = false; + return this; + } + + /** + * Gets the value of the 'uptimeLast31d' field. + * + * @return The value. + */ + public double getUptimeLast31d() { + return uptimeLast31d; } /** * Sets the value of the 'uptimeLast31d' field. * - * @param value the value to set. + * @param value The value of 'uptimeLast31d'. + * @return This builder. */ - public void setUptimeLast31d(java.lang.Double value) { - this.uptimeLast31d = value; + public sparqles.avro.analytics.EPViewAvailability.Builder setUptimeLast31d(double value) { + validate(fields()[4], value); + this.uptimeLast31d = value; + fieldSetFlags()[4] = true; + return this; } - /** Gets the value of the 'uptimeOverall' field. */ - public java.lang.Double getUptimeOverall() { - return uptimeOverall; + /** + * Checks whether the 'uptimeLast31d' field has been set. + * + * @return True if the 'uptimeLast31d' field has been set, false otherwise. + */ + public boolean hasUptimeLast31d() { + return fieldSetFlags()[4]; + } + + /** + * Clears the value of the 'uptimeLast31d' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewAvailability.Builder clearUptimeLast31d() { + fieldSetFlags()[4] = false; + return this; + } + + /** + * Gets the value of the 'uptimeOverall' field. + * + * @return The value. + */ + public double getUptimeOverall() { + return uptimeOverall; } /** * Sets the value of the 'uptimeOverall' field. * - * @param value the value to set. + * @param value The value of 'uptimeOverall'. + * @return This builder. */ - public void setUptimeOverall(java.lang.Double value) { - this.uptimeOverall = value; + public sparqles.avro.analytics.EPViewAvailability.Builder setUptimeOverall(double value) { + validate(fields()[5], value); + this.uptimeOverall = value; + fieldSetFlags()[5] = true; + return this; } - /** Gets the value of the 'data' field. */ - public sparqles.avro.analytics.EPViewAvailabilityData getData() { - return data; + /** + * Checks whether the 'uptimeOverall' field has been set. + * + * @return True if the 'uptimeOverall' field has been set, false otherwise. + */ + public boolean hasUptimeOverall() { + return fieldSetFlags()[5]; } /** - * Sets the value of the 'data' field. + * Clears the value of the 'uptimeOverall' field. * - * @param value the value to set. + * @return This builder. */ - public void setData(sparqles.avro.analytics.EPViewAvailabilityData value) { - this.data = value; + public sparqles.avro.analytics.EPViewAvailability.Builder clearUptimeOverall() { + fieldSetFlags()[5] = false; + return this; } - /** RecordBuilder for EPViewAvailability instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Gets the value of the 'data' field. + * + * @return The value. + */ + public sparqles.avro.analytics.EPViewAvailabilityData getData() { + return data; + } - private boolean upNow; - private int testRuns; - private double uptimeLast24h; - private double uptimeLast7d; - private double uptimeLast31d; - private double uptimeOverall; - private sparqles.avro.analytics.EPViewAvailabilityData data; + /** + * Sets the value of the 'data' field. + * + * @param value The value of 'data'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewAvailability.Builder setData( + sparqles.avro.analytics.EPViewAvailabilityData value) { + validate(fields()[6], value); + this.dataBuilder = null; + this.data = value; + fieldSetFlags()[6] = true; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.EPViewAvailability.SCHEMA$); - } + /** + * Checks whether the 'data' field has been set. + * + * @return True if the 'data' field has been set, false otherwise. + */ + public boolean hasData() { + return fieldSetFlags()[6]; + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.EPViewAvailability.Builder other) { - super(other); - if (isValidValue(fields()[0], other.upNow)) { - this.upNow = data().deepCopy(fields()[0].schema(), other.upNow); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.testRuns)) { - this.testRuns = data().deepCopy(fields()[1].schema(), other.testRuns); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.uptimeLast24h)) { - this.uptimeLast24h = data().deepCopy(fields()[2].schema(), other.uptimeLast24h); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.uptimeLast7d)) { - this.uptimeLast7d = data().deepCopy(fields()[3].schema(), other.uptimeLast7d); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.uptimeLast31d)) { - this.uptimeLast31d = data().deepCopy(fields()[4].schema(), other.uptimeLast31d); - fieldSetFlags()[4] = true; - } - if (isValidValue(fields()[5], other.uptimeOverall)) { - this.uptimeOverall = data().deepCopy(fields()[5].schema(), other.uptimeOverall); - fieldSetFlags()[5] = true; - } - if (isValidValue(fields()[6], other.data)) { - this.data = data().deepCopy(fields()[6].schema(), other.data); - fieldSetFlags()[6] = true; - } - } + /** + * Gets the Builder instance for the 'data' field and creates one if it doesn't exist yet. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewAvailabilityData.Builder getDataBuilder() { + if (dataBuilder == null) { + if (hasData()) { + setDataBuilder(sparqles.avro.analytics.EPViewAvailabilityData.newBuilder(data)); + } else { + setDataBuilder(sparqles.avro.analytics.EPViewAvailabilityData.newBuilder()); + } + } + return dataBuilder; + } - /** Creates a Builder by copying an existing EPViewAvailability instance */ - private Builder(sparqles.avro.analytics.EPViewAvailability other) { - super(sparqles.avro.analytics.EPViewAvailability.SCHEMA$); - if (isValidValue(fields()[0], other.upNow)) { - this.upNow = data().deepCopy(fields()[0].schema(), other.upNow); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.testRuns)) { - this.testRuns = data().deepCopy(fields()[1].schema(), other.testRuns); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.uptimeLast24h)) { - this.uptimeLast24h = data().deepCopy(fields()[2].schema(), other.uptimeLast24h); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.uptimeLast7d)) { - this.uptimeLast7d = data().deepCopy(fields()[3].schema(), other.uptimeLast7d); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.uptimeLast31d)) { - this.uptimeLast31d = data().deepCopy(fields()[4].schema(), other.uptimeLast31d); - fieldSetFlags()[4] = true; - } - if (isValidValue(fields()[5], other.uptimeOverall)) { - this.uptimeOverall = data().deepCopy(fields()[5].schema(), other.uptimeOverall); - fieldSetFlags()[5] = true; - } - if (isValidValue(fields()[6], other.data)) { - this.data = data().deepCopy(fields()[6].schema(), other.data); - fieldSetFlags()[6] = true; - } - } + /** + * Sets the Builder instance for the 'data' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewAvailability.Builder setDataBuilder( + sparqles.avro.analytics.EPViewAvailabilityData.Builder value) { + clearData(); + dataBuilder = value; + return this; + } - /** Gets the value of the 'upNow' field */ - public java.lang.Boolean getUpNow() { - return upNow; - } + /** + * Checks whether the 'data' field has an active Builder instance + * + * @return True if the 'data' field has an active Builder instance + */ + public boolean hasDataBuilder() { + return dataBuilder != null; + } - /** Sets the value of the 'upNow' field */ - public sparqles.avro.analytics.EPViewAvailability.Builder setUpNow(boolean value) { - validate(fields()[0], value); - this.upNow = value; - fieldSetFlags()[0] = true; - return this; - } + /** + * Clears the value of the 'data' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewAvailability.Builder clearData() { + data = null; + dataBuilder = null; + fieldSetFlags()[6] = false; + return this; + } - /** Checks whether the 'upNow' field has been set */ - public boolean hasUpNow() { - return fieldSetFlags()[0]; - } + @Override + @SuppressWarnings("unchecked") + public EPViewAvailability build() { + try { + EPViewAvailability record = new EPViewAvailability(); + record.upNow = + fieldSetFlags()[0] ? this.upNow : (java.lang.Boolean) defaultValue(fields()[0]); + record.testRuns = + fieldSetFlags()[1] ? this.testRuns : (java.lang.Integer) defaultValue(fields()[1]); + record.uptimeLast24h = + fieldSetFlags()[2] ? this.uptimeLast24h : (java.lang.Double) defaultValue(fields()[2]); + record.uptimeLast7d = + fieldSetFlags()[3] ? this.uptimeLast7d : (java.lang.Double) defaultValue(fields()[3]); + record.uptimeLast31d = + fieldSetFlags()[4] ? this.uptimeLast31d : (java.lang.Double) defaultValue(fields()[4]); + record.uptimeOverall = + fieldSetFlags()[5] ? this.uptimeOverall : (java.lang.Double) defaultValue(fields()[5]); + if (dataBuilder != null) { + try { + record.data = this.dataBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("data")); + throw e; + } + } else { + record.data = + fieldSetFlags()[6] + ? this.data + : (sparqles.avro.analytics.EPViewAvailabilityData) defaultValue(fields()[6]); + } + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Clears the value of the 'upNow' field */ - public sparqles.avro.analytics.EPViewAvailability.Builder clearUpNow() { - fieldSetFlags()[0] = false; - return this; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Gets the value of the 'testRuns' field */ - public java.lang.Integer getTestRuns() { - return testRuns; - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Sets the value of the 'testRuns' field */ - public sparqles.avro.analytics.EPViewAvailability.Builder setTestRuns(int value) { - validate(fields()[1], value); - this.testRuns = value; - fieldSetFlags()[1] = true; - return this; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Checks whether the 'testRuns' field has been set */ - public boolean hasTestRuns() { - return fieldSetFlags()[1]; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Clears the value of the 'testRuns' field */ - public sparqles.avro.analytics.EPViewAvailability.Builder clearTestRuns() { - fieldSetFlags()[1] = false; - return this; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Gets the value of the 'uptimeLast24h' field */ - public java.lang.Double getUptimeLast24h() { - return uptimeLast24h; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeBoolean(this.upNow); - /** Sets the value of the 'uptimeLast24h' field */ - public sparqles.avro.analytics.EPViewAvailability.Builder setUptimeLast24h(double value) { - validate(fields()[2], value); - this.uptimeLast24h = value; - fieldSetFlags()[2] = true; - return this; - } + out.writeInt(this.testRuns); - /** Checks whether the 'uptimeLast24h' field has been set */ - public boolean hasUptimeLast24h() { - return fieldSetFlags()[2]; - } + out.writeDouble(this.uptimeLast24h); - /** Clears the value of the 'uptimeLast24h' field */ - public sparqles.avro.analytics.EPViewAvailability.Builder clearUptimeLast24h() { - fieldSetFlags()[2] = false; - return this; - } + out.writeDouble(this.uptimeLast7d); - /** Gets the value of the 'uptimeLast7d' field */ - public java.lang.Double getUptimeLast7d() { - return uptimeLast7d; - } + out.writeDouble(this.uptimeLast31d); - /** Sets the value of the 'uptimeLast7d' field */ - public sparqles.avro.analytics.EPViewAvailability.Builder setUptimeLast7d(double value) { - validate(fields()[3], value); - this.uptimeLast7d = value; - fieldSetFlags()[3] = true; - return this; - } + out.writeDouble(this.uptimeOverall); - /** Checks whether the 'uptimeLast7d' field has been set */ - public boolean hasUptimeLast7d() { - return fieldSetFlags()[3]; - } + this.data.customEncode(out); + } - /** Clears the value of the 'uptimeLast7d' field */ - public sparqles.avro.analytics.EPViewAvailability.Builder clearUptimeLast7d() { - fieldSetFlags()[3] = false; - return this; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.upNow = in.readBoolean(); - /** Gets the value of the 'uptimeLast31d' field */ - public java.lang.Double getUptimeLast31d() { - return uptimeLast31d; - } + this.testRuns = in.readInt(); - /** Sets the value of the 'uptimeLast31d' field */ - public sparqles.avro.analytics.EPViewAvailability.Builder setUptimeLast31d(double value) { - validate(fields()[4], value); - this.uptimeLast31d = value; - fieldSetFlags()[4] = true; - return this; - } + this.uptimeLast24h = in.readDouble(); - /** Checks whether the 'uptimeLast31d' field has been set */ - public boolean hasUptimeLast31d() { - return fieldSetFlags()[4]; - } + this.uptimeLast7d = in.readDouble(); - /** Clears the value of the 'uptimeLast31d' field */ - public sparqles.avro.analytics.EPViewAvailability.Builder clearUptimeLast31d() { - fieldSetFlags()[4] = false; - return this; - } + this.uptimeLast31d = in.readDouble(); - /** Gets the value of the 'uptimeOverall' field */ - public java.lang.Double getUptimeOverall() { - return uptimeOverall; - } + this.uptimeOverall = in.readDouble(); - /** Sets the value of the 'uptimeOverall' field */ - public sparqles.avro.analytics.EPViewAvailability.Builder setUptimeOverall(double value) { - validate(fields()[5], value); - this.uptimeOverall = value; - fieldSetFlags()[5] = true; - return this; - } + if (this.data == null) { + this.data = new sparqles.avro.analytics.EPViewAvailabilityData(); + } + this.data.customDecode(in); - /** Checks whether the 'uptimeOverall' field has been set */ - public boolean hasUptimeOverall() { - return fieldSetFlags()[5]; - } + } else { + for (int i = 0; i < 7; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.upNow = in.readBoolean(); + break; - /** Clears the value of the 'uptimeOverall' field */ - public sparqles.avro.analytics.EPViewAvailability.Builder clearUptimeOverall() { - fieldSetFlags()[5] = false; - return this; - } + case 1: + this.testRuns = in.readInt(); + break; - /** Gets the value of the 'data' field */ - public sparqles.avro.analytics.EPViewAvailabilityData getData() { - return data; - } + case 2: + this.uptimeLast24h = in.readDouble(); + break; - /** Sets the value of the 'data' field */ - public sparqles.avro.analytics.EPViewAvailability.Builder setData( - sparqles.avro.analytics.EPViewAvailabilityData value) { - validate(fields()[6], value); - this.data = value; - fieldSetFlags()[6] = true; - return this; - } + case 3: + this.uptimeLast7d = in.readDouble(); + break; - /** Checks whether the 'data' field has been set */ - public boolean hasData() { - return fieldSetFlags()[6]; - } + case 4: + this.uptimeLast31d = in.readDouble(); + break; - /** Clears the value of the 'data' field */ - public sparqles.avro.analytics.EPViewAvailability.Builder clearData() { - data = null; - fieldSetFlags()[6] = false; - return this; - } + case 5: + this.uptimeOverall = in.readDouble(); + break; - @Override - public EPViewAvailability build() { - try { - EPViewAvailability record = new EPViewAvailability(); - record.upNow = - fieldSetFlags()[0] - ? this.upNow - : (java.lang.Boolean) defaultValue(fields()[0]); - record.testRuns = - fieldSetFlags()[1] - ? this.testRuns - : (java.lang.Integer) defaultValue(fields()[1]); - record.uptimeLast24h = - fieldSetFlags()[2] - ? this.uptimeLast24h - : (java.lang.Double) defaultValue(fields()[2]); - record.uptimeLast7d = - fieldSetFlags()[3] - ? this.uptimeLast7d - : (java.lang.Double) defaultValue(fields()[3]); - record.uptimeLast31d = - fieldSetFlags()[4] - ? this.uptimeLast31d - : (java.lang.Double) defaultValue(fields()[4]); - record.uptimeOverall = - fieldSetFlags()[5] - ? this.uptimeOverall - : (java.lang.Double) defaultValue(fields()[5]); - record.data = - fieldSetFlags()[6] - ? this.data - : (sparqles.avro.analytics.EPViewAvailabilityData) - defaultValue(fields()[6]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); + case 6: + if (this.data == null) { + this.data = new sparqles.avro.analytics.EPViewAvailabilityData(); } + this.data.customDecode(in); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/EPViewAvailabilityData.java b/backend/src/main/java/sparqles/avro/analytics/EPViewAvailabilityData.java index bca6e8e0..4034f03e 100644 --- a/backend/src/main/java/sparqles/avro/analytics/EPViewAvailabilityData.java +++ b/backend/src/main/java/sparqles/avro/analytics/EPViewAvailabilityData.java @@ -5,227 +5,487 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class EPViewAvailabilityData extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"EPViewAvailabilityData\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"values\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewAvailabilityDataPoint\",\"fields\":[{\"name\":\"x\",\"type\":\"long\"},{\"name\":\"y\",\"type\":\"double\"}]}}}]}"); - @Deprecated public java.lang.CharSequence key; - @Deprecated public java.util.List values; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -2275224574189506323L; - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public EPViewAvailabilityData() {} + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"EPViewAvailabilityData\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"values\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewAvailabilityDataPoint\",\"fields\":[{\"name\":\"x\",\"type\":\"long\"},{\"name\":\"y\",\"type\":\"double\"}]}}}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } - /** All-args constructor. */ - public EPViewAvailabilityData( - java.lang.CharSequence key, - java.util.List values) { - this.key = key; - this.values = values; + /** + * Serializes this EPViewAvailabilityData to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a EPViewAvailabilityData from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a EPViewAvailabilityData instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static EPViewAvailabilityData fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence key; + private java.util.List values; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public EPViewAvailabilityData() {} + + /** + * All-args constructor. + * + * @param key The new value for key + * @param values The new value for values + */ + public EPViewAvailabilityData( + java.lang.CharSequence key, + java.util.List values) { + this.key = key; + this.values = values; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return key; + case 1: + return values; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + key = (java.lang.CharSequence) value$; + break; + case 1: + values = (java.util.List) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } + + /** + * Gets the value of the 'key' field. + * + * @return The value of the 'key' field. + */ + public java.lang.CharSequence getKey() { + return key; + } - /** Creates a new EPViewAvailabilityData RecordBuilder */ - public static sparqles.avro.analytics.EPViewAvailabilityData.Builder newBuilder() { - return new sparqles.avro.analytics.EPViewAvailabilityData.Builder(); + /** + * Sets the value of the 'key' field. + * + * @param value the value to set. + */ + public void setKey(java.lang.CharSequence value) { + this.key = value; + } + + /** + * Gets the value of the 'values' field. + * + * @return The value of the 'values' field. + */ + public java.util.List getValues() { + return values; + } + + /** + * Sets the value of the 'values' field. + * + * @param value the value to set. + */ + public void setValues(java.util.List value) { + this.values = value; + } + + /** + * Creates a new EPViewAvailabilityData RecordBuilder. + * + * @return A new EPViewAvailabilityData RecordBuilder + */ + public static sparqles.avro.analytics.EPViewAvailabilityData.Builder newBuilder() { + return new sparqles.avro.analytics.EPViewAvailabilityData.Builder(); + } + + /** + * Creates a new EPViewAvailabilityData RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new EPViewAvailabilityData RecordBuilder + */ + public static sparqles.avro.analytics.EPViewAvailabilityData.Builder newBuilder( + sparqles.avro.analytics.EPViewAvailabilityData.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewAvailabilityData.Builder(); + } else { + return new sparqles.avro.analytics.EPViewAvailabilityData.Builder(other); + } + } + + /** + * Creates a new EPViewAvailabilityData RecordBuilder by copying an existing + * EPViewAvailabilityData instance. + * + * @param other The existing instance to copy. + * @return A new EPViewAvailabilityData RecordBuilder + */ + public static sparqles.avro.analytics.EPViewAvailabilityData.Builder newBuilder( + sparqles.avro.analytics.EPViewAvailabilityData other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewAvailabilityData.Builder(); + } else { + return new sparqles.avro.analytics.EPViewAvailabilityData.Builder(other); } + } + + /** RecordBuilder for EPViewAvailabilityData instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence key; + private java.util.List values; - /** Creates a new EPViewAvailabilityData RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.EPViewAvailabilityData.Builder newBuilder( - sparqles.avro.analytics.EPViewAvailabilityData.Builder other) { - return new sparqles.avro.analytics.EPViewAvailabilityData.Builder(other); + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } /** - * Creates a new EPViewAvailabilityData RecordBuilder by copying an existing - * EPViewAvailabilityData instance + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public static sparqles.avro.analytics.EPViewAvailabilityData.Builder newBuilder( - sparqles.avro.analytics.EPViewAvailabilityData other) { - return new sparqles.avro.analytics.EPViewAvailabilityData.Builder(other); + private Builder(sparqles.avro.analytics.EPViewAvailabilityData.Builder other) { + super(other); + if (isValidValue(fields()[0], other.key)) { + this.key = data().deepCopy(fields()[0].schema(), other.key); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.values)) { + this.values = data().deepCopy(fields()[1].schema(), other.values); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Creates a Builder by copying an existing EPViewAvailabilityData instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.EPViewAvailabilityData other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.key)) { + this.key = data().deepCopy(fields()[0].schema(), other.key); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.values)) { + this.values = data().deepCopy(fields()[1].schema(), other.values); + fieldSetFlags()[1] = true; + } } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return key; - case 1: - return values; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Gets the value of the 'key' field. + * + * @return The value. + */ + public java.lang.CharSequence getKey() { + return key; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - key = (java.lang.CharSequence) value$; - break; - case 1: - values = - (java.util.List) - value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the value of the 'key' field. + * + * @param value The value of 'key'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewAvailabilityData.Builder setKey( + java.lang.CharSequence value) { + validate(fields()[0], value); + this.key = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'key' field. */ - public java.lang.CharSequence getKey() { - return key; + /** + * Checks whether the 'key' field has been set. + * + * @return True if the 'key' field has been set, false otherwise. + */ + public boolean hasKey() { + return fieldSetFlags()[0]; } /** - * Sets the value of the 'key' field. + * Clears the value of the 'key' field. * - * @param value the value to set. + * @return This builder. */ - public void setKey(java.lang.CharSequence value) { - this.key = value; + public sparqles.avro.analytics.EPViewAvailabilityData.Builder clearKey() { + key = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'values' field. */ + /** + * Gets the value of the 'values' field. + * + * @return The value. + */ public java.util.List getValues() { - return values; + return values; } /** * Sets the value of the 'values' field. * - * @param value the value to set. + * @param value The value of 'values'. + * @return This builder. */ - public void setValues( - java.util.List value) { - this.values = value; + public sparqles.avro.analytics.EPViewAvailabilityData.Builder setValues( + java.util.List value) { + validate(fields()[1], value); + this.values = value; + fieldSetFlags()[1] = true; + return this; } - /** RecordBuilder for EPViewAvailabilityData instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Checks whether the 'values' field has been set. + * + * @return True if the 'values' field has been set, false otherwise. + */ + public boolean hasValues() { + return fieldSetFlags()[1]; + } - private java.lang.CharSequence key; - private java.util.List values; + /** + * Clears the value of the 'values' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewAvailabilityData.Builder clearValues() { + values = null; + fieldSetFlags()[1] = false; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.EPViewAvailabilityData.SCHEMA$); - } + @Override + @SuppressWarnings("unchecked") + public EPViewAvailabilityData build() { + try { + EPViewAvailabilityData record = new EPViewAvailabilityData(); + record.key = + fieldSetFlags()[0] ? this.key : (java.lang.CharSequence) defaultValue(fields()[0]); + record.values = + fieldSetFlags()[1] + ? this.values + : (java.util.List) + defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.EPViewAvailabilityData.Builder other) { - super(other); - if (isValidValue(fields()[0], other.key)) { - this.key = data().deepCopy(fields()[0].schema(), other.key); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.values)) { - this.values = data().deepCopy(fields()[1].schema(), other.values); - fieldSetFlags()[1] = true; - } - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Creates a Builder by copying an existing EPViewAvailabilityData instance */ - private Builder(sparqles.avro.analytics.EPViewAvailabilityData other) { - super(sparqles.avro.analytics.EPViewAvailabilityData.SCHEMA$); - if (isValidValue(fields()[0], other.key)) { - this.key = data().deepCopy(fields()[0].schema(), other.key); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.values)) { - this.values = data().deepCopy(fields()[1].schema(), other.values); - fieldSetFlags()[1] = true; - } - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Gets the value of the 'key' field */ - public java.lang.CharSequence getKey() { - return key; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Sets the value of the 'key' field */ - public sparqles.avro.analytics.EPViewAvailabilityData.Builder setKey( - java.lang.CharSequence value) { - validate(fields()[0], value); - this.key = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Checks whether the 'key' field has been set */ - public boolean hasKey() { - return fieldSetFlags()[0]; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Clears the value of the 'key' field */ - public sparqles.avro.analytics.EPViewAvailabilityData.Builder clearKey() { - key = null; - fieldSetFlags()[0] = false; - return this; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.key); - /** Gets the value of the 'values' field */ - public java.util.List getValues() { - return values; - } + long size0 = this.values.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (sparqles.avro.analytics.EPViewAvailabilityDataPoint e0 : this.values) { + actualSize0++; + out.startItem(); + e0.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); + } - /** Sets the value of the 'values' field */ - public sparqles.avro.analytics.EPViewAvailabilityData.Builder setValues( - java.util.List value) { - validate(fields()[1], value); - this.values = value; - fieldSetFlags()[1] = true; - return this; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.key = in.readString(this.key instanceof Utf8 ? (Utf8) this.key : null); - /** Checks whether the 'values' field has been set */ - public boolean hasValues() { - return fieldSetFlags()[1]; + long size0 = in.readArrayStart(); + java.util.List a0 = this.values; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("values").schema()); + this.values = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.EPViewAvailabilityDataPoint e0 = + (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.EPViewAvailabilityDataPoint(); + } + e0.customDecode(in); + a0.add(e0); } + } - /** Clears the value of the 'values' field */ - public sparqles.avro.analytics.EPViewAvailabilityData.Builder clearValues() { - values = null; - fieldSetFlags()[1] = false; - return this; - } + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.key = in.readString(this.key instanceof Utf8 ? (Utf8) this.key : null); + break; - @Override - public EPViewAvailabilityData build() { - try { - EPViewAvailabilityData record = new EPViewAvailabilityData(); - record.key = - fieldSetFlags()[0] - ? this.key - : (java.lang.CharSequence) defaultValue(fields()[0]); - record.values = - fieldSetFlags()[1] - ? this.values - : (java.util.List< - sparqles.avro.analytics - .EPViewAvailabilityDataPoint>) - defaultValue(fields()[1]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); + case 1: + long size0 = in.readArrayStart(); + java.util.List a0 = this.values; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("values").schema()); + this.values = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.EPViewAvailabilityDataPoint e0 = + (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.EPViewAvailabilityDataPoint(); + } + e0.customDecode(in); + a0.add(e0); + } } + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/EPViewAvailabilityDataPoint.java b/backend/src/main/java/sparqles/avro/analytics/EPViewAvailabilityDataPoint.java index f0ed3ebe..c6ba9446 100644 --- a/backend/src/main/java/sparqles/avro/analytics/EPViewAvailabilityDataPoint.java +++ b/backend/src/main/java/sparqles/avro/analytics/EPViewAvailabilityDataPoint.java @@ -5,210 +5,422 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; + @org.apache.avro.specific.AvroGenerated public class EPViewAvailabilityDataPoint extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"EPViewAvailabilityDataPoint\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"x\",\"type\":\"long\"},{\"name\":\"y\",\"type\":\"double\"}]}"); - @Deprecated public long x; - @Deprecated public double y; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 7637027059026360805L; - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public EPViewAvailabilityDataPoint() {} + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"EPViewAvailabilityDataPoint\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"x\",\"type\":\"long\"},{\"name\":\"y\",\"type\":\"double\"}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder( + SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this EPViewAvailabilityDataPoint to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a EPViewAvailabilityDataPoint from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a EPViewAvailabilityDataPoint instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static EPViewAvailabilityDataPoint fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private long x; + private double y; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public EPViewAvailabilityDataPoint() {} + + /** + * All-args constructor. + * + * @param x The new value for x + * @param y The new value for y + */ + public EPViewAvailabilityDataPoint(java.lang.Long x, java.lang.Double y) { + this.x = x; + this.y = y; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return x; + case 1: + return y; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } - /** All-args constructor. */ - public EPViewAvailabilityDataPoint(java.lang.Long x, java.lang.Double y) { - this.x = x; - this.y = y; + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + x = (java.lang.Long) value$; + break; + case 1: + y = (java.lang.Double) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } + + /** + * Gets the value of the 'x' field. + * + * @return The value of the 'x' field. + */ + public long getX() { + return x; + } + + /** + * Sets the value of the 'x' field. + * + * @param value the value to set. + */ + public void setX(long value) { + this.x = value; + } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + /** + * Gets the value of the 'y' field. + * + * @return The value of the 'y' field. + */ + public double getY() { + return y; + } + + /** + * Sets the value of the 'y' field. + * + * @param value the value to set. + */ + public void setY(double value) { + this.y = value; + } + + /** + * Creates a new EPViewAvailabilityDataPoint RecordBuilder. + * + * @return A new EPViewAvailabilityDataPoint RecordBuilder + */ + public static sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder newBuilder() { + return new sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder(); + } + + /** + * Creates a new EPViewAvailabilityDataPoint RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new EPViewAvailabilityDataPoint RecordBuilder + */ + public static sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder newBuilder( + sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder(); + } else { + return new sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder(other); } + } - /** Creates a new EPViewAvailabilityDataPoint RecordBuilder */ - public static sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder newBuilder() { - return new sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder(); + /** + * Creates a new EPViewAvailabilityDataPoint RecordBuilder by copying an existing + * EPViewAvailabilityDataPoint instance. + * + * @param other The existing instance to copy. + * @return A new EPViewAvailabilityDataPoint RecordBuilder + */ + public static sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder newBuilder( + sparqles.avro.analytics.EPViewAvailabilityDataPoint other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder(); + } else { + return new sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder(other); } + } + + /** RecordBuilder for EPViewAvailabilityDataPoint instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { - /** Creates a new EPViewAvailabilityDataPoint RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder newBuilder( - sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder other) { - return new sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder(other); + private long x; + private double y; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } /** - * Creates a new EPViewAvailabilityDataPoint RecordBuilder by copying an existing - * EPViewAvailabilityDataPoint instance + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public static sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder newBuilder( - sparqles.avro.analytics.EPViewAvailabilityDataPoint other) { - return new sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder(other); + private Builder(sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder other) { + super(other); + if (isValidValue(fields()[0], other.x)) { + this.x = data().deepCopy(fields()[0].schema(), other.x); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.y)) { + this.y = data().deepCopy(fields()[1].schema(), other.y); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Creates a Builder by copying an existing EPViewAvailabilityDataPoint instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.EPViewAvailabilityDataPoint other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.x)) { + this.x = data().deepCopy(fields()[0].schema(), other.x); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.y)) { + this.y = data().deepCopy(fields()[1].schema(), other.y); + fieldSetFlags()[1] = true; + } } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return x; - case 1: - return y; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Gets the value of the 'x' field. + * + * @return The value. + */ + public long getX() { + return x; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - x = (java.lang.Long) value$; - break; - case 1: - y = (java.lang.Double) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the value of the 'x' field. + * + * @param value The value of 'x'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder setX(long value) { + validate(fields()[0], value); + this.x = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'x' field. */ - public java.lang.Long getX() { - return x; + /** + * Checks whether the 'x' field has been set. + * + * @return True if the 'x' field has been set, false otherwise. + */ + public boolean hasX() { + return fieldSetFlags()[0]; } /** - * Sets the value of the 'x' field. + * Clears the value of the 'x' field. * - * @param value the value to set. + * @return This builder. */ - public void setX(java.lang.Long value) { - this.x = value; + public sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder clearX() { + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'y' field. */ - public java.lang.Double getY() { - return y; + /** + * Gets the value of the 'y' field. + * + * @return The value. + */ + public double getY() { + return y; } /** * Sets the value of the 'y' field. * - * @param value the value to set. + * @param value The value of 'y'. + * @return This builder. */ - public void setY(java.lang.Double value) { - this.y = value; + public sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder setY(double value) { + validate(fields()[1], value); + this.y = value; + fieldSetFlags()[1] = true; + return this; } - /** RecordBuilder for EPViewAvailabilityDataPoint instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Checks whether the 'y' field has been set. + * + * @return True if the 'y' field has been set, false otherwise. + */ + public boolean hasY() { + return fieldSetFlags()[1]; + } - private long x; - private double y; + /** + * Clears the value of the 'y' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder clearY() { + fieldSetFlags()[1] = false; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.EPViewAvailabilityDataPoint.SCHEMA$); - } + @Override + @SuppressWarnings("unchecked") + public EPViewAvailabilityDataPoint build() { + try { + EPViewAvailabilityDataPoint record = new EPViewAvailabilityDataPoint(); + record.x = fieldSetFlags()[0] ? this.x : (java.lang.Long) defaultValue(fields()[0]); + record.y = fieldSetFlags()[1] ? this.y : (java.lang.Double) defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder other) { - super(other); - if (isValidValue(fields()[0], other.x)) { - this.x = data().deepCopy(fields()[0].schema(), other.x); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.y)) { - this.y = data().deepCopy(fields()[1].schema(), other.y); - fieldSetFlags()[1] = true; - } - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) + MODEL$.createDatumWriter(SCHEMA$); - /** Creates a Builder by copying an existing EPViewAvailabilityDataPoint instance */ - private Builder(sparqles.avro.analytics.EPViewAvailabilityDataPoint other) { - super(sparqles.avro.analytics.EPViewAvailabilityDataPoint.SCHEMA$); - if (isValidValue(fields()[0], other.x)) { - this.x = data().deepCopy(fields()[0].schema(), other.x); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.y)) { - this.y = data().deepCopy(fields()[1].schema(), other.y); - fieldSetFlags()[1] = true; - } - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Gets the value of the 'x' field */ - public java.lang.Long getX() { - return x; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) + MODEL$.createDatumReader(SCHEMA$); - /** Sets the value of the 'x' field */ - public sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder setX(long value) { - validate(fields()[0], value); - this.x = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Checks whether the 'x' field has been set */ - public boolean hasX() { - return fieldSetFlags()[0]; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Clears the value of the 'x' field */ - public sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder clearX() { - fieldSetFlags()[0] = false; - return this; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeLong(this.x); - /** Gets the value of the 'y' field */ - public java.lang.Double getY() { - return y; - } + out.writeDouble(this.y); + } - /** Sets the value of the 'y' field */ - public sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder setY(double value) { - validate(fields()[1], value); - this.y = value; - fieldSetFlags()[1] = true; - return this; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.x = in.readLong(); - /** Checks whether the 'y' field has been set */ - public boolean hasY() { - return fieldSetFlags()[1]; - } + this.y = in.readDouble(); - /** Clears the value of the 'y' field */ - public sparqles.avro.analytics.EPViewAvailabilityDataPoint.Builder clearY() { - fieldSetFlags()[1] = false; - return this; - } + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.x = in.readLong(); + break; + + case 1: + this.y = in.readDouble(); + break; - @Override - public EPViewAvailabilityDataPoint build() { - try { - EPViewAvailabilityDataPoint record = new EPViewAvailabilityDataPoint(); - record.x = fieldSetFlags()[0] ? this.x : (java.lang.Long) defaultValue(fields()[0]); - record.y = - fieldSetFlags()[1] ? this.y : (java.lang.Double) defaultValue(fields()[1]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/EPViewCalculation.java b/backend/src/main/java/sparqles/avro/analytics/EPViewCalculation.java new file mode 100644 index 00000000..418611b2 --- /dev/null +++ b/backend/src/main/java/sparqles/avro/analytics/EPViewCalculation.java @@ -0,0 +1,1437 @@ +/** + * Autogenerated by Avro + * + *

DO NOT EDIT DIRECTLY + */ +package sparqles.avro.analytics; + +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + +@org.apache.avro.specific.AvroGenerated +public class EPViewCalculation extends org.apache.avro.specific.SpecificRecordBase + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -6130252355681190278L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"EPViewCalculation\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"triples\",\"type\":\"long\"},{\"name\":\"entities\",\"type\":\"long\"},{\"name\":\"classes\",\"type\":\"long\"},{\"name\":\"properties\",\"type\":\"long\"},{\"name\":\"distinctSubjects\",\"type\":\"long\"},{\"name\":\"distinctObjects\",\"type\":\"long\"},{\"name\":\"exampleResources\",\"type\":{\"type\":\"array\",\"items\":\"string\"}},{\"name\":\"VoID\",\"type\":\"string\"},{\"name\":\"VoIDPart\",\"type\":\"boolean\"},{\"name\":\"SD\",\"type\":\"string\"},{\"name\":\"SDPart\",\"type\":\"boolean\"},{\"name\":\"coherence\",\"type\":\"double\"},{\"name\":\"RS\",\"type\":\"double\"}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this EPViewCalculation to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a EPViewCalculation from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a EPViewCalculation instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static EPViewCalculation fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private long triples; + private long entities; + private long classes; + private long properties; + private long distinctSubjects; + private long distinctObjects; + private java.util.List exampleResources; + private java.lang.CharSequence VoID; + private boolean VoIDPart; + private java.lang.CharSequence SD; + private boolean SDPart; + private double coherence; + private double RS; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public EPViewCalculation() {} + + /** + * All-args constructor. + * + * @param triples The new value for triples + * @param entities The new value for entities + * @param classes The new value for classes + * @param properties The new value for properties + * @param distinctSubjects The new value for distinctSubjects + * @param distinctObjects The new value for distinctObjects + * @param exampleResources The new value for exampleResources + * @param VoID The new value for VoID + * @param VoIDPart The new value for VoIDPart + * @param SD The new value for SD + * @param SDPart The new value for SDPart + * @param coherence The new value for coherence + * @param RS The new value for RS + */ + public EPViewCalculation( + java.lang.Long triples, + java.lang.Long entities, + java.lang.Long classes, + java.lang.Long properties, + java.lang.Long distinctSubjects, + java.lang.Long distinctObjects, + java.util.List exampleResources, + java.lang.CharSequence VoID, + java.lang.Boolean VoIDPart, + java.lang.CharSequence SD, + java.lang.Boolean SDPart, + java.lang.Double coherence, + java.lang.Double RS) { + this.triples = triples; + this.entities = entities; + this.classes = classes; + this.properties = properties; + this.distinctSubjects = distinctSubjects; + this.distinctObjects = distinctObjects; + this.exampleResources = exampleResources; + this.VoID = VoID; + this.VoIDPart = VoIDPart; + this.SD = SD; + this.SDPart = SDPart; + this.coherence = coherence; + this.RS = RS; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return triples; + case 1: + return entities; + case 2: + return classes; + case 3: + return properties; + case 4: + return distinctSubjects; + case 5: + return distinctObjects; + case 6: + return exampleResources; + case 7: + return VoID; + case 8: + return VoIDPart; + case 9: + return SD; + case 10: + return SDPart; + case 11: + return coherence; + case 12: + return RS; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + triples = (java.lang.Long) value$; + break; + case 1: + entities = (java.lang.Long) value$; + break; + case 2: + classes = (java.lang.Long) value$; + break; + case 3: + properties = (java.lang.Long) value$; + break; + case 4: + distinctSubjects = (java.lang.Long) value$; + break; + case 5: + distinctObjects = (java.lang.Long) value$; + break; + case 6: + exampleResources = (java.util.List) value$; + break; + case 7: + VoID = (java.lang.CharSequence) value$; + break; + case 8: + VoIDPart = (java.lang.Boolean) value$; + break; + case 9: + SD = (java.lang.CharSequence) value$; + break; + case 10: + SDPart = (java.lang.Boolean) value$; + break; + case 11: + coherence = (java.lang.Double) value$; + break; + case 12: + RS = (java.lang.Double) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'triples' field. + * + * @return The value of the 'triples' field. + */ + public long getTriples() { + return triples; + } + + /** + * Sets the value of the 'triples' field. + * + * @param value the value to set. + */ + public void setTriples(long value) { + this.triples = value; + } + + /** + * Gets the value of the 'entities' field. + * + * @return The value of the 'entities' field. + */ + public long getEntities() { + return entities; + } + + /** + * Sets the value of the 'entities' field. + * + * @param value the value to set. + */ + public void setEntities(long value) { + this.entities = value; + } + + /** + * Gets the value of the 'classes' field. + * + * @return The value of the 'classes' field. + */ + public long getClasses() { + return classes; + } + + /** + * Sets the value of the 'classes' field. + * + * @param value the value to set. + */ + public void setClasses(long value) { + this.classes = value; + } + + /** + * Gets the value of the 'properties' field. + * + * @return The value of the 'properties' field. + */ + public long getProperties() { + return properties; + } + + /** + * Sets the value of the 'properties' field. + * + * @param value the value to set. + */ + public void setProperties(long value) { + this.properties = value; + } + + /** + * Gets the value of the 'distinctSubjects' field. + * + * @return The value of the 'distinctSubjects' field. + */ + public long getDistinctSubjects() { + return distinctSubjects; + } + + /** + * Sets the value of the 'distinctSubjects' field. + * + * @param value the value to set. + */ + public void setDistinctSubjects(long value) { + this.distinctSubjects = value; + } + + /** + * Gets the value of the 'distinctObjects' field. + * + * @return The value of the 'distinctObjects' field. + */ + public long getDistinctObjects() { + return distinctObjects; + } + + /** + * Sets the value of the 'distinctObjects' field. + * + * @param value the value to set. + */ + public void setDistinctObjects(long value) { + this.distinctObjects = value; + } + + /** + * Gets the value of the 'exampleResources' field. + * + * @return The value of the 'exampleResources' field. + */ + public java.util.List getExampleResources() { + return exampleResources; + } + + /** + * Sets the value of the 'exampleResources' field. + * + * @param value the value to set. + */ + public void setExampleResources(java.util.List value) { + this.exampleResources = value; + } + + /** + * Gets the value of the 'VoID' field. + * + * @return The value of the 'VoID' field. + */ + public java.lang.CharSequence getVoID() { + return VoID; + } + + /** + * Sets the value of the 'VoID' field. + * + * @param value the value to set. + */ + public void setVoID(java.lang.CharSequence value) { + this.VoID = value; + } + + /** + * Gets the value of the 'VoIDPart' field. + * + * @return The value of the 'VoIDPart' field. + */ + public boolean getVoIDPart() { + return VoIDPart; + } + + /** + * Sets the value of the 'VoIDPart' field. + * + * @param value the value to set. + */ + public void setVoIDPart(boolean value) { + this.VoIDPart = value; + } + + /** + * Gets the value of the 'SD' field. + * + * @return The value of the 'SD' field. + */ + public java.lang.CharSequence getSD() { + return SD; + } + + /** + * Sets the value of the 'SD' field. + * + * @param value the value to set. + */ + public void setSD(java.lang.CharSequence value) { + this.SD = value; + } + + /** + * Gets the value of the 'SDPart' field. + * + * @return The value of the 'SDPart' field. + */ + public boolean getSDPart() { + return SDPart; + } + + /** + * Sets the value of the 'SDPart' field. + * + * @param value the value to set. + */ + public void setSDPart(boolean value) { + this.SDPart = value; + } + + /** + * Gets the value of the 'coherence' field. + * + * @return The value of the 'coherence' field. + */ + public double getCoherence() { + return coherence; + } + + /** + * Sets the value of the 'coherence' field. + * + * @param value the value to set. + */ + public void setCoherence(double value) { + this.coherence = value; + } + + /** + * Gets the value of the 'RS' field. + * + * @return The value of the 'RS' field. + */ + public double getRS() { + return RS; + } + + /** + * Sets the value of the 'RS' field. + * + * @param value the value to set. + */ + public void setRS(double value) { + this.RS = value; + } + + /** + * Creates a new EPViewCalculation RecordBuilder. + * + * @return A new EPViewCalculation RecordBuilder + */ + public static sparqles.avro.analytics.EPViewCalculation.Builder newBuilder() { + return new sparqles.avro.analytics.EPViewCalculation.Builder(); + } + + /** + * Creates a new EPViewCalculation RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new EPViewCalculation RecordBuilder + */ + public static sparqles.avro.analytics.EPViewCalculation.Builder newBuilder( + sparqles.avro.analytics.EPViewCalculation.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewCalculation.Builder(); + } else { + return new sparqles.avro.analytics.EPViewCalculation.Builder(other); + } + } + + /** + * Creates a new EPViewCalculation RecordBuilder by copying an existing EPViewCalculation + * instance. + * + * @param other The existing instance to copy. + * @return A new EPViewCalculation RecordBuilder + */ + public static sparqles.avro.analytics.EPViewCalculation.Builder newBuilder( + sparqles.avro.analytics.EPViewCalculation other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewCalculation.Builder(); + } else { + return new sparqles.avro.analytics.EPViewCalculation.Builder(other); + } + } + + /** RecordBuilder for EPViewCalculation instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private long triples; + private long entities; + private long classes; + private long properties; + private long distinctSubjects; + private long distinctObjects; + private java.util.List exampleResources; + private java.lang.CharSequence VoID; + private boolean VoIDPart; + private java.lang.CharSequence SD; + private boolean SDPart; + private double coherence; + private double RS; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } + + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.analytics.EPViewCalculation.Builder other) { + super(other); + if (isValidValue(fields()[0], other.triples)) { + this.triples = data().deepCopy(fields()[0].schema(), other.triples); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.entities)) { + this.entities = data().deepCopy(fields()[1].schema(), other.entities); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.classes)) { + this.classes = data().deepCopy(fields()[2].schema(), other.classes); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.properties)) { + this.properties = data().deepCopy(fields()[3].schema(), other.properties); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + if (isValidValue(fields()[4], other.distinctSubjects)) { + this.distinctSubjects = data().deepCopy(fields()[4].schema(), other.distinctSubjects); + fieldSetFlags()[4] = other.fieldSetFlags()[4]; + } + if (isValidValue(fields()[5], other.distinctObjects)) { + this.distinctObjects = data().deepCopy(fields()[5].schema(), other.distinctObjects); + fieldSetFlags()[5] = other.fieldSetFlags()[5]; + } + if (isValidValue(fields()[6], other.exampleResources)) { + this.exampleResources = data().deepCopy(fields()[6].schema(), other.exampleResources); + fieldSetFlags()[6] = other.fieldSetFlags()[6]; + } + if (isValidValue(fields()[7], other.VoID)) { + this.VoID = data().deepCopy(fields()[7].schema(), other.VoID); + fieldSetFlags()[7] = other.fieldSetFlags()[7]; + } + if (isValidValue(fields()[8], other.VoIDPart)) { + this.VoIDPart = data().deepCopy(fields()[8].schema(), other.VoIDPart); + fieldSetFlags()[8] = other.fieldSetFlags()[8]; + } + if (isValidValue(fields()[9], other.SD)) { + this.SD = data().deepCopy(fields()[9].schema(), other.SD); + fieldSetFlags()[9] = other.fieldSetFlags()[9]; + } + if (isValidValue(fields()[10], other.SDPart)) { + this.SDPart = data().deepCopy(fields()[10].schema(), other.SDPart); + fieldSetFlags()[10] = other.fieldSetFlags()[10]; + } + if (isValidValue(fields()[11], other.coherence)) { + this.coherence = data().deepCopy(fields()[11].schema(), other.coherence); + fieldSetFlags()[11] = other.fieldSetFlags()[11]; + } + if (isValidValue(fields()[12], other.RS)) { + this.RS = data().deepCopy(fields()[12].schema(), other.RS); + fieldSetFlags()[12] = other.fieldSetFlags()[12]; + } + } + + /** + * Creates a Builder by copying an existing EPViewCalculation instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.EPViewCalculation other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.triples)) { + this.triples = data().deepCopy(fields()[0].schema(), other.triples); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.entities)) { + this.entities = data().deepCopy(fields()[1].schema(), other.entities); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.classes)) { + this.classes = data().deepCopy(fields()[2].schema(), other.classes); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.properties)) { + this.properties = data().deepCopy(fields()[3].schema(), other.properties); + fieldSetFlags()[3] = true; + } + if (isValidValue(fields()[4], other.distinctSubjects)) { + this.distinctSubjects = data().deepCopy(fields()[4].schema(), other.distinctSubjects); + fieldSetFlags()[4] = true; + } + if (isValidValue(fields()[5], other.distinctObjects)) { + this.distinctObjects = data().deepCopy(fields()[5].schema(), other.distinctObjects); + fieldSetFlags()[5] = true; + } + if (isValidValue(fields()[6], other.exampleResources)) { + this.exampleResources = data().deepCopy(fields()[6].schema(), other.exampleResources); + fieldSetFlags()[6] = true; + } + if (isValidValue(fields()[7], other.VoID)) { + this.VoID = data().deepCopy(fields()[7].schema(), other.VoID); + fieldSetFlags()[7] = true; + } + if (isValidValue(fields()[8], other.VoIDPart)) { + this.VoIDPart = data().deepCopy(fields()[8].schema(), other.VoIDPart); + fieldSetFlags()[8] = true; + } + if (isValidValue(fields()[9], other.SD)) { + this.SD = data().deepCopy(fields()[9].schema(), other.SD); + fieldSetFlags()[9] = true; + } + if (isValidValue(fields()[10], other.SDPart)) { + this.SDPart = data().deepCopy(fields()[10].schema(), other.SDPart); + fieldSetFlags()[10] = true; + } + if (isValidValue(fields()[11], other.coherence)) { + this.coherence = data().deepCopy(fields()[11].schema(), other.coherence); + fieldSetFlags()[11] = true; + } + if (isValidValue(fields()[12], other.RS)) { + this.RS = data().deepCopy(fields()[12].schema(), other.RS); + fieldSetFlags()[12] = true; + } + } + + /** + * Gets the value of the 'triples' field. + * + * @return The value. + */ + public long getTriples() { + return triples; + } + + /** + * Sets the value of the 'triples' field. + * + * @param value The value of 'triples'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder setTriples(long value) { + validate(fields()[0], value); + this.triples = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'triples' field has been set. + * + * @return True if the 'triples' field has been set, false otherwise. + */ + public boolean hasTriples() { + return fieldSetFlags()[0]; + } + + /** + * Clears the value of the 'triples' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder clearTriples() { + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'entities' field. + * + * @return The value. + */ + public long getEntities() { + return entities; + } + + /** + * Sets the value of the 'entities' field. + * + * @param value The value of 'entities'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder setEntities(long value) { + validate(fields()[1], value); + this.entities = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'entities' field has been set. + * + * @return True if the 'entities' field has been set, false otherwise. + */ + public boolean hasEntities() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'entities' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder clearEntities() { + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'classes' field. + * + * @return The value. + */ + public long getClasses() { + return classes; + } + + /** + * Sets the value of the 'classes' field. + * + * @param value The value of 'classes'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder setClasses(long value) { + validate(fields()[2], value); + this.classes = value; + fieldSetFlags()[2] = true; + return this; + } + + /** + * Checks whether the 'classes' field has been set. + * + * @return True if the 'classes' field has been set, false otherwise. + */ + public boolean hasClasses() { + return fieldSetFlags()[2]; + } + + /** + * Clears the value of the 'classes' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder clearClasses() { + fieldSetFlags()[2] = false; + return this; + } + + /** + * Gets the value of the 'properties' field. + * + * @return The value. + */ + public long getProperties() { + return properties; + } + + /** + * Sets the value of the 'properties' field. + * + * @param value The value of 'properties'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder setProperties(long value) { + validate(fields()[3], value); + this.properties = value; + fieldSetFlags()[3] = true; + return this; + } + + /** + * Checks whether the 'properties' field has been set. + * + * @return True if the 'properties' field has been set, false otherwise. + */ + public boolean hasProperties() { + return fieldSetFlags()[3]; + } + + /** + * Clears the value of the 'properties' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder clearProperties() { + fieldSetFlags()[3] = false; + return this; + } + + /** + * Gets the value of the 'distinctSubjects' field. + * + * @return The value. + */ + public long getDistinctSubjects() { + return distinctSubjects; + } + + /** + * Sets the value of the 'distinctSubjects' field. + * + * @param value The value of 'distinctSubjects'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder setDistinctSubjects(long value) { + validate(fields()[4], value); + this.distinctSubjects = value; + fieldSetFlags()[4] = true; + return this; + } + + /** + * Checks whether the 'distinctSubjects' field has been set. + * + * @return True if the 'distinctSubjects' field has been set, false otherwise. + */ + public boolean hasDistinctSubjects() { + return fieldSetFlags()[4]; + } + + /** + * Clears the value of the 'distinctSubjects' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder clearDistinctSubjects() { + fieldSetFlags()[4] = false; + return this; + } + + /** + * Gets the value of the 'distinctObjects' field. + * + * @return The value. + */ + public long getDistinctObjects() { + return distinctObjects; + } + + /** + * Sets the value of the 'distinctObjects' field. + * + * @param value The value of 'distinctObjects'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder setDistinctObjects(long value) { + validate(fields()[5], value); + this.distinctObjects = value; + fieldSetFlags()[5] = true; + return this; + } + + /** + * Checks whether the 'distinctObjects' field has been set. + * + * @return True if the 'distinctObjects' field has been set, false otherwise. + */ + public boolean hasDistinctObjects() { + return fieldSetFlags()[5]; + } + + /** + * Clears the value of the 'distinctObjects' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder clearDistinctObjects() { + fieldSetFlags()[5] = false; + return this; + } + + /** + * Gets the value of the 'exampleResources' field. + * + * @return The value. + */ + public java.util.List getExampleResources() { + return exampleResources; + } + + /** + * Sets the value of the 'exampleResources' field. + * + * @param value The value of 'exampleResources'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder setExampleResources( + java.util.List value) { + validate(fields()[6], value); + this.exampleResources = value; + fieldSetFlags()[6] = true; + return this; + } + + /** + * Checks whether the 'exampleResources' field has been set. + * + * @return True if the 'exampleResources' field has been set, false otherwise. + */ + public boolean hasExampleResources() { + return fieldSetFlags()[6]; + } + + /** + * Clears the value of the 'exampleResources' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder clearExampleResources() { + exampleResources = null; + fieldSetFlags()[6] = false; + return this; + } + + /** + * Gets the value of the 'VoID' field. + * + * @return The value. + */ + public java.lang.CharSequence getVoID() { + return VoID; + } + + /** + * Sets the value of the 'VoID' field. + * + * @param value The value of 'VoID'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder setVoID(java.lang.CharSequence value) { + validate(fields()[7], value); + this.VoID = value; + fieldSetFlags()[7] = true; + return this; + } + + /** + * Checks whether the 'VoID' field has been set. + * + * @return True if the 'VoID' field has been set, false otherwise. + */ + public boolean hasVoID() { + return fieldSetFlags()[7]; + } + + /** + * Clears the value of the 'VoID' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder clearVoID() { + VoID = null; + fieldSetFlags()[7] = false; + return this; + } + + /** + * Gets the value of the 'VoIDPart' field. + * + * @return The value. + */ + public boolean getVoIDPart() { + return VoIDPart; + } + + /** + * Sets the value of the 'VoIDPart' field. + * + * @param value The value of 'VoIDPart'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder setVoIDPart(boolean value) { + validate(fields()[8], value); + this.VoIDPart = value; + fieldSetFlags()[8] = true; + return this; + } + + /** + * Checks whether the 'VoIDPart' field has been set. + * + * @return True if the 'VoIDPart' field has been set, false otherwise. + */ + public boolean hasVoIDPart() { + return fieldSetFlags()[8]; + } + + /** + * Clears the value of the 'VoIDPart' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder clearVoIDPart() { + fieldSetFlags()[8] = false; + return this; + } + + /** + * Gets the value of the 'SD' field. + * + * @return The value. + */ + public java.lang.CharSequence getSD() { + return SD; + } + + /** + * Sets the value of the 'SD' field. + * + * @param value The value of 'SD'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder setSD(java.lang.CharSequence value) { + validate(fields()[9], value); + this.SD = value; + fieldSetFlags()[9] = true; + return this; + } + + /** + * Checks whether the 'SD' field has been set. + * + * @return True if the 'SD' field has been set, false otherwise. + */ + public boolean hasSD() { + return fieldSetFlags()[9]; + } + + /** + * Clears the value of the 'SD' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder clearSD() { + SD = null; + fieldSetFlags()[9] = false; + return this; + } + + /** + * Gets the value of the 'SDPart' field. + * + * @return The value. + */ + public boolean getSDPart() { + return SDPart; + } + + /** + * Sets the value of the 'SDPart' field. + * + * @param value The value of 'SDPart'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder setSDPart(boolean value) { + validate(fields()[10], value); + this.SDPart = value; + fieldSetFlags()[10] = true; + return this; + } + + /** + * Checks whether the 'SDPart' field has been set. + * + * @return True if the 'SDPart' field has been set, false otherwise. + */ + public boolean hasSDPart() { + return fieldSetFlags()[10]; + } + + /** + * Clears the value of the 'SDPart' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder clearSDPart() { + fieldSetFlags()[10] = false; + return this; + } + + /** + * Gets the value of the 'coherence' field. + * + * @return The value. + */ + public double getCoherence() { + return coherence; + } + + /** + * Sets the value of the 'coherence' field. + * + * @param value The value of 'coherence'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder setCoherence(double value) { + validate(fields()[11], value); + this.coherence = value; + fieldSetFlags()[11] = true; + return this; + } + + /** + * Checks whether the 'coherence' field has been set. + * + * @return True if the 'coherence' field has been set, false otherwise. + */ + public boolean hasCoherence() { + return fieldSetFlags()[11]; + } + + /** + * Clears the value of the 'coherence' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder clearCoherence() { + fieldSetFlags()[11] = false; + return this; + } + + /** + * Gets the value of the 'RS' field. + * + * @return The value. + */ + public double getRS() { + return RS; + } + + /** + * Sets the value of the 'RS' field. + * + * @param value The value of 'RS'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder setRS(double value) { + validate(fields()[12], value); + this.RS = value; + fieldSetFlags()[12] = true; + return this; + } + + /** + * Checks whether the 'RS' field has been set. + * + * @return True if the 'RS' field has been set, false otherwise. + */ + public boolean hasRS() { + return fieldSetFlags()[12]; + } + + /** + * Clears the value of the 'RS' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewCalculation.Builder clearRS() { + fieldSetFlags()[12] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public EPViewCalculation build() { + try { + EPViewCalculation record = new EPViewCalculation(); + record.triples = + fieldSetFlags()[0] ? this.triples : (java.lang.Long) defaultValue(fields()[0]); + record.entities = + fieldSetFlags()[1] ? this.entities : (java.lang.Long) defaultValue(fields()[1]); + record.classes = + fieldSetFlags()[2] ? this.classes : (java.lang.Long) defaultValue(fields()[2]); + record.properties = + fieldSetFlags()[3] ? this.properties : (java.lang.Long) defaultValue(fields()[3]); + record.distinctSubjects = + fieldSetFlags()[4] ? this.distinctSubjects : (java.lang.Long) defaultValue(fields()[4]); + record.distinctObjects = + fieldSetFlags()[5] ? this.distinctObjects : (java.lang.Long) defaultValue(fields()[5]); + record.exampleResources = + fieldSetFlags()[6] + ? this.exampleResources + : (java.util.List) defaultValue(fields()[6]); + record.VoID = + fieldSetFlags()[7] ? this.VoID : (java.lang.CharSequence) defaultValue(fields()[7]); + record.VoIDPart = + fieldSetFlags()[8] ? this.VoIDPart : (java.lang.Boolean) defaultValue(fields()[8]); + record.SD = + fieldSetFlags()[9] ? this.SD : (java.lang.CharSequence) defaultValue(fields()[9]); + record.SDPart = + fieldSetFlags()[10] ? this.SDPart : (java.lang.Boolean) defaultValue(fields()[10]); + record.coherence = + fieldSetFlags()[11] ? this.coherence : (java.lang.Double) defaultValue(fields()[11]); + record.RS = fieldSetFlags()[12] ? this.RS : (java.lang.Double) defaultValue(fields()[12]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeLong(this.triples); + + out.writeLong(this.entities); + + out.writeLong(this.classes); + + out.writeLong(this.properties); + + out.writeLong(this.distinctSubjects); + + out.writeLong(this.distinctObjects); + + long size0 = this.exampleResources.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (java.lang.CharSequence e0 : this.exampleResources) { + actualSize0++; + out.startItem(); + out.writeString(e0); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); + + out.writeString(this.VoID); + + out.writeBoolean(this.VoIDPart); + + out.writeString(this.SD); + + out.writeBoolean(this.SDPart); + + out.writeDouble(this.coherence); + + out.writeDouble(this.RS); + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.triples = in.readLong(); + + this.entities = in.readLong(); + + this.classes = in.readLong(); + + this.properties = in.readLong(); + + this.distinctSubjects = in.readLong(); + + this.distinctObjects = in.readLong(); + + long size0 = in.readArrayStart(); + java.util.List a0 = this.exampleResources; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("exampleResources").schema()); + this.exampleResources = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + java.lang.CharSequence e0 = (ga0 != null ? ga0.peek() : null); + e0 = in.readString(e0 instanceof Utf8 ? (Utf8) e0 : null); + a0.add(e0); + } + } + + this.VoID = in.readString(this.VoID instanceof Utf8 ? (Utf8) this.VoID : null); + + this.VoIDPart = in.readBoolean(); + + this.SD = in.readString(this.SD instanceof Utf8 ? (Utf8) this.SD : null); + + this.SDPart = in.readBoolean(); + + this.coherence = in.readDouble(); + + this.RS = in.readDouble(); + + } else { + for (int i = 0; i < 13; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.triples = in.readLong(); + break; + + case 1: + this.entities = in.readLong(); + break; + + case 2: + this.classes = in.readLong(); + break; + + case 3: + this.properties = in.readLong(); + break; + + case 4: + this.distinctSubjects = in.readLong(); + break; + + case 5: + this.distinctObjects = in.readLong(); + break; + + case 6: + long size0 = in.readArrayStart(); + java.util.List a0 = this.exampleResources; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("exampleResources").schema()); + this.exampleResources = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + java.lang.CharSequence e0 = (ga0 != null ? ga0.peek() : null); + e0 = in.readString(e0 instanceof Utf8 ? (Utf8) e0 : null); + a0.add(e0); + } + } + break; + + case 7: + this.VoID = in.readString(this.VoID instanceof Utf8 ? (Utf8) this.VoID : null); + break; + + case 8: + this.VoIDPart = in.readBoolean(); + break; + + case 9: + this.SD = in.readString(this.SD instanceof Utf8 ? (Utf8) this.SD : null); + break; + + case 10: + this.SDPart = in.readBoolean(); + break; + + case 11: + this.coherence = in.readDouble(); + break; + + case 12: + this.RS = in.readDouble(); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } +} diff --git a/backend/src/main/java/sparqles/avro/analytics/EPViewDiscoverability.java b/backend/src/main/java/sparqles/avro/analytics/EPViewDiscoverability.java index 0f08664e..572e000a 100644 --- a/backend/src/main/java/sparqles/avro/analytics/EPViewDiscoverability.java +++ b/backend/src/main/java/sparqles/avro/analytics/EPViewDiscoverability.java @@ -5,296 +5,642 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class EPViewDiscoverability extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"EPViewDiscoverability\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"serverName\",\"type\":\"string\"},{\"name\":\"VoIDDescription\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewDiscoverabilityData\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"boolean\"}]}}},{\"name\":\"SDDescription\",\"type\":{\"type\":\"array\",\"items\":\"EPViewDiscoverabilityData\"}}]}"); - @Deprecated public java.lang.CharSequence serverName; - - @Deprecated - public java.util.List VoIDDescription; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -5360144233249899222L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"EPViewDiscoverability\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"serverName\",\"type\":\"string\"},{\"name\":\"VoIDDescription\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewDiscoverabilityData\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"boolean\"}]}}},{\"name\":\"SDDescription\",\"type\":{\"type\":\"array\",\"items\":\"EPViewDiscoverabilityData\"}}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this EPViewDiscoverability to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a EPViewDiscoverability from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a EPViewDiscoverability instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static EPViewDiscoverability fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence serverName; + private java.util.List VoIDDescription; + private java.util.List SDDescription; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public EPViewDiscoverability() {} + + /** + * All-args constructor. + * + * @param serverName The new value for serverName + * @param VoIDDescription The new value for VoIDDescription + * @param SDDescription The new value for SDDescription + */ + public EPViewDiscoverability( + java.lang.CharSequence serverName, + java.util.List VoIDDescription, + java.util.List SDDescription) { + this.serverName = serverName; + this.VoIDDescription = VoIDDescription; + this.SDDescription = SDDescription; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return serverName; + case 1: + return VoIDDescription; + case 2: + return SDDescription; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + serverName = (java.lang.CharSequence) value$; + break; + case 1: + VoIDDescription = + (java.util.List) value$; + break; + case 2: + SDDescription = (java.util.List) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'serverName' field. + * + * @return The value of the 'serverName' field. + */ + public java.lang.CharSequence getServerName() { + return serverName; + } + + /** + * Sets the value of the 'serverName' field. + * + * @param value the value to set. + */ + public void setServerName(java.lang.CharSequence value) { + this.serverName = value; + } + + /** + * Gets the value of the 'VoIDDescription' field. + * + * @return The value of the 'VoIDDescription' field. + */ + public java.util.List getVoIDDescription() { + return VoIDDescription; + } + + /** + * Sets the value of the 'VoIDDescription' field. + * + * @param value the value to set. + */ + public void setVoIDDescription( + java.util.List value) { + this.VoIDDescription = value; + } + + /** + * Gets the value of the 'SDDescription' field. + * + * @return The value of the 'SDDescription' field. + */ + public java.util.List getSDDescription() { + return SDDescription; + } + + /** + * Sets the value of the 'SDDescription' field. + * + * @param value the value to set. + */ + public void setSDDescription( + java.util.List value) { + this.SDDescription = value; + } + + /** + * Creates a new EPViewDiscoverability RecordBuilder. + * + * @return A new EPViewDiscoverability RecordBuilder + */ + public static sparqles.avro.analytics.EPViewDiscoverability.Builder newBuilder() { + return new sparqles.avro.analytics.EPViewDiscoverability.Builder(); + } + + /** + * Creates a new EPViewDiscoverability RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new EPViewDiscoverability RecordBuilder + */ + public static sparqles.avro.analytics.EPViewDiscoverability.Builder newBuilder( + sparqles.avro.analytics.EPViewDiscoverability.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewDiscoverability.Builder(); + } else { + return new sparqles.avro.analytics.EPViewDiscoverability.Builder(other); + } + } + + /** + * Creates a new EPViewDiscoverability RecordBuilder by copying an existing EPViewDiscoverability + * instance. + * + * @param other The existing instance to copy. + * @return A new EPViewDiscoverability RecordBuilder + */ + public static sparqles.avro.analytics.EPViewDiscoverability.Builder newBuilder( + sparqles.avro.analytics.EPViewDiscoverability other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewDiscoverability.Builder(); + } else { + return new sparqles.avro.analytics.EPViewDiscoverability.Builder(other); + } + } - @Deprecated - public java.util.List SDDescription; + /** RecordBuilder for EPViewDiscoverability instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public EPViewDiscoverability() {} - - /** All-args constructor. */ - public EPViewDiscoverability( - java.lang.CharSequence serverName, - java.util.List VoIDDescription, - java.util.List SDDescription) { - this.serverName = serverName; - this.VoIDDescription = VoIDDescription; - this.SDDescription = SDDescription; - } + private java.lang.CharSequence serverName; + private java.util.List VoIDDescription; + private java.util.List SDDescription; - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } - /** Creates a new EPViewDiscoverability RecordBuilder */ - public static sparqles.avro.analytics.EPViewDiscoverability.Builder newBuilder() { - return new sparqles.avro.analytics.EPViewDiscoverability.Builder(); + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.analytics.EPViewDiscoverability.Builder other) { + super(other); + if (isValidValue(fields()[0], other.serverName)) { + this.serverName = data().deepCopy(fields()[0].schema(), other.serverName); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.VoIDDescription)) { + this.VoIDDescription = data().deepCopy(fields()[1].schema(), other.VoIDDescription); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.SDDescription)) { + this.SDDescription = data().deepCopy(fields()[2].schema(), other.SDDescription); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } } - /** Creates a new EPViewDiscoverability RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.EPViewDiscoverability.Builder newBuilder( - sparqles.avro.analytics.EPViewDiscoverability.Builder other) { - return new sparqles.avro.analytics.EPViewDiscoverability.Builder(other); + /** + * Creates a Builder by copying an existing EPViewDiscoverability instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.EPViewDiscoverability other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.serverName)) { + this.serverName = data().deepCopy(fields()[0].schema(), other.serverName); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.VoIDDescription)) { + this.VoIDDescription = data().deepCopy(fields()[1].schema(), other.VoIDDescription); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.SDDescription)) { + this.SDDescription = data().deepCopy(fields()[2].schema(), other.SDDescription); + fieldSetFlags()[2] = true; + } } /** - * Creates a new EPViewDiscoverability RecordBuilder by copying an existing - * EPViewDiscoverability instance + * Gets the value of the 'serverName' field. + * + * @return The value. */ - public static sparqles.avro.analytics.EPViewDiscoverability.Builder newBuilder( - sparqles.avro.analytics.EPViewDiscoverability other) { - return new sparqles.avro.analytics.EPViewDiscoverability.Builder(other); + public java.lang.CharSequence getServerName() { + return serverName; } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Sets the value of the 'serverName' field. + * + * @param value The value of 'serverName'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewDiscoverability.Builder setServerName( + java.lang.CharSequence value) { + validate(fields()[0], value); + this.serverName = value; + fieldSetFlags()[0] = true; + return this; } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return serverName; - case 1: - return VoIDDescription; - case 2: - return SDDescription; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Checks whether the 'serverName' field has been set. + * + * @return True if the 'serverName' field has been set, false otherwise. + */ + public boolean hasServerName() { + return fieldSetFlags()[0]; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - serverName = (java.lang.CharSequence) value$; - break; - case 1: - VoIDDescription = - (java.util.List) value$; - break; - case 2: - SDDescription = - (java.util.List) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Clears the value of the 'serverName' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewDiscoverability.Builder clearServerName() { + serverName = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'serverName' field. */ - public java.lang.CharSequence getServerName() { - return serverName; + /** + * Gets the value of the 'VoIDDescription' field. + * + * @return The value. + */ + public java.util.List getVoIDDescription() { + return VoIDDescription; } /** - * Sets the value of the 'serverName' field. + * Sets the value of the 'VoIDDescription' field. * - * @param value the value to set. + * @param value The value of 'VoIDDescription'. + * @return This builder. */ - public void setServerName(java.lang.CharSequence value) { - this.serverName = value; + public sparqles.avro.analytics.EPViewDiscoverability.Builder setVoIDDescription( + java.util.List value) { + validate(fields()[1], value); + this.VoIDDescription = value; + fieldSetFlags()[1] = true; + return this; } - /** Gets the value of the 'VoIDDescription' field. */ - public java.util.List getVoIDDescription() { - return VoIDDescription; + /** + * Checks whether the 'VoIDDescription' field has been set. + * + * @return True if the 'VoIDDescription' field has been set, false otherwise. + */ + public boolean hasVoIDDescription() { + return fieldSetFlags()[1]; } /** - * Sets the value of the 'VoIDDescription' field. + * Clears the value of the 'VoIDDescription' field. * - * @param value the value to set. + * @return This builder. */ - public void setVoIDDescription( - java.util.List value) { - this.VoIDDescription = value; + public sparqles.avro.analytics.EPViewDiscoverability.Builder clearVoIDDescription() { + VoIDDescription = null; + fieldSetFlags()[1] = false; + return this; } - /** Gets the value of the 'SDDescription' field. */ + /** + * Gets the value of the 'SDDescription' field. + * + * @return The value. + */ public java.util.List getSDDescription() { - return SDDescription; + return SDDescription; } /** * Sets the value of the 'SDDescription' field. * - * @param value the value to set. + * @param value The value of 'SDDescription'. + * @return This builder. */ - public void setSDDescription( - java.util.List value) { - this.SDDescription = value; + public sparqles.avro.analytics.EPViewDiscoverability.Builder setSDDescription( + java.util.List value) { + validate(fields()[2], value); + this.SDDescription = value; + fieldSetFlags()[2] = true; + return this; } - /** RecordBuilder for EPViewDiscoverability instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Checks whether the 'SDDescription' field has been set. + * + * @return True if the 'SDDescription' field has been set, false otherwise. + */ + public boolean hasSDDescription() { + return fieldSetFlags()[2]; + } - private java.lang.CharSequence serverName; - private java.util.List VoIDDescription; - private java.util.List SDDescription; + /** + * Clears the value of the 'SDDescription' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewDiscoverability.Builder clearSDDescription() { + SDDescription = null; + fieldSetFlags()[2] = false; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.EPViewDiscoverability.SCHEMA$); + @Override + @SuppressWarnings("unchecked") + public EPViewDiscoverability build() { + try { + EPViewDiscoverability record = new EPViewDiscoverability(); + record.serverName = + fieldSetFlags()[0] + ? this.serverName + : (java.lang.CharSequence) defaultValue(fields()[0]); + record.VoIDDescription = + fieldSetFlags()[1] + ? this.VoIDDescription + : (java.util.List) + defaultValue(fields()[1]); + record.SDDescription = + fieldSetFlags()[2] + ? this.SDDescription + : (java.util.List) + defaultValue(fields()[2]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.serverName); + + long size0 = this.VoIDDescription.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (sparqles.avro.analytics.EPViewDiscoverabilityData e0 : this.VoIDDescription) { + actualSize0++; + out.startItem(); + e0.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); + + long size1 = this.SDDescription.size(); + out.writeArrayStart(); + out.setItemCount(size1); + long actualSize1 = 0; + for (sparqles.avro.analytics.EPViewDiscoverabilityData e1 : this.SDDescription) { + actualSize1++; + out.startItem(); + e1.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize1 != size1) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size1 + ", but element count was " + actualSize1 + "."); + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.serverName = + in.readString(this.serverName instanceof Utf8 ? (Utf8) this.serverName : null); + + long size0 = in.readArrayStart(); + java.util.List a0 = this.VoIDDescription; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("VoIDDescription").schema()); + this.VoIDDescription = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.EPViewDiscoverabilityData e0 = (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.EPViewDiscoverabilityData(); + } + e0.customDecode(in); + a0.add(e0); } - - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.EPViewDiscoverability.Builder other) { - super(other); - if (isValidValue(fields()[0], other.serverName)) { - this.serverName = data().deepCopy(fields()[0].schema(), other.serverName); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.VoIDDescription)) { - this.VoIDDescription = data().deepCopy(fields()[1].schema(), other.VoIDDescription); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.SDDescription)) { - this.SDDescription = data().deepCopy(fields()[2].schema(), other.SDDescription); - fieldSetFlags()[2] = true; - } + } + + long size1 = in.readArrayStart(); + java.util.List a1 = this.SDDescription; + if (a1 == null) { + a1 = + new SpecificData.Array( + (int) size1, SCHEMA$.getField("SDDescription").schema()); + this.SDDescription = a1; + } else a1.clear(); + SpecificData.Array ga1 = + (a1 instanceof SpecificData.Array + ? (SpecificData.Array) a1 + : null); + for (; 0 < size1; size1 = in.arrayNext()) { + for (; size1 != 0; size1--) { + sparqles.avro.analytics.EPViewDiscoverabilityData e1 = (ga1 != null ? ga1.peek() : null); + if (e1 == null) { + e1 = new sparqles.avro.analytics.EPViewDiscoverabilityData(); + } + e1.customDecode(in); + a1.add(e1); } - - /** Creates a Builder by copying an existing EPViewDiscoverability instance */ - private Builder(sparqles.avro.analytics.EPViewDiscoverability other) { - super(sparqles.avro.analytics.EPViewDiscoverability.SCHEMA$); - if (isValidValue(fields()[0], other.serverName)) { - this.serverName = data().deepCopy(fields()[0].schema(), other.serverName); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.VoIDDescription)) { - this.VoIDDescription = data().deepCopy(fields()[1].schema(), other.VoIDDescription); - fieldSetFlags()[1] = true; + } + + } else { + for (int i = 0; i < 3; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.serverName = + in.readString(this.serverName instanceof Utf8 ? (Utf8) this.serverName : null); + break; + + case 1: + long size0 = in.readArrayStart(); + java.util.List a0 = + this.VoIDDescription; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("VoIDDescription").schema()); + this.VoIDDescription = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.EPViewDiscoverabilityData e0 = + (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.EPViewDiscoverabilityData(); + } + e0.customDecode(in); + a0.add(e0); + } } - if (isValidValue(fields()[2], other.SDDescription)) { - this.SDDescription = data().deepCopy(fields()[2].schema(), other.SDDescription); - fieldSetFlags()[2] = true; + break; + + case 2: + long size1 = in.readArrayStart(); + java.util.List a1 = + this.SDDescription; + if (a1 == null) { + a1 = + new SpecificData.Array( + (int) size1, SCHEMA$.getField("SDDescription").schema()); + this.SDDescription = a1; + } else a1.clear(); + SpecificData.Array ga1 = + (a1 instanceof SpecificData.Array + ? (SpecificData.Array) a1 + : null); + for (; 0 < size1; size1 = in.arrayNext()) { + for (; size1 != 0; size1--) { + sparqles.avro.analytics.EPViewDiscoverabilityData e1 = + (ga1 != null ? ga1.peek() : null); + if (e1 == null) { + e1 = new sparqles.avro.analytics.EPViewDiscoverabilityData(); + } + e1.customDecode(in); + a1.add(e1); + } } - } - - /** Gets the value of the 'serverName' field */ - public java.lang.CharSequence getServerName() { - return serverName; - } + break; - /** Sets the value of the 'serverName' field */ - public sparqles.avro.analytics.EPViewDiscoverability.Builder setServerName( - java.lang.CharSequence value) { - validate(fields()[0], value); - this.serverName = value; - fieldSetFlags()[0] = true; - return this; - } - - /** Checks whether the 'serverName' field has been set */ - public boolean hasServerName() { - return fieldSetFlags()[0]; - } - - /** Clears the value of the 'serverName' field */ - public sparqles.avro.analytics.EPViewDiscoverability.Builder clearServerName() { - serverName = null; - fieldSetFlags()[0] = false; - return this; - } - - /** Gets the value of the 'VoIDDescription' field */ - public java.util.List - getVoIDDescription() { - return VoIDDescription; - } - - /** Sets the value of the 'VoIDDescription' field */ - public sparqles.avro.analytics.EPViewDiscoverability.Builder setVoIDDescription( - java.util.List value) { - validate(fields()[1], value); - this.VoIDDescription = value; - fieldSetFlags()[1] = true; - return this; - } - - /** Checks whether the 'VoIDDescription' field has been set */ - public boolean hasVoIDDescription() { - return fieldSetFlags()[1]; - } - - /** Clears the value of the 'VoIDDescription' field */ - public sparqles.avro.analytics.EPViewDiscoverability.Builder clearVoIDDescription() { - VoIDDescription = null; - fieldSetFlags()[1] = false; - return this; - } - - /** Gets the value of the 'SDDescription' field */ - public java.util.List - getSDDescription() { - return SDDescription; - } - - /** Sets the value of the 'SDDescription' field */ - public sparqles.avro.analytics.EPViewDiscoverability.Builder setSDDescription( - java.util.List value) { - validate(fields()[2], value); - this.SDDescription = value; - fieldSetFlags()[2] = true; - return this; - } - - /** Checks whether the 'SDDescription' field has been set */ - public boolean hasSDDescription() { - return fieldSetFlags()[2]; - } - - /** Clears the value of the 'SDDescription' field */ - public sparqles.avro.analytics.EPViewDiscoverability.Builder clearSDDescription() { - SDDescription = null; - fieldSetFlags()[2] = false; - return this; - } - - @Override - public EPViewDiscoverability build() { - try { - EPViewDiscoverability record = new EPViewDiscoverability(); - record.serverName = - fieldSetFlags()[0] - ? this.serverName - : (java.lang.CharSequence) defaultValue(fields()[0]); - record.VoIDDescription = - fieldSetFlags()[1] - ? this.VoIDDescription - : (java.util.List< - sparqles.avro.analytics.EPViewDiscoverabilityData>) - defaultValue(fields()[1]); - record.SDDescription = - fieldSetFlags()[2] - ? this.SDDescription - : (java.util.List< - sparqles.avro.analytics.EPViewDiscoverabilityData>) - defaultValue(fields()[2]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/EPViewDiscoverabilityData.java b/backend/src/main/java/sparqles/avro/analytics/EPViewDiscoverabilityData.java index d4a17501..b4c09302 100644 --- a/backend/src/main/java/sparqles/avro/analytics/EPViewDiscoverabilityData.java +++ b/backend/src/main/java/sparqles/avro/analytics/EPViewDiscoverabilityData.java @@ -5,217 +5,425 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class EPViewDiscoverabilityData extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"EPViewDiscoverabilityData\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"boolean\"}]}"); - @Deprecated public java.lang.CharSequence label; - @Deprecated public boolean value; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 5532018829358910439L; - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public EPViewDiscoverabilityData() {} + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"EPViewDiscoverabilityData\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"boolean\"}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder( + SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } - /** All-args constructor. */ - public EPViewDiscoverabilityData(java.lang.CharSequence label, java.lang.Boolean value) { - this.label = label; - this.value = value; + /** + * Serializes this EPViewDiscoverabilityData to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a EPViewDiscoverabilityData from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a EPViewDiscoverabilityData instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static EPViewDiscoverabilityData fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence label; + private boolean value; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public EPViewDiscoverabilityData() {} + + /** + * All-args constructor. + * + * @param label The new value for label + * @param value The new value for value + */ + public EPViewDiscoverabilityData(java.lang.CharSequence label, java.lang.Boolean value) { + this.label = label; + this.value = value; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return label; + case 1: + return value; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + label = (java.lang.CharSequence) value$; + break; + case 1: + value = (java.lang.Boolean) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - /** Creates a new EPViewDiscoverabilityData RecordBuilder */ - public static sparqles.avro.analytics.EPViewDiscoverabilityData.Builder newBuilder() { - return new sparqles.avro.analytics.EPViewDiscoverabilityData.Builder(); + /** + * Gets the value of the 'label' field. + * + * @return The value of the 'label' field. + */ + public java.lang.CharSequence getLabel() { + return label; + } + + /** + * Sets the value of the 'label' field. + * + * @param value the value to set. + */ + public void setLabel(java.lang.CharSequence value) { + this.label = value; + } + + /** + * Gets the value of the 'value' field. + * + * @return The value of the 'value' field. + */ + public boolean getValue() { + return value; + } + + /** + * Sets the value of the 'value' field. + * + * @param value the value to set. + */ + public void setValue(boolean value) { + this.value = value; + } + + /** + * Creates a new EPViewDiscoverabilityData RecordBuilder. + * + * @return A new EPViewDiscoverabilityData RecordBuilder + */ + public static sparqles.avro.analytics.EPViewDiscoverabilityData.Builder newBuilder() { + return new sparqles.avro.analytics.EPViewDiscoverabilityData.Builder(); + } + + /** + * Creates a new EPViewDiscoverabilityData RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new EPViewDiscoverabilityData RecordBuilder + */ + public static sparqles.avro.analytics.EPViewDiscoverabilityData.Builder newBuilder( + sparqles.avro.analytics.EPViewDiscoverabilityData.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewDiscoverabilityData.Builder(); + } else { + return new sparqles.avro.analytics.EPViewDiscoverabilityData.Builder(other); } + } + + /** + * Creates a new EPViewDiscoverabilityData RecordBuilder by copying an existing + * EPViewDiscoverabilityData instance. + * + * @param other The existing instance to copy. + * @return A new EPViewDiscoverabilityData RecordBuilder + */ + public static sparqles.avro.analytics.EPViewDiscoverabilityData.Builder newBuilder( + sparqles.avro.analytics.EPViewDiscoverabilityData other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewDiscoverabilityData.Builder(); + } else { + return new sparqles.avro.analytics.EPViewDiscoverabilityData.Builder(other); + } + } + + /** RecordBuilder for EPViewDiscoverabilityData instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence label; + private boolean value; - /** Creates a new EPViewDiscoverabilityData RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.EPViewDiscoverabilityData.Builder newBuilder( - sparqles.avro.analytics.EPViewDiscoverabilityData.Builder other) { - return new sparqles.avro.analytics.EPViewDiscoverabilityData.Builder(other); + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } /** - * Creates a new EPViewDiscoverabilityData RecordBuilder by copying an existing - * EPViewDiscoverabilityData instance + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public static sparqles.avro.analytics.EPViewDiscoverabilityData.Builder newBuilder( - sparqles.avro.analytics.EPViewDiscoverabilityData other) { - return new sparqles.avro.analytics.EPViewDiscoverabilityData.Builder(other); + private Builder(sparqles.avro.analytics.EPViewDiscoverabilityData.Builder other) { + super(other); + if (isValidValue(fields()[0], other.label)) { + this.label = data().deepCopy(fields()[0].schema(), other.label); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.value)) { + this.value = data().deepCopy(fields()[1].schema(), other.value); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Creates a Builder by copying an existing EPViewDiscoverabilityData instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.EPViewDiscoverabilityData other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.label)) { + this.label = data().deepCopy(fields()[0].schema(), other.label); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.value)) { + this.value = data().deepCopy(fields()[1].schema(), other.value); + fieldSetFlags()[1] = true; + } } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return label; - case 1: - return value; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Gets the value of the 'label' field. + * + * @return The value. + */ + public java.lang.CharSequence getLabel() { + return label; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - label = (java.lang.CharSequence) value$; - break; - case 1: - value = (java.lang.Boolean) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the value of the 'label' field. + * + * @param value The value of 'label'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewDiscoverabilityData.Builder setLabel( + java.lang.CharSequence value) { + validate(fields()[0], value); + this.label = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'label' field. */ - public java.lang.CharSequence getLabel() { - return label; + /** + * Checks whether the 'label' field has been set. + * + * @return True if the 'label' field has been set, false otherwise. + */ + public boolean hasLabel() { + return fieldSetFlags()[0]; } /** - * Sets the value of the 'label' field. + * Clears the value of the 'label' field. * - * @param value the value to set. + * @return This builder. */ - public void setLabel(java.lang.CharSequence value) { - this.label = value; + public sparqles.avro.analytics.EPViewDiscoverabilityData.Builder clearLabel() { + label = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'value' field. */ - public java.lang.Boolean getValue() { - return value; + /** + * Gets the value of the 'value' field. + * + * @return The value. + */ + public boolean getValue() { + return value; } /** * Sets the value of the 'value' field. * - * @param value the value to set. + * @param value The value of 'value'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewDiscoverabilityData.Builder setValue(boolean value) { + validate(fields()[1], value); + this.value = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'value' field has been set. + * + * @return True if the 'value' field has been set, false otherwise. */ - public void setValue(java.lang.Boolean value) { - this.value = value; + public boolean hasValue() { + return fieldSetFlags()[1]; } - /** RecordBuilder for EPViewDiscoverabilityData instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Clears the value of the 'value' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewDiscoverabilityData.Builder clearValue() { + fieldSetFlags()[1] = false; + return this; + } - private java.lang.CharSequence label; - private boolean value; + @Override + @SuppressWarnings("unchecked") + public EPViewDiscoverabilityData build() { + try { + EPViewDiscoverabilityData record = new EPViewDiscoverabilityData(); + record.label = + fieldSetFlags()[0] ? this.label : (java.lang.CharSequence) defaultValue(fields()[0]); + record.value = + fieldSetFlags()[1] ? this.value : (java.lang.Boolean) defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.EPViewDiscoverabilityData.SCHEMA$); - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.EPViewDiscoverabilityData.Builder other) { - super(other); - if (isValidValue(fields()[0], other.label)) { - this.label = data().deepCopy(fields()[0].schema(), other.label); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.value)) { - this.value = data().deepCopy(fields()[1].schema(), other.value); - fieldSetFlags()[1] = true; - } - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Creates a Builder by copying an existing EPViewDiscoverabilityData instance */ - private Builder(sparqles.avro.analytics.EPViewDiscoverabilityData other) { - super(sparqles.avro.analytics.EPViewDiscoverabilityData.SCHEMA$); - if (isValidValue(fields()[0], other.label)) { - this.label = data().deepCopy(fields()[0].schema(), other.label); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.value)) { - this.value = data().deepCopy(fields()[1].schema(), other.value); - fieldSetFlags()[1] = true; - } - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Gets the value of the 'label' field */ - public java.lang.CharSequence getLabel() { - return label; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Sets the value of the 'label' field */ - public sparqles.avro.analytics.EPViewDiscoverabilityData.Builder setLabel( - java.lang.CharSequence value) { - validate(fields()[0], value); - this.label = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Checks whether the 'label' field has been set */ - public boolean hasLabel() { - return fieldSetFlags()[0]; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.label); - /** Clears the value of the 'label' field */ - public sparqles.avro.analytics.EPViewDiscoverabilityData.Builder clearLabel() { - label = null; - fieldSetFlags()[0] = false; - return this; - } + out.writeBoolean(this.value); + } - /** Gets the value of the 'value' field */ - public java.lang.Boolean getValue() { - return value; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.label = in.readString(this.label instanceof Utf8 ? (Utf8) this.label : null); - /** Sets the value of the 'value' field */ - public sparqles.avro.analytics.EPViewDiscoverabilityData.Builder setValue(boolean value) { - validate(fields()[1], value); - this.value = value; - fieldSetFlags()[1] = true; - return this; - } + this.value = in.readBoolean(); - /** Checks whether the 'value' field has been set */ - public boolean hasValue() { - return fieldSetFlags()[1]; - } + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.label = in.readString(this.label instanceof Utf8 ? (Utf8) this.label : null); + break; - /** Clears the value of the 'value' field */ - public sparqles.avro.analytics.EPViewDiscoverabilityData.Builder clearValue() { - fieldSetFlags()[1] = false; - return this; - } + case 1: + this.value = in.readBoolean(); + break; - @Override - public EPViewDiscoverabilityData build() { - try { - EPViewDiscoverabilityData record = new EPViewDiscoverabilityData(); - record.label = - fieldSetFlags()[0] - ? this.label - : (java.lang.CharSequence) defaultValue(fields()[0]); - record.value = - fieldSetFlags()[1] - ? this.value - : (java.lang.Boolean) defaultValue(fields()[1]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/EPViewInteroperability.java b/backend/src/main/java/sparqles/avro/analytics/EPViewInteroperability.java index 8e791542..5c69c04c 100644 --- a/backend/src/main/java/sparqles/avro/analytics/EPViewInteroperability.java +++ b/backend/src/main/java/sparqles/avro/analytics/EPViewInteroperability.java @@ -5,238 +5,550 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; + @org.apache.avro.specific.AvroGenerated public class EPViewInteroperability extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"EPViewInteroperability\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"SPARQL1Features\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewInteroperabilityData\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"boolean\"},{\"name\":\"exception\",\"type\":[\"string\",\"null\"]}]}}},{\"name\":\"SPARQL11Features\",\"type\":{\"type\":\"array\",\"items\":\"EPViewInteroperabilityData\"}}]}"); + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -8563136173410211251L; - @Deprecated - public java.util.List SPARQL1Features; + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"EPViewInteroperability\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"SPARQL1Features\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewInteroperabilityData\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"boolean\"},{\"name\":\"exception\",\"type\":[\"string\",\"null\"]}]}}},{\"name\":\"SPARQL11Features\",\"type\":{\"type\":\"array\",\"items\":\"EPViewInteroperabilityData\"}}]}"); - @Deprecated - public java.util.List SPARQL11Features; + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public EPViewInteroperability() {} + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this EPViewInteroperability to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a EPViewInteroperability from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a EPViewInteroperability instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static EPViewInteroperability fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private java.util.List SPARQL1Features; + private java.util.List SPARQL11Features; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public EPViewInteroperability() {} + + /** + * All-args constructor. + * + * @param SPARQL1Features The new value for SPARQL1Features + * @param SPARQL11Features The new value for SPARQL11Features + */ + public EPViewInteroperability( + java.util.List SPARQL1Features, + java.util.List SPARQL11Features) { + this.SPARQL1Features = SPARQL1Features; + this.SPARQL11Features = SPARQL11Features; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return SPARQL1Features; + case 1: + return SPARQL11Features; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } - /** All-args constructor. */ - public EPViewInteroperability( - java.util.List SPARQL1Features, - java.util.List SPARQL11Features) { - this.SPARQL1Features = SPARQL1Features; - this.SPARQL11Features = SPARQL11Features; + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + SPARQL1Features = + (java.util.List) value$; + break; + case 1: + SPARQL11Features = + (java.util.List) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + /** + * Gets the value of the 'SPARQL1Features' field. + * + * @return The value of the 'SPARQL1Features' field. + */ + public java.util.List getSPARQL1Features() { + return SPARQL1Features; + } + + /** + * Sets the value of the 'SPARQL1Features' field. + * + * @param value the value to set. + */ + public void setSPARQL1Features( + java.util.List value) { + this.SPARQL1Features = value; + } + + /** + * Gets the value of the 'SPARQL11Features' field. + * + * @return The value of the 'SPARQL11Features' field. + */ + public java.util.List getSPARQL11Features() { + return SPARQL11Features; + } + + /** + * Sets the value of the 'SPARQL11Features' field. + * + * @param value the value to set. + */ + public void setSPARQL11Features( + java.util.List value) { + this.SPARQL11Features = value; + } + + /** + * Creates a new EPViewInteroperability RecordBuilder. + * + * @return A new EPViewInteroperability RecordBuilder + */ + public static sparqles.avro.analytics.EPViewInteroperability.Builder newBuilder() { + return new sparqles.avro.analytics.EPViewInteroperability.Builder(); + } + + /** + * Creates a new EPViewInteroperability RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new EPViewInteroperability RecordBuilder + */ + public static sparqles.avro.analytics.EPViewInteroperability.Builder newBuilder( + sparqles.avro.analytics.EPViewInteroperability.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewInteroperability.Builder(); + } else { + return new sparqles.avro.analytics.EPViewInteroperability.Builder(other); } + } - /** Creates a new EPViewInteroperability RecordBuilder */ - public static sparqles.avro.analytics.EPViewInteroperability.Builder newBuilder() { - return new sparqles.avro.analytics.EPViewInteroperability.Builder(); + /** + * Creates a new EPViewInteroperability RecordBuilder by copying an existing + * EPViewInteroperability instance. + * + * @param other The existing instance to copy. + * @return A new EPViewInteroperability RecordBuilder + */ + public static sparqles.avro.analytics.EPViewInteroperability.Builder newBuilder( + sparqles.avro.analytics.EPViewInteroperability other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewInteroperability.Builder(); + } else { + return new sparqles.avro.analytics.EPViewInteroperability.Builder(other); } + } - /** Creates a new EPViewInteroperability RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.EPViewInteroperability.Builder newBuilder( - sparqles.avro.analytics.EPViewInteroperability.Builder other) { - return new sparqles.avro.analytics.EPViewInteroperability.Builder(other); + /** RecordBuilder for EPViewInteroperability instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.util.List SPARQL1Features; + private java.util.List SPARQL11Features; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } /** - * Creates a new EPViewInteroperability RecordBuilder by copying an existing - * EPViewInteroperability instance + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public static sparqles.avro.analytics.EPViewInteroperability.Builder newBuilder( - sparqles.avro.analytics.EPViewInteroperability other) { - return new sparqles.avro.analytics.EPViewInteroperability.Builder(other); + private Builder(sparqles.avro.analytics.EPViewInteroperability.Builder other) { + super(other); + if (isValidValue(fields()[0], other.SPARQL1Features)) { + this.SPARQL1Features = data().deepCopy(fields()[0].schema(), other.SPARQL1Features); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.SPARQL11Features)) { + this.SPARQL11Features = data().deepCopy(fields()[1].schema(), other.SPARQL11Features); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Creates a Builder by copying an existing EPViewInteroperability instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.EPViewInteroperability other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.SPARQL1Features)) { + this.SPARQL1Features = data().deepCopy(fields()[0].schema(), other.SPARQL1Features); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.SPARQL11Features)) { + this.SPARQL11Features = data().deepCopy(fields()[1].schema(), other.SPARQL11Features); + fieldSetFlags()[1] = true; + } } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return SPARQL1Features; - case 1: - return SPARQL11Features; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Gets the value of the 'SPARQL1Features' field. + * + * @return The value. + */ + public java.util.List getSPARQL1Features() { + return SPARQL1Features; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - SPARQL1Features = - (java.util.List) value$; - break; - case 1: - SPARQL11Features = - (java.util.List) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the value of the 'SPARQL1Features' field. + * + * @param value The value of 'SPARQL1Features'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewInteroperability.Builder setSPARQL1Features( + java.util.List value) { + validate(fields()[0], value); + this.SPARQL1Features = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'SPARQL1Features' field. */ - public java.util.List getSPARQL1Features() { - return SPARQL1Features; + /** + * Checks whether the 'SPARQL1Features' field has been set. + * + * @return True if the 'SPARQL1Features' field has been set, false otherwise. + */ + public boolean hasSPARQL1Features() { + return fieldSetFlags()[0]; } /** - * Sets the value of the 'SPARQL1Features' field. + * Clears the value of the 'SPARQL1Features' field. * - * @param value the value to set. + * @return This builder. */ - public void setSPARQL1Features( - java.util.List value) { - this.SPARQL1Features = value; + public sparqles.avro.analytics.EPViewInteroperability.Builder clearSPARQL1Features() { + SPARQL1Features = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'SPARQL11Features' field. */ + /** + * Gets the value of the 'SPARQL11Features' field. + * + * @return The value. + */ public java.util.List - getSPARQL11Features() { - return SPARQL11Features; + getSPARQL11Features() { + return SPARQL11Features; } /** * Sets the value of the 'SPARQL11Features' field. * - * @param value the value to set. + * @param value The value of 'SPARQL11Features'. + * @return This builder. */ - public void setSPARQL11Features( - java.util.List value) { - this.SPARQL11Features = value; + public sparqles.avro.analytics.EPViewInteroperability.Builder setSPARQL11Features( + java.util.List value) { + validate(fields()[1], value); + this.SPARQL11Features = value; + fieldSetFlags()[1] = true; + return this; } - /** RecordBuilder for EPViewInteroperability instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Checks whether the 'SPARQL11Features' field has been set. + * + * @return True if the 'SPARQL11Features' field has been set, false otherwise. + */ + public boolean hasSPARQL11Features() { + return fieldSetFlags()[1]; + } - private java.util.List SPARQL1Features; - private java.util.List SPARQL11Features; + /** + * Clears the value of the 'SPARQL11Features' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewInteroperability.Builder clearSPARQL11Features() { + SPARQL11Features = null; + fieldSetFlags()[1] = false; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.EPViewInteroperability.SCHEMA$); - } + @Override + @SuppressWarnings("unchecked") + public EPViewInteroperability build() { + try { + EPViewInteroperability record = new EPViewInteroperability(); + record.SPARQL1Features = + fieldSetFlags()[0] + ? this.SPARQL1Features + : (java.util.List) + defaultValue(fields()[0]); + record.SPARQL11Features = + fieldSetFlags()[1] + ? this.SPARQL11Features + : (java.util.List) + defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.EPViewInteroperability.Builder other) { - super(other); - if (isValidValue(fields()[0], other.SPARQL1Features)) { - this.SPARQL1Features = data().deepCopy(fields()[0].schema(), other.SPARQL1Features); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.SPARQL11Features)) { - this.SPARQL11Features = - data().deepCopy(fields()[1].schema(), other.SPARQL11Features); - fieldSetFlags()[1] = true; - } - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Creates a Builder by copying an existing EPViewInteroperability instance */ - private Builder(sparqles.avro.analytics.EPViewInteroperability other) { - super(sparqles.avro.analytics.EPViewInteroperability.SCHEMA$); - if (isValidValue(fields()[0], other.SPARQL1Features)) { - this.SPARQL1Features = data().deepCopy(fields()[0].schema(), other.SPARQL1Features); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.SPARQL11Features)) { - this.SPARQL11Features = - data().deepCopy(fields()[1].schema(), other.SPARQL11Features); - fieldSetFlags()[1] = true; - } - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Gets the value of the 'SPARQL1Features' field */ - public java.util.List - getSPARQL1Features() { - return SPARQL1Features; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Sets the value of the 'SPARQL1Features' field */ - public sparqles.avro.analytics.EPViewInteroperability.Builder setSPARQL1Features( - java.util.List value) { - validate(fields()[0], value); - this.SPARQL1Features = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Checks whether the 'SPARQL1Features' field has been set */ - public boolean hasSPARQL1Features() { - return fieldSetFlags()[0]; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Clears the value of the 'SPARQL1Features' field */ - public sparqles.avro.analytics.EPViewInteroperability.Builder clearSPARQL1Features() { - SPARQL1Features = null; - fieldSetFlags()[0] = false; - return this; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + long size0 = this.SPARQL1Features.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (sparqles.avro.analytics.EPViewInteroperabilityData e0 : this.SPARQL1Features) { + actualSize0++; + out.startItem(); + e0.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); - /** Gets the value of the 'SPARQL11Features' field */ - public java.util.List - getSPARQL11Features() { - return SPARQL11Features; - } + long size1 = this.SPARQL11Features.size(); + out.writeArrayStart(); + out.setItemCount(size1); + long actualSize1 = 0; + for (sparqles.avro.analytics.EPViewInteroperabilityData e1 : this.SPARQL11Features) { + actualSize1++; + out.startItem(); + e1.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize1 != size1) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size1 + ", but element count was " + actualSize1 + "."); + } - /** Sets the value of the 'SPARQL11Features' field */ - public sparqles.avro.analytics.EPViewInteroperability.Builder setSPARQL11Features( - java.util.List value) { - validate(fields()[1], value); - this.SPARQL11Features = value; - fieldSetFlags()[1] = true; - return this; + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + long size0 = in.readArrayStart(); + java.util.List a0 = this.SPARQL1Features; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("SPARQL1Features").schema()); + this.SPARQL1Features = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.EPViewInteroperabilityData e0 = (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.EPViewInteroperabilityData(); + } + e0.customDecode(in); + a0.add(e0); } + } - /** Checks whether the 'SPARQL11Features' field has been set */ - public boolean hasSPARQL11Features() { - return fieldSetFlags()[1]; + long size1 = in.readArrayStart(); + java.util.List a1 = this.SPARQL11Features; + if (a1 == null) { + a1 = + new SpecificData.Array( + (int) size1, SCHEMA$.getField("SPARQL11Features").schema()); + this.SPARQL11Features = a1; + } else a1.clear(); + SpecificData.Array ga1 = + (a1 instanceof SpecificData.Array + ? (SpecificData.Array) a1 + : null); + for (; 0 < size1; size1 = in.arrayNext()) { + for (; size1 != 0; size1--) { + sparqles.avro.analytics.EPViewInteroperabilityData e1 = (ga1 != null ? ga1.peek() : null); + if (e1 == null) { + e1 = new sparqles.avro.analytics.EPViewInteroperabilityData(); + } + e1.customDecode(in); + a1.add(e1); } + } - /** Clears the value of the 'SPARQL11Features' field */ - public sparqles.avro.analytics.EPViewInteroperability.Builder clearSPARQL11Features() { - SPARQL11Features = null; - fieldSetFlags()[1] = false; - return this; - } + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + long size0 = in.readArrayStart(); + java.util.List a0 = + this.SPARQL1Features; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("SPARQL1Features").schema()); + this.SPARQL1Features = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.EPViewInteroperabilityData e0 = + (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.EPViewInteroperabilityData(); + } + e0.customDecode(in); + a0.add(e0); + } + } + break; - @Override - public EPViewInteroperability build() { - try { - EPViewInteroperability record = new EPViewInteroperability(); - record.SPARQL1Features = - fieldSetFlags()[0] - ? this.SPARQL1Features - : (java.util.List< - sparqles.avro.analytics.EPViewInteroperabilityData>) - defaultValue(fields()[0]); - record.SPARQL11Features = - fieldSetFlags()[1] - ? this.SPARQL11Features - : (java.util.List< - sparqles.avro.analytics.EPViewInteroperabilityData>) - defaultValue(fields()[1]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); + case 1: + long size1 = in.readArrayStart(); + java.util.List a1 = + this.SPARQL11Features; + if (a1 == null) { + a1 = + new SpecificData.Array( + (int) size1, SCHEMA$.getField("SPARQL11Features").schema()); + this.SPARQL11Features = a1; + } else a1.clear(); + SpecificData.Array ga1 = + (a1 instanceof SpecificData.Array + ? (SpecificData.Array) a1 + : null); + for (; 0 < size1; size1 = in.arrayNext()) { + for (; size1 != 0; size1--) { + sparqles.avro.analytics.EPViewInteroperabilityData e1 = + (ga1 != null ? ga1.peek() : null); + if (e1 == null) { + e1 = new sparqles.avro.analytics.EPViewInteroperabilityData(); + } + e1.customDecode(in); + a1.add(e1); + } } + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/EPViewInteroperabilityData.java b/backend/src/main/java/sparqles/avro/analytics/EPViewInteroperabilityData.java index d9b2ba12..34102dba 100644 --- a/backend/src/main/java/sparqles/avro/analytics/EPViewInteroperabilityData.java +++ b/backend/src/main/java/sparqles/avro/analytics/EPViewInteroperabilityData.java @@ -5,280 +5,536 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class EPViewInteroperabilityData extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"EPViewInteroperabilityData\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"boolean\"},{\"name\":\"exception\",\"type\":[\"string\",\"null\"]}]}"); - @Deprecated public java.lang.CharSequence label; - @Deprecated public boolean value; - @Deprecated public java.lang.CharSequence exception; - - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public EPViewInteroperabilityData() {} - - /** All-args constructor. */ - public EPViewInteroperabilityData( - java.lang.CharSequence label, - java.lang.Boolean value, - java.lang.CharSequence exception) { - this.label = label; - this.value = value; - this.exception = exception; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -1601108824774484276L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"EPViewInteroperabilityData\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"boolean\"},{\"name\":\"exception\",\"type\":[\"string\",\"null\"]}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder( + SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this EPViewInteroperabilityData to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a EPViewInteroperabilityData from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a EPViewInteroperabilityData instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static EPViewInteroperabilityData fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence label; + private boolean value; + private java.lang.CharSequence exception; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public EPViewInteroperabilityData() {} + + /** + * All-args constructor. + * + * @param label The new value for label + * @param value The new value for value + * @param exception The new value for exception + */ + public EPViewInteroperabilityData( + java.lang.CharSequence label, java.lang.Boolean value, java.lang.CharSequence exception) { + this.label = label; + this.value = value; + this.exception = exception; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return label; + case 1: + return value; + case 2: + return exception; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + label = (java.lang.CharSequence) value$; + break; + case 1: + value = (java.lang.Boolean) value$; + break; + case 2: + exception = (java.lang.CharSequence) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'label' field. + * + * @return The value of the 'label' field. + */ + public java.lang.CharSequence getLabel() { + return label; + } + + /** + * Sets the value of the 'label' field. + * + * @param value the value to set. + */ + public void setLabel(java.lang.CharSequence value) { + this.label = value; + } + + /** + * Gets the value of the 'value' field. + * + * @return The value of the 'value' field. + */ + public boolean getValue() { + return value; + } + + /** + * Sets the value of the 'value' field. + * + * @param value the value to set. + */ + public void setValue(boolean value) { + this.value = value; + } + + /** + * Gets the value of the 'exception' field. + * + * @return The value of the 'exception' field. + */ + public java.lang.CharSequence getException() { + return exception; + } + + /** + * Sets the value of the 'exception' field. + * + * @param value the value to set. + */ + public void setException(java.lang.CharSequence value) { + this.exception = value; + } + + /** + * Creates a new EPViewInteroperabilityData RecordBuilder. + * + * @return A new EPViewInteroperabilityData RecordBuilder + */ + public static sparqles.avro.analytics.EPViewInteroperabilityData.Builder newBuilder() { + return new sparqles.avro.analytics.EPViewInteroperabilityData.Builder(); + } + + /** + * Creates a new EPViewInteroperabilityData RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new EPViewInteroperabilityData RecordBuilder + */ + public static sparqles.avro.analytics.EPViewInteroperabilityData.Builder newBuilder( + sparqles.avro.analytics.EPViewInteroperabilityData.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewInteroperabilityData.Builder(); + } else { + return new sparqles.avro.analytics.EPViewInteroperabilityData.Builder(other); } + } + + /** + * Creates a new EPViewInteroperabilityData RecordBuilder by copying an existing + * EPViewInteroperabilityData instance. + * + * @param other The existing instance to copy. + * @return A new EPViewInteroperabilityData RecordBuilder + */ + public static sparqles.avro.analytics.EPViewInteroperabilityData.Builder newBuilder( + sparqles.avro.analytics.EPViewInteroperabilityData other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewInteroperabilityData.Builder(); + } else { + return new sparqles.avro.analytics.EPViewInteroperabilityData.Builder(other); + } + } + + /** RecordBuilder for EPViewInteroperabilityData instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence label; + private boolean value; + private java.lang.CharSequence exception; - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } - /** Creates a new EPViewInteroperabilityData RecordBuilder */ - public static sparqles.avro.analytics.EPViewInteroperabilityData.Builder newBuilder() { - return new sparqles.avro.analytics.EPViewInteroperabilityData.Builder(); + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.analytics.EPViewInteroperabilityData.Builder other) { + super(other); + if (isValidValue(fields()[0], other.label)) { + this.label = data().deepCopy(fields()[0].schema(), other.label); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.value)) { + this.value = data().deepCopy(fields()[1].schema(), other.value); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.exception)) { + this.exception = data().deepCopy(fields()[2].schema(), other.exception); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } } - /** Creates a new EPViewInteroperabilityData RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.EPViewInteroperabilityData.Builder newBuilder( - sparqles.avro.analytics.EPViewInteroperabilityData.Builder other) { - return new sparqles.avro.analytics.EPViewInteroperabilityData.Builder(other); + /** + * Creates a Builder by copying an existing EPViewInteroperabilityData instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.EPViewInteroperabilityData other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.label)) { + this.label = data().deepCopy(fields()[0].schema(), other.label); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.value)) { + this.value = data().deepCopy(fields()[1].schema(), other.value); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.exception)) { + this.exception = data().deepCopy(fields()[2].schema(), other.exception); + fieldSetFlags()[2] = true; + } } /** - * Creates a new EPViewInteroperabilityData RecordBuilder by copying an existing - * EPViewInteroperabilityData instance + * Gets the value of the 'label' field. + * + * @return The value. */ - public static sparqles.avro.analytics.EPViewInteroperabilityData.Builder newBuilder( - sparqles.avro.analytics.EPViewInteroperabilityData other) { - return new sparqles.avro.analytics.EPViewInteroperabilityData.Builder(other); + public java.lang.CharSequence getLabel() { + return label; } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Sets the value of the 'label' field. + * + * @param value The value of 'label'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewInteroperabilityData.Builder setLabel( + java.lang.CharSequence value) { + validate(fields()[0], value); + this.label = value; + fieldSetFlags()[0] = true; + return this; } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return label; - case 1: - return value; - case 2: - return exception; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Checks whether the 'label' field has been set. + * + * @return True if the 'label' field has been set, false otherwise. + */ + public boolean hasLabel() { + return fieldSetFlags()[0]; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - label = (java.lang.CharSequence) value$; - break; - case 1: - value = (java.lang.Boolean) value$; - break; - case 2: - exception = (java.lang.CharSequence) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Clears the value of the 'label' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewInteroperabilityData.Builder clearLabel() { + label = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'label' field. */ - public java.lang.CharSequence getLabel() { - return label; + /** + * Gets the value of the 'value' field. + * + * @return The value. + */ + public boolean getValue() { + return value; } /** - * Sets the value of the 'label' field. + * Sets the value of the 'value' field. * - * @param value the value to set. + * @param value The value of 'value'. + * @return This builder. */ - public void setLabel(java.lang.CharSequence value) { - this.label = value; + public sparqles.avro.analytics.EPViewInteroperabilityData.Builder setValue(boolean value) { + validate(fields()[1], value); + this.value = value; + fieldSetFlags()[1] = true; + return this; } - /** Gets the value of the 'value' field. */ - public java.lang.Boolean getValue() { - return value; + /** + * Checks whether the 'value' field has been set. + * + * @return True if the 'value' field has been set, false otherwise. + */ + public boolean hasValue() { + return fieldSetFlags()[1]; } /** - * Sets the value of the 'value' field. + * Clears the value of the 'value' field. * - * @param value the value to set. + * @return This builder. */ - public void setValue(java.lang.Boolean value) { - this.value = value; + public sparqles.avro.analytics.EPViewInteroperabilityData.Builder clearValue() { + fieldSetFlags()[1] = false; + return this; } - /** Gets the value of the 'exception' field. */ + /** + * Gets the value of the 'exception' field. + * + * @return The value. + */ public java.lang.CharSequence getException() { - return exception; + return exception; } /** * Sets the value of the 'exception' field. * - * @param value the value to set. + * @param value The value of 'exception'. + * @return This builder. */ - public void setException(java.lang.CharSequence value) { - this.exception = value; + public sparqles.avro.analytics.EPViewInteroperabilityData.Builder setException( + java.lang.CharSequence value) { + validate(fields()[2], value); + this.exception = value; + fieldSetFlags()[2] = true; + return this; } - /** RecordBuilder for EPViewInteroperabilityData instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { - - private java.lang.CharSequence label; - private boolean value; - private java.lang.CharSequence exception; - - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.EPViewInteroperabilityData.SCHEMA$); - } + /** + * Checks whether the 'exception' field has been set. + * + * @return True if the 'exception' field has been set, false otherwise. + */ + public boolean hasException() { + return fieldSetFlags()[2]; + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.EPViewInteroperabilityData.Builder other) { - super(other); - if (isValidValue(fields()[0], other.label)) { - this.label = data().deepCopy(fields()[0].schema(), other.label); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.value)) { - this.value = data().deepCopy(fields()[1].schema(), other.value); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.exception)) { - this.exception = data().deepCopy(fields()[2].schema(), other.exception); - fieldSetFlags()[2] = true; - } - } + /** + * Clears the value of the 'exception' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewInteroperabilityData.Builder clearException() { + exception = null; + fieldSetFlags()[2] = false; + return this; + } - /** Creates a Builder by copying an existing EPViewInteroperabilityData instance */ - private Builder(sparqles.avro.analytics.EPViewInteroperabilityData other) { - super(sparqles.avro.analytics.EPViewInteroperabilityData.SCHEMA$); - if (isValidValue(fields()[0], other.label)) { - this.label = data().deepCopy(fields()[0].schema(), other.label); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.value)) { - this.value = data().deepCopy(fields()[1].schema(), other.value); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.exception)) { - this.exception = data().deepCopy(fields()[2].schema(), other.exception); - fieldSetFlags()[2] = true; + @Override + @SuppressWarnings("unchecked") + public EPViewInteroperabilityData build() { + try { + EPViewInteroperabilityData record = new EPViewInteroperabilityData(); + record.label = + fieldSetFlags()[0] ? this.label : (java.lang.CharSequence) defaultValue(fields()[0]); + record.value = + fieldSetFlags()[1] ? this.value : (java.lang.Boolean) defaultValue(fields()[1]); + record.exception = + fieldSetFlags()[2] + ? this.exception + : (java.lang.CharSequence) defaultValue(fields()[2]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) + MODEL$.createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) + MODEL$.createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.label); + + out.writeBoolean(this.value); + + if (this.exception == null) { + out.writeIndex(1); + out.writeNull(); + } else { + out.writeIndex(0); + out.writeString(this.exception); + } + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.label = in.readString(this.label instanceof Utf8 ? (Utf8) this.label : null); + + this.value = in.readBoolean(); + + if (in.readIndex() != 0) { + in.readNull(); + this.exception = null; + } else { + this.exception = + in.readString(this.exception instanceof Utf8 ? (Utf8) this.exception : null); + } + + } else { + for (int i = 0; i < 3; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.label = in.readString(this.label instanceof Utf8 ? (Utf8) this.label : null); + break; + + case 1: + this.value = in.readBoolean(); + break; + + case 2: + if (in.readIndex() != 0) { + in.readNull(); + this.exception = null; + } else { + this.exception = + in.readString(this.exception instanceof Utf8 ? (Utf8) this.exception : null); } - } - - /** Gets the value of the 'label' field */ - public java.lang.CharSequence getLabel() { - return label; - } + break; - /** Sets the value of the 'label' field */ - public sparqles.avro.analytics.EPViewInteroperabilityData.Builder setLabel( - java.lang.CharSequence value) { - validate(fields()[0], value); - this.label = value; - fieldSetFlags()[0] = true; - return this; - } - - /** Checks whether the 'label' field has been set */ - public boolean hasLabel() { - return fieldSetFlags()[0]; - } - - /** Clears the value of the 'label' field */ - public sparqles.avro.analytics.EPViewInteroperabilityData.Builder clearLabel() { - label = null; - fieldSetFlags()[0] = false; - return this; - } - - /** Gets the value of the 'value' field */ - public java.lang.Boolean getValue() { - return value; - } - - /** Sets the value of the 'value' field */ - public sparqles.avro.analytics.EPViewInteroperabilityData.Builder setValue(boolean value) { - validate(fields()[1], value); - this.value = value; - fieldSetFlags()[1] = true; - return this; - } - - /** Checks whether the 'value' field has been set */ - public boolean hasValue() { - return fieldSetFlags()[1]; - } - - /** Clears the value of the 'value' field */ - public sparqles.avro.analytics.EPViewInteroperabilityData.Builder clearValue() { - fieldSetFlags()[1] = false; - return this; - } - - /** Gets the value of the 'exception' field */ - public java.lang.CharSequence getException() { - return exception; - } - - /** Sets the value of the 'exception' field */ - public sparqles.avro.analytics.EPViewInteroperabilityData.Builder setException( - java.lang.CharSequence value) { - validate(fields()[2], value); - this.exception = value; - fieldSetFlags()[2] = true; - return this; - } - - /** Checks whether the 'exception' field has been set */ - public boolean hasException() { - return fieldSetFlags()[2]; - } - - /** Clears the value of the 'exception' field */ - public sparqles.avro.analytics.EPViewInteroperabilityData.Builder clearException() { - exception = null; - fieldSetFlags()[2] = false; - return this; - } - - @Override - public EPViewInteroperabilityData build() { - try { - EPViewInteroperabilityData record = new EPViewInteroperabilityData(); - record.label = - fieldSetFlags()[0] - ? this.label - : (java.lang.CharSequence) defaultValue(fields()[0]); - record.value = - fieldSetFlags()[1] - ? this.value - : (java.lang.Boolean) defaultValue(fields()[1]); - record.exception = - fieldSetFlags()[2] - ? this.exception - : (java.lang.CharSequence) defaultValue(fields()[2]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/EPViewPerformance.java b/backend/src/main/java/sparqles/avro/analytics/EPViewPerformance.java index 5daf245d..95552eba 100644 --- a/backend/src/main/java/sparqles/avro/analytics/EPViewPerformance.java +++ b/backend/src/main/java/sparqles/avro/analytics/EPViewPerformance.java @@ -5,282 +5,629 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; + @org.apache.avro.specific.AvroGenerated public class EPViewPerformance extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"EPViewPerformance\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"threshold\",\"type\":\"long\"},{\"name\":\"ask\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewPerformanceData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"color\",\"type\":\"string\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewPerformanceDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"},{\"name\":\"exception\",\"type\":[\"string\",\"null\"]}]}}}]}}},{\"name\":\"join\",\"type\":{\"type\":\"array\",\"items\":\"EPViewPerformanceData\"}}]}"); - @Deprecated public long threshold; - @Deprecated public java.util.List ask; - @Deprecated public java.util.List join; - - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public EPViewPerformance() {} - - /** All-args constructor. */ - public EPViewPerformance( - java.lang.Long threshold, - java.util.List ask, - java.util.List join) { - this.threshold = threshold; - this.ask = ask; - this.join = join; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 5634356004997893572L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"EPViewPerformance\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"threshold\",\"type\":\"long\"},{\"name\":\"ask\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewPerformanceData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"color\",\"type\":\"string\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewPerformanceDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"},{\"name\":\"exception\",\"type\":[\"string\",\"null\"]}]}}}]}}},{\"name\":\"join\",\"type\":{\"type\":\"array\",\"items\":\"EPViewPerformanceData\"}}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this EPViewPerformance to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a EPViewPerformance from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a EPViewPerformance instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static EPViewPerformance fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private long threshold; + private java.util.List ask; + private java.util.List join; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public EPViewPerformance() {} + + /** + * All-args constructor. + * + * @param threshold The new value for threshold + * @param ask The new value for ask + * @param join The new value for join + */ + public EPViewPerformance( + java.lang.Long threshold, + java.util.List ask, + java.util.List join) { + this.threshold = threshold; + this.ask = ask; + this.join = join; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return threshold; + case 1: + return ask; + case 2: + return join; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } - - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + threshold = (java.lang.Long) value$; + break; + case 1: + ask = (java.util.List) value$; + break; + case 2: + join = (java.util.List) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } - - /** Creates a new EPViewPerformance RecordBuilder */ - public static sparqles.avro.analytics.EPViewPerformance.Builder newBuilder() { - return new sparqles.avro.analytics.EPViewPerformance.Builder(); + } + + /** + * Gets the value of the 'threshold' field. + * + * @return The value of the 'threshold' field. + */ + public long getThreshold() { + return threshold; + } + + /** + * Sets the value of the 'threshold' field. + * + * @param value the value to set. + */ + public void setThreshold(long value) { + this.threshold = value; + } + + /** + * Gets the value of the 'ask' field. + * + * @return The value of the 'ask' field. + */ + public java.util.List getAsk() { + return ask; + } + + /** + * Sets the value of the 'ask' field. + * + * @param value the value to set. + */ + public void setAsk(java.util.List value) { + this.ask = value; + } + + /** + * Gets the value of the 'join' field. + * + * @return The value of the 'join' field. + */ + public java.util.List getJoin() { + return join; + } + + /** + * Sets the value of the 'join' field. + * + * @param value the value to set. + */ + public void setJoin(java.util.List value) { + this.join = value; + } + + /** + * Creates a new EPViewPerformance RecordBuilder. + * + * @return A new EPViewPerformance RecordBuilder + */ + public static sparqles.avro.analytics.EPViewPerformance.Builder newBuilder() { + return new sparqles.avro.analytics.EPViewPerformance.Builder(); + } + + /** + * Creates a new EPViewPerformance RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new EPViewPerformance RecordBuilder + */ + public static sparqles.avro.analytics.EPViewPerformance.Builder newBuilder( + sparqles.avro.analytics.EPViewPerformance.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewPerformance.Builder(); + } else { + return new sparqles.avro.analytics.EPViewPerformance.Builder(other); } + } + + /** + * Creates a new EPViewPerformance RecordBuilder by copying an existing EPViewPerformance + * instance. + * + * @param other The existing instance to copy. + * @return A new EPViewPerformance RecordBuilder + */ + public static sparqles.avro.analytics.EPViewPerformance.Builder newBuilder( + sparqles.avro.analytics.EPViewPerformance other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewPerformance.Builder(); + } else { + return new sparqles.avro.analytics.EPViewPerformance.Builder(other); + } + } + + /** RecordBuilder for EPViewPerformance instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private long threshold; + private java.util.List ask; + private java.util.List join; - /** Creates a new EPViewPerformance RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.EPViewPerformance.Builder newBuilder( - sparqles.avro.analytics.EPViewPerformance.Builder other) { - return new sparqles.avro.analytics.EPViewPerformance.Builder(other); + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } /** - * Creates a new EPViewPerformance RecordBuilder by copying an existing EPViewPerformance - * instance + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public static sparqles.avro.analytics.EPViewPerformance.Builder newBuilder( - sparqles.avro.analytics.EPViewPerformance other) { - return new sparqles.avro.analytics.EPViewPerformance.Builder(other); + private Builder(sparqles.avro.analytics.EPViewPerformance.Builder other) { + super(other); + if (isValidValue(fields()[0], other.threshold)) { + this.threshold = data().deepCopy(fields()[0].schema(), other.threshold); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.ask)) { + this.ask = data().deepCopy(fields()[1].schema(), other.ask); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.join)) { + this.join = data().deepCopy(fields()[2].schema(), other.join); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Creates a Builder by copying an existing EPViewPerformance instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.EPViewPerformance other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.threshold)) { + this.threshold = data().deepCopy(fields()[0].schema(), other.threshold); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.ask)) { + this.ask = data().deepCopy(fields()[1].schema(), other.ask); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.join)) { + this.join = data().deepCopy(fields()[2].schema(), other.join); + fieldSetFlags()[2] = true; + } } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return threshold; - case 1: - return ask; - case 2: - return join; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Gets the value of the 'threshold' field. + * + * @return The value. + */ + public long getThreshold() { + return threshold; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - threshold = (java.lang.Long) value$; - break; - case 1: - ask = (java.util.List) value$; - break; - case 2: - join = (java.util.List) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the value of the 'threshold' field. + * + * @param value The value of 'threshold'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewPerformance.Builder setThreshold(long value) { + validate(fields()[0], value); + this.threshold = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'threshold' field. */ - public java.lang.Long getThreshold() { - return threshold; + /** + * Checks whether the 'threshold' field has been set. + * + * @return True if the 'threshold' field has been set, false otherwise. + */ + public boolean hasThreshold() { + return fieldSetFlags()[0]; } /** - * Sets the value of the 'threshold' field. + * Clears the value of the 'threshold' field. * - * @param value the value to set. + * @return This builder. */ - public void setThreshold(java.lang.Long value) { - this.threshold = value; + public sparqles.avro.analytics.EPViewPerformance.Builder clearThreshold() { + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'ask' field. */ + /** + * Gets the value of the 'ask' field. + * + * @return The value. + */ public java.util.List getAsk() { - return ask; + return ask; } /** * Sets the value of the 'ask' field. * - * @param value the value to set. + * @param value The value of 'ask'. + * @return This builder. */ - public void setAsk(java.util.List value) { - this.ask = value; + public sparqles.avro.analytics.EPViewPerformance.Builder setAsk( + java.util.List value) { + validate(fields()[1], value); + this.ask = value; + fieldSetFlags()[1] = true; + return this; } - /** Gets the value of the 'join' field. */ + /** + * Checks whether the 'ask' field has been set. + * + * @return True if the 'ask' field has been set, false otherwise. + */ + public boolean hasAsk() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'ask' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewPerformance.Builder clearAsk() { + ask = null; + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'join' field. + * + * @return The value. + */ public java.util.List getJoin() { - return join; + return join; } /** * Sets the value of the 'join' field. * - * @param value the value to set. + * @param value The value of 'join'. + * @return This builder. */ - public void setJoin(java.util.List value) { - this.join = value; + public sparqles.avro.analytics.EPViewPerformance.Builder setJoin( + java.util.List value) { + validate(fields()[2], value); + this.join = value; + fieldSetFlags()[2] = true; + return this; } - /** RecordBuilder for EPViewPerformance instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Checks whether the 'join' field has been set. + * + * @return True if the 'join' field has been set, false otherwise. + */ + public boolean hasJoin() { + return fieldSetFlags()[2]; + } - private long threshold; - private java.util.List ask; - private java.util.List join; + /** + * Clears the value of the 'join' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewPerformance.Builder clearJoin() { + join = null; + fieldSetFlags()[2] = false; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.EPViewPerformance.SCHEMA$); + @Override + @SuppressWarnings("unchecked") + public EPViewPerformance build() { + try { + EPViewPerformance record = new EPViewPerformance(); + record.threshold = + fieldSetFlags()[0] ? this.threshold : (java.lang.Long) defaultValue(fields()[0]); + record.ask = + fieldSetFlags()[1] + ? this.ask + : (java.util.List) + defaultValue(fields()[1]); + record.join = + fieldSetFlags()[2] + ? this.join + : (java.util.List) + defaultValue(fields()[2]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeLong(this.threshold); + + long size0 = this.ask.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (sparqles.avro.analytics.EPViewPerformanceData e0 : this.ask) { + actualSize0++; + out.startItem(); + e0.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); + + long size1 = this.join.size(); + out.writeArrayStart(); + out.setItemCount(size1); + long actualSize1 = 0; + for (sparqles.avro.analytics.EPViewPerformanceData e1 : this.join) { + actualSize1++; + out.startItem(); + e1.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize1 != size1) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size1 + ", but element count was " + actualSize1 + "."); + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.threshold = in.readLong(); + + long size0 = in.readArrayStart(); + java.util.List a0 = this.ask; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("ask").schema()); + this.ask = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.EPViewPerformanceData e0 = (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.EPViewPerformanceData(); + } + e0.customDecode(in); + a0.add(e0); } - - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.EPViewPerformance.Builder other) { - super(other); - if (isValidValue(fields()[0], other.threshold)) { - this.threshold = data().deepCopy(fields()[0].schema(), other.threshold); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.ask)) { - this.ask = data().deepCopy(fields()[1].schema(), other.ask); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.join)) { - this.join = data().deepCopy(fields()[2].schema(), other.join); - fieldSetFlags()[2] = true; - } + } + + long size1 = in.readArrayStart(); + java.util.List a1 = this.join; + if (a1 == null) { + a1 = + new SpecificData.Array( + (int) size1, SCHEMA$.getField("join").schema()); + this.join = a1; + } else a1.clear(); + SpecificData.Array ga1 = + (a1 instanceof SpecificData.Array + ? (SpecificData.Array) a1 + : null); + for (; 0 < size1; size1 = in.arrayNext()) { + for (; size1 != 0; size1--) { + sparqles.avro.analytics.EPViewPerformanceData e1 = (ga1 != null ? ga1.peek() : null); + if (e1 == null) { + e1 = new sparqles.avro.analytics.EPViewPerformanceData(); + } + e1.customDecode(in); + a1.add(e1); } - - /** Creates a Builder by copying an existing EPViewPerformance instance */ - private Builder(sparqles.avro.analytics.EPViewPerformance other) { - super(sparqles.avro.analytics.EPViewPerformance.SCHEMA$); - if (isValidValue(fields()[0], other.threshold)) { - this.threshold = data().deepCopy(fields()[0].schema(), other.threshold); - fieldSetFlags()[0] = true; + } + + } else { + for (int i = 0; i < 3; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.threshold = in.readLong(); + break; + + case 1: + long size0 = in.readArrayStart(); + java.util.List a0 = this.ask; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("ask").schema()); + this.ask = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.EPViewPerformanceData e0 = + (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.EPViewPerformanceData(); + } + e0.customDecode(in); + a0.add(e0); + } } - if (isValidValue(fields()[1], other.ask)) { - this.ask = data().deepCopy(fields()[1].schema(), other.ask); - fieldSetFlags()[1] = true; + break; + + case 2: + long size1 = in.readArrayStart(); + java.util.List a1 = this.join; + if (a1 == null) { + a1 = + new SpecificData.Array( + (int) size1, SCHEMA$.getField("join").schema()); + this.join = a1; + } else a1.clear(); + SpecificData.Array ga1 = + (a1 instanceof SpecificData.Array + ? (SpecificData.Array) a1 + : null); + for (; 0 < size1; size1 = in.arrayNext()) { + for (; size1 != 0; size1--) { + sparqles.avro.analytics.EPViewPerformanceData e1 = + (ga1 != null ? ga1.peek() : null); + if (e1 == null) { + e1 = new sparqles.avro.analytics.EPViewPerformanceData(); + } + e1.customDecode(in); + a1.add(e1); + } } - if (isValidValue(fields()[2], other.join)) { - this.join = data().deepCopy(fields()[2].schema(), other.join); - fieldSetFlags()[2] = true; - } - } + break; - /** Gets the value of the 'threshold' field */ - public java.lang.Long getThreshold() { - return threshold; - } - - /** Sets the value of the 'threshold' field */ - public sparqles.avro.analytics.EPViewPerformance.Builder setThreshold(long value) { - validate(fields()[0], value); - this.threshold = value; - fieldSetFlags()[0] = true; - return this; - } - - /** Checks whether the 'threshold' field has been set */ - public boolean hasThreshold() { - return fieldSetFlags()[0]; - } - - /** Clears the value of the 'threshold' field */ - public sparqles.avro.analytics.EPViewPerformance.Builder clearThreshold() { - fieldSetFlags()[0] = false; - return this; - } - - /** Gets the value of the 'ask' field */ - public java.util.List getAsk() { - return ask; - } - - /** Sets the value of the 'ask' field */ - public sparqles.avro.analytics.EPViewPerformance.Builder setAsk( - java.util.List value) { - validate(fields()[1], value); - this.ask = value; - fieldSetFlags()[1] = true; - return this; - } - - /** Checks whether the 'ask' field has been set */ - public boolean hasAsk() { - return fieldSetFlags()[1]; - } - - /** Clears the value of the 'ask' field */ - public sparqles.avro.analytics.EPViewPerformance.Builder clearAsk() { - ask = null; - fieldSetFlags()[1] = false; - return this; - } - - /** Gets the value of the 'join' field */ - public java.util.List getJoin() { - return join; - } - - /** Sets the value of the 'join' field */ - public sparqles.avro.analytics.EPViewPerformance.Builder setJoin( - java.util.List value) { - validate(fields()[2], value); - this.join = value; - fieldSetFlags()[2] = true; - return this; - } - - /** Checks whether the 'join' field has been set */ - public boolean hasJoin() { - return fieldSetFlags()[2]; - } - - /** Clears the value of the 'join' field */ - public sparqles.avro.analytics.EPViewPerformance.Builder clearJoin() { - join = null; - fieldSetFlags()[2] = false; - return this; - } - - @Override - public EPViewPerformance build() { - try { - EPViewPerformance record = new EPViewPerformance(); - record.threshold = - fieldSetFlags()[0] - ? this.threshold - : (java.lang.Long) defaultValue(fields()[0]); - record.ask = - fieldSetFlags()[1] - ? this.ask - : (java.util.List) - defaultValue(fields()[1]); - record.join = - fieldSetFlags()[2] - ? this.join - : (java.util.List) - defaultValue(fields()[2]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/EPViewPerformanceData.java b/backend/src/main/java/sparqles/avro/analytics/EPViewPerformanceData.java index 1e481707..425b15cc 100644 --- a/backend/src/main/java/sparqles/avro/analytics/EPViewPerformanceData.java +++ b/backend/src/main/java/sparqles/avro/analytics/EPViewPerformanceData.java @@ -5,285 +5,576 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class EPViewPerformanceData extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"EPViewPerformanceData\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"color\",\"type\":\"string\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewPerformanceDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"},{\"name\":\"exception\",\"type\":[\"string\",\"null\"]}]}}}]}"); - @Deprecated public java.lang.CharSequence key; - @Deprecated public java.lang.CharSequence color; - @Deprecated public java.util.List data; - - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public EPViewPerformanceData() {} - - /** All-args constructor. */ - public EPViewPerformanceData( - java.lang.CharSequence key, - java.lang.CharSequence color, - java.util.List data) { - this.key = key; - this.color = color; - this.data = data; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 8853438138679964690L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"EPViewPerformanceData\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"color\",\"type\":\"string\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"EPViewPerformanceDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"},{\"name\":\"exception\",\"type\":[\"string\",\"null\"]}]}}}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this EPViewPerformanceData to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a EPViewPerformanceData from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a EPViewPerformanceData instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static EPViewPerformanceData fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence key; + private java.lang.CharSequence color; + private java.util.List data; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public EPViewPerformanceData() {} + + /** + * All-args constructor. + * + * @param key The new value for key + * @param color The new value for color + * @param data The new value for data + */ + public EPViewPerformanceData( + java.lang.CharSequence key, + java.lang.CharSequence color, + java.util.List data) { + this.key = key; + this.color = color; + this.data = data; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return key; + case 1: + return color; + case 2: + return data; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } - - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + key = (java.lang.CharSequence) value$; + break; + case 1: + color = (java.lang.CharSequence) value$; + break; + case 2: + data = (java.util.List) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } - - /** Creates a new EPViewPerformanceData RecordBuilder */ - public static sparqles.avro.analytics.EPViewPerformanceData.Builder newBuilder() { - return new sparqles.avro.analytics.EPViewPerformanceData.Builder(); + } + + /** + * Gets the value of the 'key' field. + * + * @return The value of the 'key' field. + */ + public java.lang.CharSequence getKey() { + return key; + } + + /** + * Sets the value of the 'key' field. + * + * @param value the value to set. + */ + public void setKey(java.lang.CharSequence value) { + this.key = value; + } + + /** + * Gets the value of the 'color' field. + * + * @return The value of the 'color' field. + */ + public java.lang.CharSequence getColor() { + return color; + } + + /** + * Sets the value of the 'color' field. + * + * @param value the value to set. + */ + public void setColor(java.lang.CharSequence value) { + this.color = value; + } + + /** + * Gets the value of the 'data' field. + * + * @return The value of the 'data' field. + */ + public java.util.List getData() { + return data; + } + + /** + * Sets the value of the 'data' field. + * + * @param value the value to set. + */ + public void setData(java.util.List value) { + this.data = value; + } + + /** + * Creates a new EPViewPerformanceData RecordBuilder. + * + * @return A new EPViewPerformanceData RecordBuilder + */ + public static sparqles.avro.analytics.EPViewPerformanceData.Builder newBuilder() { + return new sparqles.avro.analytics.EPViewPerformanceData.Builder(); + } + + /** + * Creates a new EPViewPerformanceData RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new EPViewPerformanceData RecordBuilder + */ + public static sparqles.avro.analytics.EPViewPerformanceData.Builder newBuilder( + sparqles.avro.analytics.EPViewPerformanceData.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewPerformanceData.Builder(); + } else { + return new sparqles.avro.analytics.EPViewPerformanceData.Builder(other); } + } + + /** + * Creates a new EPViewPerformanceData RecordBuilder by copying an existing EPViewPerformanceData + * instance. + * + * @param other The existing instance to copy. + * @return A new EPViewPerformanceData RecordBuilder + */ + public static sparqles.avro.analytics.EPViewPerformanceData.Builder newBuilder( + sparqles.avro.analytics.EPViewPerformanceData other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewPerformanceData.Builder(); + } else { + return new sparqles.avro.analytics.EPViewPerformanceData.Builder(other); + } + } + + /** RecordBuilder for EPViewPerformanceData instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence key; + private java.lang.CharSequence color; + private java.util.List data; - /** Creates a new EPViewPerformanceData RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.EPViewPerformanceData.Builder newBuilder( - sparqles.avro.analytics.EPViewPerformanceData.Builder other) { - return new sparqles.avro.analytics.EPViewPerformanceData.Builder(other); + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } /** - * Creates a new EPViewPerformanceData RecordBuilder by copying an existing - * EPViewPerformanceData instance + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public static sparqles.avro.analytics.EPViewPerformanceData.Builder newBuilder( - sparqles.avro.analytics.EPViewPerformanceData other) { - return new sparqles.avro.analytics.EPViewPerformanceData.Builder(other); + private Builder(sparqles.avro.analytics.EPViewPerformanceData.Builder other) { + super(other); + if (isValidValue(fields()[0], other.key)) { + this.key = data().deepCopy(fields()[0].schema(), other.key); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.color)) { + this.color = data().deepCopy(fields()[1].schema(), other.color); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.data)) { + this.data = data().deepCopy(fields()[2].schema(), other.data); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Creates a Builder by copying an existing EPViewPerformanceData instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.EPViewPerformanceData other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.key)) { + this.key = data().deepCopy(fields()[0].schema(), other.key); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.color)) { + this.color = data().deepCopy(fields()[1].schema(), other.color); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.data)) { + this.data = data().deepCopy(fields()[2].schema(), other.data); + fieldSetFlags()[2] = true; + } } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return key; - case 1: - return color; - case 2: - return data; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Gets the value of the 'key' field. + * + * @return The value. + */ + public java.lang.CharSequence getKey() { + return key; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - key = (java.lang.CharSequence) value$; - break; - case 1: - color = (java.lang.CharSequence) value$; - break; - case 2: - data = (java.util.List) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the value of the 'key' field. + * + * @param value The value of 'key'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewPerformanceData.Builder setKey( + java.lang.CharSequence value) { + validate(fields()[0], value); + this.key = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'key' field. */ - public java.lang.CharSequence getKey() { - return key; + /** + * Checks whether the 'key' field has been set. + * + * @return True if the 'key' field has been set, false otherwise. + */ + public boolean hasKey() { + return fieldSetFlags()[0]; } /** - * Sets the value of the 'key' field. + * Clears the value of the 'key' field. * - * @param value the value to set. + * @return This builder. */ - public void setKey(java.lang.CharSequence value) { - this.key = value; + public sparqles.avro.analytics.EPViewPerformanceData.Builder clearKey() { + key = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'color' field. */ + /** + * Gets the value of the 'color' field. + * + * @return The value. + */ public java.lang.CharSequence getColor() { - return color; + return color; } /** * Sets the value of the 'color' field. * - * @param value the value to set. + * @param value The value of 'color'. + * @return This builder. */ - public void setColor(java.lang.CharSequence value) { - this.color = value; + public sparqles.avro.analytics.EPViewPerformanceData.Builder setColor( + java.lang.CharSequence value) { + validate(fields()[1], value); + this.color = value; + fieldSetFlags()[1] = true; + return this; } - /** Gets the value of the 'data' field. */ + /** + * Checks whether the 'color' field has been set. + * + * @return True if the 'color' field has been set, false otherwise. + */ + public boolean hasColor() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'color' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewPerformanceData.Builder clearColor() { + color = null; + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'data' field. + * + * @return The value. + */ public java.util.List getData() { - return data; + return data; } /** * Sets the value of the 'data' field. * - * @param value the value to set. + * @param value The value of 'data'. + * @return This builder. */ - public void setData(java.util.List value) { - this.data = value; + public sparqles.avro.analytics.EPViewPerformanceData.Builder setData( + java.util.List value) { + validate(fields()[2], value); + this.data = value; + fieldSetFlags()[2] = true; + return this; } - /** RecordBuilder for EPViewPerformanceData instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { - - private java.lang.CharSequence key; - private java.lang.CharSequence color; - private java.util.List data; + /** + * Checks whether the 'data' field has been set. + * + * @return True if the 'data' field has been set, false otherwise. + */ + public boolean hasData() { + return fieldSetFlags()[2]; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.EPViewPerformanceData.SCHEMA$); - } + /** + * Clears the value of the 'data' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewPerformanceData.Builder clearData() { + data = null; + fieldSetFlags()[2] = false; + return this; + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.EPViewPerformanceData.Builder other) { - super(other); - if (isValidValue(fields()[0], other.key)) { - this.key = data().deepCopy(fields()[0].schema(), other.key); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.color)) { - this.color = data().deepCopy(fields()[1].schema(), other.color); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.data)) { - this.data = data().deepCopy(fields()[2].schema(), other.data); - fieldSetFlags()[2] = true; - } + @Override + @SuppressWarnings("unchecked") + public EPViewPerformanceData build() { + try { + EPViewPerformanceData record = new EPViewPerformanceData(); + record.key = + fieldSetFlags()[0] ? this.key : (java.lang.CharSequence) defaultValue(fields()[0]); + record.color = + fieldSetFlags()[1] ? this.color : (java.lang.CharSequence) defaultValue(fields()[1]); + record.data = + fieldSetFlags()[2] + ? this.data + : (java.util.List) + defaultValue(fields()[2]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.key); + + out.writeString(this.color); + + long size0 = this.data.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (sparqles.avro.analytics.EPViewPerformanceDataValues e0 : this.data) { + actualSize0++; + out.startItem(); + e0.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.key = in.readString(this.key instanceof Utf8 ? (Utf8) this.key : null); + + this.color = in.readString(this.color instanceof Utf8 ? (Utf8) this.color : null); + + long size0 = in.readArrayStart(); + java.util.List a0 = this.data; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("data").schema()); + this.data = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.EPViewPerformanceDataValues e0 = + (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.EPViewPerformanceDataValues(); + } + e0.customDecode(in); + a0.add(e0); } - - /** Creates a Builder by copying an existing EPViewPerformanceData instance */ - private Builder(sparqles.avro.analytics.EPViewPerformanceData other) { - super(sparqles.avro.analytics.EPViewPerformanceData.SCHEMA$); - if (isValidValue(fields()[0], other.key)) { - this.key = data().deepCopy(fields()[0].schema(), other.key); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.color)) { - this.color = data().deepCopy(fields()[1].schema(), other.color); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.data)) { - this.data = data().deepCopy(fields()[2].schema(), other.data); - fieldSetFlags()[2] = true; + } + + } else { + for (int i = 0; i < 3; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.key = in.readString(this.key instanceof Utf8 ? (Utf8) this.key : null); + break; + + case 1: + this.color = in.readString(this.color instanceof Utf8 ? (Utf8) this.color : null); + break; + + case 2: + long size0 = in.readArrayStart(); + java.util.List a0 = this.data; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("data").schema()); + this.data = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.EPViewPerformanceDataValues e0 = + (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.EPViewPerformanceDataValues(); + } + e0.customDecode(in); + a0.add(e0); + } } - } - - /** Gets the value of the 'key' field */ - public java.lang.CharSequence getKey() { - return key; - } + break; - /** Sets the value of the 'key' field */ - public sparqles.avro.analytics.EPViewPerformanceData.Builder setKey( - java.lang.CharSequence value) { - validate(fields()[0], value); - this.key = value; - fieldSetFlags()[0] = true; - return this; - } - - /** Checks whether the 'key' field has been set */ - public boolean hasKey() { - return fieldSetFlags()[0]; - } - - /** Clears the value of the 'key' field */ - public sparqles.avro.analytics.EPViewPerformanceData.Builder clearKey() { - key = null; - fieldSetFlags()[0] = false; - return this; - } - - /** Gets the value of the 'color' field */ - public java.lang.CharSequence getColor() { - return color; - } - - /** Sets the value of the 'color' field */ - public sparqles.avro.analytics.EPViewPerformanceData.Builder setColor( - java.lang.CharSequence value) { - validate(fields()[1], value); - this.color = value; - fieldSetFlags()[1] = true; - return this; - } - - /** Checks whether the 'color' field has been set */ - public boolean hasColor() { - return fieldSetFlags()[1]; - } - - /** Clears the value of the 'color' field */ - public sparqles.avro.analytics.EPViewPerformanceData.Builder clearColor() { - color = null; - fieldSetFlags()[1] = false; - return this; - } - - /** Gets the value of the 'data' field */ - public java.util.List getData() { - return data; - } - - /** Sets the value of the 'data' field */ - public sparqles.avro.analytics.EPViewPerformanceData.Builder setData( - java.util.List value) { - validate(fields()[2], value); - this.data = value; - fieldSetFlags()[2] = true; - return this; - } - - /** Checks whether the 'data' field has been set */ - public boolean hasData() { - return fieldSetFlags()[2]; - } - - /** Clears the value of the 'data' field */ - public sparqles.avro.analytics.EPViewPerformanceData.Builder clearData() { - data = null; - fieldSetFlags()[2] = false; - return this; - } - - @Override - public EPViewPerformanceData build() { - try { - EPViewPerformanceData record = new EPViewPerformanceData(); - record.key = - fieldSetFlags()[0] - ? this.key - : (java.lang.CharSequence) defaultValue(fields()[0]); - record.color = - fieldSetFlags()[1] - ? this.color - : (java.lang.CharSequence) defaultValue(fields()[1]); - record.data = - fieldSetFlags()[2] - ? this.data - : (java.util.List< - sparqles.avro.analytics - .EPViewPerformanceDataValues>) - defaultValue(fields()[2]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/EPViewPerformanceDataValues.java b/backend/src/main/java/sparqles/avro/analytics/EPViewPerformanceDataValues.java index d53b2fe6..6aa5efc9 100644 --- a/backend/src/main/java/sparqles/avro/analytics/EPViewPerformanceDataValues.java +++ b/backend/src/main/java/sparqles/avro/analytics/EPViewPerformanceDataValues.java @@ -5,280 +5,536 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class EPViewPerformanceDataValues extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"EPViewPerformanceDataValues\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"},{\"name\":\"exception\",\"type\":[\"string\",\"null\"]}]}"); - @Deprecated public java.lang.CharSequence label; - @Deprecated public double value; - @Deprecated public java.lang.CharSequence exception; - - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public EPViewPerformanceDataValues() {} - - /** All-args constructor. */ - public EPViewPerformanceDataValues( - java.lang.CharSequence label, - java.lang.Double value, - java.lang.CharSequence exception) { - this.label = label; - this.value = value; - this.exception = exception; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -129584720939747212L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"EPViewPerformanceDataValues\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"},{\"name\":\"exception\",\"type\":[\"string\",\"null\"]}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder( + SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this EPViewPerformanceDataValues to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a EPViewPerformanceDataValues from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a EPViewPerformanceDataValues instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static EPViewPerformanceDataValues fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence label; + private double value; + private java.lang.CharSequence exception; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public EPViewPerformanceDataValues() {} + + /** + * All-args constructor. + * + * @param label The new value for label + * @param value The new value for value + * @param exception The new value for exception + */ + public EPViewPerformanceDataValues( + java.lang.CharSequence label, java.lang.Double value, java.lang.CharSequence exception) { + this.label = label; + this.value = value; + this.exception = exception; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return label; + case 1: + return value; + case 2: + return exception; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + label = (java.lang.CharSequence) value$; + break; + case 1: + value = (java.lang.Double) value$; + break; + case 2: + exception = (java.lang.CharSequence) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'label' field. + * + * @return The value of the 'label' field. + */ + public java.lang.CharSequence getLabel() { + return label; + } + + /** + * Sets the value of the 'label' field. + * + * @param value the value to set. + */ + public void setLabel(java.lang.CharSequence value) { + this.label = value; + } + + /** + * Gets the value of the 'value' field. + * + * @return The value of the 'value' field. + */ + public double getValue() { + return value; + } + + /** + * Sets the value of the 'value' field. + * + * @param value the value to set. + */ + public void setValue(double value) { + this.value = value; + } + + /** + * Gets the value of the 'exception' field. + * + * @return The value of the 'exception' field. + */ + public java.lang.CharSequence getException() { + return exception; + } + + /** + * Sets the value of the 'exception' field. + * + * @param value the value to set. + */ + public void setException(java.lang.CharSequence value) { + this.exception = value; + } + + /** + * Creates a new EPViewPerformanceDataValues RecordBuilder. + * + * @return A new EPViewPerformanceDataValues RecordBuilder + */ + public static sparqles.avro.analytics.EPViewPerformanceDataValues.Builder newBuilder() { + return new sparqles.avro.analytics.EPViewPerformanceDataValues.Builder(); + } + + /** + * Creates a new EPViewPerformanceDataValues RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new EPViewPerformanceDataValues RecordBuilder + */ + public static sparqles.avro.analytics.EPViewPerformanceDataValues.Builder newBuilder( + sparqles.avro.analytics.EPViewPerformanceDataValues.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewPerformanceDataValues.Builder(); + } else { + return new sparqles.avro.analytics.EPViewPerformanceDataValues.Builder(other); } + } + + /** + * Creates a new EPViewPerformanceDataValues RecordBuilder by copying an existing + * EPViewPerformanceDataValues instance. + * + * @param other The existing instance to copy. + * @return A new EPViewPerformanceDataValues RecordBuilder + */ + public static sparqles.avro.analytics.EPViewPerformanceDataValues.Builder newBuilder( + sparqles.avro.analytics.EPViewPerformanceDataValues other) { + if (other == null) { + return new sparqles.avro.analytics.EPViewPerformanceDataValues.Builder(); + } else { + return new sparqles.avro.analytics.EPViewPerformanceDataValues.Builder(other); + } + } + + /** RecordBuilder for EPViewPerformanceDataValues instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence label; + private double value; + private java.lang.CharSequence exception; - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } - /** Creates a new EPViewPerformanceDataValues RecordBuilder */ - public static sparqles.avro.analytics.EPViewPerformanceDataValues.Builder newBuilder() { - return new sparqles.avro.analytics.EPViewPerformanceDataValues.Builder(); + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.analytics.EPViewPerformanceDataValues.Builder other) { + super(other); + if (isValidValue(fields()[0], other.label)) { + this.label = data().deepCopy(fields()[0].schema(), other.label); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.value)) { + this.value = data().deepCopy(fields()[1].schema(), other.value); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.exception)) { + this.exception = data().deepCopy(fields()[2].schema(), other.exception); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } } - /** Creates a new EPViewPerformanceDataValues RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.EPViewPerformanceDataValues.Builder newBuilder( - sparqles.avro.analytics.EPViewPerformanceDataValues.Builder other) { - return new sparqles.avro.analytics.EPViewPerformanceDataValues.Builder(other); + /** + * Creates a Builder by copying an existing EPViewPerformanceDataValues instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.EPViewPerformanceDataValues other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.label)) { + this.label = data().deepCopy(fields()[0].schema(), other.label); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.value)) { + this.value = data().deepCopy(fields()[1].schema(), other.value); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.exception)) { + this.exception = data().deepCopy(fields()[2].schema(), other.exception); + fieldSetFlags()[2] = true; + } } /** - * Creates a new EPViewPerformanceDataValues RecordBuilder by copying an existing - * EPViewPerformanceDataValues instance + * Gets the value of the 'label' field. + * + * @return The value. */ - public static sparqles.avro.analytics.EPViewPerformanceDataValues.Builder newBuilder( - sparqles.avro.analytics.EPViewPerformanceDataValues other) { - return new sparqles.avro.analytics.EPViewPerformanceDataValues.Builder(other); + public java.lang.CharSequence getLabel() { + return label; } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Sets the value of the 'label' field. + * + * @param value The value of 'label'. + * @return This builder. + */ + public sparqles.avro.analytics.EPViewPerformanceDataValues.Builder setLabel( + java.lang.CharSequence value) { + validate(fields()[0], value); + this.label = value; + fieldSetFlags()[0] = true; + return this; } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return label; - case 1: - return value; - case 2: - return exception; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Checks whether the 'label' field has been set. + * + * @return True if the 'label' field has been set, false otherwise. + */ + public boolean hasLabel() { + return fieldSetFlags()[0]; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - label = (java.lang.CharSequence) value$; - break; - case 1: - value = (java.lang.Double) value$; - break; - case 2: - exception = (java.lang.CharSequence) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Clears the value of the 'label' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewPerformanceDataValues.Builder clearLabel() { + label = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'label' field. */ - public java.lang.CharSequence getLabel() { - return label; + /** + * Gets the value of the 'value' field. + * + * @return The value. + */ + public double getValue() { + return value; } /** - * Sets the value of the 'label' field. + * Sets the value of the 'value' field. * - * @param value the value to set. + * @param value The value of 'value'. + * @return This builder. */ - public void setLabel(java.lang.CharSequence value) { - this.label = value; + public sparqles.avro.analytics.EPViewPerformanceDataValues.Builder setValue(double value) { + validate(fields()[1], value); + this.value = value; + fieldSetFlags()[1] = true; + return this; } - /** Gets the value of the 'value' field. */ - public java.lang.Double getValue() { - return value; + /** + * Checks whether the 'value' field has been set. + * + * @return True if the 'value' field has been set, false otherwise. + */ + public boolean hasValue() { + return fieldSetFlags()[1]; } /** - * Sets the value of the 'value' field. + * Clears the value of the 'value' field. * - * @param value the value to set. + * @return This builder. */ - public void setValue(java.lang.Double value) { - this.value = value; + public sparqles.avro.analytics.EPViewPerformanceDataValues.Builder clearValue() { + fieldSetFlags()[1] = false; + return this; } - /** Gets the value of the 'exception' field. */ + /** + * Gets the value of the 'exception' field. + * + * @return The value. + */ public java.lang.CharSequence getException() { - return exception; + return exception; } /** * Sets the value of the 'exception' field. * - * @param value the value to set. + * @param value The value of 'exception'. + * @return This builder. */ - public void setException(java.lang.CharSequence value) { - this.exception = value; + public sparqles.avro.analytics.EPViewPerformanceDataValues.Builder setException( + java.lang.CharSequence value) { + validate(fields()[2], value); + this.exception = value; + fieldSetFlags()[2] = true; + return this; } - /** RecordBuilder for EPViewPerformanceDataValues instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { - - private java.lang.CharSequence label; - private double value; - private java.lang.CharSequence exception; - - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.EPViewPerformanceDataValues.SCHEMA$); - } + /** + * Checks whether the 'exception' field has been set. + * + * @return True if the 'exception' field has been set, false otherwise. + */ + public boolean hasException() { + return fieldSetFlags()[2]; + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.EPViewPerformanceDataValues.Builder other) { - super(other); - if (isValidValue(fields()[0], other.label)) { - this.label = data().deepCopy(fields()[0].schema(), other.label); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.value)) { - this.value = data().deepCopy(fields()[1].schema(), other.value); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.exception)) { - this.exception = data().deepCopy(fields()[2].schema(), other.exception); - fieldSetFlags()[2] = true; - } - } + /** + * Clears the value of the 'exception' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.EPViewPerformanceDataValues.Builder clearException() { + exception = null; + fieldSetFlags()[2] = false; + return this; + } - /** Creates a Builder by copying an existing EPViewPerformanceDataValues instance */ - private Builder(sparqles.avro.analytics.EPViewPerformanceDataValues other) { - super(sparqles.avro.analytics.EPViewPerformanceDataValues.SCHEMA$); - if (isValidValue(fields()[0], other.label)) { - this.label = data().deepCopy(fields()[0].schema(), other.label); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.value)) { - this.value = data().deepCopy(fields()[1].schema(), other.value); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.exception)) { - this.exception = data().deepCopy(fields()[2].schema(), other.exception); - fieldSetFlags()[2] = true; + @Override + @SuppressWarnings("unchecked") + public EPViewPerformanceDataValues build() { + try { + EPViewPerformanceDataValues record = new EPViewPerformanceDataValues(); + record.label = + fieldSetFlags()[0] ? this.label : (java.lang.CharSequence) defaultValue(fields()[0]); + record.value = + fieldSetFlags()[1] ? this.value : (java.lang.Double) defaultValue(fields()[1]); + record.exception = + fieldSetFlags()[2] + ? this.exception + : (java.lang.CharSequence) defaultValue(fields()[2]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) + MODEL$.createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) + MODEL$.createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.label); + + out.writeDouble(this.value); + + if (this.exception == null) { + out.writeIndex(1); + out.writeNull(); + } else { + out.writeIndex(0); + out.writeString(this.exception); + } + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.label = in.readString(this.label instanceof Utf8 ? (Utf8) this.label : null); + + this.value = in.readDouble(); + + if (in.readIndex() != 0) { + in.readNull(); + this.exception = null; + } else { + this.exception = + in.readString(this.exception instanceof Utf8 ? (Utf8) this.exception : null); + } + + } else { + for (int i = 0; i < 3; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.label = in.readString(this.label instanceof Utf8 ? (Utf8) this.label : null); + break; + + case 1: + this.value = in.readDouble(); + break; + + case 2: + if (in.readIndex() != 0) { + in.readNull(); + this.exception = null; + } else { + this.exception = + in.readString(this.exception instanceof Utf8 ? (Utf8) this.exception : null); } - } - - /** Gets the value of the 'label' field */ - public java.lang.CharSequence getLabel() { - return label; - } + break; - /** Sets the value of the 'label' field */ - public sparqles.avro.analytics.EPViewPerformanceDataValues.Builder setLabel( - java.lang.CharSequence value) { - validate(fields()[0], value); - this.label = value; - fieldSetFlags()[0] = true; - return this; - } - - /** Checks whether the 'label' field has been set */ - public boolean hasLabel() { - return fieldSetFlags()[0]; - } - - /** Clears the value of the 'label' field */ - public sparqles.avro.analytics.EPViewPerformanceDataValues.Builder clearLabel() { - label = null; - fieldSetFlags()[0] = false; - return this; - } - - /** Gets the value of the 'value' field */ - public java.lang.Double getValue() { - return value; - } - - /** Sets the value of the 'value' field */ - public sparqles.avro.analytics.EPViewPerformanceDataValues.Builder setValue(double value) { - validate(fields()[1], value); - this.value = value; - fieldSetFlags()[1] = true; - return this; - } - - /** Checks whether the 'value' field has been set */ - public boolean hasValue() { - return fieldSetFlags()[1]; - } - - /** Clears the value of the 'value' field */ - public sparqles.avro.analytics.EPViewPerformanceDataValues.Builder clearValue() { - fieldSetFlags()[1] = false; - return this; - } - - /** Gets the value of the 'exception' field */ - public java.lang.CharSequence getException() { - return exception; - } - - /** Sets the value of the 'exception' field */ - public sparqles.avro.analytics.EPViewPerformanceDataValues.Builder setException( - java.lang.CharSequence value) { - validate(fields()[2], value); - this.exception = value; - fieldSetFlags()[2] = true; - return this; - } - - /** Checks whether the 'exception' field has been set */ - public boolean hasException() { - return fieldSetFlags()[2]; - } - - /** Clears the value of the 'exception' field */ - public sparqles.avro.analytics.EPViewPerformanceDataValues.Builder clearException() { - exception = null; - fieldSetFlags()[2] = false; - return this; - } - - @Override - public EPViewPerformanceDataValues build() { - try { - EPViewPerformanceDataValues record = new EPViewPerformanceDataValues(); - record.label = - fieldSetFlags()[0] - ? this.label - : (java.lang.CharSequence) defaultValue(fields()[0]); - record.value = - fieldSetFlags()[1] - ? this.value - : (java.lang.Double) defaultValue(fields()[1]); - record.exception = - fieldSetFlags()[2] - ? this.exception - : (java.lang.CharSequence) defaultValue(fields()[2]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/Index.java b/backend/src/main/java/sparqles/avro/analytics/Index.java index e25a70b7..e26ff7de 100644 --- a/backend/src/main/java/sparqles/avro/analytics/Index.java +++ b/backend/src/main/java/sparqles/avro/analytics/Index.java @@ -5,464 +5,1248 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; + @org.apache.avro.specific.AvroGenerated public class Index extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"Index\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"lastUpdate\",\"type\":\"long\"},{\"name\":\"availability\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"AvailabilityIndex\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"values\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexAvailabilityDataPoint\",\"fields\":[{\"name\":\"x\",\"type\":\"string\"},{\"name\":\"y\",\"type\":\"double\"}]}}}]}}},{\"name\":\"performance\",\"type\":{\"type\":\"record\",\"name\":\"IndexViewPerformance\",\"fields\":[{\"name\":\"threshold\",\"type\":\"long\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewPerformanceData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"color\",\"type\":\"string\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewPerformanceDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}}}]}},{\"name\":\"interoperability\",\"type\":{\"type\":\"record\",\"name\":\"IndexViewInteroperability\",\"fields\":[{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewInterData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"color\",\"type\":\"string\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewInterDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}}}]}},{\"name\":\"discoverability\",\"type\":{\"type\":\"record\",\"name\":\"IndexViewDiscoverability\",\"fields\":[{\"name\":\"serverName\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewDiscoverabilityData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"values\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewDiscoverabilityDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}}},{\"name\":\"VoIDDescription\",\"type\":\"double\"},{\"name\":\"SDDescription\",\"type\":\"double\"},{\"name\":\"NoDescription\",\"type\":\"double\"}]}}]}"); - @Deprecated public sparqles.avro.Endpoint endpoint; - @Deprecated public long lastUpdate; - @Deprecated public java.util.List availability; - @Deprecated public sparqles.avro.analytics.IndexViewPerformance performance; - @Deprecated public sparqles.avro.analytics.IndexViewInteroperability interoperability; - @Deprecated public sparqles.avro.analytics.IndexViewDiscoverability discoverability; - - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public Index() {} - - /** All-args constructor. */ - public Index( - sparqles.avro.Endpoint endpoint, - java.lang.Long lastUpdate, - java.util.List availability, - sparqles.avro.analytics.IndexViewPerformance performance, - sparqles.avro.analytics.IndexViewInteroperability interoperability, - sparqles.avro.analytics.IndexViewDiscoverability discoverability) { - this.endpoint = endpoint; - this.lastUpdate = lastUpdate; - this.availability = availability; - this.performance = performance; - this.interoperability = interoperability; - this.discoverability = discoverability; - } - - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; - } - - /** Creates a new Index RecordBuilder */ - public static sparqles.avro.analytics.Index.Builder newBuilder() { - return new sparqles.avro.analytics.Index.Builder(); - } - - /** Creates a new Index RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.Index.Builder newBuilder( - sparqles.avro.analytics.Index.Builder other) { - return new sparqles.avro.analytics.Index.Builder(other); - } - - /** Creates a new Index RecordBuilder by copying an existing Index instance */ - public static sparqles.avro.analytics.Index.Builder newBuilder( - sparqles.avro.analytics.Index other) { - return new sparqles.avro.analytics.Index.Builder(other); - } - - public org.apache.avro.Schema getSchema() { - return SCHEMA$; - } - - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return endpoint; - case 1: - return lastUpdate; - case 2: - return availability; - case 3: - return performance; - case 4: - return interoperability; - case 5: - return discoverability; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 8399375307976379980L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"Index\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"lastUpdate\",\"type\":\"long\"},{\"name\":\"availability\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"AvailabilityIndex\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"values\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexAvailabilityDataPoint\",\"fields\":[{\"name\":\"x\",\"type\":\"string\"},{\"name\":\"y\",\"type\":\"double\"}]}}}]}}},{\"name\":\"performance\",\"type\":{\"type\":\"record\",\"name\":\"IndexViewPerformance\",\"fields\":[{\"name\":\"threshold\",\"type\":\"long\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewPerformanceData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"color\",\"type\":\"string\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewPerformanceDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}}}]}},{\"name\":\"interoperability\",\"type\":{\"type\":\"record\",\"name\":\"IndexViewInteroperability\",\"fields\":[{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewInterData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"color\",\"type\":\"string\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewInterDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}}}]}},{\"name\":\"discoverability\",\"type\":{\"type\":\"record\",\"name\":\"IndexViewDiscoverability\",\"fields\":[{\"name\":\"serverName\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewDiscoverabilityData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"values\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewDiscoverabilityDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}}},{\"name\":\"VoIDDescription\",\"type\":\"double\"},{\"name\":\"SDDescription\",\"type\":\"double\"},{\"name\":\"NoDescription\",\"type\":\"double\"}]}},{\"name\":\"calculation\",\"type\":{\"type\":\"record\",\"name\":\"IndexViewCalculation\",\"fields\":[{\"name\":\"coherences\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewCalculationData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"values\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewCalculationDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}}},{\"name\":\"rss\",\"type\":{\"type\":\"array\",\"items\":\"IndexViewCalculationData\"}},{\"name\":\"VoID\",\"type\":\"double\"},{\"name\":\"VoIDPart\",\"type\":\"double\"},{\"name\":\"SD\",\"type\":\"double\"},{\"name\":\"SDPart\",\"type\":\"double\"},{\"name\":\"Coherence\",\"type\":\"double\"},{\"name\":\"RS\",\"type\":\"double\"}]}}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this Index to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a Index from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a Index instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static Index fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private sparqles.avro.Endpoint endpoint; + private long lastUpdate; + private java.util.List availability; + private sparqles.avro.analytics.IndexViewPerformance performance; + private sparqles.avro.analytics.IndexViewInteroperability interoperability; + private sparqles.avro.analytics.IndexViewDiscoverability discoverability; + private sparqles.avro.analytics.IndexViewCalculation calculation; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public Index() {} + + /** + * All-args constructor. + * + * @param endpoint The new value for endpoint + * @param lastUpdate The new value for lastUpdate + * @param availability The new value for availability + * @param performance The new value for performance + * @param interoperability The new value for interoperability + * @param discoverability The new value for discoverability + * @param calculation The new value for calculation + */ + public Index( + sparqles.avro.Endpoint endpoint, + java.lang.Long lastUpdate, + java.util.List availability, + sparqles.avro.analytics.IndexViewPerformance performance, + sparqles.avro.analytics.IndexViewInteroperability interoperability, + sparqles.avro.analytics.IndexViewDiscoverability discoverability, + sparqles.avro.analytics.IndexViewCalculation calculation) { + this.endpoint = endpoint; + this.lastUpdate = lastUpdate; + this.availability = availability; + this.performance = performance; + this.interoperability = interoperability; + this.discoverability = discoverability; + this.calculation = calculation; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return endpoint; + case 1: + return lastUpdate; + case 2: + return availability; + case 3: + return performance; + case 4: + return interoperability; + case 5: + return discoverability; + case 6: + return calculation; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - endpoint = (sparqles.avro.Endpoint) value$; - break; - case 1: - lastUpdate = (java.lang.Long) value$; - break; - case 2: - availability = (java.util.List) value$; - break; - case 3: - performance = (sparqles.avro.analytics.IndexViewPerformance) value$; - break; - case 4: - interoperability = (sparqles.avro.analytics.IndexViewInteroperability) value$; - break; - case 5: - discoverability = (sparqles.avro.analytics.IndexViewDiscoverability) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + endpoint = (sparqles.avro.Endpoint) value$; + break; + case 1: + lastUpdate = (java.lang.Long) value$; + break; + case 2: + availability = (java.util.List) value$; + break; + case 3: + performance = (sparqles.avro.analytics.IndexViewPerformance) value$; + break; + case 4: + interoperability = (sparqles.avro.analytics.IndexViewInteroperability) value$; + break; + case 5: + discoverability = (sparqles.avro.analytics.IndexViewDiscoverability) value$; + break; + case 6: + calculation = (sparqles.avro.analytics.IndexViewCalculation) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'endpoint' field. + * + * @return The value of the 'endpoint' field. + */ + public sparqles.avro.Endpoint getEndpoint() { + return endpoint; + } + + /** + * Sets the value of the 'endpoint' field. + * + * @param value the value to set. + */ + public void setEndpoint(sparqles.avro.Endpoint value) { + this.endpoint = value; + } + + /** + * Gets the value of the 'lastUpdate' field. + * + * @return The value of the 'lastUpdate' field. + */ + public long getLastUpdate() { + return lastUpdate; + } + + /** + * Sets the value of the 'lastUpdate' field. + * + * @param value the value to set. + */ + public void setLastUpdate(long value) { + this.lastUpdate = value; + } + + /** + * Gets the value of the 'availability' field. + * + * @return The value of the 'availability' field. + */ + public java.util.List getAvailability() { + return availability; + } + + /** + * Sets the value of the 'availability' field. + * + * @param value the value to set. + */ + public void setAvailability(java.util.List value) { + this.availability = value; + } + + /** + * Gets the value of the 'performance' field. + * + * @return The value of the 'performance' field. + */ + public sparqles.avro.analytics.IndexViewPerformance getPerformance() { + return performance; + } + + /** + * Sets the value of the 'performance' field. + * + * @param value the value to set. + */ + public void setPerformance(sparqles.avro.analytics.IndexViewPerformance value) { + this.performance = value; + } + + /** + * Gets the value of the 'interoperability' field. + * + * @return The value of the 'interoperability' field. + */ + public sparqles.avro.analytics.IndexViewInteroperability getInteroperability() { + return interoperability; + } + + /** + * Sets the value of the 'interoperability' field. + * + * @param value the value to set. + */ + public void setInteroperability(sparqles.avro.analytics.IndexViewInteroperability value) { + this.interoperability = value; + } + + /** + * Gets the value of the 'discoverability' field. + * + * @return The value of the 'discoverability' field. + */ + public sparqles.avro.analytics.IndexViewDiscoverability getDiscoverability() { + return discoverability; + } + + /** + * Sets the value of the 'discoverability' field. + * + * @param value the value to set. + */ + public void setDiscoverability(sparqles.avro.analytics.IndexViewDiscoverability value) { + this.discoverability = value; + } + + /** + * Gets the value of the 'calculation' field. + * + * @return The value of the 'calculation' field. + */ + public sparqles.avro.analytics.IndexViewCalculation getCalculation() { + return calculation; + } + + /** + * Sets the value of the 'calculation' field. + * + * @param value the value to set. + */ + public void setCalculation(sparqles.avro.analytics.IndexViewCalculation value) { + this.calculation = value; + } + + /** + * Creates a new Index RecordBuilder. + * + * @return A new Index RecordBuilder + */ + public static sparqles.avro.analytics.Index.Builder newBuilder() { + return new sparqles.avro.analytics.Index.Builder(); + } + + /** + * Creates a new Index RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new Index RecordBuilder + */ + public static sparqles.avro.analytics.Index.Builder newBuilder( + sparqles.avro.analytics.Index.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.Index.Builder(); + } else { + return new sparqles.avro.analytics.Index.Builder(other); + } + } + + /** + * Creates a new Index RecordBuilder by copying an existing Index instance. + * + * @param other The existing instance to copy. + * @return A new Index RecordBuilder + */ + public static sparqles.avro.analytics.Index.Builder newBuilder( + sparqles.avro.analytics.Index other) { + if (other == null) { + return new sparqles.avro.analytics.Index.Builder(); + } else { + return new sparqles.avro.analytics.Index.Builder(other); + } + } + + /** RecordBuilder for Index instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private sparqles.avro.Endpoint endpoint; + private sparqles.avro.Endpoint.Builder endpointBuilder; + private long lastUpdate; + private java.util.List availability; + private sparqles.avro.analytics.IndexViewPerformance performance; + private sparqles.avro.analytics.IndexViewPerformance.Builder performanceBuilder; + private sparqles.avro.analytics.IndexViewInteroperability interoperability; + private sparqles.avro.analytics.IndexViewInteroperability.Builder interoperabilityBuilder; + private sparqles.avro.analytics.IndexViewDiscoverability discoverability; + private sparqles.avro.analytics.IndexViewDiscoverability.Builder discoverabilityBuilder; + private sparqles.avro.analytics.IndexViewCalculation calculation; + private sparqles.avro.analytics.IndexViewCalculation.Builder calculationBuilder; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } + + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.analytics.Index.Builder other) { + super(other); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (other.hasEndpointBuilder()) { + this.endpointBuilder = sparqles.avro.Endpoint.newBuilder(other.getEndpointBuilder()); + } + if (isValidValue(fields()[1], other.lastUpdate)) { + this.lastUpdate = data().deepCopy(fields()[1].schema(), other.lastUpdate); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.availability)) { + this.availability = data().deepCopy(fields()[2].schema(), other.availability); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.performance)) { + this.performance = data().deepCopy(fields()[3].schema(), other.performance); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + if (other.hasPerformanceBuilder()) { + this.performanceBuilder = + sparqles.avro.analytics.IndexViewPerformance.newBuilder(other.getPerformanceBuilder()); + } + if (isValidValue(fields()[4], other.interoperability)) { + this.interoperability = data().deepCopy(fields()[4].schema(), other.interoperability); + fieldSetFlags()[4] = other.fieldSetFlags()[4]; + } + if (other.hasInteroperabilityBuilder()) { + this.interoperabilityBuilder = + sparqles.avro.analytics.IndexViewInteroperability.newBuilder( + other.getInteroperabilityBuilder()); + } + if (isValidValue(fields()[5], other.discoverability)) { + this.discoverability = data().deepCopy(fields()[5].schema(), other.discoverability); + fieldSetFlags()[5] = other.fieldSetFlags()[5]; + } + if (other.hasDiscoverabilityBuilder()) { + this.discoverabilityBuilder = + sparqles.avro.analytics.IndexViewDiscoverability.newBuilder( + other.getDiscoverabilityBuilder()); + } + if (isValidValue(fields()[6], other.calculation)) { + this.calculation = data().deepCopy(fields()[6].schema(), other.calculation); + fieldSetFlags()[6] = other.fieldSetFlags()[6]; + } + if (other.hasCalculationBuilder()) { + this.calculationBuilder = + sparqles.avro.analytics.IndexViewCalculation.newBuilder(other.getCalculationBuilder()); + } + } + + /** + * Creates a Builder by copying an existing Index instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.Index other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = true; + } + this.endpointBuilder = null; + if (isValidValue(fields()[1], other.lastUpdate)) { + this.lastUpdate = data().deepCopy(fields()[1].schema(), other.lastUpdate); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.availability)) { + this.availability = data().deepCopy(fields()[2].schema(), other.availability); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.performance)) { + this.performance = data().deepCopy(fields()[3].schema(), other.performance); + fieldSetFlags()[3] = true; + } + this.performanceBuilder = null; + if (isValidValue(fields()[4], other.interoperability)) { + this.interoperability = data().deepCopy(fields()[4].schema(), other.interoperability); + fieldSetFlags()[4] = true; + } + this.interoperabilityBuilder = null; + if (isValidValue(fields()[5], other.discoverability)) { + this.discoverability = data().deepCopy(fields()[5].schema(), other.discoverability); + fieldSetFlags()[5] = true; + } + this.discoverabilityBuilder = null; + if (isValidValue(fields()[6], other.calculation)) { + this.calculation = data().deepCopy(fields()[6].schema(), other.calculation); + fieldSetFlags()[6] = true; + } + this.calculationBuilder = null; } - /** Gets the value of the 'endpoint' field. */ + /** + * Gets the value of the 'endpoint' field. + * + * @return The value. + */ public sparqles.avro.Endpoint getEndpoint() { - return endpoint; + return endpoint; } /** * Sets the value of the 'endpoint' field. * - * @param value the value to set. + * @param value The value of 'endpoint'. + * @return This builder. */ - public void setEndpoint(sparqles.avro.Endpoint value) { - this.endpoint = value; + public sparqles.avro.analytics.Index.Builder setEndpoint(sparqles.avro.Endpoint value) { + validate(fields()[0], value); + this.endpointBuilder = null; + this.endpoint = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'lastUpdate' field. */ - public java.lang.Long getLastUpdate() { - return lastUpdate; + /** + * Checks whether the 'endpoint' field has been set. + * + * @return True if the 'endpoint' field has been set, false otherwise. + */ + public boolean hasEndpoint() { + return fieldSetFlags()[0]; + } + + /** + * Gets the Builder instance for the 'endpoint' field and creates one if it doesn't exist yet. + * + * @return This builder. + */ + public sparqles.avro.Endpoint.Builder getEndpointBuilder() { + if (endpointBuilder == null) { + if (hasEndpoint()) { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder(endpoint)); + } else { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder()); + } + } + return endpointBuilder; + } + + /** + * Sets the Builder instance for the 'endpoint' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.analytics.Index.Builder setEndpointBuilder( + sparqles.avro.Endpoint.Builder value) { + clearEndpoint(); + endpointBuilder = value; + return this; + } + + /** + * Checks whether the 'endpoint' field has an active Builder instance + * + * @return True if the 'endpoint' field has an active Builder instance + */ + public boolean hasEndpointBuilder() { + return endpointBuilder != null; + } + + /** + * Clears the value of the 'endpoint' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.Index.Builder clearEndpoint() { + endpoint = null; + endpointBuilder = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'lastUpdate' field. + * + * @return The value. + */ + public long getLastUpdate() { + return lastUpdate; } /** * Sets the value of the 'lastUpdate' field. * - * @param value the value to set. + * @param value The value of 'lastUpdate'. + * @return This builder. + */ + public sparqles.avro.analytics.Index.Builder setLastUpdate(long value) { + validate(fields()[1], value); + this.lastUpdate = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'lastUpdate' field has been set. + * + * @return True if the 'lastUpdate' field has been set, false otherwise. + */ + public boolean hasLastUpdate() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'lastUpdate' field. + * + * @return This builder. */ - public void setLastUpdate(java.lang.Long value) { - this.lastUpdate = value; + public sparqles.avro.analytics.Index.Builder clearLastUpdate() { + fieldSetFlags()[1] = false; + return this; } - /** Gets the value of the 'availability' field. */ + /** + * Gets the value of the 'availability' field. + * + * @return The value. + */ public java.util.List getAvailability() { - return availability; + return availability; } /** * Sets the value of the 'availability' field. * - * @param value the value to set. + * @param value The value of 'availability'. + * @return This builder. */ - public void setAvailability(java.util.List value) { - this.availability = value; + public sparqles.avro.analytics.Index.Builder setAvailability( + java.util.List value) { + validate(fields()[2], value); + this.availability = value; + fieldSetFlags()[2] = true; + return this; } - /** Gets the value of the 'performance' field. */ + /** + * Checks whether the 'availability' field has been set. + * + * @return True if the 'availability' field has been set, false otherwise. + */ + public boolean hasAvailability() { + return fieldSetFlags()[2]; + } + + /** + * Clears the value of the 'availability' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.Index.Builder clearAvailability() { + availability = null; + fieldSetFlags()[2] = false; + return this; + } + + /** + * Gets the value of the 'performance' field. + * + * @return The value. + */ public sparqles.avro.analytics.IndexViewPerformance getPerformance() { - return performance; + return performance; } /** * Sets the value of the 'performance' field. * - * @param value the value to set. + * @param value The value of 'performance'. + * @return This builder. + */ + public sparqles.avro.analytics.Index.Builder setPerformance( + sparqles.avro.analytics.IndexViewPerformance value) { + validate(fields()[3], value); + this.performanceBuilder = null; + this.performance = value; + fieldSetFlags()[3] = true; + return this; + } + + /** + * Checks whether the 'performance' field has been set. + * + * @return True if the 'performance' field has been set, false otherwise. + */ + public boolean hasPerformance() { + return fieldSetFlags()[3]; + } + + /** + * Gets the Builder instance for the 'performance' field and creates one if it doesn't exist + * yet. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewPerformance.Builder getPerformanceBuilder() { + if (performanceBuilder == null) { + if (hasPerformance()) { + setPerformanceBuilder( + sparqles.avro.analytics.IndexViewPerformance.newBuilder(performance)); + } else { + setPerformanceBuilder(sparqles.avro.analytics.IndexViewPerformance.newBuilder()); + } + } + return performanceBuilder; + } + + /** + * Sets the Builder instance for the 'performance' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.analytics.Index.Builder setPerformanceBuilder( + sparqles.avro.analytics.IndexViewPerformance.Builder value) { + clearPerformance(); + performanceBuilder = value; + return this; + } + + /** + * Checks whether the 'performance' field has an active Builder instance + * + * @return True if the 'performance' field has an active Builder instance + */ + public boolean hasPerformanceBuilder() { + return performanceBuilder != null; + } + + /** + * Clears the value of the 'performance' field. + * + * @return This builder. */ - public void setPerformance(sparqles.avro.analytics.IndexViewPerformance value) { - this.performance = value; + public sparqles.avro.analytics.Index.Builder clearPerformance() { + performance = null; + performanceBuilder = null; + fieldSetFlags()[3] = false; + return this; } - /** Gets the value of the 'interoperability' field. */ + /** + * Gets the value of the 'interoperability' field. + * + * @return The value. + */ public sparqles.avro.analytics.IndexViewInteroperability getInteroperability() { - return interoperability; + return interoperability; } /** * Sets the value of the 'interoperability' field. * - * @param value the value to set. + * @param value The value of 'interoperability'. + * @return This builder. + */ + public sparqles.avro.analytics.Index.Builder setInteroperability( + sparqles.avro.analytics.IndexViewInteroperability value) { + validate(fields()[4], value); + this.interoperabilityBuilder = null; + this.interoperability = value; + fieldSetFlags()[4] = true; + return this; + } + + /** + * Checks whether the 'interoperability' field has been set. + * + * @return True if the 'interoperability' field has been set, false otherwise. + */ + public boolean hasInteroperability() { + return fieldSetFlags()[4]; + } + + /** + * Gets the Builder instance for the 'interoperability' field and creates one if it doesn't + * exist yet. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewInteroperability.Builder getInteroperabilityBuilder() { + if (interoperabilityBuilder == null) { + if (hasInteroperability()) { + setInteroperabilityBuilder( + sparqles.avro.analytics.IndexViewInteroperability.newBuilder(interoperability)); + } else { + setInteroperabilityBuilder( + sparqles.avro.analytics.IndexViewInteroperability.newBuilder()); + } + } + return interoperabilityBuilder; + } + + /** + * Sets the Builder instance for the 'interoperability' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.analytics.Index.Builder setInteroperabilityBuilder( + sparqles.avro.analytics.IndexViewInteroperability.Builder value) { + clearInteroperability(); + interoperabilityBuilder = value; + return this; + } + + /** + * Checks whether the 'interoperability' field has an active Builder instance + * + * @return True if the 'interoperability' field has an active Builder instance */ - public void setInteroperability(sparqles.avro.analytics.IndexViewInteroperability value) { - this.interoperability = value; + public boolean hasInteroperabilityBuilder() { + return interoperabilityBuilder != null; } - /** Gets the value of the 'discoverability' field. */ + /** + * Clears the value of the 'interoperability' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.Index.Builder clearInteroperability() { + interoperability = null; + interoperabilityBuilder = null; + fieldSetFlags()[4] = false; + return this; + } + + /** + * Gets the value of the 'discoverability' field. + * + * @return The value. + */ public sparqles.avro.analytics.IndexViewDiscoverability getDiscoverability() { - return discoverability; + return discoverability; } /** * Sets the value of the 'discoverability' field. * - * @param value the value to set. + * @param value The value of 'discoverability'. + * @return This builder. */ - public void setDiscoverability(sparqles.avro.analytics.IndexViewDiscoverability value) { - this.discoverability = value; + public sparqles.avro.analytics.Index.Builder setDiscoverability( + sparqles.avro.analytics.IndexViewDiscoverability value) { + validate(fields()[5], value); + this.discoverabilityBuilder = null; + this.discoverability = value; + fieldSetFlags()[5] = true; + return this; } - /** RecordBuilder for Index instances. */ - public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { - - private sparqles.avro.Endpoint endpoint; - private long lastUpdate; - private java.util.List availability; - private sparqles.avro.analytics.IndexViewPerformance performance; - private sparqles.avro.analytics.IndexViewInteroperability interoperability; - private sparqles.avro.analytics.IndexViewDiscoverability discoverability; + /** + * Checks whether the 'discoverability' field has been set. + * + * @return True if the 'discoverability' field has been set, false otherwise. + */ + public boolean hasDiscoverability() { + return fieldSetFlags()[5]; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.Index.SCHEMA$); + /** + * Gets the Builder instance for the 'discoverability' field and creates one if it doesn't exist + * yet. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewDiscoverability.Builder getDiscoverabilityBuilder() { + if (discoverabilityBuilder == null) { + if (hasDiscoverability()) { + setDiscoverabilityBuilder( + sparqles.avro.analytics.IndexViewDiscoverability.newBuilder(discoverability)); + } else { + setDiscoverabilityBuilder(sparqles.avro.analytics.IndexViewDiscoverability.newBuilder()); } + } + return discoverabilityBuilder; + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.Index.Builder other) { - super(other); - if (isValidValue(fields()[0], other.endpoint)) { - this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.lastUpdate)) { - this.lastUpdate = data().deepCopy(fields()[1].schema(), other.lastUpdate); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.availability)) { - this.availability = data().deepCopy(fields()[2].schema(), other.availability); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.performance)) { - this.performance = data().deepCopy(fields()[3].schema(), other.performance); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.interoperability)) { - this.interoperability = - data().deepCopy(fields()[4].schema(), other.interoperability); - fieldSetFlags()[4] = true; - } - if (isValidValue(fields()[5], other.discoverability)) { - this.discoverability = data().deepCopy(fields()[5].schema(), other.discoverability); - fieldSetFlags()[5] = true; - } - } + /** + * Sets the Builder instance for the 'discoverability' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.analytics.Index.Builder setDiscoverabilityBuilder( + sparqles.avro.analytics.IndexViewDiscoverability.Builder value) { + clearDiscoverability(); + discoverabilityBuilder = value; + return this; + } - /** Creates a Builder by copying an existing Index instance */ - private Builder(sparqles.avro.analytics.Index other) { - super(sparqles.avro.analytics.Index.SCHEMA$); - if (isValidValue(fields()[0], other.endpoint)) { - this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.lastUpdate)) { - this.lastUpdate = data().deepCopy(fields()[1].schema(), other.lastUpdate); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.availability)) { - this.availability = data().deepCopy(fields()[2].schema(), other.availability); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.performance)) { - this.performance = data().deepCopy(fields()[3].schema(), other.performance); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.interoperability)) { - this.interoperability = - data().deepCopy(fields()[4].schema(), other.interoperability); - fieldSetFlags()[4] = true; - } - if (isValidValue(fields()[5], other.discoverability)) { - this.discoverability = data().deepCopy(fields()[5].schema(), other.discoverability); - fieldSetFlags()[5] = true; - } - } + /** + * Checks whether the 'discoverability' field has an active Builder instance + * + * @return True if the 'discoverability' field has an active Builder instance + */ + public boolean hasDiscoverabilityBuilder() { + return discoverabilityBuilder != null; + } - /** Gets the value of the 'endpoint' field */ - public sparqles.avro.Endpoint getEndpoint() { - return endpoint; - } + /** + * Clears the value of the 'discoverability' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.Index.Builder clearDiscoverability() { + discoverability = null; + discoverabilityBuilder = null; + fieldSetFlags()[5] = false; + return this; + } - /** Sets the value of the 'endpoint' field */ - public sparqles.avro.analytics.Index.Builder setEndpoint(sparqles.avro.Endpoint value) { - validate(fields()[0], value); - this.endpoint = value; - fieldSetFlags()[0] = true; - return this; - } + /** + * Gets the value of the 'calculation' field. + * + * @return The value. + */ + public sparqles.avro.analytics.IndexViewCalculation getCalculation() { + return calculation; + } - /** Checks whether the 'endpoint' field has been set */ - public boolean hasEndpoint() { - return fieldSetFlags()[0]; - } + /** + * Sets the value of the 'calculation' field. + * + * @param value The value of 'calculation'. + * @return This builder. + */ + public sparqles.avro.analytics.Index.Builder setCalculation( + sparqles.avro.analytics.IndexViewCalculation value) { + validate(fields()[6], value); + this.calculationBuilder = null; + this.calculation = value; + fieldSetFlags()[6] = true; + return this; + } - /** Clears the value of the 'endpoint' field */ - public sparqles.avro.analytics.Index.Builder clearEndpoint() { - endpoint = null; - fieldSetFlags()[0] = false; - return this; - } + /** + * Checks whether the 'calculation' field has been set. + * + * @return True if the 'calculation' field has been set, false otherwise. + */ + public boolean hasCalculation() { + return fieldSetFlags()[6]; + } - /** Gets the value of the 'lastUpdate' field */ - public java.lang.Long getLastUpdate() { - return lastUpdate; + /** + * Gets the Builder instance for the 'calculation' field and creates one if it doesn't exist + * yet. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculation.Builder getCalculationBuilder() { + if (calculationBuilder == null) { + if (hasCalculation()) { + setCalculationBuilder( + sparqles.avro.analytics.IndexViewCalculation.newBuilder(calculation)); + } else { + setCalculationBuilder(sparqles.avro.analytics.IndexViewCalculation.newBuilder()); } + } + return calculationBuilder; + } - /** Sets the value of the 'lastUpdate' field */ - public sparqles.avro.analytics.Index.Builder setLastUpdate(long value) { - validate(fields()[1], value); - this.lastUpdate = value; - fieldSetFlags()[1] = true; - return this; - } + /** + * Sets the Builder instance for the 'calculation' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.analytics.Index.Builder setCalculationBuilder( + sparqles.avro.analytics.IndexViewCalculation.Builder value) { + clearCalculation(); + calculationBuilder = value; + return this; + } - /** Checks whether the 'lastUpdate' field has been set */ - public boolean hasLastUpdate() { - return fieldSetFlags()[1]; - } + /** + * Checks whether the 'calculation' field has an active Builder instance + * + * @return True if the 'calculation' field has an active Builder instance + */ + public boolean hasCalculationBuilder() { + return calculationBuilder != null; + } - /** Clears the value of the 'lastUpdate' field */ - public sparqles.avro.analytics.Index.Builder clearLastUpdate() { - fieldSetFlags()[1] = false; - return this; - } + /** + * Clears the value of the 'calculation' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.Index.Builder clearCalculation() { + calculation = null; + calculationBuilder = null; + fieldSetFlags()[6] = false; + return this; + } - /** Gets the value of the 'availability' field */ - public java.util.List getAvailability() { - return availability; + @Override + @SuppressWarnings("unchecked") + public Index build() { + try { + Index record = new Index(); + if (endpointBuilder != null) { + try { + record.endpoint = this.endpointBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("endpoint")); + throw e; + } + } else { + record.endpoint = + fieldSetFlags()[0] + ? this.endpoint + : (sparqles.avro.Endpoint) defaultValue(fields()[0]); } - - /** Sets the value of the 'availability' field */ - public sparqles.avro.analytics.Index.Builder setAvailability( - java.util.List value) { - validate(fields()[2], value); - this.availability = value; - fieldSetFlags()[2] = true; - return this; + record.lastUpdate = + fieldSetFlags()[1] ? this.lastUpdate : (java.lang.Long) defaultValue(fields()[1]); + record.availability = + fieldSetFlags()[2] + ? this.availability + : (java.util.List) + defaultValue(fields()[2]); + if (performanceBuilder != null) { + try { + record.performance = this.performanceBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("performance")); + throw e; + } + } else { + record.performance = + fieldSetFlags()[3] + ? this.performance + : (sparqles.avro.analytics.IndexViewPerformance) defaultValue(fields()[3]); } - - /** Checks whether the 'availability' field has been set */ - public boolean hasAvailability() { - return fieldSetFlags()[2]; + if (interoperabilityBuilder != null) { + try { + record.interoperability = this.interoperabilityBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("interoperability")); + throw e; + } + } else { + record.interoperability = + fieldSetFlags()[4] + ? this.interoperability + : (sparqles.avro.analytics.IndexViewInteroperability) defaultValue(fields()[4]); } - - /** Clears the value of the 'availability' field */ - public sparqles.avro.analytics.Index.Builder clearAvailability() { - availability = null; - fieldSetFlags()[2] = false; - return this; + if (discoverabilityBuilder != null) { + try { + record.discoverability = this.discoverabilityBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("discoverability")); + throw e; + } + } else { + record.discoverability = + fieldSetFlags()[5] + ? this.discoverability + : (sparqles.avro.analytics.IndexViewDiscoverability) defaultValue(fields()[5]); } - - /** Gets the value of the 'performance' field */ - public sparqles.avro.analytics.IndexViewPerformance getPerformance() { - return performance; + if (calculationBuilder != null) { + try { + record.calculation = this.calculationBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("calculation")); + throw e; + } + } else { + record.calculation = + fieldSetFlags()[6] + ? this.calculation + : (sparqles.avro.analytics.IndexViewCalculation) defaultValue(fields()[6]); } + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Sets the value of the 'performance' field */ - public sparqles.avro.analytics.Index.Builder setPerformance( - sparqles.avro.analytics.IndexViewPerformance value) { - validate(fields()[3], value); - this.performance = value; - fieldSetFlags()[3] = true; - return this; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Checks whether the 'performance' field has been set */ - public boolean hasPerformance() { - return fieldSetFlags()[3]; - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Clears the value of the 'performance' field */ - public sparqles.avro.analytics.Index.Builder clearPerformance() { - performance = null; - fieldSetFlags()[3] = false; - return this; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Gets the value of the 'interoperability' field */ - public sparqles.avro.analytics.IndexViewInteroperability getInteroperability() { - return interoperability; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Sets the value of the 'interoperability' field */ - public sparqles.avro.analytics.Index.Builder setInteroperability( - sparqles.avro.analytics.IndexViewInteroperability value) { - validate(fields()[4], value); - this.interoperability = value; - fieldSetFlags()[4] = true; - return this; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Checks whether the 'interoperability' field has been set */ - public boolean hasInteroperability() { - return fieldSetFlags()[4]; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + this.endpoint.customEncode(out); - /** Clears the value of the 'interoperability' field */ - public sparqles.avro.analytics.Index.Builder clearInteroperability() { - interoperability = null; - fieldSetFlags()[4] = false; - return this; - } + out.writeLong(this.lastUpdate); - /** Gets the value of the 'discoverability' field */ - public sparqles.avro.analytics.IndexViewDiscoverability getDiscoverability() { - return discoverability; - } + long size0 = this.availability.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (sparqles.avro.analytics.AvailabilityIndex e0 : this.availability) { + actualSize0++; + out.startItem(); + e0.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); - /** Sets the value of the 'discoverability' field */ - public sparqles.avro.analytics.Index.Builder setDiscoverability( - sparqles.avro.analytics.IndexViewDiscoverability value) { - validate(fields()[5], value); - this.discoverability = value; - fieldSetFlags()[5] = true; - return this; - } + this.performance.customEncode(out); - /** Checks whether the 'discoverability' field has been set */ - public boolean hasDiscoverability() { - return fieldSetFlags()[5]; - } + this.interoperability.customEncode(out); + + this.discoverability.customEncode(out); + + this.calculation.customEncode(out); + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); + } + this.endpoint.customDecode(in); - /** Clears the value of the 'discoverability' field */ - public sparqles.avro.analytics.Index.Builder clearDiscoverability() { - discoverability = null; - fieldSetFlags()[5] = false; - return this; + this.lastUpdate = in.readLong(); + + long size0 = in.readArrayStart(); + java.util.List a0 = this.availability; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("availability").schema()); + this.availability = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.AvailabilityIndex e0 = (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.AvailabilityIndex(); + } + e0.customDecode(in); + a0.add(e0); } + } + + if (this.performance == null) { + this.performance = new sparqles.avro.analytics.IndexViewPerformance(); + } + this.performance.customDecode(in); + + if (this.interoperability == null) { + this.interoperability = new sparqles.avro.analytics.IndexViewInteroperability(); + } + this.interoperability.customDecode(in); + + if (this.discoverability == null) { + this.discoverability = new sparqles.avro.analytics.IndexViewDiscoverability(); + } + this.discoverability.customDecode(in); + + if (this.calculation == null) { + this.calculation = new sparqles.avro.analytics.IndexViewCalculation(); + } + this.calculation.customDecode(in); - @Override - public Index build() { - try { - Index record = new Index(); - record.endpoint = - fieldSetFlags()[0] - ? this.endpoint - : (sparqles.avro.Endpoint) defaultValue(fields()[0]); - record.lastUpdate = - fieldSetFlags()[1] - ? this.lastUpdate - : (java.lang.Long) defaultValue(fields()[1]); - record.availability = - fieldSetFlags()[2] - ? this.availability - : (java.util.List) - defaultValue(fields()[2]); - record.performance = - fieldSetFlags()[3] - ? this.performance - : (sparqles.avro.analytics.IndexViewPerformance) - defaultValue(fields()[3]); - record.interoperability = - fieldSetFlags()[4] - ? this.interoperability - : (sparqles.avro.analytics.IndexViewInteroperability) - defaultValue(fields()[4]); - record.discoverability = - fieldSetFlags()[5] - ? this.discoverability - : (sparqles.avro.analytics.IndexViewDiscoverability) - defaultValue(fields()[5]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); + } else { + for (int i = 0; i < 7; i++) { + switch (fieldOrder[i].pos()) { + case 0: + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); } + this.endpoint.customDecode(in); + break; + + case 1: + this.lastUpdate = in.readLong(); + break; + + case 2: + long size0 = in.readArrayStart(); + java.util.List a0 = this.availability; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("availability").schema()); + this.availability = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.AvailabilityIndex e0 = (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.AvailabilityIndex(); + } + e0.customDecode(in); + a0.add(e0); + } + } + break; + + case 3: + if (this.performance == null) { + this.performance = new sparqles.avro.analytics.IndexViewPerformance(); + } + this.performance.customDecode(in); + break; + + case 4: + if (this.interoperability == null) { + this.interoperability = new sparqles.avro.analytics.IndexViewInteroperability(); + } + this.interoperability.customDecode(in); + break; + + case 5: + if (this.discoverability == null) { + this.discoverability = new sparqles.avro.analytics.IndexViewDiscoverability(); + } + this.discoverability.customDecode(in); + break; + + case 6: + if (this.calculation == null) { + this.calculation = new sparqles.avro.analytics.IndexViewCalculation(); + } + this.calculation.customDecode(in); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/IndexAvailabilityDataPoint.java b/backend/src/main/java/sparqles/avro/analytics/IndexAvailabilityDataPoint.java index 22b364d8..a7ee03b4 100644 --- a/backend/src/main/java/sparqles/avro/analytics/IndexAvailabilityDataPoint.java +++ b/backend/src/main/java/sparqles/avro/analytics/IndexAvailabilityDataPoint.java @@ -5,215 +5,425 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class IndexAvailabilityDataPoint extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"IndexAvailabilityDataPoint\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"x\",\"type\":\"string\"},{\"name\":\"y\",\"type\":\"double\"}]}"); - @Deprecated public java.lang.CharSequence x; - @Deprecated public double y; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -3339399301666649668L; - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public IndexAvailabilityDataPoint() {} + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"IndexAvailabilityDataPoint\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"x\",\"type\":\"string\"},{\"name\":\"y\",\"type\":\"double\"}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder( + SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } - /** All-args constructor. */ - public IndexAvailabilityDataPoint(java.lang.CharSequence x, java.lang.Double y) { - this.x = x; - this.y = y; + /** + * Serializes this IndexAvailabilityDataPoint to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a IndexAvailabilityDataPoint from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a IndexAvailabilityDataPoint instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static IndexAvailabilityDataPoint fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence x; + private double y; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public IndexAvailabilityDataPoint() {} + + /** + * All-args constructor. + * + * @param x The new value for x + * @param y The new value for y + */ + public IndexAvailabilityDataPoint(java.lang.CharSequence x, java.lang.Double y) { + this.x = x; + this.y = y; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return x; + case 1: + return y; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + x = (java.lang.CharSequence) value$; + break; + case 1: + y = (java.lang.Double) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - /** Creates a new IndexAvailabilityDataPoint RecordBuilder */ - public static sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder newBuilder() { - return new sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder(); + /** + * Gets the value of the 'x' field. + * + * @return The value of the 'x' field. + */ + public java.lang.CharSequence getX() { + return x; + } + + /** + * Sets the value of the 'x' field. + * + * @param value the value to set. + */ + public void setX(java.lang.CharSequence value) { + this.x = value; + } + + /** + * Gets the value of the 'y' field. + * + * @return The value of the 'y' field. + */ + public double getY() { + return y; + } + + /** + * Sets the value of the 'y' field. + * + * @param value the value to set. + */ + public void setY(double value) { + this.y = value; + } + + /** + * Creates a new IndexAvailabilityDataPoint RecordBuilder. + * + * @return A new IndexAvailabilityDataPoint RecordBuilder + */ + public static sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder newBuilder() { + return new sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder(); + } + + /** + * Creates a new IndexAvailabilityDataPoint RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new IndexAvailabilityDataPoint RecordBuilder + */ + public static sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder newBuilder( + sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder(); + } else { + return new sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder(other); } + } + + /** + * Creates a new IndexAvailabilityDataPoint RecordBuilder by copying an existing + * IndexAvailabilityDataPoint instance. + * + * @param other The existing instance to copy. + * @return A new IndexAvailabilityDataPoint RecordBuilder + */ + public static sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder newBuilder( + sparqles.avro.analytics.IndexAvailabilityDataPoint other) { + if (other == null) { + return new sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder(); + } else { + return new sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder(other); + } + } + + /** RecordBuilder for IndexAvailabilityDataPoint instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence x; + private double y; - /** Creates a new IndexAvailabilityDataPoint RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder newBuilder( - sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder other) { - return new sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder(other); + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } /** - * Creates a new IndexAvailabilityDataPoint RecordBuilder by copying an existing - * IndexAvailabilityDataPoint instance + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public static sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder newBuilder( - sparqles.avro.analytics.IndexAvailabilityDataPoint other) { - return new sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder(other); + private Builder(sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder other) { + super(other); + if (isValidValue(fields()[0], other.x)) { + this.x = data().deepCopy(fields()[0].schema(), other.x); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.y)) { + this.y = data().deepCopy(fields()[1].schema(), other.y); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Creates a Builder by copying an existing IndexAvailabilityDataPoint instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.IndexAvailabilityDataPoint other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.x)) { + this.x = data().deepCopy(fields()[0].schema(), other.x); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.y)) { + this.y = data().deepCopy(fields()[1].schema(), other.y); + fieldSetFlags()[1] = true; + } } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return x; - case 1: - return y; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Gets the value of the 'x' field. + * + * @return The value. + */ + public java.lang.CharSequence getX() { + return x; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - x = (java.lang.CharSequence) value$; - break; - case 1: - y = (java.lang.Double) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the value of the 'x' field. + * + * @param value The value of 'x'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder setX( + java.lang.CharSequence value) { + validate(fields()[0], value); + this.x = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'x' field. */ - public java.lang.CharSequence getX() { - return x; + /** + * Checks whether the 'x' field has been set. + * + * @return True if the 'x' field has been set, false otherwise. + */ + public boolean hasX() { + return fieldSetFlags()[0]; } /** - * Sets the value of the 'x' field. + * Clears the value of the 'x' field. * - * @param value the value to set. + * @return This builder. */ - public void setX(java.lang.CharSequence value) { - this.x = value; + public sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder clearX() { + x = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'y' field. */ - public java.lang.Double getY() { - return y; + /** + * Gets the value of the 'y' field. + * + * @return The value. + */ + public double getY() { + return y; } /** * Sets the value of the 'y' field. * - * @param value the value to set. + * @param value The value of 'y'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder setY(double value) { + validate(fields()[1], value); + this.y = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'y' field has been set. + * + * @return True if the 'y' field has been set, false otherwise. */ - public void setY(java.lang.Double value) { - this.y = value; + public boolean hasY() { + return fieldSetFlags()[1]; } - /** RecordBuilder for IndexAvailabilityDataPoint instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Clears the value of the 'y' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder clearY() { + fieldSetFlags()[1] = false; + return this; + } - private java.lang.CharSequence x; - private double y; + @Override + @SuppressWarnings("unchecked") + public IndexAvailabilityDataPoint build() { + try { + IndexAvailabilityDataPoint record = new IndexAvailabilityDataPoint(); + record.x = fieldSetFlags()[0] ? this.x : (java.lang.CharSequence) defaultValue(fields()[0]); + record.y = fieldSetFlags()[1] ? this.y : (java.lang.Double) defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.IndexAvailabilityDataPoint.SCHEMA$); - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) + MODEL$.createDatumWriter(SCHEMA$); - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder other) { - super(other); - if (isValidValue(fields()[0], other.x)) { - this.x = data().deepCopy(fields()[0].schema(), other.x); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.y)) { - this.y = data().deepCopy(fields()[1].schema(), other.y); - fieldSetFlags()[1] = true; - } - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Creates a Builder by copying an existing IndexAvailabilityDataPoint instance */ - private Builder(sparqles.avro.analytics.IndexAvailabilityDataPoint other) { - super(sparqles.avro.analytics.IndexAvailabilityDataPoint.SCHEMA$); - if (isValidValue(fields()[0], other.x)) { - this.x = data().deepCopy(fields()[0].schema(), other.x); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.y)) { - this.y = data().deepCopy(fields()[1].schema(), other.y); - fieldSetFlags()[1] = true; - } - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) + MODEL$.createDatumReader(SCHEMA$); - /** Gets the value of the 'x' field */ - public java.lang.CharSequence getX() { - return x; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Sets the value of the 'x' field */ - public sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder setX( - java.lang.CharSequence value) { - validate(fields()[0], value); - this.x = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Checks whether the 'x' field has been set */ - public boolean hasX() { - return fieldSetFlags()[0]; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.x); - /** Clears the value of the 'x' field */ - public sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder clearX() { - x = null; - fieldSetFlags()[0] = false; - return this; - } + out.writeDouble(this.y); + } - /** Gets the value of the 'y' field */ - public java.lang.Double getY() { - return y; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.x = in.readString(this.x instanceof Utf8 ? (Utf8) this.x : null); - /** Sets the value of the 'y' field */ - public sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder setY(double value) { - validate(fields()[1], value); - this.y = value; - fieldSetFlags()[1] = true; - return this; - } + this.y = in.readDouble(); - /** Checks whether the 'y' field has been set */ - public boolean hasY() { - return fieldSetFlags()[1]; - } + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.x = in.readString(this.x instanceof Utf8 ? (Utf8) this.x : null); + break; - /** Clears the value of the 'y' field */ - public sparqles.avro.analytics.IndexAvailabilityDataPoint.Builder clearY() { - fieldSetFlags()[1] = false; - return this; - } + case 1: + this.y = in.readDouble(); + break; - @Override - public IndexAvailabilityDataPoint build() { - try { - IndexAvailabilityDataPoint record = new IndexAvailabilityDataPoint(); - record.x = - fieldSetFlags()[0] - ? this.x - : (java.lang.CharSequence) defaultValue(fields()[0]); - record.y = - fieldSetFlags()[1] ? this.y : (java.lang.Double) defaultValue(fields()[1]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/IndexViewCalculation.java b/backend/src/main/java/sparqles/avro/analytics/IndexViewCalculation.java new file mode 100644 index 00000000..4a7a1aa2 --- /dev/null +++ b/backend/src/main/java/sparqles/avro/analytics/IndexViewCalculation.java @@ -0,0 +1,1067 @@ +/** + * Autogenerated by Avro + * + *

DO NOT EDIT DIRECTLY + */ +package sparqles.avro.analytics; + +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; + +@org.apache.avro.specific.AvroGenerated +public class IndexViewCalculation extends org.apache.avro.specific.SpecificRecordBase + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 2125895546380566315L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"IndexViewCalculation\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"coherences\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewCalculationData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"values\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewCalculationDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}}},{\"name\":\"rss\",\"type\":{\"type\":\"array\",\"items\":\"IndexViewCalculationData\"}},{\"name\":\"VoID\",\"type\":\"double\"},{\"name\":\"VoIDPart\",\"type\":\"double\"},{\"name\":\"SD\",\"type\":\"double\"},{\"name\":\"SDPart\",\"type\":\"double\"},{\"name\":\"Coherence\",\"type\":\"double\"},{\"name\":\"RS\",\"type\":\"double\"}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this IndexViewCalculation to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a IndexViewCalculation from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a IndexViewCalculation instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static IndexViewCalculation fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private java.util.List coherences; + private java.util.List rss; + private double VoID; + private double VoIDPart; + private double SD; + private double SDPart; + private double Coherence; + private double RS; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public IndexViewCalculation() {} + + /** + * All-args constructor. + * + * @param coherences The new value for coherences + * @param rss The new value for rss + * @param VoID The new value for VoID + * @param VoIDPart The new value for VoIDPart + * @param SD The new value for SD + * @param SDPart The new value for SDPart + * @param Coherence The new value for Coherence + * @param RS The new value for RS + */ + public IndexViewCalculation( + java.util.List coherences, + java.util.List rss, + java.lang.Double VoID, + java.lang.Double VoIDPart, + java.lang.Double SD, + java.lang.Double SDPart, + java.lang.Double Coherence, + java.lang.Double RS) { + this.coherences = coherences; + this.rss = rss; + this.VoID = VoID; + this.VoIDPart = VoIDPart; + this.SD = SD; + this.SDPart = SDPart; + this.Coherence = Coherence; + this.RS = RS; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return coherences; + case 1: + return rss; + case 2: + return VoID; + case 3: + return VoIDPart; + case 4: + return SD; + case 5: + return SDPart; + case 6: + return Coherence; + case 7: + return RS; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + coherences = (java.util.List) value$; + break; + case 1: + rss = (java.util.List) value$; + break; + case 2: + VoID = (java.lang.Double) value$; + break; + case 3: + VoIDPart = (java.lang.Double) value$; + break; + case 4: + SD = (java.lang.Double) value$; + break; + case 5: + SDPart = (java.lang.Double) value$; + break; + case 6: + Coherence = (java.lang.Double) value$; + break; + case 7: + RS = (java.lang.Double) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'coherences' field. + * + * @return The value of the 'coherences' field. + */ + public java.util.List getCoherences() { + return coherences; + } + + /** + * Sets the value of the 'coherences' field. + * + * @param value the value to set. + */ + public void setCoherences( + java.util.List value) { + this.coherences = value; + } + + /** + * Gets the value of the 'rss' field. + * + * @return The value of the 'rss' field. + */ + public java.util.List getRss() { + return rss; + } + + /** + * Sets the value of the 'rss' field. + * + * @param value the value to set. + */ + public void setRss(java.util.List value) { + this.rss = value; + } + + /** + * Gets the value of the 'VoID' field. + * + * @return The value of the 'VoID' field. + */ + public double getVoID() { + return VoID; + } + + /** + * Sets the value of the 'VoID' field. + * + * @param value the value to set. + */ + public void setVoID(double value) { + this.VoID = value; + } + + /** + * Gets the value of the 'VoIDPart' field. + * + * @return The value of the 'VoIDPart' field. + */ + public double getVoIDPart() { + return VoIDPart; + } + + /** + * Sets the value of the 'VoIDPart' field. + * + * @param value the value to set. + */ + public void setVoIDPart(double value) { + this.VoIDPart = value; + } + + /** + * Gets the value of the 'SD' field. + * + * @return The value of the 'SD' field. + */ + public double getSD() { + return SD; + } + + /** + * Sets the value of the 'SD' field. + * + * @param value the value to set. + */ + public void setSD(double value) { + this.SD = value; + } + + /** + * Gets the value of the 'SDPart' field. + * + * @return The value of the 'SDPart' field. + */ + public double getSDPart() { + return SDPart; + } + + /** + * Sets the value of the 'SDPart' field. + * + * @param value the value to set. + */ + public void setSDPart(double value) { + this.SDPart = value; + } + + /** + * Gets the value of the 'Coherence' field. + * + * @return The value of the 'Coherence' field. + */ + public double getCoherence() { + return Coherence; + } + + /** + * Sets the value of the 'Coherence' field. + * + * @param value the value to set. + */ + public void setCoherence(double value) { + this.Coherence = value; + } + + /** + * Gets the value of the 'RS' field. + * + * @return The value of the 'RS' field. + */ + public double getRS() { + return RS; + } + + /** + * Sets the value of the 'RS' field. + * + * @param value the value to set. + */ + public void setRS(double value) { + this.RS = value; + } + + /** + * Creates a new IndexViewCalculation RecordBuilder. + * + * @return A new IndexViewCalculation RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewCalculation.Builder newBuilder() { + return new sparqles.avro.analytics.IndexViewCalculation.Builder(); + } + + /** + * Creates a new IndexViewCalculation RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new IndexViewCalculation RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewCalculation.Builder newBuilder( + sparqles.avro.analytics.IndexViewCalculation.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewCalculation.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewCalculation.Builder(other); + } + } + + /** + * Creates a new IndexViewCalculation RecordBuilder by copying an existing IndexViewCalculation + * instance. + * + * @param other The existing instance to copy. + * @return A new IndexViewCalculation RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewCalculation.Builder newBuilder( + sparqles.avro.analytics.IndexViewCalculation other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewCalculation.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewCalculation.Builder(other); + } + } + + /** RecordBuilder for IndexViewCalculation instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.util.List coherences; + private java.util.List rss; + private double VoID; + private double VoIDPart; + private double SD; + private double SDPart; + private double Coherence; + private double RS; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } + + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.analytics.IndexViewCalculation.Builder other) { + super(other); + if (isValidValue(fields()[0], other.coherences)) { + this.coherences = data().deepCopy(fields()[0].schema(), other.coherences); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.rss)) { + this.rss = data().deepCopy(fields()[1].schema(), other.rss); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.VoID)) { + this.VoID = data().deepCopy(fields()[2].schema(), other.VoID); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.VoIDPart)) { + this.VoIDPart = data().deepCopy(fields()[3].schema(), other.VoIDPart); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + if (isValidValue(fields()[4], other.SD)) { + this.SD = data().deepCopy(fields()[4].schema(), other.SD); + fieldSetFlags()[4] = other.fieldSetFlags()[4]; + } + if (isValidValue(fields()[5], other.SDPart)) { + this.SDPart = data().deepCopy(fields()[5].schema(), other.SDPart); + fieldSetFlags()[5] = other.fieldSetFlags()[5]; + } + if (isValidValue(fields()[6], other.Coherence)) { + this.Coherence = data().deepCopy(fields()[6].schema(), other.Coherence); + fieldSetFlags()[6] = other.fieldSetFlags()[6]; + } + if (isValidValue(fields()[7], other.RS)) { + this.RS = data().deepCopy(fields()[7].schema(), other.RS); + fieldSetFlags()[7] = other.fieldSetFlags()[7]; + } + } + + /** + * Creates a Builder by copying an existing IndexViewCalculation instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.IndexViewCalculation other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.coherences)) { + this.coherences = data().deepCopy(fields()[0].schema(), other.coherences); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.rss)) { + this.rss = data().deepCopy(fields()[1].schema(), other.rss); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.VoID)) { + this.VoID = data().deepCopy(fields()[2].schema(), other.VoID); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.VoIDPart)) { + this.VoIDPart = data().deepCopy(fields()[3].schema(), other.VoIDPart); + fieldSetFlags()[3] = true; + } + if (isValidValue(fields()[4], other.SD)) { + this.SD = data().deepCopy(fields()[4].schema(), other.SD); + fieldSetFlags()[4] = true; + } + if (isValidValue(fields()[5], other.SDPart)) { + this.SDPart = data().deepCopy(fields()[5].schema(), other.SDPart); + fieldSetFlags()[5] = true; + } + if (isValidValue(fields()[6], other.Coherence)) { + this.Coherence = data().deepCopy(fields()[6].schema(), other.Coherence); + fieldSetFlags()[6] = true; + } + if (isValidValue(fields()[7], other.RS)) { + this.RS = data().deepCopy(fields()[7].schema(), other.RS); + fieldSetFlags()[7] = true; + } + } + + /** + * Gets the value of the 'coherences' field. + * + * @return The value. + */ + public java.util.List getCoherences() { + return coherences; + } + + /** + * Sets the value of the 'coherences' field. + * + * @param value The value of 'coherences'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculation.Builder setCoherences( + java.util.List value) { + validate(fields()[0], value); + this.coherences = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'coherences' field has been set. + * + * @return True if the 'coherences' field has been set, false otherwise. + */ + public boolean hasCoherences() { + return fieldSetFlags()[0]; + } + + /** + * Clears the value of the 'coherences' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculation.Builder clearCoherences() { + coherences = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'rss' field. + * + * @return The value. + */ + public java.util.List getRss() { + return rss; + } + + /** + * Sets the value of the 'rss' field. + * + * @param value The value of 'rss'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculation.Builder setRss( + java.util.List value) { + validate(fields()[1], value); + this.rss = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'rss' field has been set. + * + * @return True if the 'rss' field has been set, false otherwise. + */ + public boolean hasRss() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'rss' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculation.Builder clearRss() { + rss = null; + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'VoID' field. + * + * @return The value. + */ + public double getVoID() { + return VoID; + } + + /** + * Sets the value of the 'VoID' field. + * + * @param value The value of 'VoID'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculation.Builder setVoID(double value) { + validate(fields()[2], value); + this.VoID = value; + fieldSetFlags()[2] = true; + return this; + } + + /** + * Checks whether the 'VoID' field has been set. + * + * @return True if the 'VoID' field has been set, false otherwise. + */ + public boolean hasVoID() { + return fieldSetFlags()[2]; + } + + /** + * Clears the value of the 'VoID' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculation.Builder clearVoID() { + fieldSetFlags()[2] = false; + return this; + } + + /** + * Gets the value of the 'VoIDPart' field. + * + * @return The value. + */ + public double getVoIDPart() { + return VoIDPart; + } + + /** + * Sets the value of the 'VoIDPart' field. + * + * @param value The value of 'VoIDPart'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculation.Builder setVoIDPart(double value) { + validate(fields()[3], value); + this.VoIDPart = value; + fieldSetFlags()[3] = true; + return this; + } + + /** + * Checks whether the 'VoIDPart' field has been set. + * + * @return True if the 'VoIDPart' field has been set, false otherwise. + */ + public boolean hasVoIDPart() { + return fieldSetFlags()[3]; + } + + /** + * Clears the value of the 'VoIDPart' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculation.Builder clearVoIDPart() { + fieldSetFlags()[3] = false; + return this; + } + + /** + * Gets the value of the 'SD' field. + * + * @return The value. + */ + public double getSD() { + return SD; + } + + /** + * Sets the value of the 'SD' field. + * + * @param value The value of 'SD'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculation.Builder setSD(double value) { + validate(fields()[4], value); + this.SD = value; + fieldSetFlags()[4] = true; + return this; + } + + /** + * Checks whether the 'SD' field has been set. + * + * @return True if the 'SD' field has been set, false otherwise. + */ + public boolean hasSD() { + return fieldSetFlags()[4]; + } + + /** + * Clears the value of the 'SD' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculation.Builder clearSD() { + fieldSetFlags()[4] = false; + return this; + } + + /** + * Gets the value of the 'SDPart' field. + * + * @return The value. + */ + public double getSDPart() { + return SDPart; + } + + /** + * Sets the value of the 'SDPart' field. + * + * @param value The value of 'SDPart'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculation.Builder setSDPart(double value) { + validate(fields()[5], value); + this.SDPart = value; + fieldSetFlags()[5] = true; + return this; + } + + /** + * Checks whether the 'SDPart' field has been set. + * + * @return True if the 'SDPart' field has been set, false otherwise. + */ + public boolean hasSDPart() { + return fieldSetFlags()[5]; + } + + /** + * Clears the value of the 'SDPart' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculation.Builder clearSDPart() { + fieldSetFlags()[5] = false; + return this; + } + + /** + * Gets the value of the 'Coherence' field. + * + * @return The value. + */ + public double getCoherence() { + return Coherence; + } + + /** + * Sets the value of the 'Coherence' field. + * + * @param value The value of 'Coherence'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculation.Builder setCoherence(double value) { + validate(fields()[6], value); + this.Coherence = value; + fieldSetFlags()[6] = true; + return this; + } + + /** + * Checks whether the 'Coherence' field has been set. + * + * @return True if the 'Coherence' field has been set, false otherwise. + */ + public boolean hasCoherence() { + return fieldSetFlags()[6]; + } + + /** + * Clears the value of the 'Coherence' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculation.Builder clearCoherence() { + fieldSetFlags()[6] = false; + return this; + } + + /** + * Gets the value of the 'RS' field. + * + * @return The value. + */ + public double getRS() { + return RS; + } + + /** + * Sets the value of the 'RS' field. + * + * @param value The value of 'RS'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculation.Builder setRS(double value) { + validate(fields()[7], value); + this.RS = value; + fieldSetFlags()[7] = true; + return this; + } + + /** + * Checks whether the 'RS' field has been set. + * + * @return True if the 'RS' field has been set, false otherwise. + */ + public boolean hasRS() { + return fieldSetFlags()[7]; + } + + /** + * Clears the value of the 'RS' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculation.Builder clearRS() { + fieldSetFlags()[7] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public IndexViewCalculation build() { + try { + IndexViewCalculation record = new IndexViewCalculation(); + record.coherences = + fieldSetFlags()[0] + ? this.coherences + : (java.util.List) + defaultValue(fields()[0]); + record.rss = + fieldSetFlags()[1] + ? this.rss + : (java.util.List) + defaultValue(fields()[1]); + record.VoID = fieldSetFlags()[2] ? this.VoID : (java.lang.Double) defaultValue(fields()[2]); + record.VoIDPart = + fieldSetFlags()[3] ? this.VoIDPart : (java.lang.Double) defaultValue(fields()[3]); + record.SD = fieldSetFlags()[4] ? this.SD : (java.lang.Double) defaultValue(fields()[4]); + record.SDPart = + fieldSetFlags()[5] ? this.SDPart : (java.lang.Double) defaultValue(fields()[5]); + record.Coherence = + fieldSetFlags()[6] ? this.Coherence : (java.lang.Double) defaultValue(fields()[6]); + record.RS = fieldSetFlags()[7] ? this.RS : (java.lang.Double) defaultValue(fields()[7]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + long size0 = this.coherences.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (sparqles.avro.analytics.IndexViewCalculationData e0 : this.coherences) { + actualSize0++; + out.startItem(); + e0.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); + + long size1 = this.rss.size(); + out.writeArrayStart(); + out.setItemCount(size1); + long actualSize1 = 0; + for (sparqles.avro.analytics.IndexViewCalculationData e1 : this.rss) { + actualSize1++; + out.startItem(); + e1.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize1 != size1) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size1 + ", but element count was " + actualSize1 + "."); + + out.writeDouble(this.VoID); + + out.writeDouble(this.VoIDPart); + + out.writeDouble(this.SD); + + out.writeDouble(this.SDPart); + + out.writeDouble(this.Coherence); + + out.writeDouble(this.RS); + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + long size0 = in.readArrayStart(); + java.util.List a0 = this.coherences; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("coherences").schema()); + this.coherences = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.IndexViewCalculationData e0 = (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.IndexViewCalculationData(); + } + e0.customDecode(in); + a0.add(e0); + } + } + + long size1 = in.readArrayStart(); + java.util.List a1 = this.rss; + if (a1 == null) { + a1 = + new SpecificData.Array( + (int) size1, SCHEMA$.getField("rss").schema()); + this.rss = a1; + } else a1.clear(); + SpecificData.Array ga1 = + (a1 instanceof SpecificData.Array + ? (SpecificData.Array) a1 + : null); + for (; 0 < size1; size1 = in.arrayNext()) { + for (; size1 != 0; size1--) { + sparqles.avro.analytics.IndexViewCalculationData e1 = (ga1 != null ? ga1.peek() : null); + if (e1 == null) { + e1 = new sparqles.avro.analytics.IndexViewCalculationData(); + } + e1.customDecode(in); + a1.add(e1); + } + } + + this.VoID = in.readDouble(); + + this.VoIDPart = in.readDouble(); + + this.SD = in.readDouble(); + + this.SDPart = in.readDouble(); + + this.Coherence = in.readDouble(); + + this.RS = in.readDouble(); + + } else { + for (int i = 0; i < 8; i++) { + switch (fieldOrder[i].pos()) { + case 0: + long size0 = in.readArrayStart(); + java.util.List a0 = this.coherences; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("coherences").schema()); + this.coherences = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.IndexViewCalculationData e0 = + (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.IndexViewCalculationData(); + } + e0.customDecode(in); + a0.add(e0); + } + } + break; + + case 1: + long size1 = in.readArrayStart(); + java.util.List a1 = this.rss; + if (a1 == null) { + a1 = + new SpecificData.Array( + (int) size1, SCHEMA$.getField("rss").schema()); + this.rss = a1; + } else a1.clear(); + SpecificData.Array ga1 = + (a1 instanceof SpecificData.Array + ? (SpecificData.Array) a1 + : null); + for (; 0 < size1; size1 = in.arrayNext()) { + for (; size1 != 0; size1--) { + sparqles.avro.analytics.IndexViewCalculationData e1 = + (ga1 != null ? ga1.peek() : null); + if (e1 == null) { + e1 = new sparqles.avro.analytics.IndexViewCalculationData(); + } + e1.customDecode(in); + a1.add(e1); + } + } + break; + + case 2: + this.VoID = in.readDouble(); + break; + + case 3: + this.VoIDPart = in.readDouble(); + break; + + case 4: + this.SD = in.readDouble(); + break; + + case 5: + this.SDPart = in.readDouble(); + break; + + case 6: + this.Coherence = in.readDouble(); + break; + + case 7: + this.RS = in.readDouble(); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } +} diff --git a/backend/src/main/java/sparqles/avro/analytics/IndexViewCalculationData.java b/backend/src/main/java/sparqles/avro/analytics/IndexViewCalculationData.java new file mode 100644 index 00000000..b8a1634f --- /dev/null +++ b/backend/src/main/java/sparqles/avro/analytics/IndexViewCalculationData.java @@ -0,0 +1,493 @@ +/** + * Autogenerated by Avro + * + *

DO NOT EDIT DIRECTLY + */ +package sparqles.avro.analytics; + +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + +@org.apache.avro.specific.AvroGenerated +public class IndexViewCalculationData extends org.apache.avro.specific.SpecificRecordBase + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 6255000774111843550L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"IndexViewCalculationData\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"values\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewCalculationDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this IndexViewCalculationData to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a IndexViewCalculationData from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a IndexViewCalculationData instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static IndexViewCalculationData fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence key; + private java.util.List values; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public IndexViewCalculationData() {} + + /** + * All-args constructor. + * + * @param key The new value for key + * @param values The new value for values + */ + public IndexViewCalculationData( + java.lang.CharSequence key, + java.util.List values) { + this.key = key; + this.values = values; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return key; + case 1: + return values; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + key = (java.lang.CharSequence) value$; + break; + case 1: + values = (java.util.List) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'key' field. + * + * @return The value of the 'key' field. + */ + public java.lang.CharSequence getKey() { + return key; + } + + /** + * Sets the value of the 'key' field. + * + * @param value the value to set. + */ + public void setKey(java.lang.CharSequence value) { + this.key = value; + } + + /** + * Gets the value of the 'values' field. + * + * @return The value of the 'values' field. + */ + public java.util.List getValues() { + return values; + } + + /** + * Sets the value of the 'values' field. + * + * @param value the value to set. + */ + public void setValues( + java.util.List value) { + this.values = value; + } + + /** + * Creates a new IndexViewCalculationData RecordBuilder. + * + * @return A new IndexViewCalculationData RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewCalculationData.Builder newBuilder() { + return new sparqles.avro.analytics.IndexViewCalculationData.Builder(); + } + + /** + * Creates a new IndexViewCalculationData RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new IndexViewCalculationData RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewCalculationData.Builder newBuilder( + sparqles.avro.analytics.IndexViewCalculationData.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewCalculationData.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewCalculationData.Builder(other); + } + } + + /** + * Creates a new IndexViewCalculationData RecordBuilder by copying an existing + * IndexViewCalculationData instance. + * + * @param other The existing instance to copy. + * @return A new IndexViewCalculationData RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewCalculationData.Builder newBuilder( + sparqles.avro.analytics.IndexViewCalculationData other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewCalculationData.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewCalculationData.Builder(other); + } + } + + /** RecordBuilder for IndexViewCalculationData instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence key; + private java.util.List values; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } + + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.analytics.IndexViewCalculationData.Builder other) { + super(other); + if (isValidValue(fields()[0], other.key)) { + this.key = data().deepCopy(fields()[0].schema(), other.key); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.values)) { + this.values = data().deepCopy(fields()[1].schema(), other.values); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + } + + /** + * Creates a Builder by copying an existing IndexViewCalculationData instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.IndexViewCalculationData other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.key)) { + this.key = data().deepCopy(fields()[0].schema(), other.key); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.values)) { + this.values = data().deepCopy(fields()[1].schema(), other.values); + fieldSetFlags()[1] = true; + } + } + + /** + * Gets the value of the 'key' field. + * + * @return The value. + */ + public java.lang.CharSequence getKey() { + return key; + } + + /** + * Sets the value of the 'key' field. + * + * @param value The value of 'key'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculationData.Builder setKey( + java.lang.CharSequence value) { + validate(fields()[0], value); + this.key = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'key' field has been set. + * + * @return True if the 'key' field has been set, false otherwise. + */ + public boolean hasKey() { + return fieldSetFlags()[0]; + } + + /** + * Clears the value of the 'key' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculationData.Builder clearKey() { + key = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'values' field. + * + * @return The value. + */ + public java.util.List getValues() { + return values; + } + + /** + * Sets the value of the 'values' field. + * + * @param value The value of 'values'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculationData.Builder setValues( + java.util.List value) { + validate(fields()[1], value); + this.values = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'values' field has been set. + * + * @return True if the 'values' field has been set, false otherwise. + */ + public boolean hasValues() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'values' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculationData.Builder clearValues() { + values = null; + fieldSetFlags()[1] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public IndexViewCalculationData build() { + try { + IndexViewCalculationData record = new IndexViewCalculationData(); + record.key = + fieldSetFlags()[0] ? this.key : (java.lang.CharSequence) defaultValue(fields()[0]); + record.values = + fieldSetFlags()[1] + ? this.values + : (java.util.List) + defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.key); + + long size0 = this.values.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (sparqles.avro.analytics.IndexViewCalculationDataValues e0 : this.values) { + actualSize0++; + out.startItem(); + e0.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.key = in.readString(this.key instanceof Utf8 ? (Utf8) this.key : null); + + long size0 = in.readArrayStart(); + java.util.List a0 = this.values; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("values").schema()); + this.values = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.IndexViewCalculationDataValues e0 = + (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.IndexViewCalculationDataValues(); + } + e0.customDecode(in); + a0.add(e0); + } + } + + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.key = in.readString(this.key instanceof Utf8 ? (Utf8) this.key : null); + break; + + case 1: + long size0 = in.readArrayStart(); + java.util.List a0 = this.values; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("values").schema()); + this.values = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) + a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.IndexViewCalculationDataValues e0 = + (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.IndexViewCalculationDataValues(); + } + e0.customDecode(in); + a0.add(e0); + } + } + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } +} diff --git a/backend/src/main/java/sparqles/avro/analytics/IndexViewCalculationDataValues.java b/backend/src/main/java/sparqles/avro/analytics/IndexViewCalculationDataValues.java new file mode 100644 index 00000000..de7eae0a --- /dev/null +++ b/backend/src/main/java/sparqles/avro/analytics/IndexViewCalculationDataValues.java @@ -0,0 +1,431 @@ +/** + * Autogenerated by Avro + * + *

DO NOT EDIT DIRECTLY + */ +package sparqles.avro.analytics; + +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + +@org.apache.avro.specific.AvroGenerated +public class IndexViewCalculationDataValues extends org.apache.avro.specific.SpecificRecordBase + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 7990489637718673347L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"IndexViewCalculationDataValues\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder( + SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this IndexViewCalculationDataValues to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a IndexViewCalculationDataValues from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a IndexViewCalculationDataValues instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static IndexViewCalculationDataValues fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence label; + private double value; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public IndexViewCalculationDataValues() {} + + /** + * All-args constructor. + * + * @param label The new value for label + * @param value The new value for value + */ + public IndexViewCalculationDataValues(java.lang.CharSequence label, java.lang.Double value) { + this.label = label; + this.value = value; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return label; + case 1: + return value; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + label = (java.lang.CharSequence) value$; + break; + case 1: + value = (java.lang.Double) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'label' field. + * + * @return The value of the 'label' field. + */ + public java.lang.CharSequence getLabel() { + return label; + } + + /** + * Sets the value of the 'label' field. + * + * @param value the value to set. + */ + public void setLabel(java.lang.CharSequence value) { + this.label = value; + } + + /** + * Gets the value of the 'value' field. + * + * @return The value of the 'value' field. + */ + public double getValue() { + return value; + } + + /** + * Sets the value of the 'value' field. + * + * @param value the value to set. + */ + public void setValue(double value) { + this.value = value; + } + + /** + * Creates a new IndexViewCalculationDataValues RecordBuilder. + * + * @return A new IndexViewCalculationDataValues RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewCalculationDataValues.Builder newBuilder() { + return new sparqles.avro.analytics.IndexViewCalculationDataValues.Builder(); + } + + /** + * Creates a new IndexViewCalculationDataValues RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new IndexViewCalculationDataValues RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewCalculationDataValues.Builder newBuilder( + sparqles.avro.analytics.IndexViewCalculationDataValues.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewCalculationDataValues.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewCalculationDataValues.Builder(other); + } + } + + /** + * Creates a new IndexViewCalculationDataValues RecordBuilder by copying an existing + * IndexViewCalculationDataValues instance. + * + * @param other The existing instance to copy. + * @return A new IndexViewCalculationDataValues RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewCalculationDataValues.Builder newBuilder( + sparqles.avro.analytics.IndexViewCalculationDataValues other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewCalculationDataValues.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewCalculationDataValues.Builder(other); + } + } + + /** RecordBuilder for IndexViewCalculationDataValues instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence label; + private double value; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } + + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.analytics.IndexViewCalculationDataValues.Builder other) { + super(other); + if (isValidValue(fields()[0], other.label)) { + this.label = data().deepCopy(fields()[0].schema(), other.label); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.value)) { + this.value = data().deepCopy(fields()[1].schema(), other.value); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + } + + /** + * Creates a Builder by copying an existing IndexViewCalculationDataValues instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.IndexViewCalculationDataValues other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.label)) { + this.label = data().deepCopy(fields()[0].schema(), other.label); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.value)) { + this.value = data().deepCopy(fields()[1].schema(), other.value); + fieldSetFlags()[1] = true; + } + } + + /** + * Gets the value of the 'label' field. + * + * @return The value. + */ + public java.lang.CharSequence getLabel() { + return label; + } + + /** + * Sets the value of the 'label' field. + * + * @param value The value of 'label'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculationDataValues.Builder setLabel( + java.lang.CharSequence value) { + validate(fields()[0], value); + this.label = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'label' field has been set. + * + * @return True if the 'label' field has been set, false otherwise. + */ + public boolean hasLabel() { + return fieldSetFlags()[0]; + } + + /** + * Clears the value of the 'label' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculationDataValues.Builder clearLabel() { + label = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'value' field. + * + * @return The value. + */ + public double getValue() { + return value; + } + + /** + * Sets the value of the 'value' field. + * + * @param value The value of 'value'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculationDataValues.Builder setValue(double value) { + validate(fields()[1], value); + this.value = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'value' field has been set. + * + * @return True if the 'value' field has been set, false otherwise. + */ + public boolean hasValue() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'value' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewCalculationDataValues.Builder clearValue() { + fieldSetFlags()[1] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public IndexViewCalculationDataValues build() { + try { + IndexViewCalculationDataValues record = new IndexViewCalculationDataValues(); + record.label = + fieldSetFlags()[0] ? this.label : (java.lang.CharSequence) defaultValue(fields()[0]); + record.value = + fieldSetFlags()[1] ? this.value : (java.lang.Double) defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) + MODEL$.createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) + MODEL$.createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.label); + + out.writeDouble(this.value); + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.label = in.readString(this.label instanceof Utf8 ? (Utf8) this.label : null); + + this.value = in.readDouble(); + + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.label = in.readString(this.label instanceof Utf8 ? (Utf8) this.label : null); + break; + + case 1: + this.value = in.readDouble(); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } +} diff --git a/backend/src/main/java/sparqles/avro/analytics/IndexViewDiscoverability.java b/backend/src/main/java/sparqles/avro/analytics/IndexViewDiscoverability.java index 8fa0973e..5dac3107 100644 --- a/backend/src/main/java/sparqles/avro/analytics/IndexViewDiscoverability.java +++ b/backend/src/main/java/sparqles/avro/analytics/IndexViewDiscoverability.java @@ -5,350 +5,663 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; + @org.apache.avro.specific.AvroGenerated public class IndexViewDiscoverability extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"IndexViewDiscoverability\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"serverName\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewDiscoverabilityData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"values\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewDiscoverabilityDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}}},{\"name\":\"VoIDDescription\",\"type\":\"double\"},{\"name\":\"SDDescription\",\"type\":\"double\"},{\"name\":\"NoDescription\",\"type\":\"double\"}]}"); - - @Deprecated - public java.util.List serverName; - - @Deprecated public double VoIDDescription; - @Deprecated public double SDDescription; - @Deprecated public double NoDescription; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -7717181572167394328L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"IndexViewDiscoverability\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"serverName\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewDiscoverabilityData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"values\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewDiscoverabilityDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}}},{\"name\":\"VoIDDescription\",\"type\":\"double\"},{\"name\":\"SDDescription\",\"type\":\"double\"},{\"name\":\"NoDescription\",\"type\":\"double\"}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this IndexViewDiscoverability to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a IndexViewDiscoverability from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a IndexViewDiscoverability instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static IndexViewDiscoverability fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private java.util.List serverName; + private double VoIDDescription; + private double SDDescription; + private double NoDescription; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public IndexViewDiscoverability() {} + + /** + * All-args constructor. + * + * @param serverName The new value for serverName + * @param VoIDDescription The new value for VoIDDescription + * @param SDDescription The new value for SDDescription + * @param NoDescription The new value for NoDescription + */ + public IndexViewDiscoverability( + java.util.List serverName, + java.lang.Double VoIDDescription, + java.lang.Double SDDescription, + java.lang.Double NoDescription) { + this.serverName = serverName; + this.VoIDDescription = VoIDDescription; + this.SDDescription = SDDescription; + this.NoDescription = NoDescription; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return serverName; + case 1: + return VoIDDescription; + case 2: + return SDDescription; + case 3: + return NoDescription; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + serverName = (java.util.List) value$; + break; + case 1: + VoIDDescription = (java.lang.Double) value$; + break; + case 2: + SDDescription = (java.lang.Double) value$; + break; + case 3: + NoDescription = (java.lang.Double) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'serverName' field. + * + * @return The value of the 'serverName' field. + */ + public java.util.List getServerName() { + return serverName; + } + + /** + * Sets the value of the 'serverName' field. + * + * @param value the value to set. + */ + public void setServerName( + java.util.List value) { + this.serverName = value; + } + + /** + * Gets the value of the 'VoIDDescription' field. + * + * @return The value of the 'VoIDDescription' field. + */ + public double getVoIDDescription() { + return VoIDDescription; + } + + /** + * Sets the value of the 'VoIDDescription' field. + * + * @param value the value to set. + */ + public void setVoIDDescription(double value) { + this.VoIDDescription = value; + } + + /** + * Gets the value of the 'SDDescription' field. + * + * @return The value of the 'SDDescription' field. + */ + public double getSDDescription() { + return SDDescription; + } + + /** + * Sets the value of the 'SDDescription' field. + * + * @param value the value to set. + */ + public void setSDDescription(double value) { + this.SDDescription = value; + } + + /** + * Gets the value of the 'NoDescription' field. + * + * @return The value of the 'NoDescription' field. + */ + public double getNoDescription() { + return NoDescription; + } + + /** + * Sets the value of the 'NoDescription' field. + * + * @param value the value to set. + */ + public void setNoDescription(double value) { + this.NoDescription = value; + } + + /** + * Creates a new IndexViewDiscoverability RecordBuilder. + * + * @return A new IndexViewDiscoverability RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewDiscoverability.Builder newBuilder() { + return new sparqles.avro.analytics.IndexViewDiscoverability.Builder(); + } + + /** + * Creates a new IndexViewDiscoverability RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new IndexViewDiscoverability RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewDiscoverability.Builder newBuilder( + sparqles.avro.analytics.IndexViewDiscoverability.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewDiscoverability.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewDiscoverability.Builder(other); + } + } + + /** + * Creates a new IndexViewDiscoverability RecordBuilder by copying an existing + * IndexViewDiscoverability instance. + * + * @param other The existing instance to copy. + * @return A new IndexViewDiscoverability RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewDiscoverability.Builder newBuilder( + sparqles.avro.analytics.IndexViewDiscoverability other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewDiscoverability.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewDiscoverability.Builder(other); + } + } + + /** RecordBuilder for IndexViewDiscoverability instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.util.List serverName; + private double VoIDDescription; + private double SDDescription; + private double NoDescription; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public IndexViewDiscoverability() {} - - /** All-args constructor. */ - public IndexViewDiscoverability( - java.util.List serverName, - java.lang.Double VoIDDescription, - java.lang.Double SDDescription, - java.lang.Double NoDescription) { - this.serverName = serverName; - this.VoIDDescription = VoIDDescription; - this.SDDescription = SDDescription; - this.NoDescription = NoDescription; + private Builder(sparqles.avro.analytics.IndexViewDiscoverability.Builder other) { + super(other); + if (isValidValue(fields()[0], other.serverName)) { + this.serverName = data().deepCopy(fields()[0].schema(), other.serverName); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.VoIDDescription)) { + this.VoIDDescription = data().deepCopy(fields()[1].schema(), other.VoIDDescription); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.SDDescription)) { + this.SDDescription = data().deepCopy(fields()[2].schema(), other.SDDescription); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.NoDescription)) { + this.NoDescription = data().deepCopy(fields()[3].schema(), other.NoDescription); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + /** + * Creates a Builder by copying an existing IndexViewDiscoverability instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.IndexViewDiscoverability other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.serverName)) { + this.serverName = data().deepCopy(fields()[0].schema(), other.serverName); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.VoIDDescription)) { + this.VoIDDescription = data().deepCopy(fields()[1].schema(), other.VoIDDescription); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.SDDescription)) { + this.SDDescription = data().deepCopy(fields()[2].schema(), other.SDDescription); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.NoDescription)) { + this.NoDescription = data().deepCopy(fields()[3].schema(), other.NoDescription); + fieldSetFlags()[3] = true; + } } - /** Creates a new IndexViewDiscoverability RecordBuilder */ - public static sparqles.avro.analytics.IndexViewDiscoverability.Builder newBuilder() { - return new sparqles.avro.analytics.IndexViewDiscoverability.Builder(); + /** + * Gets the value of the 'serverName' field. + * + * @return The value. + */ + public java.util.List getServerName() { + return serverName; } - /** Creates a new IndexViewDiscoverability RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.IndexViewDiscoverability.Builder newBuilder( - sparqles.avro.analytics.IndexViewDiscoverability.Builder other) { - return new sparqles.avro.analytics.IndexViewDiscoverability.Builder(other); + /** + * Sets the value of the 'serverName' field. + * + * @param value The value of 'serverName'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewDiscoverability.Builder setServerName( + java.util.List value) { + validate(fields()[0], value); + this.serverName = value; + fieldSetFlags()[0] = true; + return this; } /** - * Creates a new IndexViewDiscoverability RecordBuilder by copying an existing - * IndexViewDiscoverability instance + * Checks whether the 'serverName' field has been set. + * + * @return True if the 'serverName' field has been set, false otherwise. */ - public static sparqles.avro.analytics.IndexViewDiscoverability.Builder newBuilder( - sparqles.avro.analytics.IndexViewDiscoverability other) { - return new sparqles.avro.analytics.IndexViewDiscoverability.Builder(other); + public boolean hasServerName() { + return fieldSetFlags()[0]; } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Clears the value of the 'serverName' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewDiscoverability.Builder clearServerName() { + serverName = null; + fieldSetFlags()[0] = false; + return this; } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return serverName; - case 1: - return VoIDDescription; - case 2: - return SDDescription; - case 3: - return NoDescription; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Gets the value of the 'VoIDDescription' field. + * + * @return The value. + */ + public double getVoIDDescription() { + return VoIDDescription; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - serverName = - (java.util.List) - value$; - break; - case 1: - VoIDDescription = (java.lang.Double) value$; - break; - case 2: - SDDescription = (java.lang.Double) value$; - break; - case 3: - NoDescription = (java.lang.Double) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the value of the 'VoIDDescription' field. + * + * @param value The value of 'VoIDDescription'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewDiscoverability.Builder setVoIDDescription( + double value) { + validate(fields()[1], value); + this.VoIDDescription = value; + fieldSetFlags()[1] = true; + return this; } - /** Gets the value of the 'serverName' field. */ - public java.util.List getServerName() { - return serverName; + /** + * Checks whether the 'VoIDDescription' field has been set. + * + * @return True if the 'VoIDDescription' field has been set, false otherwise. + */ + public boolean hasVoIDDescription() { + return fieldSetFlags()[1]; } /** - * Sets the value of the 'serverName' field. + * Clears the value of the 'VoIDDescription' field. * - * @param value the value to set. + * @return This builder. */ - public void setServerName( - java.util.List value) { - this.serverName = value; + public sparqles.avro.analytics.IndexViewDiscoverability.Builder clearVoIDDescription() { + fieldSetFlags()[1] = false; + return this; } - /** Gets the value of the 'VoIDDescription' field. */ - public java.lang.Double getVoIDDescription() { - return VoIDDescription; + /** + * Gets the value of the 'SDDescription' field. + * + * @return The value. + */ + public double getSDDescription() { + return SDDescription; } /** - * Sets the value of the 'VoIDDescription' field. + * Sets the value of the 'SDDescription' field. * - * @param value the value to set. + * @param value The value of 'SDDescription'. + * @return This builder. */ - public void setVoIDDescription(java.lang.Double value) { - this.VoIDDescription = value; + public sparqles.avro.analytics.IndexViewDiscoverability.Builder setSDDescription(double value) { + validate(fields()[2], value); + this.SDDescription = value; + fieldSetFlags()[2] = true; + return this; } - /** Gets the value of the 'SDDescription' field. */ - public java.lang.Double getSDDescription() { - return SDDescription; + /** + * Checks whether the 'SDDescription' field has been set. + * + * @return True if the 'SDDescription' field has been set, false otherwise. + */ + public boolean hasSDDescription() { + return fieldSetFlags()[2]; } /** - * Sets the value of the 'SDDescription' field. + * Clears the value of the 'SDDescription' field. * - * @param value the value to set. + * @return This builder. */ - public void setSDDescription(java.lang.Double value) { - this.SDDescription = value; + public sparqles.avro.analytics.IndexViewDiscoverability.Builder clearSDDescription() { + fieldSetFlags()[2] = false; + return this; } - /** Gets the value of the 'NoDescription' field. */ - public java.lang.Double getNoDescription() { - return NoDescription; + /** + * Gets the value of the 'NoDescription' field. + * + * @return The value. + */ + public double getNoDescription() { + return NoDescription; } /** * Sets the value of the 'NoDescription' field. * - * @param value the value to set. + * @param value The value of 'NoDescription'. + * @return This builder. */ - public void setNoDescription(java.lang.Double value) { - this.NoDescription = value; + public sparqles.avro.analytics.IndexViewDiscoverability.Builder setNoDescription(double value) { + validate(fields()[3], value); + this.NoDescription = value; + fieldSetFlags()[3] = true; + return this; } - /** RecordBuilder for IndexViewDiscoverability instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { - - private java.util.List serverName; - private double VoIDDescription; - private double SDDescription; - private double NoDescription; + /** + * Checks whether the 'NoDescription' field has been set. + * + * @return True if the 'NoDescription' field has been set, false otherwise. + */ + public boolean hasNoDescription() { + return fieldSetFlags()[3]; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.IndexViewDiscoverability.SCHEMA$); - } + /** + * Clears the value of the 'NoDescription' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewDiscoverability.Builder clearNoDescription() { + fieldSetFlags()[3] = false; + return this; + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.IndexViewDiscoverability.Builder other) { - super(other); - if (isValidValue(fields()[0], other.serverName)) { - this.serverName = data().deepCopy(fields()[0].schema(), other.serverName); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.VoIDDescription)) { - this.VoIDDescription = data().deepCopy(fields()[1].schema(), other.VoIDDescription); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.SDDescription)) { - this.SDDescription = data().deepCopy(fields()[2].schema(), other.SDDescription); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.NoDescription)) { - this.NoDescription = data().deepCopy(fields()[3].schema(), other.NoDescription); - fieldSetFlags()[3] = true; - } + @Override + @SuppressWarnings("unchecked") + public IndexViewDiscoverability build() { + try { + IndexViewDiscoverability record = new IndexViewDiscoverability(); + record.serverName = + fieldSetFlags()[0] + ? this.serverName + : (java.util.List) + defaultValue(fields()[0]); + record.VoIDDescription = + fieldSetFlags()[1] + ? this.VoIDDescription + : (java.lang.Double) defaultValue(fields()[1]); + record.SDDescription = + fieldSetFlags()[2] ? this.SDDescription : (java.lang.Double) defaultValue(fields()[2]); + record.NoDescription = + fieldSetFlags()[3] ? this.NoDescription : (java.lang.Double) defaultValue(fields()[3]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + long size0 = this.serverName.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (sparqles.avro.analytics.IndexViewDiscoverabilityData e0 : this.serverName) { + actualSize0++; + out.startItem(); + e0.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); + + out.writeDouble(this.VoIDDescription); + + out.writeDouble(this.SDDescription); + + out.writeDouble(this.NoDescription); + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + long size0 = in.readArrayStart(); + java.util.List a0 = this.serverName; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("serverName").schema()); + this.serverName = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.IndexViewDiscoverabilityData e0 = + (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.IndexViewDiscoverabilityData(); + } + e0.customDecode(in); + a0.add(e0); } - - /** Creates a Builder by copying an existing IndexViewDiscoverability instance */ - private Builder(sparqles.avro.analytics.IndexViewDiscoverability other) { - super(sparqles.avro.analytics.IndexViewDiscoverability.SCHEMA$); - if (isValidValue(fields()[0], other.serverName)) { - this.serverName = data().deepCopy(fields()[0].schema(), other.serverName); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.VoIDDescription)) { - this.VoIDDescription = data().deepCopy(fields()[1].schema(), other.VoIDDescription); - fieldSetFlags()[1] = true; + } + + this.VoIDDescription = in.readDouble(); + + this.SDDescription = in.readDouble(); + + this.NoDescription = in.readDouble(); + + } else { + for (int i = 0; i < 4; i++) { + switch (fieldOrder[i].pos()) { + case 0: + long size0 = in.readArrayStart(); + java.util.List a0 = + this.serverName; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("serverName").schema()); + this.serverName = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.IndexViewDiscoverabilityData e0 = + (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.IndexViewDiscoverabilityData(); + } + e0.customDecode(in); + a0.add(e0); + } } - if (isValidValue(fields()[2], other.SDDescription)) { - this.SDDescription = data().deepCopy(fields()[2].schema(), other.SDDescription); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.NoDescription)) { - this.NoDescription = data().deepCopy(fields()[3].schema(), other.NoDescription); - fieldSetFlags()[3] = true; - } - } - - /** Gets the value of the 'serverName' field */ - public java.util.List - getServerName() { - return serverName; - } - - /** Sets the value of the 'serverName' field */ - public sparqles.avro.analytics.IndexViewDiscoverability.Builder setServerName( - java.util.List value) { - validate(fields()[0], value); - this.serverName = value; - fieldSetFlags()[0] = true; - return this; - } - - /** Checks whether the 'serverName' field has been set */ - public boolean hasServerName() { - return fieldSetFlags()[0]; - } - - /** Clears the value of the 'serverName' field */ - public sparqles.avro.analytics.IndexViewDiscoverability.Builder clearServerName() { - serverName = null; - fieldSetFlags()[0] = false; - return this; - } - - /** Gets the value of the 'VoIDDescription' field */ - public java.lang.Double getVoIDDescription() { - return VoIDDescription; - } + break; - /** Sets the value of the 'VoIDDescription' field */ - public sparqles.avro.analytics.IndexViewDiscoverability.Builder setVoIDDescription( - double value) { - validate(fields()[1], value); - this.VoIDDescription = value; - fieldSetFlags()[1] = true; - return this; - } - - /** Checks whether the 'VoIDDescription' field has been set */ - public boolean hasVoIDDescription() { - return fieldSetFlags()[1]; - } + case 1: + this.VoIDDescription = in.readDouble(); + break; - /** Clears the value of the 'VoIDDescription' field */ - public sparqles.avro.analytics.IndexViewDiscoverability.Builder clearVoIDDescription() { - fieldSetFlags()[1] = false; - return this; - } + case 2: + this.SDDescription = in.readDouble(); + break; - /** Gets the value of the 'SDDescription' field */ - public java.lang.Double getSDDescription() { - return SDDescription; - } - - /** Sets the value of the 'SDDescription' field */ - public sparqles.avro.analytics.IndexViewDiscoverability.Builder setSDDescription( - double value) { - validate(fields()[2], value); - this.SDDescription = value; - fieldSetFlags()[2] = true; - return this; - } + case 3: + this.NoDescription = in.readDouble(); + break; - /** Checks whether the 'SDDescription' field has been set */ - public boolean hasSDDescription() { - return fieldSetFlags()[2]; - } - - /** Clears the value of the 'SDDescription' field */ - public sparqles.avro.analytics.IndexViewDiscoverability.Builder clearSDDescription() { - fieldSetFlags()[2] = false; - return this; - } - - /** Gets the value of the 'NoDescription' field */ - public java.lang.Double getNoDescription() { - return NoDescription; - } - - /** Sets the value of the 'NoDescription' field */ - public sparqles.avro.analytics.IndexViewDiscoverability.Builder setNoDescription( - double value) { - validate(fields()[3], value); - this.NoDescription = value; - fieldSetFlags()[3] = true; - return this; - } - - /** Checks whether the 'NoDescription' field has been set */ - public boolean hasNoDescription() { - return fieldSetFlags()[3]; - } - - /** Clears the value of the 'NoDescription' field */ - public sparqles.avro.analytics.IndexViewDiscoverability.Builder clearNoDescription() { - fieldSetFlags()[3] = false; - return this; - } - - @Override - public IndexViewDiscoverability build() { - try { - IndexViewDiscoverability record = new IndexViewDiscoverability(); - record.serverName = - fieldSetFlags()[0] - ? this.serverName - : (java.util.List< - sparqles.avro.analytics - .IndexViewDiscoverabilityData>) - defaultValue(fields()[0]); - record.VoIDDescription = - fieldSetFlags()[1] - ? this.VoIDDescription - : (java.lang.Double) defaultValue(fields()[1]); - record.SDDescription = - fieldSetFlags()[2] - ? this.SDDescription - : (java.lang.Double) defaultValue(fields()[2]); - record.NoDescription = - fieldSetFlags()[3] - ? this.NoDescription - : (java.lang.Double) defaultValue(fields()[3]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/IndexViewDiscoverabilityData.java b/backend/src/main/java/sparqles/avro/analytics/IndexViewDiscoverabilityData.java index ec5c700c..56f681f3 100644 --- a/backend/src/main/java/sparqles/avro/analytics/IndexViewDiscoverabilityData.java +++ b/backend/src/main/java/sparqles/avro/analytics/IndexViewDiscoverabilityData.java @@ -5,230 +5,496 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class IndexViewDiscoverabilityData extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"IndexViewDiscoverabilityData\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"values\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewDiscoverabilityDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}"); - @Deprecated public java.lang.CharSequence key; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -1983791455052018937L; - @Deprecated - public java.util.List values; + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"IndexViewDiscoverabilityData\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"values\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewDiscoverabilityDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}"); - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public IndexViewDiscoverabilityData() {} + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder( + SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this IndexViewDiscoverabilityData to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a IndexViewDiscoverabilityData from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a IndexViewDiscoverabilityData instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static IndexViewDiscoverabilityData fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence key; + private java.util.List values; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public IndexViewDiscoverabilityData() {} + + /** + * All-args constructor. + * + * @param key The new value for key + * @param values The new value for values + */ + public IndexViewDiscoverabilityData( + java.lang.CharSequence key, + java.util.List values) { + this.key = key; + this.values = values; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return key; + case 1: + return values; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } - /** All-args constructor. */ - public IndexViewDiscoverabilityData( - java.lang.CharSequence key, - java.util.List values) { - this.key = key; - this.values = values; + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + key = (java.lang.CharSequence) value$; + break; + case 1: + values = + (java.util.List) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } + + /** + * Gets the value of the 'key' field. + * + * @return The value of the 'key' field. + */ + public java.lang.CharSequence getKey() { + return key; + } + + /** + * Sets the value of the 'key' field. + * + * @param value the value to set. + */ + public void setKey(java.lang.CharSequence value) { + this.key = value; + } + + /** + * Gets the value of the 'values' field. + * + * @return The value of the 'values' field. + */ + public java.util.List getValues() { + return values; + } + + /** + * Sets the value of the 'values' field. + * + * @param value the value to set. + */ + public void setValues( + java.util.List value) { + this.values = value; + } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + /** + * Creates a new IndexViewDiscoverabilityData RecordBuilder. + * + * @return A new IndexViewDiscoverabilityData RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder newBuilder() { + return new sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder(); + } + + /** + * Creates a new IndexViewDiscoverabilityData RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new IndexViewDiscoverabilityData RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder newBuilder( + sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder(other); } + } - /** Creates a new IndexViewDiscoverabilityData RecordBuilder */ - public static sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder newBuilder() { - return new sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder(); + /** + * Creates a new IndexViewDiscoverabilityData RecordBuilder by copying an existing + * IndexViewDiscoverabilityData instance. + * + * @param other The existing instance to copy. + * @return A new IndexViewDiscoverabilityData RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder newBuilder( + sparqles.avro.analytics.IndexViewDiscoverabilityData other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder(other); } + } + + /** RecordBuilder for IndexViewDiscoverabilityData instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence key; + private java.util.List values; - /** Creates a new IndexViewDiscoverabilityData RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder newBuilder( - sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder other) { - return new sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder(other); + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } /** - * Creates a new IndexViewDiscoverabilityData RecordBuilder by copying an existing - * IndexViewDiscoverabilityData instance + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public static sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder newBuilder( - sparqles.avro.analytics.IndexViewDiscoverabilityData other) { - return new sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder(other); + private Builder(sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder other) { + super(other); + if (isValidValue(fields()[0], other.key)) { + this.key = data().deepCopy(fields()[0].schema(), other.key); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.values)) { + this.values = data().deepCopy(fields()[1].schema(), other.values); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Creates a Builder by copying an existing IndexViewDiscoverabilityData instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.IndexViewDiscoverabilityData other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.key)) { + this.key = data().deepCopy(fields()[0].schema(), other.key); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.values)) { + this.values = data().deepCopy(fields()[1].schema(), other.values); + fieldSetFlags()[1] = true; + } } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return key; - case 1: - return values; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Gets the value of the 'key' field. + * + * @return The value. + */ + public java.lang.CharSequence getKey() { + return key; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - key = (java.lang.CharSequence) value$; - break; - case 1: - values = - (java.util.List) - value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the value of the 'key' field. + * + * @param value The value of 'key'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder setKey( + java.lang.CharSequence value) { + validate(fields()[0], value); + this.key = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'key' field. */ - public java.lang.CharSequence getKey() { - return key; + /** + * Checks whether the 'key' field has been set. + * + * @return True if the 'key' field has been set, false otherwise. + */ + public boolean hasKey() { + return fieldSetFlags()[0]; } /** - * Sets the value of the 'key' field. + * Clears the value of the 'key' field. * - * @param value the value to set. + * @return This builder. */ - public void setKey(java.lang.CharSequence value) { - this.key = value; + public sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder clearKey() { + key = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'values' field. */ + /** + * Gets the value of the 'values' field. + * + * @return The value. + */ public java.util.List getValues() { - return values; + return values; } /** * Sets the value of the 'values' field. * - * @param value the value to set. + * @param value The value of 'values'. + * @return This builder. */ - public void setValues( - java.util.List value) { - this.values = value; + public sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder setValues( + java.util.List value) { + validate(fields()[1], value); + this.values = value; + fieldSetFlags()[1] = true; + return this; } - /** RecordBuilder for IndexViewDiscoverabilityData instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Checks whether the 'values' field has been set. + * + * @return True if the 'values' field has been set, false otherwise. + */ + public boolean hasValues() { + return fieldSetFlags()[1]; + } - private java.lang.CharSequence key; - private java.util.List values; + /** + * Clears the value of the 'values' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder clearValues() { + values = null; + fieldSetFlags()[1] = false; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.IndexViewDiscoverabilityData.SCHEMA$); - } + @Override + @SuppressWarnings("unchecked") + public IndexViewDiscoverabilityData build() { + try { + IndexViewDiscoverabilityData record = new IndexViewDiscoverabilityData(); + record.key = + fieldSetFlags()[0] ? this.key : (java.lang.CharSequence) defaultValue(fields()[0]); + record.values = + fieldSetFlags()[1] + ? this.values + : (java.util.List) + defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder other) { - super(other); - if (isValidValue(fields()[0], other.key)) { - this.key = data().deepCopy(fields()[0].schema(), other.key); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.values)) { - this.values = data().deepCopy(fields()[1].schema(), other.values); - fieldSetFlags()[1] = true; - } - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) + MODEL$.createDatumWriter(SCHEMA$); - /** Creates a Builder by copying an existing IndexViewDiscoverabilityData instance */ - private Builder(sparqles.avro.analytics.IndexViewDiscoverabilityData other) { - super(sparqles.avro.analytics.IndexViewDiscoverabilityData.SCHEMA$); - if (isValidValue(fields()[0], other.key)) { - this.key = data().deepCopy(fields()[0].schema(), other.key); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.values)) { - this.values = data().deepCopy(fields()[1].schema(), other.values); - fieldSetFlags()[1] = true; - } - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Gets the value of the 'key' field */ - public java.lang.CharSequence getKey() { - return key; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) + MODEL$.createDatumReader(SCHEMA$); - /** Sets the value of the 'key' field */ - public sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder setKey( - java.lang.CharSequence value) { - validate(fields()[0], value); - this.key = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Checks whether the 'key' field has been set */ - public boolean hasKey() { - return fieldSetFlags()[0]; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Clears the value of the 'key' field */ - public sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder clearKey() { - key = null; - fieldSetFlags()[0] = false; - return this; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.key); - /** Gets the value of the 'values' field */ - public java.util.List - getValues() { - return values; - } + long size0 = this.values.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (sparqles.avro.analytics.IndexViewDiscoverabilityDataValues e0 : this.values) { + actualSize0++; + out.startItem(); + e0.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); + } - /** Sets the value of the 'values' field */ - public sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder setValues( - java.util.List value) { - validate(fields()[1], value); - this.values = value; - fieldSetFlags()[1] = true; - return this; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.key = in.readString(this.key instanceof Utf8 ? (Utf8) this.key : null); - /** Checks whether the 'values' field has been set */ - public boolean hasValues() { - return fieldSetFlags()[1]; + long size0 = in.readArrayStart(); + java.util.List a0 = this.values; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("values").schema()); + this.values = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.IndexViewDiscoverabilityDataValues e0 = + (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.IndexViewDiscoverabilityDataValues(); + } + e0.customDecode(in); + a0.add(e0); } + } - /** Clears the value of the 'values' field */ - public sparqles.avro.analytics.IndexViewDiscoverabilityData.Builder clearValues() { - values = null; - fieldSetFlags()[1] = false; - return this; - } + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.key = in.readString(this.key instanceof Utf8 ? (Utf8) this.key : null); + break; - @Override - public IndexViewDiscoverabilityData build() { - try { - IndexViewDiscoverabilityData record = new IndexViewDiscoverabilityData(); - record.key = - fieldSetFlags()[0] - ? this.key - : (java.lang.CharSequence) defaultValue(fields()[0]); - record.values = - fieldSetFlags()[1] - ? this.values - : (java.util.List< - sparqles.avro.analytics - .IndexViewDiscoverabilityDataValues>) - defaultValue(fields()[1]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); + case 1: + long size0 = in.readArrayStart(); + java.util.List a0 = + this.values; + if (a0 == null) { + a0 = + new SpecificData.Array< + sparqles.avro.analytics.IndexViewDiscoverabilityDataValues>( + (int) size0, SCHEMA$.getField("values").schema()); + this.values = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array< + sparqles.avro.analytics.IndexViewDiscoverabilityDataValues>) + a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.IndexViewDiscoverabilityDataValues e0 = + (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.IndexViewDiscoverabilityDataValues(); + } + e0.customDecode(in); + a0.add(e0); + } } + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/IndexViewDiscoverabilityDataValues.java b/backend/src/main/java/sparqles/avro/analytics/IndexViewDiscoverabilityDataValues.java index faa0a8c7..84051548 100644 --- a/backend/src/main/java/sparqles/avro/analytics/IndexViewDiscoverabilityDataValues.java +++ b/backend/src/main/java/sparqles/avro/analytics/IndexViewDiscoverabilityDataValues.java @@ -5,223 +5,428 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class IndexViewDiscoverabilityDataValues extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"IndexViewDiscoverabilityDataValues\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}"); - @Deprecated public java.lang.CharSequence label; - @Deprecated public double value; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -5179837929451377377L; - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public IndexViewDiscoverabilityDataValues() {} + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"IndexViewDiscoverabilityDataValues\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder( + SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } - /** All-args constructor. */ - public IndexViewDiscoverabilityDataValues( - java.lang.CharSequence label, java.lang.Double value) { - this.label = label; - this.value = value; + /** + * Serializes this IndexViewDiscoverabilityDataValues to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a IndexViewDiscoverabilityDataValues from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a IndexViewDiscoverabilityDataValues instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static IndexViewDiscoverabilityDataValues fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence label; + private double value; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public IndexViewDiscoverabilityDataValues() {} + + /** + * All-args constructor. + * + * @param label The new value for label + * @param value The new value for value + */ + public IndexViewDiscoverabilityDataValues(java.lang.CharSequence label, java.lang.Double value) { + this.label = label; + this.value = value; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return label; + case 1: + return value; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + label = (java.lang.CharSequence) value$; + break; + case 1: + value = (java.lang.Double) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - /** Creates a new IndexViewDiscoverabilityDataValues RecordBuilder */ - public static sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder newBuilder() { - return new sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder(); + /** + * Gets the value of the 'label' field. + * + * @return The value of the 'label' field. + */ + public java.lang.CharSequence getLabel() { + return label; + } + + /** + * Sets the value of the 'label' field. + * + * @param value the value to set. + */ + public void setLabel(java.lang.CharSequence value) { + this.label = value; + } + + /** + * Gets the value of the 'value' field. + * + * @return The value of the 'value' field. + */ + public double getValue() { + return value; + } + + /** + * Sets the value of the 'value' field. + * + * @param value the value to set. + */ + public void setValue(double value) { + this.value = value; + } + + /** + * Creates a new IndexViewDiscoverabilityDataValues RecordBuilder. + * + * @return A new IndexViewDiscoverabilityDataValues RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder newBuilder() { + return new sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder(); + } + + /** + * Creates a new IndexViewDiscoverabilityDataValues RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new IndexViewDiscoverabilityDataValues RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder newBuilder( + sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder(other); + } + } + + /** + * Creates a new IndexViewDiscoverabilityDataValues RecordBuilder by copying an existing + * IndexViewDiscoverabilityDataValues instance. + * + * @param other The existing instance to copy. + * @return A new IndexViewDiscoverabilityDataValues RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder newBuilder( + sparqles.avro.analytics.IndexViewDiscoverabilityDataValues other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder(other); + } + } + + /** RecordBuilder for IndexViewDiscoverabilityDataValues instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence label; + private double value; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } /** - * Creates a new IndexViewDiscoverabilityDataValues RecordBuilder by copying an existing Builder + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public static sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder newBuilder( - sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder other) { - return new sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder(other); + private Builder(sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder other) { + super(other); + if (isValidValue(fields()[0], other.label)) { + this.label = data().deepCopy(fields()[0].schema(), other.label); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.value)) { + this.value = data().deepCopy(fields()[1].schema(), other.value); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } } /** - * Creates a new IndexViewDiscoverabilityDataValues RecordBuilder by copying an existing - * IndexViewDiscoverabilityDataValues instance + * Creates a Builder by copying an existing IndexViewDiscoverabilityDataValues instance + * + * @param other The existing instance to copy. */ - public static sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder newBuilder( - sparqles.avro.analytics.IndexViewDiscoverabilityDataValues other) { - return new sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder(other); + private Builder(sparqles.avro.analytics.IndexViewDiscoverabilityDataValues other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.label)) { + this.label = data().deepCopy(fields()[0].schema(), other.label); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.value)) { + this.value = data().deepCopy(fields()[1].schema(), other.value); + fieldSetFlags()[1] = true; + } } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Gets the value of the 'label' field. + * + * @return The value. + */ + public java.lang.CharSequence getLabel() { + return label; } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return label; - case 1: - return value; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the value of the 'label' field. + * + * @param value The value of 'label'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder setLabel( + java.lang.CharSequence value) { + validate(fields()[0], value); + this.label = value; + fieldSetFlags()[0] = true; + return this; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - label = (java.lang.CharSequence) value$; - break; - case 1: - value = (java.lang.Double) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Checks whether the 'label' field has been set. + * + * @return True if the 'label' field has been set, false otherwise. + */ + public boolean hasLabel() { + return fieldSetFlags()[0]; } - /** Gets the value of the 'label' field. */ - public java.lang.CharSequence getLabel() { - return label; + /** + * Clears the value of the 'label' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder clearLabel() { + label = null; + fieldSetFlags()[0] = false; + return this; } /** - * Sets the value of the 'label' field. + * Gets the value of the 'value' field. * - * @param value the value to set. + * @return The value. */ - public void setLabel(java.lang.CharSequence value) { - this.label = value; + public double getValue() { + return value; } - /** Gets the value of the 'value' field. */ - public java.lang.Double getValue() { - return value; + /** + * Sets the value of the 'value' field. + * + * @param value The value of 'value'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder setValue( + double value) { + validate(fields()[1], value); + this.value = value; + fieldSetFlags()[1] = true; + return this; } /** - * Sets the value of the 'value' field. + * Checks whether the 'value' field has been set. * - * @param value the value to set. + * @return True if the 'value' field has been set, false otherwise. */ - public void setValue(java.lang.Double value) { - this.value = value; + public boolean hasValue() { + return fieldSetFlags()[1]; } - /** RecordBuilder for IndexViewDiscoverabilityDataValues instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase< - IndexViewDiscoverabilityDataValues> - implements org.apache.avro.data.RecordBuilder { + /** + * Clears the value of the 'value' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder clearValue() { + fieldSetFlags()[1] = false; + return this; + } - private java.lang.CharSequence label; - private double value; + @Override + @SuppressWarnings("unchecked") + public IndexViewDiscoverabilityDataValues build() { + try { + IndexViewDiscoverabilityDataValues record = new IndexViewDiscoverabilityDataValues(); + record.label = + fieldSetFlags()[0] ? this.label : (java.lang.CharSequence) defaultValue(fields()[0]); + record.value = + fieldSetFlags()[1] ? this.value : (java.lang.Double) defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.SCHEMA$); - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) + MODEL$.createDatumWriter(SCHEMA$); - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder other) { - super(other); - if (isValidValue(fields()[0], other.label)) { - this.label = data().deepCopy(fields()[0].schema(), other.label); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.value)) { - this.value = data().deepCopy(fields()[1].schema(), other.value); - fieldSetFlags()[1] = true; - } - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Creates a Builder by copying an existing IndexViewDiscoverabilityDataValues instance */ - private Builder(sparqles.avro.analytics.IndexViewDiscoverabilityDataValues other) { - super(sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.SCHEMA$); - if (isValidValue(fields()[0], other.label)) { - this.label = data().deepCopy(fields()[0].schema(), other.label); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.value)) { - this.value = data().deepCopy(fields()[1].schema(), other.value); - fieldSetFlags()[1] = true; - } - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) + MODEL$.createDatumReader(SCHEMA$); - /** Gets the value of the 'label' field */ - public java.lang.CharSequence getLabel() { - return label; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Sets the value of the 'label' field */ - public sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder setLabel( - java.lang.CharSequence value) { - validate(fields()[0], value); - this.label = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Checks whether the 'label' field has been set */ - public boolean hasLabel() { - return fieldSetFlags()[0]; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.label); - /** Clears the value of the 'label' field */ - public sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder clearLabel() { - label = null; - fieldSetFlags()[0] = false; - return this; - } + out.writeDouble(this.value); + } - /** Gets the value of the 'value' field */ - public java.lang.Double getValue() { - return value; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.label = in.readString(this.label instanceof Utf8 ? (Utf8) this.label : null); - /** Sets the value of the 'value' field */ - public sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder setValue( - double value) { - validate(fields()[1], value); - this.value = value; - fieldSetFlags()[1] = true; - return this; - } + this.value = in.readDouble(); - /** Checks whether the 'value' field has been set */ - public boolean hasValue() { - return fieldSetFlags()[1]; - } + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.label = in.readString(this.label instanceof Utf8 ? (Utf8) this.label : null); + break; - /** Clears the value of the 'value' field */ - public sparqles.avro.analytics.IndexViewDiscoverabilityDataValues.Builder clearValue() { - fieldSetFlags()[1] = false; - return this; - } + case 1: + this.value = in.readDouble(); + break; - @Override - public IndexViewDiscoverabilityDataValues build() { - try { - IndexViewDiscoverabilityDataValues record = - new IndexViewDiscoverabilityDataValues(); - record.label = - fieldSetFlags()[0] - ? this.label - : (java.lang.CharSequence) defaultValue(fields()[0]); - record.value = - fieldSetFlags()[1] - ? this.value - : (java.lang.Double) defaultValue(fields()[1]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/IndexViewInterData.java b/backend/src/main/java/sparqles/avro/analytics/IndexViewInterData.java index 47ed1cac..ac245877 100644 --- a/backend/src/main/java/sparqles/avro/analytics/IndexViewInterData.java +++ b/backend/src/main/java/sparqles/avro/analytics/IndexViewInterData.java @@ -5,283 +5,574 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class IndexViewInterData extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"IndexViewInterData\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"color\",\"type\":\"string\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewInterDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}"); - @Deprecated public java.lang.CharSequence key; - @Deprecated public java.lang.CharSequence color; - @Deprecated public java.util.List data; - - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public IndexViewInterData() {} - - /** All-args constructor. */ - public IndexViewInterData( - java.lang.CharSequence key, - java.lang.CharSequence color, - java.util.List data) { - this.key = key; - this.color = color; - this.data = data; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -9129232541384086334L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"IndexViewInterData\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"color\",\"type\":\"string\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewInterDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this IndexViewInterData to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a IndexViewInterData from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a IndexViewInterData instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static IndexViewInterData fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence key; + private java.lang.CharSequence color; + private java.util.List data; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public IndexViewInterData() {} + + /** + * All-args constructor. + * + * @param key The new value for key + * @param color The new value for color + * @param data The new value for data + */ + public IndexViewInterData( + java.lang.CharSequence key, + java.lang.CharSequence color, + java.util.List data) { + this.key = key; + this.color = color; + this.data = data; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return key; + case 1: + return color; + case 2: + return data; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } - - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + key = (java.lang.CharSequence) value$; + break; + case 1: + color = (java.lang.CharSequence) value$; + break; + case 2: + data = (java.util.List) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } - - /** Creates a new IndexViewInterData RecordBuilder */ - public static sparqles.avro.analytics.IndexViewInterData.Builder newBuilder() { - return new sparqles.avro.analytics.IndexViewInterData.Builder(); + } + + /** + * Gets the value of the 'key' field. + * + * @return The value of the 'key' field. + */ + public java.lang.CharSequence getKey() { + return key; + } + + /** + * Sets the value of the 'key' field. + * + * @param value the value to set. + */ + public void setKey(java.lang.CharSequence value) { + this.key = value; + } + + /** + * Gets the value of the 'color' field. + * + * @return The value of the 'color' field. + */ + public java.lang.CharSequence getColor() { + return color; + } + + /** + * Sets the value of the 'color' field. + * + * @param value the value to set. + */ + public void setColor(java.lang.CharSequence value) { + this.color = value; + } + + /** + * Gets the value of the 'data' field. + * + * @return The value of the 'data' field. + */ + public java.util.List getData() { + return data; + } + + /** + * Sets the value of the 'data' field. + * + * @param value the value to set. + */ + public void setData(java.util.List value) { + this.data = value; + } + + /** + * Creates a new IndexViewInterData RecordBuilder. + * + * @return A new IndexViewInterData RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewInterData.Builder newBuilder() { + return new sparqles.avro.analytics.IndexViewInterData.Builder(); + } + + /** + * Creates a new IndexViewInterData RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new IndexViewInterData RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewInterData.Builder newBuilder( + sparqles.avro.analytics.IndexViewInterData.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewInterData.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewInterData.Builder(other); } + } + + /** + * Creates a new IndexViewInterData RecordBuilder by copying an existing IndexViewInterData + * instance. + * + * @param other The existing instance to copy. + * @return A new IndexViewInterData RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewInterData.Builder newBuilder( + sparqles.avro.analytics.IndexViewInterData other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewInterData.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewInterData.Builder(other); + } + } + + /** RecordBuilder for IndexViewInterData instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence key; + private java.lang.CharSequence color; + private java.util.List data; - /** Creates a new IndexViewInterData RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.IndexViewInterData.Builder newBuilder( - sparqles.avro.analytics.IndexViewInterData.Builder other) { - return new sparqles.avro.analytics.IndexViewInterData.Builder(other); + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } /** - * Creates a new IndexViewInterData RecordBuilder by copying an existing IndexViewInterData - * instance + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public static sparqles.avro.analytics.IndexViewInterData.Builder newBuilder( - sparqles.avro.analytics.IndexViewInterData other) { - return new sparqles.avro.analytics.IndexViewInterData.Builder(other); + private Builder(sparqles.avro.analytics.IndexViewInterData.Builder other) { + super(other); + if (isValidValue(fields()[0], other.key)) { + this.key = data().deepCopy(fields()[0].schema(), other.key); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.color)) { + this.color = data().deepCopy(fields()[1].schema(), other.color); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.data)) { + this.data = data().deepCopy(fields()[2].schema(), other.data); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Creates a Builder by copying an existing IndexViewInterData instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.IndexViewInterData other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.key)) { + this.key = data().deepCopy(fields()[0].schema(), other.key); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.color)) { + this.color = data().deepCopy(fields()[1].schema(), other.color); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.data)) { + this.data = data().deepCopy(fields()[2].schema(), other.data); + fieldSetFlags()[2] = true; + } } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return key; - case 1: - return color; - case 2: - return data; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Gets the value of the 'key' field. + * + * @return The value. + */ + public java.lang.CharSequence getKey() { + return key; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - key = (java.lang.CharSequence) value$; - break; - case 1: - color = (java.lang.CharSequence) value$; - break; - case 2: - data = (java.util.List) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the value of the 'key' field. + * + * @param value The value of 'key'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewInterData.Builder setKey(java.lang.CharSequence value) { + validate(fields()[0], value); + this.key = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'key' field. */ - public java.lang.CharSequence getKey() { - return key; + /** + * Checks whether the 'key' field has been set. + * + * @return True if the 'key' field has been set, false otherwise. + */ + public boolean hasKey() { + return fieldSetFlags()[0]; } /** - * Sets the value of the 'key' field. + * Clears the value of the 'key' field. * - * @param value the value to set. + * @return This builder. */ - public void setKey(java.lang.CharSequence value) { - this.key = value; + public sparqles.avro.analytics.IndexViewInterData.Builder clearKey() { + key = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'color' field. */ + /** + * Gets the value of the 'color' field. + * + * @return The value. + */ public java.lang.CharSequence getColor() { - return color; + return color; } /** * Sets the value of the 'color' field. * - * @param value the value to set. + * @param value The value of 'color'. + * @return This builder. */ - public void setColor(java.lang.CharSequence value) { - this.color = value; + public sparqles.avro.analytics.IndexViewInterData.Builder setColor( + java.lang.CharSequence value) { + validate(fields()[1], value); + this.color = value; + fieldSetFlags()[1] = true; + return this; } - /** Gets the value of the 'data' field. */ + /** + * Checks whether the 'color' field has been set. + * + * @return True if the 'color' field has been set, false otherwise. + */ + public boolean hasColor() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'color' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewInterData.Builder clearColor() { + color = null; + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'data' field. + * + * @return The value. + */ public java.util.List getData() { - return data; + return data; } /** * Sets the value of the 'data' field. * - * @param value the value to set. + * @param value The value of 'data'. + * @return This builder. */ - public void setData(java.util.List value) { - this.data = value; + public sparqles.avro.analytics.IndexViewInterData.Builder setData( + java.util.List value) { + validate(fields()[2], value); + this.data = value; + fieldSetFlags()[2] = true; + return this; } - /** RecordBuilder for IndexViewInterData instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { - - private java.lang.CharSequence key; - private java.lang.CharSequence color; - private java.util.List data; + /** + * Checks whether the 'data' field has been set. + * + * @return True if the 'data' field has been set, false otherwise. + */ + public boolean hasData() { + return fieldSetFlags()[2]; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.IndexViewInterData.SCHEMA$); - } + /** + * Clears the value of the 'data' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewInterData.Builder clearData() { + data = null; + fieldSetFlags()[2] = false; + return this; + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.IndexViewInterData.Builder other) { - super(other); - if (isValidValue(fields()[0], other.key)) { - this.key = data().deepCopy(fields()[0].schema(), other.key); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.color)) { - this.color = data().deepCopy(fields()[1].schema(), other.color); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.data)) { - this.data = data().deepCopy(fields()[2].schema(), other.data); - fieldSetFlags()[2] = true; - } + @Override + @SuppressWarnings("unchecked") + public IndexViewInterData build() { + try { + IndexViewInterData record = new IndexViewInterData(); + record.key = + fieldSetFlags()[0] ? this.key : (java.lang.CharSequence) defaultValue(fields()[0]); + record.color = + fieldSetFlags()[1] ? this.color : (java.lang.CharSequence) defaultValue(fields()[1]); + record.data = + fieldSetFlags()[2] + ? this.data + : (java.util.List) + defaultValue(fields()[2]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.key); + + out.writeString(this.color); + + long size0 = this.data.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (sparqles.avro.analytics.IndexViewInterDataValues e0 : this.data) { + actualSize0++; + out.startItem(); + e0.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.key = in.readString(this.key instanceof Utf8 ? (Utf8) this.key : null); + + this.color = in.readString(this.color instanceof Utf8 ? (Utf8) this.color : null); + + long size0 = in.readArrayStart(); + java.util.List a0 = this.data; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("data").schema()); + this.data = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.IndexViewInterDataValues e0 = (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.IndexViewInterDataValues(); + } + e0.customDecode(in); + a0.add(e0); } - - /** Creates a Builder by copying an existing IndexViewInterData instance */ - private Builder(sparqles.avro.analytics.IndexViewInterData other) { - super(sparqles.avro.analytics.IndexViewInterData.SCHEMA$); - if (isValidValue(fields()[0], other.key)) { - this.key = data().deepCopy(fields()[0].schema(), other.key); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.color)) { - this.color = data().deepCopy(fields()[1].schema(), other.color); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.data)) { - this.data = data().deepCopy(fields()[2].schema(), other.data); - fieldSetFlags()[2] = true; + } + + } else { + for (int i = 0; i < 3; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.key = in.readString(this.key instanceof Utf8 ? (Utf8) this.key : null); + break; + + case 1: + this.color = in.readString(this.color instanceof Utf8 ? (Utf8) this.color : null); + break; + + case 2: + long size0 = in.readArrayStart(); + java.util.List a0 = this.data; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("data").schema()); + this.data = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.IndexViewInterDataValues e0 = + (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.IndexViewInterDataValues(); + } + e0.customDecode(in); + a0.add(e0); + } } - } - - /** Gets the value of the 'key' field */ - public java.lang.CharSequence getKey() { - return key; - } + break; - /** Sets the value of the 'key' field */ - public sparqles.avro.analytics.IndexViewInterData.Builder setKey( - java.lang.CharSequence value) { - validate(fields()[0], value); - this.key = value; - fieldSetFlags()[0] = true; - return this; - } - - /** Checks whether the 'key' field has been set */ - public boolean hasKey() { - return fieldSetFlags()[0]; - } - - /** Clears the value of the 'key' field */ - public sparqles.avro.analytics.IndexViewInterData.Builder clearKey() { - key = null; - fieldSetFlags()[0] = false; - return this; - } - - /** Gets the value of the 'color' field */ - public java.lang.CharSequence getColor() { - return color; - } - - /** Sets the value of the 'color' field */ - public sparqles.avro.analytics.IndexViewInterData.Builder setColor( - java.lang.CharSequence value) { - validate(fields()[1], value); - this.color = value; - fieldSetFlags()[1] = true; - return this; - } - - /** Checks whether the 'color' field has been set */ - public boolean hasColor() { - return fieldSetFlags()[1]; - } - - /** Clears the value of the 'color' field */ - public sparqles.avro.analytics.IndexViewInterData.Builder clearColor() { - color = null; - fieldSetFlags()[1] = false; - return this; - } - - /** Gets the value of the 'data' field */ - public java.util.List getData() { - return data; - } - - /** Sets the value of the 'data' field */ - public sparqles.avro.analytics.IndexViewInterData.Builder setData( - java.util.List value) { - validate(fields()[2], value); - this.data = value; - fieldSetFlags()[2] = true; - return this; - } - - /** Checks whether the 'data' field has been set */ - public boolean hasData() { - return fieldSetFlags()[2]; - } - - /** Clears the value of the 'data' field */ - public sparqles.avro.analytics.IndexViewInterData.Builder clearData() { - data = null; - fieldSetFlags()[2] = false; - return this; - } - - @Override - public IndexViewInterData build() { - try { - IndexViewInterData record = new IndexViewInterData(); - record.key = - fieldSetFlags()[0] - ? this.key - : (java.lang.CharSequence) defaultValue(fields()[0]); - record.color = - fieldSetFlags()[1] - ? this.color - : (java.lang.CharSequence) defaultValue(fields()[1]); - record.data = - fieldSetFlags()[2] - ? this.data - : (java.util.List) - defaultValue(fields()[2]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/IndexViewInterDataValues.java b/backend/src/main/java/sparqles/avro/analytics/IndexViewInterDataValues.java index 3d54787f..684efd66 100644 --- a/backend/src/main/java/sparqles/avro/analytics/IndexViewInterDataValues.java +++ b/backend/src/main/java/sparqles/avro/analytics/IndexViewInterDataValues.java @@ -5,217 +5,424 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class IndexViewInterDataValues extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"IndexViewInterDataValues\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}"); - @Deprecated public java.lang.CharSequence label; - @Deprecated public double value; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -2362170334397943618L; - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public IndexViewInterDataValues() {} + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"IndexViewInterDataValues\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } - /** All-args constructor. */ - public IndexViewInterDataValues(java.lang.CharSequence label, java.lang.Double value) { - this.label = label; - this.value = value; + /** + * Serializes this IndexViewInterDataValues to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a IndexViewInterDataValues from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a IndexViewInterDataValues instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static IndexViewInterDataValues fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence label; + private double value; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public IndexViewInterDataValues() {} + + /** + * All-args constructor. + * + * @param label The new value for label + * @param value The new value for value + */ + public IndexViewInterDataValues(java.lang.CharSequence label, java.lang.Double value) { + this.label = label; + this.value = value; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return label; + case 1: + return value; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + label = (java.lang.CharSequence) value$; + break; + case 1: + value = (java.lang.Double) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - /** Creates a new IndexViewInterDataValues RecordBuilder */ - public static sparqles.avro.analytics.IndexViewInterDataValues.Builder newBuilder() { - return new sparqles.avro.analytics.IndexViewInterDataValues.Builder(); + /** + * Gets the value of the 'label' field. + * + * @return The value of the 'label' field. + */ + public java.lang.CharSequence getLabel() { + return label; + } + + /** + * Sets the value of the 'label' field. + * + * @param value the value to set. + */ + public void setLabel(java.lang.CharSequence value) { + this.label = value; + } + + /** + * Gets the value of the 'value' field. + * + * @return The value of the 'value' field. + */ + public double getValue() { + return value; + } + + /** + * Sets the value of the 'value' field. + * + * @param value the value to set. + */ + public void setValue(double value) { + this.value = value; + } + + /** + * Creates a new IndexViewInterDataValues RecordBuilder. + * + * @return A new IndexViewInterDataValues RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewInterDataValues.Builder newBuilder() { + return new sparqles.avro.analytics.IndexViewInterDataValues.Builder(); + } + + /** + * Creates a new IndexViewInterDataValues RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new IndexViewInterDataValues RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewInterDataValues.Builder newBuilder( + sparqles.avro.analytics.IndexViewInterDataValues.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewInterDataValues.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewInterDataValues.Builder(other); } + } + + /** + * Creates a new IndexViewInterDataValues RecordBuilder by copying an existing + * IndexViewInterDataValues instance. + * + * @param other The existing instance to copy. + * @return A new IndexViewInterDataValues RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewInterDataValues.Builder newBuilder( + sparqles.avro.analytics.IndexViewInterDataValues other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewInterDataValues.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewInterDataValues.Builder(other); + } + } + + /** RecordBuilder for IndexViewInterDataValues instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence label; + private double value; - /** Creates a new IndexViewInterDataValues RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.IndexViewInterDataValues.Builder newBuilder( - sparqles.avro.analytics.IndexViewInterDataValues.Builder other) { - return new sparqles.avro.analytics.IndexViewInterDataValues.Builder(other); + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } /** - * Creates a new IndexViewInterDataValues RecordBuilder by copying an existing - * IndexViewInterDataValues instance + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public static sparqles.avro.analytics.IndexViewInterDataValues.Builder newBuilder( - sparqles.avro.analytics.IndexViewInterDataValues other) { - return new sparqles.avro.analytics.IndexViewInterDataValues.Builder(other); + private Builder(sparqles.avro.analytics.IndexViewInterDataValues.Builder other) { + super(other); + if (isValidValue(fields()[0], other.label)) { + this.label = data().deepCopy(fields()[0].schema(), other.label); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.value)) { + this.value = data().deepCopy(fields()[1].schema(), other.value); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Creates a Builder by copying an existing IndexViewInterDataValues instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.IndexViewInterDataValues other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.label)) { + this.label = data().deepCopy(fields()[0].schema(), other.label); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.value)) { + this.value = data().deepCopy(fields()[1].schema(), other.value); + fieldSetFlags()[1] = true; + } } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return label; - case 1: - return value; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Gets the value of the 'label' field. + * + * @return The value. + */ + public java.lang.CharSequence getLabel() { + return label; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - label = (java.lang.CharSequence) value$; - break; - case 1: - value = (java.lang.Double) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the value of the 'label' field. + * + * @param value The value of 'label'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewInterDataValues.Builder setLabel( + java.lang.CharSequence value) { + validate(fields()[0], value); + this.label = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'label' field. */ - public java.lang.CharSequence getLabel() { - return label; + /** + * Checks whether the 'label' field has been set. + * + * @return True if the 'label' field has been set, false otherwise. + */ + public boolean hasLabel() { + return fieldSetFlags()[0]; } /** - * Sets the value of the 'label' field. + * Clears the value of the 'label' field. * - * @param value the value to set. + * @return This builder. */ - public void setLabel(java.lang.CharSequence value) { - this.label = value; + public sparqles.avro.analytics.IndexViewInterDataValues.Builder clearLabel() { + label = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'value' field. */ - public java.lang.Double getValue() { - return value; + /** + * Gets the value of the 'value' field. + * + * @return The value. + */ + public double getValue() { + return value; } /** * Sets the value of the 'value' field. * - * @param value the value to set. + * @param value The value of 'value'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewInterDataValues.Builder setValue(double value) { + validate(fields()[1], value); + this.value = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'value' field has been set. + * + * @return True if the 'value' field has been set, false otherwise. */ - public void setValue(java.lang.Double value) { - this.value = value; + public boolean hasValue() { + return fieldSetFlags()[1]; } - /** RecordBuilder for IndexViewInterDataValues instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Clears the value of the 'value' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewInterDataValues.Builder clearValue() { + fieldSetFlags()[1] = false; + return this; + } - private java.lang.CharSequence label; - private double value; + @Override + @SuppressWarnings("unchecked") + public IndexViewInterDataValues build() { + try { + IndexViewInterDataValues record = new IndexViewInterDataValues(); + record.label = + fieldSetFlags()[0] ? this.label : (java.lang.CharSequence) defaultValue(fields()[0]); + record.value = + fieldSetFlags()[1] ? this.value : (java.lang.Double) defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.IndexViewInterDataValues.SCHEMA$); - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.IndexViewInterDataValues.Builder other) { - super(other); - if (isValidValue(fields()[0], other.label)) { - this.label = data().deepCopy(fields()[0].schema(), other.label); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.value)) { - this.value = data().deepCopy(fields()[1].schema(), other.value); - fieldSetFlags()[1] = true; - } - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Creates a Builder by copying an existing IndexViewInterDataValues instance */ - private Builder(sparqles.avro.analytics.IndexViewInterDataValues other) { - super(sparqles.avro.analytics.IndexViewInterDataValues.SCHEMA$); - if (isValidValue(fields()[0], other.label)) { - this.label = data().deepCopy(fields()[0].schema(), other.label); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.value)) { - this.value = data().deepCopy(fields()[1].schema(), other.value); - fieldSetFlags()[1] = true; - } - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Gets the value of the 'label' field */ - public java.lang.CharSequence getLabel() { - return label; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Sets the value of the 'label' field */ - public sparqles.avro.analytics.IndexViewInterDataValues.Builder setLabel( - java.lang.CharSequence value) { - validate(fields()[0], value); - this.label = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Checks whether the 'label' field has been set */ - public boolean hasLabel() { - return fieldSetFlags()[0]; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.label); - /** Clears the value of the 'label' field */ - public sparqles.avro.analytics.IndexViewInterDataValues.Builder clearLabel() { - label = null; - fieldSetFlags()[0] = false; - return this; - } + out.writeDouble(this.value); + } - /** Gets the value of the 'value' field */ - public java.lang.Double getValue() { - return value; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.label = in.readString(this.label instanceof Utf8 ? (Utf8) this.label : null); - /** Sets the value of the 'value' field */ - public sparqles.avro.analytics.IndexViewInterDataValues.Builder setValue(double value) { - validate(fields()[1], value); - this.value = value; - fieldSetFlags()[1] = true; - return this; - } + this.value = in.readDouble(); - /** Checks whether the 'value' field has been set */ - public boolean hasValue() { - return fieldSetFlags()[1]; - } + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.label = in.readString(this.label instanceof Utf8 ? (Utf8) this.label : null); + break; - /** Clears the value of the 'value' field */ - public sparqles.avro.analytics.IndexViewInterDataValues.Builder clearValue() { - fieldSetFlags()[1] = false; - return this; - } + case 1: + this.value = in.readDouble(); + break; - @Override - public IndexViewInterDataValues build() { - try { - IndexViewInterDataValues record = new IndexViewInterDataValues(); - record.label = - fieldSetFlags()[0] - ? this.label - : (java.lang.CharSequence) defaultValue(fields()[0]); - record.value = - fieldSetFlags()[1] - ? this.value - : (java.lang.Double) defaultValue(fields()[1]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/IndexViewInteroperability.java b/backend/src/main/java/sparqles/avro/analytics/IndexViewInteroperability.java index c09e8c11..fdee980f 100644 --- a/backend/src/main/java/sparqles/avro/analytics/IndexViewInteroperability.java +++ b/backend/src/main/java/sparqles/avro/analytics/IndexViewInteroperability.java @@ -5,161 +5,396 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; + @org.apache.avro.specific.AvroGenerated public class IndexViewInteroperability extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"IndexViewInteroperability\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewInterData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"color\",\"type\":\"string\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewInterDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}}}]}"); - @Deprecated public java.util.List data; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 9215712904603231389L; - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public IndexViewInteroperability() {} + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"IndexViewInteroperability\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewInterData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"color\",\"type\":\"string\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewInterDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}}}]}"); - /** All-args constructor. */ - public IndexViewInteroperability( - java.util.List data) { - this.data = data; - } + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder( + SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this IndexViewInteroperability to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a IndexViewInteroperability from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a IndexViewInteroperability instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static IndexViewInteroperability fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private java.util.List data; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public IndexViewInteroperability() {} + + /** + * All-args constructor. + * + * @param data The new value for data + */ + public IndexViewInteroperability( + java.util.List data) { + this.data = data; + } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return data; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - /** Creates a new IndexViewInteroperability RecordBuilder */ - public static sparqles.avro.analytics.IndexViewInteroperability.Builder newBuilder() { - return new sparqles.avro.analytics.IndexViewInteroperability.Builder(); + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + data = (java.util.List) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } + + /** + * Gets the value of the 'data' field. + * + * @return The value of the 'data' field. + */ + public java.util.List getData() { + return data; + } - /** Creates a new IndexViewInteroperability RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.IndexViewInteroperability.Builder newBuilder( - sparqles.avro.analytics.IndexViewInteroperability.Builder other) { - return new sparqles.avro.analytics.IndexViewInteroperability.Builder(other); + /** + * Sets the value of the 'data' field. + * + * @param value the value to set. + */ + public void setData(java.util.List value) { + this.data = value; + } + + /** + * Creates a new IndexViewInteroperability RecordBuilder. + * + * @return A new IndexViewInteroperability RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewInteroperability.Builder newBuilder() { + return new sparqles.avro.analytics.IndexViewInteroperability.Builder(); + } + + /** + * Creates a new IndexViewInteroperability RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new IndexViewInteroperability RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewInteroperability.Builder newBuilder( + sparqles.avro.analytics.IndexViewInteroperability.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewInteroperability.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewInteroperability.Builder(other); } + } - /** - * Creates a new IndexViewInteroperability RecordBuilder by copying an existing - * IndexViewInteroperability instance - */ - public static sparqles.avro.analytics.IndexViewInteroperability.Builder newBuilder( - sparqles.avro.analytics.IndexViewInteroperability other) { - return new sparqles.avro.analytics.IndexViewInteroperability.Builder(other); + /** + * Creates a new IndexViewInteroperability RecordBuilder by copying an existing + * IndexViewInteroperability instance. + * + * @param other The existing instance to copy. + * @return A new IndexViewInteroperability RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewInteroperability.Builder newBuilder( + sparqles.avro.analytics.IndexViewInteroperability other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewInteroperability.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewInteroperability.Builder(other); } + } + + /** RecordBuilder for IndexViewInteroperability instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + private java.util.List data; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return data; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.analytics.IndexViewInteroperability.Builder other) { + super(other); + if (isValidValue(fields()[0], other.data)) { + this.data = data().deepCopy(fields()[0].schema(), other.data); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - data = (java.util.List) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Creates a Builder by copying an existing IndexViewInteroperability instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.IndexViewInteroperability other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.data)) { + this.data = data().deepCopy(fields()[0].schema(), other.data); + fieldSetFlags()[0] = true; + } } - /** Gets the value of the 'data' field. */ + /** + * Gets the value of the 'data' field. + * + * @return The value. + */ public java.util.List getData() { - return data; + return data; } /** * Sets the value of the 'data' field. * - * @param value the value to set. + * @param value The value of 'data'. + * @return This builder. */ - public void setData(java.util.List value) { - this.data = value; + public sparqles.avro.analytics.IndexViewInteroperability.Builder setData( + java.util.List value) { + validate(fields()[0], value); + this.data = value; + fieldSetFlags()[0] = true; + return this; } - /** RecordBuilder for IndexViewInteroperability instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Checks whether the 'data' field has been set. + * + * @return True if the 'data' field has been set, false otherwise. + */ + public boolean hasData() { + return fieldSetFlags()[0]; + } + + /** + * Clears the value of the 'data' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewInteroperability.Builder clearData() { + data = null; + fieldSetFlags()[0] = false; + return this; + } - private java.util.List data; + @Override + @SuppressWarnings("unchecked") + public IndexViewInteroperability build() { + try { + IndexViewInteroperability record = new IndexViewInteroperability(); + record.data = + fieldSetFlags()[0] + ? this.data + : (java.util.List) + defaultValue(fields()[0]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.IndexViewInteroperability.SCHEMA$); - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.IndexViewInteroperability.Builder other) { - super(other); - if (isValidValue(fields()[0], other.data)) { - this.data = data().deepCopy(fields()[0].schema(), other.data); - fieldSetFlags()[0] = true; - } - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Creates a Builder by copying an existing IndexViewInteroperability instance */ - private Builder(sparqles.avro.analytics.IndexViewInteroperability other) { - super(sparqles.avro.analytics.IndexViewInteroperability.SCHEMA$); - if (isValidValue(fields()[0], other.data)) { - this.data = data().deepCopy(fields()[0].schema(), other.data); - fieldSetFlags()[0] = true; - } - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Gets the value of the 'data' field */ - public java.util.List getData() { - return data; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Sets the value of the 'data' field */ - public sparqles.avro.analytics.IndexViewInteroperability.Builder setData( - java.util.List value) { - validate(fields()[0], value); - this.data = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Checks whether the 'data' field has been set */ - public boolean hasData() { - return fieldSetFlags()[0]; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + long size0 = this.data.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (sparqles.avro.analytics.IndexViewInterData e0 : this.data) { + actualSize0++; + out.startItem(); + e0.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); + } - /** Clears the value of the 'data' field */ - public sparqles.avro.analytics.IndexViewInteroperability.Builder clearData() { - data = null; - fieldSetFlags()[0] = false; - return this; + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + long size0 = in.readArrayStart(); + java.util.List a0 = this.data; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("data").schema()); + this.data = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.IndexViewInterData e0 = (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.IndexViewInterData(); + } + e0.customDecode(in); + a0.add(e0); } + } - @Override - public IndexViewInteroperability build() { - try { - IndexViewInteroperability record = new IndexViewInteroperability(); - record.data = - fieldSetFlags()[0] - ? this.data - : (java.util.List) - defaultValue(fields()[0]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); + } else { + for (int i = 0; i < 1; i++) { + switch (fieldOrder[i].pos()) { + case 0: + long size0 = in.readArrayStart(); + java.util.List a0 = this.data; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("data").schema()); + this.data = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.IndexViewInterData e0 = (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.IndexViewInterData(); + } + e0.customDecode(in); + a0.add(e0); + } } + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/IndexViewPerformance.java b/backend/src/main/java/sparqles/avro/analytics/IndexViewPerformance.java index d9f15433..1d4f0ff0 100644 --- a/backend/src/main/java/sparqles/avro/analytics/IndexViewPerformance.java +++ b/backend/src/main/java/sparqles/avro/analytics/IndexViewPerformance.java @@ -5,220 +5,483 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; + @org.apache.avro.specific.AvroGenerated public class IndexViewPerformance extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"IndexViewPerformance\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"threshold\",\"type\":\"long\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewPerformanceData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"color\",\"type\":\"string\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewPerformanceDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}}}]}"); - @Deprecated public long threshold; - @Deprecated public java.util.List data; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 8447908175316675294L; - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public IndexViewPerformance() {} + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"IndexViewPerformance\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"threshold\",\"type\":\"long\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewPerformanceData\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"color\",\"type\":\"string\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewPerformanceDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}}}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this IndexViewPerformance to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a IndexViewPerformance from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a IndexViewPerformance instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static IndexViewPerformance fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private long threshold; + private java.util.List data; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public IndexViewPerformance() {} + + /** + * All-args constructor. + * + * @param threshold The new value for threshold + * @param data The new value for data + */ + public IndexViewPerformance( + java.lang.Long threshold, + java.util.List data) { + this.threshold = threshold; + this.data = data; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return threshold; + case 1: + return data; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } - /** All-args constructor. */ - public IndexViewPerformance( - java.lang.Long threshold, - java.util.List data) { - this.threshold = threshold; - this.data = data; + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + threshold = (java.lang.Long) value$; + break; + case 1: + data = (java.util.List) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } + + /** + * Gets the value of the 'threshold' field. + * + * @return The value of the 'threshold' field. + */ + public long getThreshold() { + return threshold; + } + + /** + * Sets the value of the 'threshold' field. + * + * @param value the value to set. + */ + public void setThreshold(long value) { + this.threshold = value; + } + + /** + * Gets the value of the 'data' field. + * + * @return The value of the 'data' field. + */ + public java.util.List getData() { + return data; + } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + /** + * Sets the value of the 'data' field. + * + * @param value the value to set. + */ + public void setData(java.util.List value) { + this.data = value; + } + + /** + * Creates a new IndexViewPerformance RecordBuilder. + * + * @return A new IndexViewPerformance RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewPerformance.Builder newBuilder() { + return new sparqles.avro.analytics.IndexViewPerformance.Builder(); + } + + /** + * Creates a new IndexViewPerformance RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new IndexViewPerformance RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewPerformance.Builder newBuilder( + sparqles.avro.analytics.IndexViewPerformance.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewPerformance.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewPerformance.Builder(other); } + } - /** Creates a new IndexViewPerformance RecordBuilder */ - public static sparqles.avro.analytics.IndexViewPerformance.Builder newBuilder() { - return new sparqles.avro.analytics.IndexViewPerformance.Builder(); + /** + * Creates a new IndexViewPerformance RecordBuilder by copying an existing IndexViewPerformance + * instance. + * + * @param other The existing instance to copy. + * @return A new IndexViewPerformance RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewPerformance.Builder newBuilder( + sparqles.avro.analytics.IndexViewPerformance other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewPerformance.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewPerformance.Builder(other); } + } + + /** RecordBuilder for IndexViewPerformance instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { - /** Creates a new IndexViewPerformance RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.IndexViewPerformance.Builder newBuilder( - sparqles.avro.analytics.IndexViewPerformance.Builder other) { - return new sparqles.avro.analytics.IndexViewPerformance.Builder(other); + private long threshold; + private java.util.List data; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } /** - * Creates a new IndexViewPerformance RecordBuilder by copying an existing IndexViewPerformance - * instance + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public static sparqles.avro.analytics.IndexViewPerformance.Builder newBuilder( - sparqles.avro.analytics.IndexViewPerformance other) { - return new sparqles.avro.analytics.IndexViewPerformance.Builder(other); + private Builder(sparqles.avro.analytics.IndexViewPerformance.Builder other) { + super(other); + if (isValidValue(fields()[0], other.threshold)) { + this.threshold = data().deepCopy(fields()[0].schema(), other.threshold); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.data)) { + this.data = data().deepCopy(fields()[1].schema(), other.data); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Creates a Builder by copying an existing IndexViewPerformance instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.IndexViewPerformance other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.threshold)) { + this.threshold = data().deepCopy(fields()[0].schema(), other.threshold); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.data)) { + this.data = data().deepCopy(fields()[1].schema(), other.data); + fieldSetFlags()[1] = true; + } } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return threshold; - case 1: - return data; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Gets the value of the 'threshold' field. + * + * @return The value. + */ + public long getThreshold() { + return threshold; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - threshold = (java.lang.Long) value$; - break; - case 1: - data = (java.util.List) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the value of the 'threshold' field. + * + * @param value The value of 'threshold'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewPerformance.Builder setThreshold(long value) { + validate(fields()[0], value); + this.threshold = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'threshold' field. */ - public java.lang.Long getThreshold() { - return threshold; + /** + * Checks whether the 'threshold' field has been set. + * + * @return True if the 'threshold' field has been set, false otherwise. + */ + public boolean hasThreshold() { + return fieldSetFlags()[0]; } /** - * Sets the value of the 'threshold' field. + * Clears the value of the 'threshold' field. * - * @param value the value to set. + * @return This builder. */ - public void setThreshold(java.lang.Long value) { - this.threshold = value; + public sparqles.avro.analytics.IndexViewPerformance.Builder clearThreshold() { + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'data' field. */ + /** + * Gets the value of the 'data' field. + * + * @return The value. + */ public java.util.List getData() { - return data; + return data; } /** * Sets the value of the 'data' field. * - * @param value the value to set. + * @param value The value of 'data'. + * @return This builder. */ - public void setData(java.util.List value) { - this.data = value; + public sparqles.avro.analytics.IndexViewPerformance.Builder setData( + java.util.List value) { + validate(fields()[1], value); + this.data = value; + fieldSetFlags()[1] = true; + return this; } - /** RecordBuilder for IndexViewPerformance instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Checks whether the 'data' field has been set. + * + * @return True if the 'data' field has been set, false otherwise. + */ + public boolean hasData() { + return fieldSetFlags()[1]; + } - private long threshold; - private java.util.List data; + /** + * Clears the value of the 'data' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewPerformance.Builder clearData() { + data = null; + fieldSetFlags()[1] = false; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.IndexViewPerformance.SCHEMA$); - } + @Override + @SuppressWarnings("unchecked") + public IndexViewPerformance build() { + try { + IndexViewPerformance record = new IndexViewPerformance(); + record.threshold = + fieldSetFlags()[0] ? this.threshold : (java.lang.Long) defaultValue(fields()[0]); + record.data = + fieldSetFlags()[1] + ? this.data + : (java.util.List) + defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.IndexViewPerformance.Builder other) { - super(other); - if (isValidValue(fields()[0], other.threshold)) { - this.threshold = data().deepCopy(fields()[0].schema(), other.threshold); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.data)) { - this.data = data().deepCopy(fields()[1].schema(), other.data); - fieldSetFlags()[1] = true; - } - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Creates a Builder by copying an existing IndexViewPerformance instance */ - private Builder(sparqles.avro.analytics.IndexViewPerformance other) { - super(sparqles.avro.analytics.IndexViewPerformance.SCHEMA$); - if (isValidValue(fields()[0], other.threshold)) { - this.threshold = data().deepCopy(fields()[0].schema(), other.threshold); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.data)) { - this.data = data().deepCopy(fields()[1].schema(), other.data); - fieldSetFlags()[1] = true; - } - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Gets the value of the 'threshold' field */ - public java.lang.Long getThreshold() { - return threshold; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Sets the value of the 'threshold' field */ - public sparqles.avro.analytics.IndexViewPerformance.Builder setThreshold(long value) { - validate(fields()[0], value); - this.threshold = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Checks whether the 'threshold' field has been set */ - public boolean hasThreshold() { - return fieldSetFlags()[0]; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Clears the value of the 'threshold' field */ - public sparqles.avro.analytics.IndexViewPerformance.Builder clearThreshold() { - fieldSetFlags()[0] = false; - return this; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeLong(this.threshold); - /** Gets the value of the 'data' field */ - public java.util.List getData() { - return data; - } + long size0 = this.data.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (sparqles.avro.analytics.IndexViewPerformanceData e0 : this.data) { + actualSize0++; + out.startItem(); + e0.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); + } - /** Sets the value of the 'data' field */ - public sparqles.avro.analytics.IndexViewPerformance.Builder setData( - java.util.List value) { - validate(fields()[1], value); - this.data = value; - fieldSetFlags()[1] = true; - return this; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.threshold = in.readLong(); - /** Checks whether the 'data' field has been set */ - public boolean hasData() { - return fieldSetFlags()[1]; + long size0 = in.readArrayStart(); + java.util.List a0 = this.data; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("data").schema()); + this.data = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.IndexViewPerformanceData e0 = (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.IndexViewPerformanceData(); + } + e0.customDecode(in); + a0.add(e0); } + } - /** Clears the value of the 'data' field */ - public sparqles.avro.analytics.IndexViewPerformance.Builder clearData() { - data = null; - fieldSetFlags()[1] = false; - return this; - } + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.threshold = in.readLong(); + break; - @Override - public IndexViewPerformance build() { - try { - IndexViewPerformance record = new IndexViewPerformance(); - record.threshold = - fieldSetFlags()[0] - ? this.threshold - : (java.lang.Long) defaultValue(fields()[0]); - record.data = - fieldSetFlags()[1] - ? this.data - : (java.util.List) - defaultValue(fields()[1]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); + case 1: + long size0 = in.readArrayStart(); + java.util.List a0 = this.data; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("data").schema()); + this.data = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.IndexViewPerformanceData e0 = + (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.IndexViewPerformanceData(); + } + e0.customDecode(in); + a0.add(e0); + } } + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/IndexViewPerformanceData.java b/backend/src/main/java/sparqles/avro/analytics/IndexViewPerformanceData.java index 4c7333cf..28fb566c 100644 --- a/backend/src/main/java/sparqles/avro/analytics/IndexViewPerformanceData.java +++ b/backend/src/main/java/sparqles/avro/analytics/IndexViewPerformanceData.java @@ -5,288 +5,578 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class IndexViewPerformanceData extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"IndexViewPerformanceData\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"color\",\"type\":\"string\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewPerformanceDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}"); - @Deprecated public java.lang.CharSequence key; - @Deprecated public java.lang.CharSequence color; - @Deprecated public java.util.List data; - - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public IndexViewPerformanceData() {} - - /** All-args constructor. */ - public IndexViewPerformanceData( - java.lang.CharSequence key, - java.lang.CharSequence color, - java.util.List data) { - this.key = key; - this.color = color; - this.data = data; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 1417101332083056095L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"IndexViewPerformanceData\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"color\",\"type\":\"string\"},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"IndexViewPerformanceDataValues\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}}}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this IndexViewPerformanceData to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a IndexViewPerformanceData from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a IndexViewPerformanceData instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static IndexViewPerformanceData fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence key; + private java.lang.CharSequence color; + private java.util.List data; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public IndexViewPerformanceData() {} + + /** + * All-args constructor. + * + * @param key The new value for key + * @param color The new value for color + * @param data The new value for data + */ + public IndexViewPerformanceData( + java.lang.CharSequence key, + java.lang.CharSequence color, + java.util.List data) { + this.key = key; + this.color = color; + this.data = data; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return key; + case 1: + return color; + case 2: + return data; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } - - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + key = (java.lang.CharSequence) value$; + break; + case 1: + color = (java.lang.CharSequence) value$; + break; + case 2: + data = (java.util.List) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } - - /** Creates a new IndexViewPerformanceData RecordBuilder */ - public static sparqles.avro.analytics.IndexViewPerformanceData.Builder newBuilder() { - return new sparqles.avro.analytics.IndexViewPerformanceData.Builder(); + } + + /** + * Gets the value of the 'key' field. + * + * @return The value of the 'key' field. + */ + public java.lang.CharSequence getKey() { + return key; + } + + /** + * Sets the value of the 'key' field. + * + * @param value the value to set. + */ + public void setKey(java.lang.CharSequence value) { + this.key = value; + } + + /** + * Gets the value of the 'color' field. + * + * @return The value of the 'color' field. + */ + public java.lang.CharSequence getColor() { + return color; + } + + /** + * Sets the value of the 'color' field. + * + * @param value the value to set. + */ + public void setColor(java.lang.CharSequence value) { + this.color = value; + } + + /** + * Gets the value of the 'data' field. + * + * @return The value of the 'data' field. + */ + public java.util.List getData() { + return data; + } + + /** + * Sets the value of the 'data' field. + * + * @param value the value to set. + */ + public void setData( + java.util.List value) { + this.data = value; + } + + /** + * Creates a new IndexViewPerformanceData RecordBuilder. + * + * @return A new IndexViewPerformanceData RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewPerformanceData.Builder newBuilder() { + return new sparqles.avro.analytics.IndexViewPerformanceData.Builder(); + } + + /** + * Creates a new IndexViewPerformanceData RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new IndexViewPerformanceData RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewPerformanceData.Builder newBuilder( + sparqles.avro.analytics.IndexViewPerformanceData.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewPerformanceData.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewPerformanceData.Builder(other); } + } + + /** + * Creates a new IndexViewPerformanceData RecordBuilder by copying an existing + * IndexViewPerformanceData instance. + * + * @param other The existing instance to copy. + * @return A new IndexViewPerformanceData RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewPerformanceData.Builder newBuilder( + sparqles.avro.analytics.IndexViewPerformanceData other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewPerformanceData.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewPerformanceData.Builder(other); + } + } + + /** RecordBuilder for IndexViewPerformanceData instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence key; + private java.lang.CharSequence color; + private java.util.List data; - /** Creates a new IndexViewPerformanceData RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.IndexViewPerformanceData.Builder newBuilder( - sparqles.avro.analytics.IndexViewPerformanceData.Builder other) { - return new sparqles.avro.analytics.IndexViewPerformanceData.Builder(other); + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } /** - * Creates a new IndexViewPerformanceData RecordBuilder by copying an existing - * IndexViewPerformanceData instance + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public static sparqles.avro.analytics.IndexViewPerformanceData.Builder newBuilder( - sparqles.avro.analytics.IndexViewPerformanceData other) { - return new sparqles.avro.analytics.IndexViewPerformanceData.Builder(other); + private Builder(sparqles.avro.analytics.IndexViewPerformanceData.Builder other) { + super(other); + if (isValidValue(fields()[0], other.key)) { + this.key = data().deepCopy(fields()[0].schema(), other.key); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.color)) { + this.color = data().deepCopy(fields()[1].schema(), other.color); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.data)) { + this.data = data().deepCopy(fields()[2].schema(), other.data); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Creates a Builder by copying an existing IndexViewPerformanceData instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.IndexViewPerformanceData other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.key)) { + this.key = data().deepCopy(fields()[0].schema(), other.key); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.color)) { + this.color = data().deepCopy(fields()[1].schema(), other.color); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.data)) { + this.data = data().deepCopy(fields()[2].schema(), other.data); + fieldSetFlags()[2] = true; + } } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return key; - case 1: - return color; - case 2: - return data; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Gets the value of the 'key' field. + * + * @return The value. + */ + public java.lang.CharSequence getKey() { + return key; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - key = (java.lang.CharSequence) value$; - break; - case 1: - color = (java.lang.CharSequence) value$; - break; - case 2: - data = - (java.util.List) - value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the value of the 'key' field. + * + * @param value The value of 'key'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewPerformanceData.Builder setKey( + java.lang.CharSequence value) { + validate(fields()[0], value); + this.key = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'key' field. */ - public java.lang.CharSequence getKey() { - return key; + /** + * Checks whether the 'key' field has been set. + * + * @return True if the 'key' field has been set, false otherwise. + */ + public boolean hasKey() { + return fieldSetFlags()[0]; } /** - * Sets the value of the 'key' field. + * Clears the value of the 'key' field. * - * @param value the value to set. + * @return This builder. */ - public void setKey(java.lang.CharSequence value) { - this.key = value; + public sparqles.avro.analytics.IndexViewPerformanceData.Builder clearKey() { + key = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'color' field. */ + /** + * Gets the value of the 'color' field. + * + * @return The value. + */ public java.lang.CharSequence getColor() { - return color; + return color; } /** * Sets the value of the 'color' field. * - * @param value the value to set. + * @param value The value of 'color'. + * @return This builder. */ - public void setColor(java.lang.CharSequence value) { - this.color = value; + public sparqles.avro.analytics.IndexViewPerformanceData.Builder setColor( + java.lang.CharSequence value) { + validate(fields()[1], value); + this.color = value; + fieldSetFlags()[1] = true; + return this; } - /** Gets the value of the 'data' field. */ + /** + * Checks whether the 'color' field has been set. + * + * @return True if the 'color' field has been set, false otherwise. + */ + public boolean hasColor() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'color' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewPerformanceData.Builder clearColor() { + color = null; + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'data' field. + * + * @return The value. + */ public java.util.List getData() { - return data; + return data; } /** * Sets the value of the 'data' field. * - * @param value the value to set. + * @param value The value of 'data'. + * @return This builder. */ - public void setData( - java.util.List value) { - this.data = value; + public sparqles.avro.analytics.IndexViewPerformanceData.Builder setData( + java.util.List value) { + validate(fields()[2], value); + this.data = value; + fieldSetFlags()[2] = true; + return this; } - /** RecordBuilder for IndexViewPerformanceData instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { - - private java.lang.CharSequence key; - private java.lang.CharSequence color; - private java.util.List data; + /** + * Checks whether the 'data' field has been set. + * + * @return True if the 'data' field has been set, false otherwise. + */ + public boolean hasData() { + return fieldSetFlags()[2]; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.IndexViewPerformanceData.SCHEMA$); - } + /** + * Clears the value of the 'data' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewPerformanceData.Builder clearData() { + data = null; + fieldSetFlags()[2] = false; + return this; + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.IndexViewPerformanceData.Builder other) { - super(other); - if (isValidValue(fields()[0], other.key)) { - this.key = data().deepCopy(fields()[0].schema(), other.key); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.color)) { - this.color = data().deepCopy(fields()[1].schema(), other.color); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.data)) { - this.data = data().deepCopy(fields()[2].schema(), other.data); - fieldSetFlags()[2] = true; - } + @Override + @SuppressWarnings("unchecked") + public IndexViewPerformanceData build() { + try { + IndexViewPerformanceData record = new IndexViewPerformanceData(); + record.key = + fieldSetFlags()[0] ? this.key : (java.lang.CharSequence) defaultValue(fields()[0]); + record.color = + fieldSetFlags()[1] ? this.color : (java.lang.CharSequence) defaultValue(fields()[1]); + record.data = + fieldSetFlags()[2] + ? this.data + : (java.util.List) + defaultValue(fields()[2]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.key); + + out.writeString(this.color); + + long size0 = this.data.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (sparqles.avro.analytics.IndexViewPerformanceDataValues e0 : this.data) { + actualSize0++; + out.startItem(); + e0.customEncode(out); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.key = in.readString(this.key instanceof Utf8 ? (Utf8) this.key : null); + + this.color = in.readString(this.color instanceof Utf8 ? (Utf8) this.color : null); + + long size0 = in.readArrayStart(); + java.util.List a0 = this.data; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("data").schema()); + this.data = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.IndexViewPerformanceDataValues e0 = + (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.IndexViewPerformanceDataValues(); + } + e0.customDecode(in); + a0.add(e0); } - - /** Creates a Builder by copying an existing IndexViewPerformanceData instance */ - private Builder(sparqles.avro.analytics.IndexViewPerformanceData other) { - super(sparqles.avro.analytics.IndexViewPerformanceData.SCHEMA$); - if (isValidValue(fields()[0], other.key)) { - this.key = data().deepCopy(fields()[0].schema(), other.key); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.color)) { - this.color = data().deepCopy(fields()[1].schema(), other.color); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.data)) { - this.data = data().deepCopy(fields()[2].schema(), other.data); - fieldSetFlags()[2] = true; + } + + } else { + for (int i = 0; i < 3; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.key = in.readString(this.key instanceof Utf8 ? (Utf8) this.key : null); + break; + + case 1: + this.color = in.readString(this.color instanceof Utf8 ? (Utf8) this.color : null); + break; + + case 2: + long size0 = in.readArrayStart(); + java.util.List a0 = this.data; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("data").schema()); + this.data = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) + a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + sparqles.avro.analytics.IndexViewPerformanceDataValues e0 = + (ga0 != null ? ga0.peek() : null); + if (e0 == null) { + e0 = new sparqles.avro.analytics.IndexViewPerformanceDataValues(); + } + e0.customDecode(in); + a0.add(e0); + } } - } - - /** Gets the value of the 'key' field */ - public java.lang.CharSequence getKey() { - return key; - } + break; - /** Sets the value of the 'key' field */ - public sparqles.avro.analytics.IndexViewPerformanceData.Builder setKey( - java.lang.CharSequence value) { - validate(fields()[0], value); - this.key = value; - fieldSetFlags()[0] = true; - return this; - } - - /** Checks whether the 'key' field has been set */ - public boolean hasKey() { - return fieldSetFlags()[0]; - } - - /** Clears the value of the 'key' field */ - public sparqles.avro.analytics.IndexViewPerformanceData.Builder clearKey() { - key = null; - fieldSetFlags()[0] = false; - return this; - } - - /** Gets the value of the 'color' field */ - public java.lang.CharSequence getColor() { - return color; - } - - /** Sets the value of the 'color' field */ - public sparqles.avro.analytics.IndexViewPerformanceData.Builder setColor( - java.lang.CharSequence value) { - validate(fields()[1], value); - this.color = value; - fieldSetFlags()[1] = true; - return this; - } - - /** Checks whether the 'color' field has been set */ - public boolean hasColor() { - return fieldSetFlags()[1]; - } - - /** Clears the value of the 'color' field */ - public sparqles.avro.analytics.IndexViewPerformanceData.Builder clearColor() { - color = null; - fieldSetFlags()[1] = false; - return this; - } - - /** Gets the value of the 'data' field */ - public java.util.List getData() { - return data; - } - - /** Sets the value of the 'data' field */ - public sparqles.avro.analytics.IndexViewPerformanceData.Builder setData( - java.util.List value) { - validate(fields()[2], value); - this.data = value; - fieldSetFlags()[2] = true; - return this; - } - - /** Checks whether the 'data' field has been set */ - public boolean hasData() { - return fieldSetFlags()[2]; - } - - /** Clears the value of the 'data' field */ - public sparqles.avro.analytics.IndexViewPerformanceData.Builder clearData() { - data = null; - fieldSetFlags()[2] = false; - return this; - } - - @Override - public IndexViewPerformanceData build() { - try { - IndexViewPerformanceData record = new IndexViewPerformanceData(); - record.key = - fieldSetFlags()[0] - ? this.key - : (java.lang.CharSequence) defaultValue(fields()[0]); - record.color = - fieldSetFlags()[1] - ? this.color - : (java.lang.CharSequence) defaultValue(fields()[1]); - record.data = - fieldSetFlags()[2] - ? this.data - : (java.util.List< - sparqles.avro.analytics - .IndexViewPerformanceDataValues>) - defaultValue(fields()[2]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/IndexViewPerformanceDataValues.java b/backend/src/main/java/sparqles/avro/analytics/IndexViewPerformanceDataValues.java index e450adbc..cf6f8b72 100644 --- a/backend/src/main/java/sparqles/avro/analytics/IndexViewPerformanceDataValues.java +++ b/backend/src/main/java/sparqles/avro/analytics/IndexViewPerformanceDataValues.java @@ -5,219 +5,427 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class IndexViewPerformanceDataValues extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"IndexViewPerformanceDataValues\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}"); - @Deprecated public java.lang.CharSequence label; - @Deprecated public double value; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -61428646288184520L; - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public IndexViewPerformanceDataValues() {} + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"IndexViewPerformanceDataValues\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"double\"}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder( + SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } - /** All-args constructor. */ - public IndexViewPerformanceDataValues(java.lang.CharSequence label, java.lang.Double value) { - this.label = label; - this.value = value; + /** + * Serializes this IndexViewPerformanceDataValues to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a IndexViewPerformanceDataValues from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a IndexViewPerformanceDataValues instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static IndexViewPerformanceDataValues fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence label; + private double value; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public IndexViewPerformanceDataValues() {} + + /** + * All-args constructor. + * + * @param label The new value for label + * @param value The new value for value + */ + public IndexViewPerformanceDataValues(java.lang.CharSequence label, java.lang.Double value) { + this.label = label; + this.value = value; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return label; + case 1: + return value; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + label = (java.lang.CharSequence) value$; + break; + case 1: + value = (java.lang.Double) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - /** Creates a new IndexViewPerformanceDataValues RecordBuilder */ - public static sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder newBuilder() { - return new sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder(); + /** + * Gets the value of the 'label' field. + * + * @return The value of the 'label' field. + */ + public java.lang.CharSequence getLabel() { + return label; + } + + /** + * Sets the value of the 'label' field. + * + * @param value the value to set. + */ + public void setLabel(java.lang.CharSequence value) { + this.label = value; + } + + /** + * Gets the value of the 'value' field. + * + * @return The value of the 'value' field. + */ + public double getValue() { + return value; + } + + /** + * Sets the value of the 'value' field. + * + * @param value the value to set. + */ + public void setValue(double value) { + this.value = value; + } + + /** + * Creates a new IndexViewPerformanceDataValues RecordBuilder. + * + * @return A new IndexViewPerformanceDataValues RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder newBuilder() { + return new sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder(); + } + + /** + * Creates a new IndexViewPerformanceDataValues RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new IndexViewPerformanceDataValues RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder newBuilder( + sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder(other); } + } + + /** + * Creates a new IndexViewPerformanceDataValues RecordBuilder by copying an existing + * IndexViewPerformanceDataValues instance. + * + * @param other The existing instance to copy. + * @return A new IndexViewPerformanceDataValues RecordBuilder + */ + public static sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder newBuilder( + sparqles.avro.analytics.IndexViewPerformanceDataValues other) { + if (other == null) { + return new sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder(); + } else { + return new sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder(other); + } + } + + /** RecordBuilder for IndexViewPerformanceDataValues instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence label; + private double value; - /** Creates a new IndexViewPerformanceDataValues RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder newBuilder( - sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder other) { - return new sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder(other); + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } /** - * Creates a new IndexViewPerformanceDataValues RecordBuilder by copying an existing - * IndexViewPerformanceDataValues instance + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public static sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder newBuilder( - sparqles.avro.analytics.IndexViewPerformanceDataValues other) { - return new sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder(other); + private Builder(sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder other) { + super(other); + if (isValidValue(fields()[0], other.label)) { + this.label = data().deepCopy(fields()[0].schema(), other.label); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.value)) { + this.value = data().deepCopy(fields()[1].schema(), other.value); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Creates a Builder by copying an existing IndexViewPerformanceDataValues instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.IndexViewPerformanceDataValues other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.label)) { + this.label = data().deepCopy(fields()[0].schema(), other.label); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.value)) { + this.value = data().deepCopy(fields()[1].schema(), other.value); + fieldSetFlags()[1] = true; + } } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return label; - case 1: - return value; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Gets the value of the 'label' field. + * + * @return The value. + */ + public java.lang.CharSequence getLabel() { + return label; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - label = (java.lang.CharSequence) value$; - break; - case 1: - value = (java.lang.Double) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the value of the 'label' field. + * + * @param value The value of 'label'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder setLabel( + java.lang.CharSequence value) { + validate(fields()[0], value); + this.label = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'label' field. */ - public java.lang.CharSequence getLabel() { - return label; + /** + * Checks whether the 'label' field has been set. + * + * @return True if the 'label' field has been set, false otherwise. + */ + public boolean hasLabel() { + return fieldSetFlags()[0]; } /** - * Sets the value of the 'label' field. + * Clears the value of the 'label' field. * - * @param value the value to set. + * @return This builder. */ - public void setLabel(java.lang.CharSequence value) { - this.label = value; + public sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder clearLabel() { + label = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'value' field. */ - public java.lang.Double getValue() { - return value; + /** + * Gets the value of the 'value' field. + * + * @return The value. + */ + public double getValue() { + return value; } /** * Sets the value of the 'value' field. * - * @param value the value to set. + * @param value The value of 'value'. + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder setValue(double value) { + validate(fields()[1], value); + this.value = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'value' field has been set. + * + * @return True if the 'value' field has been set, false otherwise. */ - public void setValue(java.lang.Double value) { - this.value = value; + public boolean hasValue() { + return fieldSetFlags()[1]; } - /** RecordBuilder for IndexViewPerformanceDataValues instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase< - IndexViewPerformanceDataValues> - implements org.apache.avro.data.RecordBuilder { + /** + * Clears the value of the 'value' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder clearValue() { + fieldSetFlags()[1] = false; + return this; + } - private java.lang.CharSequence label; - private double value; + @Override + @SuppressWarnings("unchecked") + public IndexViewPerformanceDataValues build() { + try { + IndexViewPerformanceDataValues record = new IndexViewPerformanceDataValues(); + record.label = + fieldSetFlags()[0] ? this.label : (java.lang.CharSequence) defaultValue(fields()[0]); + record.value = + fieldSetFlags()[1] ? this.value : (java.lang.Double) defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.IndexViewPerformanceDataValues.SCHEMA$); - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) + MODEL$.createDatumWriter(SCHEMA$); - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder other) { - super(other); - if (isValidValue(fields()[0], other.label)) { - this.label = data().deepCopy(fields()[0].schema(), other.label); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.value)) { - this.value = data().deepCopy(fields()[1].schema(), other.value); - fieldSetFlags()[1] = true; - } - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Creates a Builder by copying an existing IndexViewPerformanceDataValues instance */ - private Builder(sparqles.avro.analytics.IndexViewPerformanceDataValues other) { - super(sparqles.avro.analytics.IndexViewPerformanceDataValues.SCHEMA$); - if (isValidValue(fields()[0], other.label)) { - this.label = data().deepCopy(fields()[0].schema(), other.label); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.value)) { - this.value = data().deepCopy(fields()[1].schema(), other.value); - fieldSetFlags()[1] = true; - } - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) + MODEL$.createDatumReader(SCHEMA$); - /** Gets the value of the 'label' field */ - public java.lang.CharSequence getLabel() { - return label; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Sets the value of the 'label' field */ - public sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder setLabel( - java.lang.CharSequence value) { - validate(fields()[0], value); - this.label = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Checks whether the 'label' field has been set */ - public boolean hasLabel() { - return fieldSetFlags()[0]; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.label); - /** Clears the value of the 'label' field */ - public sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder clearLabel() { - label = null; - fieldSetFlags()[0] = false; - return this; - } + out.writeDouble(this.value); + } - /** Gets the value of the 'value' field */ - public java.lang.Double getValue() { - return value; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.label = in.readString(this.label instanceof Utf8 ? (Utf8) this.label : null); - /** Sets the value of the 'value' field */ - public sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder setValue( - double value) { - validate(fields()[1], value); - this.value = value; - fieldSetFlags()[1] = true; - return this; - } + this.value = in.readDouble(); - /** Checks whether the 'value' field has been set */ - public boolean hasValue() { - return fieldSetFlags()[1]; - } + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.label = in.readString(this.label instanceof Utf8 ? (Utf8) this.label : null); + break; - /** Clears the value of the 'value' field */ - public sparqles.avro.analytics.IndexViewPerformanceDataValues.Builder clearValue() { - fieldSetFlags()[1] = false; - return this; - } + case 1: + this.value = in.readDouble(); + break; - @Override - public IndexViewPerformanceDataValues build() { - try { - IndexViewPerformanceDataValues record = new IndexViewPerformanceDataValues(); - record.label = - fieldSetFlags()[0] - ? this.label - : (java.lang.CharSequence) defaultValue(fields()[0]); - record.value = - fieldSetFlags()[1] - ? this.value - : (java.lang.Double) defaultValue(fields()[1]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/InteroperabilityView.java b/backend/src/main/java/sparqles/avro/analytics/InteroperabilityView.java index 192f066e..8b9c0595 100644 --- a/backend/src/main/java/sparqles/avro/analytics/InteroperabilityView.java +++ b/backend/src/main/java/sparqles/avro/analytics/InteroperabilityView.java @@ -5,345 +5,671 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; + @org.apache.avro.specific.AvroGenerated public class InteroperabilityView extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"InteroperabilityView\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"nbCompliantSPARQL1Features\",\"type\":\"int\"},{\"name\":\"nbCompliantSPARQL11Features\",\"type\":\"int\"},{\"name\":\"lastUpdate\",\"type\":\"long\"}]}"); - @Deprecated public sparqles.avro.Endpoint endpoint; - @Deprecated public int nbCompliantSPARQL1Features; - @Deprecated public int nbCompliantSPARQL11Features; - @Deprecated public long lastUpdate; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -1989266332641995296L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"InteroperabilityView\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"nbCompliantSPARQL1Features\",\"type\":\"int\"},{\"name\":\"nbCompliantSPARQL11Features\",\"type\":\"int\"},{\"name\":\"lastUpdate\",\"type\":\"long\"}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this InteroperabilityView to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a InteroperabilityView from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a InteroperabilityView instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static InteroperabilityView fromByteBuffer(java.nio.ByteBuffer b) + throws java.io.IOException { + return DECODER.decode(b); + } + + private sparqles.avro.Endpoint endpoint; + private int nbCompliantSPARQL1Features; + private int nbCompliantSPARQL11Features; + private long lastUpdate; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public InteroperabilityView() {} + + /** + * All-args constructor. + * + * @param endpoint The new value for endpoint + * @param nbCompliantSPARQL1Features The new value for nbCompliantSPARQL1Features + * @param nbCompliantSPARQL11Features The new value for nbCompliantSPARQL11Features + * @param lastUpdate The new value for lastUpdate + */ + public InteroperabilityView( + sparqles.avro.Endpoint endpoint, + java.lang.Integer nbCompliantSPARQL1Features, + java.lang.Integer nbCompliantSPARQL11Features, + java.lang.Long lastUpdate) { + this.endpoint = endpoint; + this.nbCompliantSPARQL1Features = nbCompliantSPARQL1Features; + this.nbCompliantSPARQL11Features = nbCompliantSPARQL11Features; + this.lastUpdate = lastUpdate; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return endpoint; + case 1: + return nbCompliantSPARQL1Features; + case 2: + return nbCompliantSPARQL11Features; + case 3: + return lastUpdate; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + endpoint = (sparqles.avro.Endpoint) value$; + break; + case 1: + nbCompliantSPARQL1Features = (java.lang.Integer) value$; + break; + case 2: + nbCompliantSPARQL11Features = (java.lang.Integer) value$; + break; + case 3: + lastUpdate = (java.lang.Long) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'endpoint' field. + * + * @return The value of the 'endpoint' field. + */ + public sparqles.avro.Endpoint getEndpoint() { + return endpoint; + } + + /** + * Sets the value of the 'endpoint' field. + * + * @param value the value to set. + */ + public void setEndpoint(sparqles.avro.Endpoint value) { + this.endpoint = value; + } + + /** + * Gets the value of the 'nbCompliantSPARQL1Features' field. + * + * @return The value of the 'nbCompliantSPARQL1Features' field. + */ + public int getNbCompliantSPARQL1Features() { + return nbCompliantSPARQL1Features; + } + + /** + * Sets the value of the 'nbCompliantSPARQL1Features' field. + * + * @param value the value to set. + */ + public void setNbCompliantSPARQL1Features(int value) { + this.nbCompliantSPARQL1Features = value; + } + + /** + * Gets the value of the 'nbCompliantSPARQL11Features' field. + * + * @return The value of the 'nbCompliantSPARQL11Features' field. + */ + public int getNbCompliantSPARQL11Features() { + return nbCompliantSPARQL11Features; + } + + /** + * Sets the value of the 'nbCompliantSPARQL11Features' field. + * + * @param value the value to set. + */ + public void setNbCompliantSPARQL11Features(int value) { + this.nbCompliantSPARQL11Features = value; + } + + /** + * Gets the value of the 'lastUpdate' field. + * + * @return The value of the 'lastUpdate' field. + */ + public long getLastUpdate() { + return lastUpdate; + } + + /** + * Sets the value of the 'lastUpdate' field. + * + * @param value the value to set. + */ + public void setLastUpdate(long value) { + this.lastUpdate = value; + } + + /** + * Creates a new InteroperabilityView RecordBuilder. + * + * @return A new InteroperabilityView RecordBuilder + */ + public static sparqles.avro.analytics.InteroperabilityView.Builder newBuilder() { + return new sparqles.avro.analytics.InteroperabilityView.Builder(); + } + + /** + * Creates a new InteroperabilityView RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new InteroperabilityView RecordBuilder + */ + public static sparqles.avro.analytics.InteroperabilityView.Builder newBuilder( + sparqles.avro.analytics.InteroperabilityView.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.InteroperabilityView.Builder(); + } else { + return new sparqles.avro.analytics.InteroperabilityView.Builder(other); + } + } + + /** + * Creates a new InteroperabilityView RecordBuilder by copying an existing InteroperabilityView + * instance. + * + * @param other The existing instance to copy. + * @return A new InteroperabilityView RecordBuilder + */ + public static sparqles.avro.analytics.InteroperabilityView.Builder newBuilder( + sparqles.avro.analytics.InteroperabilityView other) { + if (other == null) { + return new sparqles.avro.analytics.InteroperabilityView.Builder(); + } else { + return new sparqles.avro.analytics.InteroperabilityView.Builder(other); + } + } + + /** RecordBuilder for InteroperabilityView instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private sparqles.avro.Endpoint endpoint; + private sparqles.avro.Endpoint.Builder endpointBuilder; + private int nbCompliantSPARQL1Features; + private int nbCompliantSPARQL11Features; + private long lastUpdate; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public InteroperabilityView() {} - - /** All-args constructor. */ - public InteroperabilityView( - sparqles.avro.Endpoint endpoint, - java.lang.Integer nbCompliantSPARQL1Features, - java.lang.Integer nbCompliantSPARQL11Features, - java.lang.Long lastUpdate) { - this.endpoint = endpoint; - this.nbCompliantSPARQL1Features = nbCompliantSPARQL1Features; - this.nbCompliantSPARQL11Features = nbCompliantSPARQL11Features; - this.lastUpdate = lastUpdate; + private Builder(sparqles.avro.analytics.InteroperabilityView.Builder other) { + super(other); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (other.hasEndpointBuilder()) { + this.endpointBuilder = sparqles.avro.Endpoint.newBuilder(other.getEndpointBuilder()); + } + if (isValidValue(fields()[1], other.nbCompliantSPARQL1Features)) { + this.nbCompliantSPARQL1Features = + data().deepCopy(fields()[1].schema(), other.nbCompliantSPARQL1Features); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.nbCompliantSPARQL11Features)) { + this.nbCompliantSPARQL11Features = + data().deepCopy(fields()[2].schema(), other.nbCompliantSPARQL11Features); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.lastUpdate)) { + this.lastUpdate = data().deepCopy(fields()[3].schema(), other.lastUpdate); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; - } - - /** Creates a new InteroperabilityView RecordBuilder */ - public static sparqles.avro.analytics.InteroperabilityView.Builder newBuilder() { - return new sparqles.avro.analytics.InteroperabilityView.Builder(); + /** + * Creates a Builder by copying an existing InteroperabilityView instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.InteroperabilityView other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = true; + } + this.endpointBuilder = null; + if (isValidValue(fields()[1], other.nbCompliantSPARQL1Features)) { + this.nbCompliantSPARQL1Features = + data().deepCopy(fields()[1].schema(), other.nbCompliantSPARQL1Features); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.nbCompliantSPARQL11Features)) { + this.nbCompliantSPARQL11Features = + data().deepCopy(fields()[2].schema(), other.nbCompliantSPARQL11Features); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.lastUpdate)) { + this.lastUpdate = data().deepCopy(fields()[3].schema(), other.lastUpdate); + fieldSetFlags()[3] = true; + } } - /** Creates a new InteroperabilityView RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.InteroperabilityView.Builder newBuilder( - sparqles.avro.analytics.InteroperabilityView.Builder other) { - return new sparqles.avro.analytics.InteroperabilityView.Builder(other); + /** + * Gets the value of the 'endpoint' field. + * + * @return The value. + */ + public sparqles.avro.Endpoint getEndpoint() { + return endpoint; } /** - * Creates a new InteroperabilityView RecordBuilder by copying an existing InteroperabilityView - * instance + * Sets the value of the 'endpoint' field. + * + * @param value The value of 'endpoint'. + * @return This builder. */ - public static sparqles.avro.analytics.InteroperabilityView.Builder newBuilder( - sparqles.avro.analytics.InteroperabilityView other) { - return new sparqles.avro.analytics.InteroperabilityView.Builder(other); + public sparqles.avro.analytics.InteroperabilityView.Builder setEndpoint( + sparqles.avro.Endpoint value) { + validate(fields()[0], value); + this.endpointBuilder = null; + this.endpoint = value; + fieldSetFlags()[0] = true; + return this; } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Checks whether the 'endpoint' field has been set. + * + * @return True if the 'endpoint' field has been set, false otherwise. + */ + public boolean hasEndpoint() { + return fieldSetFlags()[0]; } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return endpoint; - case 1: - return nbCompliantSPARQL1Features; - case 2: - return nbCompliantSPARQL11Features; - case 3: - return lastUpdate; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); + /** + * Gets the Builder instance for the 'endpoint' field and creates one if it doesn't exist yet. + * + * @return This builder. + */ + public sparqles.avro.Endpoint.Builder getEndpointBuilder() { + if (endpointBuilder == null) { + if (hasEndpoint()) { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder(endpoint)); + } else { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder()); } + } + return endpointBuilder; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - endpoint = (sparqles.avro.Endpoint) value$; - break; - case 1: - nbCompliantSPARQL1Features = (java.lang.Integer) value$; - break; - case 2: - nbCompliantSPARQL11Features = (java.lang.Integer) value$; - break; - case 3: - lastUpdate = (java.lang.Long) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the Builder instance for the 'endpoint' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.analytics.InteroperabilityView.Builder setEndpointBuilder( + sparqles.avro.Endpoint.Builder value) { + clearEndpoint(); + endpointBuilder = value; + return this; } - /** Gets the value of the 'endpoint' field. */ - public sparqles.avro.Endpoint getEndpoint() { - return endpoint; + /** + * Checks whether the 'endpoint' field has an active Builder instance + * + * @return True if the 'endpoint' field has an active Builder instance + */ + public boolean hasEndpointBuilder() { + return endpointBuilder != null; } /** - * Sets the value of the 'endpoint' field. + * Clears the value of the 'endpoint' field. * - * @param value the value to set. + * @return This builder. */ - public void setEndpoint(sparqles.avro.Endpoint value) { - this.endpoint = value; + public sparqles.avro.analytics.InteroperabilityView.Builder clearEndpoint() { + endpoint = null; + endpointBuilder = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'nbCompliantSPARQL1Features' field. */ - public java.lang.Integer getNbCompliantSPARQL1Features() { - return nbCompliantSPARQL1Features; + /** + * Gets the value of the 'nbCompliantSPARQL1Features' field. + * + * @return The value. + */ + public int getNbCompliantSPARQL1Features() { + return nbCompliantSPARQL1Features; } /** * Sets the value of the 'nbCompliantSPARQL1Features' field. * - * @param value the value to set. + * @param value The value of 'nbCompliantSPARQL1Features'. + * @return This builder. */ - public void setNbCompliantSPARQL1Features(java.lang.Integer value) { - this.nbCompliantSPARQL1Features = value; + public sparqles.avro.analytics.InteroperabilityView.Builder setNbCompliantSPARQL1Features( + int value) { + validate(fields()[1], value); + this.nbCompliantSPARQL1Features = value; + fieldSetFlags()[1] = true; + return this; } - /** Gets the value of the 'nbCompliantSPARQL11Features' field. */ - public java.lang.Integer getNbCompliantSPARQL11Features() { - return nbCompliantSPARQL11Features; + /** + * Checks whether the 'nbCompliantSPARQL1Features' field has been set. + * + * @return True if the 'nbCompliantSPARQL1Features' field has been set, false otherwise. + */ + public boolean hasNbCompliantSPARQL1Features() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'nbCompliantSPARQL1Features' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.InteroperabilityView.Builder clearNbCompliantSPARQL1Features() { + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'nbCompliantSPARQL11Features' field. + * + * @return The value. + */ + public int getNbCompliantSPARQL11Features() { + return nbCompliantSPARQL11Features; } /** * Sets the value of the 'nbCompliantSPARQL11Features' field. * - * @param value the value to set. + * @param value The value of 'nbCompliantSPARQL11Features'. + * @return This builder. */ - public void setNbCompliantSPARQL11Features(java.lang.Integer value) { - this.nbCompliantSPARQL11Features = value; + public sparqles.avro.analytics.InteroperabilityView.Builder setNbCompliantSPARQL11Features( + int value) { + validate(fields()[2], value); + this.nbCompliantSPARQL11Features = value; + fieldSetFlags()[2] = true; + return this; } - /** Gets the value of the 'lastUpdate' field. */ - public java.lang.Long getLastUpdate() { - return lastUpdate; + /** + * Checks whether the 'nbCompliantSPARQL11Features' field has been set. + * + * @return True if the 'nbCompliantSPARQL11Features' field has been set, false otherwise. + */ + public boolean hasNbCompliantSPARQL11Features() { + return fieldSetFlags()[2]; } /** - * Sets the value of the 'lastUpdate' field. + * Clears the value of the 'nbCompliantSPARQL11Features' field. * - * @param value the value to set. + * @return This builder. */ - public void setLastUpdate(java.lang.Long value) { - this.lastUpdate = value; + public sparqles.avro.analytics.InteroperabilityView.Builder clearNbCompliantSPARQL11Features() { + fieldSetFlags()[2] = false; + return this; } - /** RecordBuilder for InteroperabilityView instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Gets the value of the 'lastUpdate' field. + * + * @return The value. + */ + public long getLastUpdate() { + return lastUpdate; + } - private sparqles.avro.Endpoint endpoint; - private int nbCompliantSPARQL1Features; - private int nbCompliantSPARQL11Features; - private long lastUpdate; + /** + * Sets the value of the 'lastUpdate' field. + * + * @param value The value of 'lastUpdate'. + * @return This builder. + */ + public sparqles.avro.analytics.InteroperabilityView.Builder setLastUpdate(long value) { + validate(fields()[3], value); + this.lastUpdate = value; + fieldSetFlags()[3] = true; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.InteroperabilityView.SCHEMA$); - } + /** + * Checks whether the 'lastUpdate' field has been set. + * + * @return True if the 'lastUpdate' field has been set, false otherwise. + */ + public boolean hasLastUpdate() { + return fieldSetFlags()[3]; + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.InteroperabilityView.Builder other) { - super(other); - if (isValidValue(fields()[0], other.endpoint)) { - this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.nbCompliantSPARQL1Features)) { - this.nbCompliantSPARQL1Features = - data().deepCopy(fields()[1].schema(), other.nbCompliantSPARQL1Features); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.nbCompliantSPARQL11Features)) { - this.nbCompliantSPARQL11Features = - data().deepCopy(fields()[2].schema(), other.nbCompliantSPARQL11Features); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.lastUpdate)) { - this.lastUpdate = data().deepCopy(fields()[3].schema(), other.lastUpdate); - fieldSetFlags()[3] = true; - } - } + /** + * Clears the value of the 'lastUpdate' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.InteroperabilityView.Builder clearLastUpdate() { + fieldSetFlags()[3] = false; + return this; + } - /** Creates a Builder by copying an existing InteroperabilityView instance */ - private Builder(sparqles.avro.analytics.InteroperabilityView other) { - super(sparqles.avro.analytics.InteroperabilityView.SCHEMA$); - if (isValidValue(fields()[0], other.endpoint)) { - this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.nbCompliantSPARQL1Features)) { - this.nbCompliantSPARQL1Features = - data().deepCopy(fields()[1].schema(), other.nbCompliantSPARQL1Features); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.nbCompliantSPARQL11Features)) { - this.nbCompliantSPARQL11Features = - data().deepCopy(fields()[2].schema(), other.nbCompliantSPARQL11Features); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.lastUpdate)) { - this.lastUpdate = data().deepCopy(fields()[3].schema(), other.lastUpdate); - fieldSetFlags()[3] = true; - } + @Override + @SuppressWarnings("unchecked") + public InteroperabilityView build() { + try { + InteroperabilityView record = new InteroperabilityView(); + if (endpointBuilder != null) { + try { + record.endpoint = this.endpointBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("endpoint")); + throw e; + } + } else { + record.endpoint = + fieldSetFlags()[0] + ? this.endpoint + : (sparqles.avro.Endpoint) defaultValue(fields()[0]); } + record.nbCompliantSPARQL1Features = + fieldSetFlags()[1] + ? this.nbCompliantSPARQL1Features + : (java.lang.Integer) defaultValue(fields()[1]); + record.nbCompliantSPARQL11Features = + fieldSetFlags()[2] + ? this.nbCompliantSPARQL11Features + : (java.lang.Integer) defaultValue(fields()[2]); + record.lastUpdate = + fieldSetFlags()[3] ? this.lastUpdate : (java.lang.Long) defaultValue(fields()[3]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Gets the value of the 'endpoint' field */ - public sparqles.avro.Endpoint getEndpoint() { - return endpoint; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Sets the value of the 'endpoint' field */ - public sparqles.avro.analytics.InteroperabilityView.Builder setEndpoint( - sparqles.avro.Endpoint value) { - validate(fields()[0], value); - this.endpoint = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Checks whether the 'endpoint' field has been set */ - public boolean hasEndpoint() { - return fieldSetFlags()[0]; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Clears the value of the 'endpoint' field */ - public sparqles.avro.analytics.InteroperabilityView.Builder clearEndpoint() { - endpoint = null; - fieldSetFlags()[0] = false; - return this; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Gets the value of the 'nbCompliantSPARQL1Features' field */ - public java.lang.Integer getNbCompliantSPARQL1Features() { - return nbCompliantSPARQL1Features; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Sets the value of the 'nbCompliantSPARQL1Features' field */ - public sparqles.avro.analytics.InteroperabilityView.Builder setNbCompliantSPARQL1Features( - int value) { - validate(fields()[1], value); - this.nbCompliantSPARQL1Features = value; - fieldSetFlags()[1] = true; - return this; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + this.endpoint.customEncode(out); - /** Checks whether the 'nbCompliantSPARQL1Features' field has been set */ - public boolean hasNbCompliantSPARQL1Features() { - return fieldSetFlags()[1]; - } + out.writeInt(this.nbCompliantSPARQL1Features); - /** Clears the value of the 'nbCompliantSPARQL1Features' field */ - public sparqles.avro.analytics.InteroperabilityView.Builder - clearNbCompliantSPARQL1Features() { - fieldSetFlags()[1] = false; - return this; - } + out.writeInt(this.nbCompliantSPARQL11Features); - /** Gets the value of the 'nbCompliantSPARQL11Features' field */ - public java.lang.Integer getNbCompliantSPARQL11Features() { - return nbCompliantSPARQL11Features; - } + out.writeLong(this.lastUpdate); + } - /** Sets the value of the 'nbCompliantSPARQL11Features' field */ - public sparqles.avro.analytics.InteroperabilityView.Builder setNbCompliantSPARQL11Features( - int value) { - validate(fields()[2], value); - this.nbCompliantSPARQL11Features = value; - fieldSetFlags()[2] = true; - return this; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); + } + this.endpoint.customDecode(in); - /** Checks whether the 'nbCompliantSPARQL11Features' field has been set */ - public boolean hasNbCompliantSPARQL11Features() { - return fieldSetFlags()[2]; - } + this.nbCompliantSPARQL1Features = in.readInt(); - /** Clears the value of the 'nbCompliantSPARQL11Features' field */ - public sparqles.avro.analytics.InteroperabilityView.Builder - clearNbCompliantSPARQL11Features() { - fieldSetFlags()[2] = false; - return this; - } + this.nbCompliantSPARQL11Features = in.readInt(); - /** Gets the value of the 'lastUpdate' field */ - public java.lang.Long getLastUpdate() { - return lastUpdate; - } + this.lastUpdate = in.readLong(); - /** Sets the value of the 'lastUpdate' field */ - public sparqles.avro.analytics.InteroperabilityView.Builder setLastUpdate(long value) { - validate(fields()[3], value); - this.lastUpdate = value; - fieldSetFlags()[3] = true; - return this; - } + } else { + for (int i = 0; i < 4; i++) { + switch (fieldOrder[i].pos()) { + case 0: + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); + } + this.endpoint.customDecode(in); + break; - /** Checks whether the 'lastUpdate' field has been set */ - public boolean hasLastUpdate() { - return fieldSetFlags()[3]; - } + case 1: + this.nbCompliantSPARQL1Features = in.readInt(); + break; - /** Clears the value of the 'lastUpdate' field */ - public sparqles.avro.analytics.InteroperabilityView.Builder clearLastUpdate() { - fieldSetFlags()[3] = false; - return this; - } + case 2: + this.nbCompliantSPARQL11Features = in.readInt(); + break; - @Override - public InteroperabilityView build() { - try { - InteroperabilityView record = new InteroperabilityView(); - record.endpoint = - fieldSetFlags()[0] - ? this.endpoint - : (sparqles.avro.Endpoint) defaultValue(fields()[0]); - record.nbCompliantSPARQL1Features = - fieldSetFlags()[1] - ? this.nbCompliantSPARQL1Features - : (java.lang.Integer) defaultValue(fields()[1]); - record.nbCompliantSPARQL11Features = - fieldSetFlags()[2] - ? this.nbCompliantSPARQL11Features - : (java.lang.Integer) defaultValue(fields()[2]); - record.lastUpdate = - fieldSetFlags()[3] - ? this.lastUpdate - : (java.lang.Long) defaultValue(fields()[3]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + case 3: + this.lastUpdate = in.readLong(); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/analytics/PerformanceView.java b/backend/src/main/java/sparqles/avro/analytics/PerformanceView.java index 5c9f5c45..59ff8411 100644 --- a/backend/src/main/java/sparqles/avro/analytics/PerformanceView.java +++ b/backend/src/main/java/sparqles/avro/analytics/PerformanceView.java @@ -5,513 +5,920 @@ */ package sparqles.avro.analytics; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; + @org.apache.avro.specific.AvroGenerated public class PerformanceView extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"PerformanceView\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"askMeanCold\",\"type\":\"double\"},{\"name\":\"askMeanWarm\",\"type\":\"double\"},{\"name\":\"joinMeanCold\",\"type\":\"double\"},{\"name\":\"joinMeanWarm\",\"type\":\"double\"},{\"name\":\"lastUpdate\",\"type\":\"long\"},{\"name\":\"threshold\",\"type\":\"long\"}]}"); - @Deprecated public sparqles.avro.Endpoint endpoint; - @Deprecated public double askMeanCold; - @Deprecated public double askMeanWarm; - @Deprecated public double joinMeanCold; - @Deprecated public double joinMeanWarm; - @Deprecated public long lastUpdate; - @Deprecated public long threshold; - - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public PerformanceView() {} - - /** All-args constructor. */ - public PerformanceView( - sparqles.avro.Endpoint endpoint, - java.lang.Double askMeanCold, - java.lang.Double askMeanWarm, - java.lang.Double joinMeanCold, - java.lang.Double joinMeanWarm, - java.lang.Long lastUpdate, - java.lang.Long threshold) { - this.endpoint = endpoint; - this.askMeanCold = askMeanCold; - this.askMeanWarm = askMeanWarm; - this.joinMeanCold = joinMeanCold; - this.joinMeanWarm = joinMeanWarm; - this.lastUpdate = lastUpdate; - this.threshold = threshold; - } - - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; - } - - /** Creates a new PerformanceView RecordBuilder */ - public static sparqles.avro.analytics.PerformanceView.Builder newBuilder() { - return new sparqles.avro.analytics.PerformanceView.Builder(); - } - - /** Creates a new PerformanceView RecordBuilder by copying an existing Builder */ - public static sparqles.avro.analytics.PerformanceView.Builder newBuilder( - sparqles.avro.analytics.PerformanceView.Builder other) { - return new sparqles.avro.analytics.PerformanceView.Builder(other); - } - - /** - * Creates a new PerformanceView RecordBuilder by copying an existing PerformanceView instance - */ - public static sparqles.avro.analytics.PerformanceView.Builder newBuilder( - sparqles.avro.analytics.PerformanceView other) { - return new sparqles.avro.analytics.PerformanceView.Builder(other); - } - - public org.apache.avro.Schema getSchema() { - return SCHEMA$; - } - - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return endpoint; - case 1: - return askMeanCold; - case 2: - return askMeanWarm; - case 3: - return joinMeanCold; - case 4: - return joinMeanWarm; - case 5: - return lastUpdate; - case 6: - return threshold; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 5954098580558978738L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"PerformanceView\",\"namespace\":\"sparqles.avro.analytics\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"askMeanCold\",\"type\":\"double\"},{\"name\":\"askMeanWarm\",\"type\":\"double\"},{\"name\":\"joinMeanCold\",\"type\":\"double\"},{\"name\":\"joinMeanWarm\",\"type\":\"double\"},{\"name\":\"lastUpdate\",\"type\":\"long\"},{\"name\":\"threshold\",\"type\":\"long\"}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this PerformanceView to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a PerformanceView from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a PerformanceView instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static PerformanceView fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private sparqles.avro.Endpoint endpoint; + private double askMeanCold; + private double askMeanWarm; + private double joinMeanCold; + private double joinMeanWarm; + private long lastUpdate; + private long threshold; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public PerformanceView() {} + + /** + * All-args constructor. + * + * @param endpoint The new value for endpoint + * @param askMeanCold The new value for askMeanCold + * @param askMeanWarm The new value for askMeanWarm + * @param joinMeanCold The new value for joinMeanCold + * @param joinMeanWarm The new value for joinMeanWarm + * @param lastUpdate The new value for lastUpdate + * @param threshold The new value for threshold + */ + public PerformanceView( + sparqles.avro.Endpoint endpoint, + java.lang.Double askMeanCold, + java.lang.Double askMeanWarm, + java.lang.Double joinMeanCold, + java.lang.Double joinMeanWarm, + java.lang.Long lastUpdate, + java.lang.Long threshold) { + this.endpoint = endpoint; + this.askMeanCold = askMeanCold; + this.askMeanWarm = askMeanWarm; + this.joinMeanCold = joinMeanCold; + this.joinMeanWarm = joinMeanWarm; + this.lastUpdate = lastUpdate; + this.threshold = threshold; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return endpoint; + case 1: + return askMeanCold; + case 2: + return askMeanWarm; + case 3: + return joinMeanCold; + case 4: + return joinMeanWarm; + case 5: + return lastUpdate; + case 6: + return threshold; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + endpoint = (sparqles.avro.Endpoint) value$; + break; + case 1: + askMeanCold = (java.lang.Double) value$; + break; + case 2: + askMeanWarm = (java.lang.Double) value$; + break; + case 3: + joinMeanCold = (java.lang.Double) value$; + break; + case 4: + joinMeanWarm = (java.lang.Double) value$; + break; + case 5: + lastUpdate = (java.lang.Long) value$; + break; + case 6: + threshold = (java.lang.Long) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'endpoint' field. + * + * @return The value of the 'endpoint' field. + */ + public sparqles.avro.Endpoint getEndpoint() { + return endpoint; + } + + /** + * Sets the value of the 'endpoint' field. + * + * @param value the value to set. + */ + public void setEndpoint(sparqles.avro.Endpoint value) { + this.endpoint = value; + } + + /** + * Gets the value of the 'askMeanCold' field. + * + * @return The value of the 'askMeanCold' field. + */ + public double getAskMeanCold() { + return askMeanCold; + } + + /** + * Sets the value of the 'askMeanCold' field. + * + * @param value the value to set. + */ + public void setAskMeanCold(double value) { + this.askMeanCold = value; + } + + /** + * Gets the value of the 'askMeanWarm' field. + * + * @return The value of the 'askMeanWarm' field. + */ + public double getAskMeanWarm() { + return askMeanWarm; + } + + /** + * Sets the value of the 'askMeanWarm' field. + * + * @param value the value to set. + */ + public void setAskMeanWarm(double value) { + this.askMeanWarm = value; + } + + /** + * Gets the value of the 'joinMeanCold' field. + * + * @return The value of the 'joinMeanCold' field. + */ + public double getJoinMeanCold() { + return joinMeanCold; + } + + /** + * Sets the value of the 'joinMeanCold' field. + * + * @param value the value to set. + */ + public void setJoinMeanCold(double value) { + this.joinMeanCold = value; + } + + /** + * Gets the value of the 'joinMeanWarm' field. + * + * @return The value of the 'joinMeanWarm' field. + */ + public double getJoinMeanWarm() { + return joinMeanWarm; + } + + /** + * Sets the value of the 'joinMeanWarm' field. + * + * @param value the value to set. + */ + public void setJoinMeanWarm(double value) { + this.joinMeanWarm = value; + } + + /** + * Gets the value of the 'lastUpdate' field. + * + * @return The value of the 'lastUpdate' field. + */ + public long getLastUpdate() { + return lastUpdate; + } + + /** + * Sets the value of the 'lastUpdate' field. + * + * @param value the value to set. + */ + public void setLastUpdate(long value) { + this.lastUpdate = value; + } + + /** + * Gets the value of the 'threshold' field. + * + * @return The value of the 'threshold' field. + */ + public long getThreshold() { + return threshold; + } + + /** + * Sets the value of the 'threshold' field. + * + * @param value the value to set. + */ + public void setThreshold(long value) { + this.threshold = value; + } + + /** + * Creates a new PerformanceView RecordBuilder. + * + * @return A new PerformanceView RecordBuilder + */ + public static sparqles.avro.analytics.PerformanceView.Builder newBuilder() { + return new sparqles.avro.analytics.PerformanceView.Builder(); + } + + /** + * Creates a new PerformanceView RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new PerformanceView RecordBuilder + */ + public static sparqles.avro.analytics.PerformanceView.Builder newBuilder( + sparqles.avro.analytics.PerformanceView.Builder other) { + if (other == null) { + return new sparqles.avro.analytics.PerformanceView.Builder(); + } else { + return new sparqles.avro.analytics.PerformanceView.Builder(other); + } + } + + /** + * Creates a new PerformanceView RecordBuilder by copying an existing PerformanceView instance. + * + * @param other The existing instance to copy. + * @return A new PerformanceView RecordBuilder + */ + public static sparqles.avro.analytics.PerformanceView.Builder newBuilder( + sparqles.avro.analytics.PerformanceView other) { + if (other == null) { + return new sparqles.avro.analytics.PerformanceView.Builder(); + } else { + return new sparqles.avro.analytics.PerformanceView.Builder(other); + } + } + + /** RecordBuilder for PerformanceView instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private sparqles.avro.Endpoint endpoint; + private sparqles.avro.Endpoint.Builder endpointBuilder; + private double askMeanCold; + private double askMeanWarm; + private double joinMeanCold; + private double joinMeanWarm; + private long lastUpdate; + private long threshold; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - endpoint = (sparqles.avro.Endpoint) value$; - break; - case 1: - askMeanCold = (java.lang.Double) value$; - break; - case 2: - askMeanWarm = (java.lang.Double) value$; - break; - case 3: - joinMeanCold = (java.lang.Double) value$; - break; - case 4: - joinMeanWarm = (java.lang.Double) value$; - break; - case 5: - lastUpdate = (java.lang.Long) value$; - break; - case 6: - threshold = (java.lang.Long) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.analytics.PerformanceView.Builder other) { + super(other); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (other.hasEndpointBuilder()) { + this.endpointBuilder = sparqles.avro.Endpoint.newBuilder(other.getEndpointBuilder()); + } + if (isValidValue(fields()[1], other.askMeanCold)) { + this.askMeanCold = data().deepCopy(fields()[1].schema(), other.askMeanCold); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.askMeanWarm)) { + this.askMeanWarm = data().deepCopy(fields()[2].schema(), other.askMeanWarm); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.joinMeanCold)) { + this.joinMeanCold = data().deepCopy(fields()[3].schema(), other.joinMeanCold); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + if (isValidValue(fields()[4], other.joinMeanWarm)) { + this.joinMeanWarm = data().deepCopy(fields()[4].schema(), other.joinMeanWarm); + fieldSetFlags()[4] = other.fieldSetFlags()[4]; + } + if (isValidValue(fields()[5], other.lastUpdate)) { + this.lastUpdate = data().deepCopy(fields()[5].schema(), other.lastUpdate); + fieldSetFlags()[5] = other.fieldSetFlags()[5]; + } + if (isValidValue(fields()[6], other.threshold)) { + this.threshold = data().deepCopy(fields()[6].schema(), other.threshold); + fieldSetFlags()[6] = other.fieldSetFlags()[6]; + } } - /** Gets the value of the 'endpoint' field. */ + /** + * Creates a Builder by copying an existing PerformanceView instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.analytics.PerformanceView other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = true; + } + this.endpointBuilder = null; + if (isValidValue(fields()[1], other.askMeanCold)) { + this.askMeanCold = data().deepCopy(fields()[1].schema(), other.askMeanCold); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.askMeanWarm)) { + this.askMeanWarm = data().deepCopy(fields()[2].schema(), other.askMeanWarm); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.joinMeanCold)) { + this.joinMeanCold = data().deepCopy(fields()[3].schema(), other.joinMeanCold); + fieldSetFlags()[3] = true; + } + if (isValidValue(fields()[4], other.joinMeanWarm)) { + this.joinMeanWarm = data().deepCopy(fields()[4].schema(), other.joinMeanWarm); + fieldSetFlags()[4] = true; + } + if (isValidValue(fields()[5], other.lastUpdate)) { + this.lastUpdate = data().deepCopy(fields()[5].schema(), other.lastUpdate); + fieldSetFlags()[5] = true; + } + if (isValidValue(fields()[6], other.threshold)) { + this.threshold = data().deepCopy(fields()[6].schema(), other.threshold); + fieldSetFlags()[6] = true; + } + } + + /** + * Gets the value of the 'endpoint' field. + * + * @return The value. + */ public sparqles.avro.Endpoint getEndpoint() { - return endpoint; + return endpoint; } /** * Sets the value of the 'endpoint' field. * - * @param value the value to set. + * @param value The value of 'endpoint'. + * @return This builder. */ - public void setEndpoint(sparqles.avro.Endpoint value) { - this.endpoint = value; + public sparqles.avro.analytics.PerformanceView.Builder setEndpoint( + sparqles.avro.Endpoint value) { + validate(fields()[0], value); + this.endpointBuilder = null; + this.endpoint = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'askMeanCold' field. */ - public java.lang.Double getAskMeanCold() { - return askMeanCold; + /** + * Checks whether the 'endpoint' field has been set. + * + * @return True if the 'endpoint' field has been set, false otherwise. + */ + public boolean hasEndpoint() { + return fieldSetFlags()[0]; + } + + /** + * Gets the Builder instance for the 'endpoint' field and creates one if it doesn't exist yet. + * + * @return This builder. + */ + public sparqles.avro.Endpoint.Builder getEndpointBuilder() { + if (endpointBuilder == null) { + if (hasEndpoint()) { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder(endpoint)); + } else { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder()); + } + } + return endpointBuilder; + } + + /** + * Sets the Builder instance for the 'endpoint' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.analytics.PerformanceView.Builder setEndpointBuilder( + sparqles.avro.Endpoint.Builder value) { + clearEndpoint(); + endpointBuilder = value; + return this; + } + + /** + * Checks whether the 'endpoint' field has an active Builder instance + * + * @return True if the 'endpoint' field has an active Builder instance + */ + public boolean hasEndpointBuilder() { + return endpointBuilder != null; + } + + /** + * Clears the value of the 'endpoint' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.PerformanceView.Builder clearEndpoint() { + endpoint = null; + endpointBuilder = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'askMeanCold' field. + * + * @return The value. + */ + public double getAskMeanCold() { + return askMeanCold; } /** * Sets the value of the 'askMeanCold' field. * - * @param value the value to set. + * @param value The value of 'askMeanCold'. + * @return This builder. */ - public void setAskMeanCold(java.lang.Double value) { - this.askMeanCold = value; + public sparqles.avro.analytics.PerformanceView.Builder setAskMeanCold(double value) { + validate(fields()[1], value); + this.askMeanCold = value; + fieldSetFlags()[1] = true; + return this; } - /** Gets the value of the 'askMeanWarm' field. */ - public java.lang.Double getAskMeanWarm() { - return askMeanWarm; + /** + * Checks whether the 'askMeanCold' field has been set. + * + * @return True if the 'askMeanCold' field has been set, false otherwise. + */ + public boolean hasAskMeanCold() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'askMeanCold' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.PerformanceView.Builder clearAskMeanCold() { + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'askMeanWarm' field. + * + * @return The value. + */ + public double getAskMeanWarm() { + return askMeanWarm; } /** * Sets the value of the 'askMeanWarm' field. * - * @param value the value to set. + * @param value The value of 'askMeanWarm'. + * @return This builder. */ - public void setAskMeanWarm(java.lang.Double value) { - this.askMeanWarm = value; + public sparqles.avro.analytics.PerformanceView.Builder setAskMeanWarm(double value) { + validate(fields()[2], value); + this.askMeanWarm = value; + fieldSetFlags()[2] = true; + return this; } - /** Gets the value of the 'joinMeanCold' field. */ - public java.lang.Double getJoinMeanCold() { - return joinMeanCold; + /** + * Checks whether the 'askMeanWarm' field has been set. + * + * @return True if the 'askMeanWarm' field has been set, false otherwise. + */ + public boolean hasAskMeanWarm() { + return fieldSetFlags()[2]; + } + + /** + * Clears the value of the 'askMeanWarm' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.PerformanceView.Builder clearAskMeanWarm() { + fieldSetFlags()[2] = false; + return this; + } + + /** + * Gets the value of the 'joinMeanCold' field. + * + * @return The value. + */ + public double getJoinMeanCold() { + return joinMeanCold; } /** * Sets the value of the 'joinMeanCold' field. * - * @param value the value to set. + * @param value The value of 'joinMeanCold'. + * @return This builder. */ - public void setJoinMeanCold(java.lang.Double value) { - this.joinMeanCold = value; + public sparqles.avro.analytics.PerformanceView.Builder setJoinMeanCold(double value) { + validate(fields()[3], value); + this.joinMeanCold = value; + fieldSetFlags()[3] = true; + return this; } - /** Gets the value of the 'joinMeanWarm' field. */ - public java.lang.Double getJoinMeanWarm() { - return joinMeanWarm; + /** + * Checks whether the 'joinMeanCold' field has been set. + * + * @return True if the 'joinMeanCold' field has been set, false otherwise. + */ + public boolean hasJoinMeanCold() { + return fieldSetFlags()[3]; + } + + /** + * Clears the value of the 'joinMeanCold' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.PerformanceView.Builder clearJoinMeanCold() { + fieldSetFlags()[3] = false; + return this; + } + + /** + * Gets the value of the 'joinMeanWarm' field. + * + * @return The value. + */ + public double getJoinMeanWarm() { + return joinMeanWarm; } /** * Sets the value of the 'joinMeanWarm' field. * - * @param value the value to set. + * @param value The value of 'joinMeanWarm'. + * @return This builder. */ - public void setJoinMeanWarm(java.lang.Double value) { - this.joinMeanWarm = value; + public sparqles.avro.analytics.PerformanceView.Builder setJoinMeanWarm(double value) { + validate(fields()[4], value); + this.joinMeanWarm = value; + fieldSetFlags()[4] = true; + return this; } - /** Gets the value of the 'lastUpdate' field. */ - public java.lang.Long getLastUpdate() { - return lastUpdate; + /** + * Checks whether the 'joinMeanWarm' field has been set. + * + * @return True if the 'joinMeanWarm' field has been set, false otherwise. + */ + public boolean hasJoinMeanWarm() { + return fieldSetFlags()[4]; } /** - * Sets the value of the 'lastUpdate' field. + * Clears the value of the 'joinMeanWarm' field. * - * @param value the value to set. + * @return This builder. */ - public void setLastUpdate(java.lang.Long value) { - this.lastUpdate = value; + public sparqles.avro.analytics.PerformanceView.Builder clearJoinMeanWarm() { + fieldSetFlags()[4] = false; + return this; } - /** Gets the value of the 'threshold' field. */ - public java.lang.Long getThreshold() { - return threshold; + /** + * Gets the value of the 'lastUpdate' field. + * + * @return The value. + */ + public long getLastUpdate() { + return lastUpdate; } /** - * Sets the value of the 'threshold' field. + * Sets the value of the 'lastUpdate' field. * - * @param value the value to set. + * @param value The value of 'lastUpdate'. + * @return This builder. */ - public void setThreshold(java.lang.Long value) { - this.threshold = value; + public sparqles.avro.analytics.PerformanceView.Builder setLastUpdate(long value) { + validate(fields()[5], value); + this.lastUpdate = value; + fieldSetFlags()[5] = true; + return this; } - /** RecordBuilder for PerformanceView instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Checks whether the 'lastUpdate' field has been set. + * + * @return True if the 'lastUpdate' field has been set, false otherwise. + */ + public boolean hasLastUpdate() { + return fieldSetFlags()[5]; + } - private sparqles.avro.Endpoint endpoint; - private double askMeanCold; - private double askMeanWarm; - private double joinMeanCold; - private double joinMeanWarm; - private long lastUpdate; - private long threshold; + /** + * Clears the value of the 'lastUpdate' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.PerformanceView.Builder clearLastUpdate() { + fieldSetFlags()[5] = false; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.analytics.PerformanceView.SCHEMA$); - } + /** + * Gets the value of the 'threshold' field. + * + * @return The value. + */ + public long getThreshold() { + return threshold; + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.analytics.PerformanceView.Builder other) { - super(other); - if (isValidValue(fields()[0], other.endpoint)) { - this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.askMeanCold)) { - this.askMeanCold = data().deepCopy(fields()[1].schema(), other.askMeanCold); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.askMeanWarm)) { - this.askMeanWarm = data().deepCopy(fields()[2].schema(), other.askMeanWarm); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.joinMeanCold)) { - this.joinMeanCold = data().deepCopy(fields()[3].schema(), other.joinMeanCold); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.joinMeanWarm)) { - this.joinMeanWarm = data().deepCopy(fields()[4].schema(), other.joinMeanWarm); - fieldSetFlags()[4] = true; - } - if (isValidValue(fields()[5], other.lastUpdate)) { - this.lastUpdate = data().deepCopy(fields()[5].schema(), other.lastUpdate); - fieldSetFlags()[5] = true; - } - if (isValidValue(fields()[6], other.threshold)) { - this.threshold = data().deepCopy(fields()[6].schema(), other.threshold); - fieldSetFlags()[6] = true; - } - } + /** + * Sets the value of the 'threshold' field. + * + * @param value The value of 'threshold'. + * @return This builder. + */ + public sparqles.avro.analytics.PerformanceView.Builder setThreshold(long value) { + validate(fields()[6], value); + this.threshold = value; + fieldSetFlags()[6] = true; + return this; + } - /** Creates a Builder by copying an existing PerformanceView instance */ - private Builder(sparqles.avro.analytics.PerformanceView other) { - super(sparqles.avro.analytics.PerformanceView.SCHEMA$); - if (isValidValue(fields()[0], other.endpoint)) { - this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.askMeanCold)) { - this.askMeanCold = data().deepCopy(fields()[1].schema(), other.askMeanCold); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.askMeanWarm)) { - this.askMeanWarm = data().deepCopy(fields()[2].schema(), other.askMeanWarm); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.joinMeanCold)) { - this.joinMeanCold = data().deepCopy(fields()[3].schema(), other.joinMeanCold); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.joinMeanWarm)) { - this.joinMeanWarm = data().deepCopy(fields()[4].schema(), other.joinMeanWarm); - fieldSetFlags()[4] = true; - } - if (isValidValue(fields()[5], other.lastUpdate)) { - this.lastUpdate = data().deepCopy(fields()[5].schema(), other.lastUpdate); - fieldSetFlags()[5] = true; - } - if (isValidValue(fields()[6], other.threshold)) { - this.threshold = data().deepCopy(fields()[6].schema(), other.threshold); - fieldSetFlags()[6] = true; - } - } + /** + * Checks whether the 'threshold' field has been set. + * + * @return True if the 'threshold' field has been set, false otherwise. + */ + public boolean hasThreshold() { + return fieldSetFlags()[6]; + } - /** Gets the value of the 'endpoint' field */ - public sparqles.avro.Endpoint getEndpoint() { - return endpoint; - } + /** + * Clears the value of the 'threshold' field. + * + * @return This builder. + */ + public sparqles.avro.analytics.PerformanceView.Builder clearThreshold() { + fieldSetFlags()[6] = false; + return this; + } - /** Sets the value of the 'endpoint' field */ - public sparqles.avro.analytics.PerformanceView.Builder setEndpoint( - sparqles.avro.Endpoint value) { - validate(fields()[0], value); - this.endpoint = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + @SuppressWarnings("unchecked") + public PerformanceView build() { + try { + PerformanceView record = new PerformanceView(); + if (endpointBuilder != null) { + try { + record.endpoint = this.endpointBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("endpoint")); + throw e; + } + } else { + record.endpoint = + fieldSetFlags()[0] + ? this.endpoint + : (sparqles.avro.Endpoint) defaultValue(fields()[0]); + } + record.askMeanCold = + fieldSetFlags()[1] ? this.askMeanCold : (java.lang.Double) defaultValue(fields()[1]); + record.askMeanWarm = + fieldSetFlags()[2] ? this.askMeanWarm : (java.lang.Double) defaultValue(fields()[2]); + record.joinMeanCold = + fieldSetFlags()[3] ? this.joinMeanCold : (java.lang.Double) defaultValue(fields()[3]); + record.joinMeanWarm = + fieldSetFlags()[4] ? this.joinMeanWarm : (java.lang.Double) defaultValue(fields()[4]); + record.lastUpdate = + fieldSetFlags()[5] ? this.lastUpdate : (java.lang.Long) defaultValue(fields()[5]); + record.threshold = + fieldSetFlags()[6] ? this.threshold : (java.lang.Long) defaultValue(fields()[6]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Checks whether the 'endpoint' field has been set */ - public boolean hasEndpoint() { - return fieldSetFlags()[0]; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Clears the value of the 'endpoint' field */ - public sparqles.avro.analytics.PerformanceView.Builder clearEndpoint() { - endpoint = null; - fieldSetFlags()[0] = false; - return this; - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Gets the value of the 'askMeanCold' field */ - public java.lang.Double getAskMeanCold() { - return askMeanCold; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Sets the value of the 'askMeanCold' field */ - public sparqles.avro.analytics.PerformanceView.Builder setAskMeanCold(double value) { - validate(fields()[1], value); - this.askMeanCold = value; - fieldSetFlags()[1] = true; - return this; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Checks whether the 'askMeanCold' field has been set */ - public boolean hasAskMeanCold() { - return fieldSetFlags()[1]; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Clears the value of the 'askMeanCold' field */ - public sparqles.avro.analytics.PerformanceView.Builder clearAskMeanCold() { - fieldSetFlags()[1] = false; - return this; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + this.endpoint.customEncode(out); - /** Gets the value of the 'askMeanWarm' field */ - public java.lang.Double getAskMeanWarm() { - return askMeanWarm; - } + out.writeDouble(this.askMeanCold); - /** Sets the value of the 'askMeanWarm' field */ - public sparqles.avro.analytics.PerformanceView.Builder setAskMeanWarm(double value) { - validate(fields()[2], value); - this.askMeanWarm = value; - fieldSetFlags()[2] = true; - return this; - } + out.writeDouble(this.askMeanWarm); - /** Checks whether the 'askMeanWarm' field has been set */ - public boolean hasAskMeanWarm() { - return fieldSetFlags()[2]; - } + out.writeDouble(this.joinMeanCold); - /** Clears the value of the 'askMeanWarm' field */ - public sparqles.avro.analytics.PerformanceView.Builder clearAskMeanWarm() { - fieldSetFlags()[2] = false; - return this; - } + out.writeDouble(this.joinMeanWarm); - /** Gets the value of the 'joinMeanCold' field */ - public java.lang.Double getJoinMeanCold() { - return joinMeanCold; - } + out.writeLong(this.lastUpdate); - /** Sets the value of the 'joinMeanCold' field */ - public sparqles.avro.analytics.PerformanceView.Builder setJoinMeanCold(double value) { - validate(fields()[3], value); - this.joinMeanCold = value; - fieldSetFlags()[3] = true; - return this; - } + out.writeLong(this.threshold); + } - /** Checks whether the 'joinMeanCold' field has been set */ - public boolean hasJoinMeanCold() { - return fieldSetFlags()[3]; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); + } + this.endpoint.customDecode(in); - /** Clears the value of the 'joinMeanCold' field */ - public sparqles.avro.analytics.PerformanceView.Builder clearJoinMeanCold() { - fieldSetFlags()[3] = false; - return this; - } + this.askMeanCold = in.readDouble(); - /** Gets the value of the 'joinMeanWarm' field */ - public java.lang.Double getJoinMeanWarm() { - return joinMeanWarm; - } + this.askMeanWarm = in.readDouble(); - /** Sets the value of the 'joinMeanWarm' field */ - public sparqles.avro.analytics.PerformanceView.Builder setJoinMeanWarm(double value) { - validate(fields()[4], value); - this.joinMeanWarm = value; - fieldSetFlags()[4] = true; - return this; - } + this.joinMeanCold = in.readDouble(); - /** Checks whether the 'joinMeanWarm' field has been set */ - public boolean hasJoinMeanWarm() { - return fieldSetFlags()[4]; - } + this.joinMeanWarm = in.readDouble(); - /** Clears the value of the 'joinMeanWarm' field */ - public sparqles.avro.analytics.PerformanceView.Builder clearJoinMeanWarm() { - fieldSetFlags()[4] = false; - return this; - } + this.lastUpdate = in.readLong(); - /** Gets the value of the 'lastUpdate' field */ - public java.lang.Long getLastUpdate() { - return lastUpdate; - } + this.threshold = in.readLong(); - /** Sets the value of the 'lastUpdate' field */ - public sparqles.avro.analytics.PerformanceView.Builder setLastUpdate(long value) { - validate(fields()[5], value); - this.lastUpdate = value; - fieldSetFlags()[5] = true; - return this; - } + } else { + for (int i = 0; i < 7; i++) { + switch (fieldOrder[i].pos()) { + case 0: + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); + } + this.endpoint.customDecode(in); + break; - /** Checks whether the 'lastUpdate' field has been set */ - public boolean hasLastUpdate() { - return fieldSetFlags()[5]; - } + case 1: + this.askMeanCold = in.readDouble(); + break; - /** Clears the value of the 'lastUpdate' field */ - public sparqles.avro.analytics.PerformanceView.Builder clearLastUpdate() { - fieldSetFlags()[5] = false; - return this; - } + case 2: + this.askMeanWarm = in.readDouble(); + break; - /** Gets the value of the 'threshold' field */ - public java.lang.Long getThreshold() { - return threshold; - } + case 3: + this.joinMeanCold = in.readDouble(); + break; - /** Sets the value of the 'threshold' field */ - public sparqles.avro.analytics.PerformanceView.Builder setThreshold(long value) { - validate(fields()[6], value); - this.threshold = value; - fieldSetFlags()[6] = true; - return this; - } + case 4: + this.joinMeanWarm = in.readDouble(); + break; - /** Checks whether the 'threshold' field has been set */ - public boolean hasThreshold() { - return fieldSetFlags()[6]; - } + case 5: + this.lastUpdate = in.readLong(); + break; - /** Clears the value of the 'threshold' field */ - public sparqles.avro.analytics.PerformanceView.Builder clearThreshold() { - fieldSetFlags()[6] = false; - return this; - } + case 6: + this.threshold = in.readLong(); + break; - @Override - public PerformanceView build() { - try { - PerformanceView record = new PerformanceView(); - record.endpoint = - fieldSetFlags()[0] - ? this.endpoint - : (sparqles.avro.Endpoint) defaultValue(fields()[0]); - record.askMeanCold = - fieldSetFlags()[1] - ? this.askMeanCold - : (java.lang.Double) defaultValue(fields()[1]); - record.askMeanWarm = - fieldSetFlags()[2] - ? this.askMeanWarm - : (java.lang.Double) defaultValue(fields()[2]); - record.joinMeanCold = - fieldSetFlags()[3] - ? this.joinMeanCold - : (java.lang.Double) defaultValue(fields()[3]); - record.joinMeanWarm = - fieldSetFlags()[4] - ? this.joinMeanWarm - : (java.lang.Double) defaultValue(fields()[4]); - record.lastUpdate = - fieldSetFlags()[5] - ? this.lastUpdate - : (java.lang.Long) defaultValue(fields()[5]); - record.threshold = - fieldSetFlags()[6] - ? this.threshold - : (java.lang.Long) defaultValue(fields()[6]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/availability/AResult.java b/backend/src/main/java/sparqles/avro/availability/AResult.java index d1cd6c9e..19bdff3f 100644 --- a/backend/src/main/java/sparqles/avro/availability/AResult.java +++ b/backend/src/main/java/sparqles/avro/availability/AResult.java @@ -5,455 +5,877 @@ */ package sparqles.avro.availability; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class AResult extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"AResult\",\"namespace\":\"sparqles.avro.availability\",\"fields\":[{\"name\":\"endpointResult\",\"type\":{\"type\":\"record\",\"name\":\"EndpointResult\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"start\",\"type\":\"long\"},{\"name\":\"end\",\"type\":\"long\"}],\"import\":\"Endpoint.avsc\"}},{\"name\":\"responseTime\",\"type\":\"long\"},{\"name\":\"isAvailable\",\"type\":\"boolean\",\"default\":false},{\"name\":\"isPrivate\",\"type\":\"boolean\",\"default\":false},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"explanation\",\"type\":[\"string\",\"null\"]}]}"); - @Deprecated public sparqles.avro.EndpointResult endpointResult; - @Deprecated public long responseTime; - @Deprecated public boolean isAvailable; - @Deprecated public boolean isPrivate; - @Deprecated public java.lang.CharSequence Exception; - @Deprecated public java.lang.CharSequence explanation; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -3710164031125842257L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"AResult\",\"namespace\":\"sparqles.avro.availability\",\"fields\":[{\"name\":\"endpointResult\",\"type\":{\"type\":\"record\",\"name\":\"EndpointResult\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"start\",\"type\":\"long\"},{\"name\":\"end\",\"type\":\"long\"}],\"import\":\"Endpoint.avsc\"}},{\"name\":\"responseTime\",\"type\":\"long\"},{\"name\":\"isAvailable\",\"type\":\"boolean\",\"default\":false},{\"name\":\"isPrivate\",\"type\":\"boolean\",\"default\":false},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"explanation\",\"type\":[\"string\",\"null\"]}],\"import\":\"EndpointResult.avsc\"}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this AResult to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a AResult from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a AResult instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static AResult fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private sparqles.avro.EndpointResult endpointResult; + private long responseTime; + private boolean isAvailable; + private boolean isPrivate; + private java.lang.CharSequence Exception; + private java.lang.CharSequence explanation; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public AResult() {} + + /** + * All-args constructor. + * + * @param endpointResult The new value for endpointResult + * @param responseTime The new value for responseTime + * @param isAvailable The new value for isAvailable + * @param isPrivate The new value for isPrivate + * @param Exception The new value for Exception + * @param explanation The new value for explanation + */ + public AResult( + sparqles.avro.EndpointResult endpointResult, + java.lang.Long responseTime, + java.lang.Boolean isAvailable, + java.lang.Boolean isPrivate, + java.lang.CharSequence Exception, + java.lang.CharSequence explanation) { + this.endpointResult = endpointResult; + this.responseTime = responseTime; + this.isAvailable = isAvailable; + this.isPrivate = isPrivate; + this.Exception = Exception; + this.explanation = explanation; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return endpointResult; + case 1: + return responseTime; + case 2: + return isAvailable; + case 3: + return isPrivate; + case 4: + return Exception; + case 5: + return explanation; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + endpointResult = (sparqles.avro.EndpointResult) value$; + break; + case 1: + responseTime = (java.lang.Long) value$; + break; + case 2: + isAvailable = (java.lang.Boolean) value$; + break; + case 3: + isPrivate = (java.lang.Boolean) value$; + break; + case 4: + Exception = (java.lang.CharSequence) value$; + break; + case 5: + explanation = (java.lang.CharSequence) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'endpointResult' field. + * + * @return The value of the 'endpointResult' field. + */ + public sparqles.avro.EndpointResult getEndpointResult() { + return endpointResult; + } + + /** + * Sets the value of the 'endpointResult' field. + * + * @param value the value to set. + */ + public void setEndpointResult(sparqles.avro.EndpointResult value) { + this.endpointResult = value; + } + + /** + * Gets the value of the 'responseTime' field. + * + * @return The value of the 'responseTime' field. + */ + public long getResponseTime() { + return responseTime; + } + + /** + * Sets the value of the 'responseTime' field. + * + * @param value the value to set. + */ + public void setResponseTime(long value) { + this.responseTime = value; + } + + /** + * Gets the value of the 'isAvailable' field. + * + * @return The value of the 'isAvailable' field. + */ + public boolean getIsAvailable() { + return isAvailable; + } + + /** + * Sets the value of the 'isAvailable' field. + * + * @param value the value to set. + */ + public void setIsAvailable(boolean value) { + this.isAvailable = value; + } + + /** + * Gets the value of the 'isPrivate' field. + * + * @return The value of the 'isPrivate' field. + */ + public boolean getIsPrivate() { + return isPrivate; + } + + /** + * Sets the value of the 'isPrivate' field. + * + * @param value the value to set. + */ + public void setIsPrivate(boolean value) { + this.isPrivate = value; + } + + /** + * Gets the value of the 'Exception' field. + * + * @return The value of the 'Exception' field. + */ + public java.lang.CharSequence getException() { + return Exception; + } + + /** + * Sets the value of the 'Exception' field. + * + * @param value the value to set. + */ + public void setException(java.lang.CharSequence value) { + this.Exception = value; + } + + /** + * Gets the value of the 'explanation' field. + * + * @return The value of the 'explanation' field. + */ + public java.lang.CharSequence getExplanation() { + return explanation; + } + + /** + * Sets the value of the 'explanation' field. + * + * @param value the value to set. + */ + public void setExplanation(java.lang.CharSequence value) { + this.explanation = value; + } + + /** + * Creates a new AResult RecordBuilder. + * + * @return A new AResult RecordBuilder + */ + public static sparqles.avro.availability.AResult.Builder newBuilder() { + return new sparqles.avro.availability.AResult.Builder(); + } + + /** + * Creates a new AResult RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new AResult RecordBuilder + */ + public static sparqles.avro.availability.AResult.Builder newBuilder( + sparqles.avro.availability.AResult.Builder other) { + if (other == null) { + return new sparqles.avro.availability.AResult.Builder(); + } else { + return new sparqles.avro.availability.AResult.Builder(other); + } + } + + /** + * Creates a new AResult RecordBuilder by copying an existing AResult instance. + * + * @param other The existing instance to copy. + * @return A new AResult RecordBuilder + */ + public static sparqles.avro.availability.AResult.Builder newBuilder( + sparqles.avro.availability.AResult other) { + if (other == null) { + return new sparqles.avro.availability.AResult.Builder(); + } else { + return new sparqles.avro.availability.AResult.Builder(other); + } + } + + /** RecordBuilder for AResult instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private sparqles.avro.EndpointResult endpointResult; + private sparqles.avro.EndpointResult.Builder endpointResultBuilder; + private long responseTime; + private boolean isAvailable; + private boolean isPrivate; + private java.lang.CharSequence Exception; + private java.lang.CharSequence explanation; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public AResult() {} - - /** All-args constructor. */ - public AResult( - sparqles.avro.EndpointResult endpointResult, - java.lang.Long responseTime, - java.lang.Boolean isAvailable, - java.lang.Boolean isPrivate, - java.lang.CharSequence Exception, - java.lang.CharSequence explanation) { - this.endpointResult = endpointResult; - this.responseTime = responseTime; - this.isAvailable = isAvailable; - this.isPrivate = isPrivate; - this.Exception = Exception; - this.explanation = explanation; - } - - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; - } - - /** Creates a new AResult RecordBuilder */ - public static sparqles.avro.availability.AResult.Builder newBuilder() { - return new sparqles.avro.availability.AResult.Builder(); - } - - /** Creates a new AResult RecordBuilder by copying an existing Builder */ - public static sparqles.avro.availability.AResult.Builder newBuilder( - sparqles.avro.availability.AResult.Builder other) { - return new sparqles.avro.availability.AResult.Builder(other); - } - - /** Creates a new AResult RecordBuilder by copying an existing AResult instance */ - public static sparqles.avro.availability.AResult.Builder newBuilder( - sparqles.avro.availability.AResult other) { - return new sparqles.avro.availability.AResult.Builder(other); - } - - public org.apache.avro.Schema getSchema() { - return SCHEMA$; - } - - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return endpointResult; - case 1: - return responseTime; - case 2: - return isAvailable; - case 3: - return isPrivate; - case 4: - return Exception; - case 5: - return explanation; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + private Builder(sparqles.avro.availability.AResult.Builder other) { + super(other); + if (isValidValue(fields()[0], other.endpointResult)) { + this.endpointResult = data().deepCopy(fields()[0].schema(), other.endpointResult); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (other.hasEndpointResultBuilder()) { + this.endpointResultBuilder = + sparqles.avro.EndpointResult.newBuilder(other.getEndpointResultBuilder()); + } + if (isValidValue(fields()[1], other.responseTime)) { + this.responseTime = data().deepCopy(fields()[1].schema(), other.responseTime); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.isAvailable)) { + this.isAvailable = data().deepCopy(fields()[2].schema(), other.isAvailable); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.isPrivate)) { + this.isPrivate = data().deepCopy(fields()[3].schema(), other.isPrivate); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + if (isValidValue(fields()[4], other.Exception)) { + this.Exception = data().deepCopy(fields()[4].schema(), other.Exception); + fieldSetFlags()[4] = other.fieldSetFlags()[4]; + } + if (isValidValue(fields()[5], other.explanation)) { + this.explanation = data().deepCopy(fields()[5].schema(), other.explanation); + fieldSetFlags()[5] = other.fieldSetFlags()[5]; + } } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - endpointResult = (sparqles.avro.EndpointResult) value$; - break; - case 1: - responseTime = (java.lang.Long) value$; - break; - case 2: - isAvailable = (java.lang.Boolean) value$; - break; - case 3: - isPrivate = (java.lang.Boolean) value$; - break; - case 4: - Exception = (java.lang.CharSequence) value$; - break; - case 5: - explanation = (java.lang.CharSequence) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Creates a Builder by copying an existing AResult instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.availability.AResult other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.endpointResult)) { + this.endpointResult = data().deepCopy(fields()[0].schema(), other.endpointResult); + fieldSetFlags()[0] = true; + } + this.endpointResultBuilder = null; + if (isValidValue(fields()[1], other.responseTime)) { + this.responseTime = data().deepCopy(fields()[1].schema(), other.responseTime); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.isAvailable)) { + this.isAvailable = data().deepCopy(fields()[2].schema(), other.isAvailable); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.isPrivate)) { + this.isPrivate = data().deepCopy(fields()[3].schema(), other.isPrivate); + fieldSetFlags()[3] = true; + } + if (isValidValue(fields()[4], other.Exception)) { + this.Exception = data().deepCopy(fields()[4].schema(), other.Exception); + fieldSetFlags()[4] = true; + } + if (isValidValue(fields()[5], other.explanation)) { + this.explanation = data().deepCopy(fields()[5].schema(), other.explanation); + fieldSetFlags()[5] = true; + } } - /** Gets the value of the 'endpointResult' field. */ + /** + * Gets the value of the 'endpointResult' field. + * + * @return The value. + */ public sparqles.avro.EndpointResult getEndpointResult() { - return endpointResult; + return endpointResult; } /** * Sets the value of the 'endpointResult' field. * - * @param value the value to set. + * @param value The value of 'endpointResult'. + * @return This builder. */ - public void setEndpointResult(sparqles.avro.EndpointResult value) { - this.endpointResult = value; + public sparqles.avro.availability.AResult.Builder setEndpointResult( + sparqles.avro.EndpointResult value) { + validate(fields()[0], value); + this.endpointResultBuilder = null; + this.endpointResult = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'responseTime' field. */ - public java.lang.Long getResponseTime() { - return responseTime; + /** + * Checks whether the 'endpointResult' field has been set. + * + * @return True if the 'endpointResult' field has been set, false otherwise. + */ + public boolean hasEndpointResult() { + return fieldSetFlags()[0]; } /** - * Sets the value of the 'responseTime' field. + * Gets the Builder instance for the 'endpointResult' field and creates one if it doesn't exist + * yet. * - * @param value the value to set. + * @return This builder. */ - public void setResponseTime(java.lang.Long value) { - this.responseTime = value; + public sparqles.avro.EndpointResult.Builder getEndpointResultBuilder() { + if (endpointResultBuilder == null) { + if (hasEndpointResult()) { + setEndpointResultBuilder(sparqles.avro.EndpointResult.newBuilder(endpointResult)); + } else { + setEndpointResultBuilder(sparqles.avro.EndpointResult.newBuilder()); + } + } + return endpointResultBuilder; } - /** Gets the value of the 'isAvailable' field. */ - public java.lang.Boolean getIsAvailable() { - return isAvailable; + /** + * Sets the Builder instance for the 'endpointResult' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.availability.AResult.Builder setEndpointResultBuilder( + sparqles.avro.EndpointResult.Builder value) { + clearEndpointResult(); + endpointResultBuilder = value; + return this; } /** - * Sets the value of the 'isAvailable' field. + * Checks whether the 'endpointResult' field has an active Builder instance * - * @param value the value to set. + * @return True if the 'endpointResult' field has an active Builder instance */ - public void setIsAvailable(java.lang.Boolean value) { - this.isAvailable = value; + public boolean hasEndpointResultBuilder() { + return endpointResultBuilder != null; } - /** Gets the value of the 'isPrivate' field. */ - public java.lang.Boolean getIsPrivate() { - return isPrivate; + /** + * Clears the value of the 'endpointResult' field. + * + * @return This builder. + */ + public sparqles.avro.availability.AResult.Builder clearEndpointResult() { + endpointResult = null; + endpointResultBuilder = null; + fieldSetFlags()[0] = false; + return this; } /** - * Sets the value of the 'isPrivate' field. + * Gets the value of the 'responseTime' field. * - * @param value the value to set. + * @return The value. */ - public void setIsPrivate(java.lang.Boolean value) { - this.isPrivate = value; + public long getResponseTime() { + return responseTime; } - /** Gets the value of the 'Exception' field. */ - public java.lang.CharSequence getException() { - return Exception; + /** + * Sets the value of the 'responseTime' field. + * + * @param value The value of 'responseTime'. + * @return This builder. + */ + public sparqles.avro.availability.AResult.Builder setResponseTime(long value) { + validate(fields()[1], value); + this.responseTime = value; + fieldSetFlags()[1] = true; + return this; } /** - * Sets the value of the 'Exception' field. + * Checks whether the 'responseTime' field has been set. * - * @param value the value to set. + * @return True if the 'responseTime' field has been set, false otherwise. */ - public void setException(java.lang.CharSequence value) { - this.Exception = value; + public boolean hasResponseTime() { + return fieldSetFlags()[1]; } - /** Gets the value of the 'explanation' field. */ - public java.lang.CharSequence getExplanation() { - return explanation; + /** + * Clears the value of the 'responseTime' field. + * + * @return This builder. + */ + public sparqles.avro.availability.AResult.Builder clearResponseTime() { + fieldSetFlags()[1] = false; + return this; } /** - * Sets the value of the 'explanation' field. + * Gets the value of the 'isAvailable' field. * - * @param value the value to set. + * @return The value. */ - public void setExplanation(java.lang.CharSequence value) { - this.explanation = value; + public boolean getIsAvailable() { + return isAvailable; } - /** RecordBuilder for AResult instances. */ - public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { - - private sparqles.avro.EndpointResult endpointResult; - private long responseTime; - private boolean isAvailable; - private boolean isPrivate; - private java.lang.CharSequence Exception; - private java.lang.CharSequence explanation; - - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.availability.AResult.SCHEMA$); - } - - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.availability.AResult.Builder other) { - super(other); - if (isValidValue(fields()[0], other.endpointResult)) { - this.endpointResult = data().deepCopy(fields()[0].schema(), other.endpointResult); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.responseTime)) { - this.responseTime = data().deepCopy(fields()[1].schema(), other.responseTime); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.isAvailable)) { - this.isAvailable = data().deepCopy(fields()[2].schema(), other.isAvailable); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.isPrivate)) { - this.isPrivate = data().deepCopy(fields()[3].schema(), other.isPrivate); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.Exception)) { - this.Exception = data().deepCopy(fields()[4].schema(), other.Exception); - fieldSetFlags()[4] = true; - } - if (isValidValue(fields()[5], other.explanation)) { - this.explanation = data().deepCopy(fields()[5].schema(), other.explanation); - fieldSetFlags()[5] = true; - } - } + /** + * Sets the value of the 'isAvailable' field. + * + * @param value The value of 'isAvailable'. + * @return This builder. + */ + public sparqles.avro.availability.AResult.Builder setIsAvailable(boolean value) { + validate(fields()[2], value); + this.isAvailable = value; + fieldSetFlags()[2] = true; + return this; + } - /** Creates a Builder by copying an existing AResult instance */ - private Builder(sparqles.avro.availability.AResult other) { - super(sparqles.avro.availability.AResult.SCHEMA$); - if (isValidValue(fields()[0], other.endpointResult)) { - this.endpointResult = data().deepCopy(fields()[0].schema(), other.endpointResult); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.responseTime)) { - this.responseTime = data().deepCopy(fields()[1].schema(), other.responseTime); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.isAvailable)) { - this.isAvailable = data().deepCopy(fields()[2].schema(), other.isAvailable); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.isPrivate)) { - this.isPrivate = data().deepCopy(fields()[3].schema(), other.isPrivate); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.Exception)) { - this.Exception = data().deepCopy(fields()[4].schema(), other.Exception); - fieldSetFlags()[4] = true; - } - if (isValidValue(fields()[5], other.explanation)) { - this.explanation = data().deepCopy(fields()[5].schema(), other.explanation); - fieldSetFlags()[5] = true; - } - } + /** + * Checks whether the 'isAvailable' field has been set. + * + * @return True if the 'isAvailable' field has been set, false otherwise. + */ + public boolean hasIsAvailable() { + return fieldSetFlags()[2]; + } - /** Gets the value of the 'endpointResult' field */ - public sparqles.avro.EndpointResult getEndpointResult() { - return endpointResult; - } + /** + * Clears the value of the 'isAvailable' field. + * + * @return This builder. + */ + public sparqles.avro.availability.AResult.Builder clearIsAvailable() { + fieldSetFlags()[2] = false; + return this; + } - /** Sets the value of the 'endpointResult' field */ - public sparqles.avro.availability.AResult.Builder setEndpointResult( - sparqles.avro.EndpointResult value) { - validate(fields()[0], value); - this.endpointResult = value; - fieldSetFlags()[0] = true; - return this; - } + /** + * Gets the value of the 'isPrivate' field. + * + * @return The value. + */ + public boolean getIsPrivate() { + return isPrivate; + } - /** Checks whether the 'endpointResult' field has been set */ - public boolean hasEndpointResult() { - return fieldSetFlags()[0]; - } + /** + * Sets the value of the 'isPrivate' field. + * + * @param value The value of 'isPrivate'. + * @return This builder. + */ + public sparqles.avro.availability.AResult.Builder setIsPrivate(boolean value) { + validate(fields()[3], value); + this.isPrivate = value; + fieldSetFlags()[3] = true; + return this; + } - /** Clears the value of the 'endpointResult' field */ - public sparqles.avro.availability.AResult.Builder clearEndpointResult() { - endpointResult = null; - fieldSetFlags()[0] = false; - return this; - } + /** + * Checks whether the 'isPrivate' field has been set. + * + * @return True if the 'isPrivate' field has been set, false otherwise. + */ + public boolean hasIsPrivate() { + return fieldSetFlags()[3]; + } - /** Gets the value of the 'responseTime' field */ - public java.lang.Long getResponseTime() { - return responseTime; - } + /** + * Clears the value of the 'isPrivate' field. + * + * @return This builder. + */ + public sparqles.avro.availability.AResult.Builder clearIsPrivate() { + fieldSetFlags()[3] = false; + return this; + } - /** Sets the value of the 'responseTime' field */ - public sparqles.avro.availability.AResult.Builder setResponseTime(long value) { - validate(fields()[1], value); - this.responseTime = value; - fieldSetFlags()[1] = true; - return this; - } + /** + * Gets the value of the 'Exception' field. + * + * @return The value. + */ + public java.lang.CharSequence getException() { + return Exception; + } - /** Checks whether the 'responseTime' field has been set */ - public boolean hasResponseTime() { - return fieldSetFlags()[1]; - } + /** + * Sets the value of the 'Exception' field. + * + * @param value The value of 'Exception'. + * @return This builder. + */ + public sparqles.avro.availability.AResult.Builder setException(java.lang.CharSequence value) { + validate(fields()[4], value); + this.Exception = value; + fieldSetFlags()[4] = true; + return this; + } - /** Clears the value of the 'responseTime' field */ - public sparqles.avro.availability.AResult.Builder clearResponseTime() { - fieldSetFlags()[1] = false; - return this; - } + /** + * Checks whether the 'Exception' field has been set. + * + * @return True if the 'Exception' field has been set, false otherwise. + */ + public boolean hasException() { + return fieldSetFlags()[4]; + } - /** Gets the value of the 'isAvailable' field */ - public java.lang.Boolean getIsAvailable() { - return isAvailable; - } + /** + * Clears the value of the 'Exception' field. + * + * @return This builder. + */ + public sparqles.avro.availability.AResult.Builder clearException() { + Exception = null; + fieldSetFlags()[4] = false; + return this; + } - /** Sets the value of the 'isAvailable' field */ - public sparqles.avro.availability.AResult.Builder setIsAvailable(boolean value) { - validate(fields()[2], value); - this.isAvailable = value; - fieldSetFlags()[2] = true; - return this; - } + /** + * Gets the value of the 'explanation' field. + * + * @return The value. + */ + public java.lang.CharSequence getExplanation() { + return explanation; + } - /** Checks whether the 'isAvailable' field has been set */ - public boolean hasIsAvailable() { - return fieldSetFlags()[2]; - } + /** + * Sets the value of the 'explanation' field. + * + * @param value The value of 'explanation'. + * @return This builder. + */ + public sparqles.avro.availability.AResult.Builder setExplanation(java.lang.CharSequence value) { + validate(fields()[5], value); + this.explanation = value; + fieldSetFlags()[5] = true; + return this; + } - /** Clears the value of the 'isAvailable' field */ - public sparqles.avro.availability.AResult.Builder clearIsAvailable() { - fieldSetFlags()[2] = false; - return this; - } + /** + * Checks whether the 'explanation' field has been set. + * + * @return True if the 'explanation' field has been set, false otherwise. + */ + public boolean hasExplanation() { + return fieldSetFlags()[5]; + } - /** Gets the value of the 'isPrivate' field */ - public java.lang.Boolean getIsPrivate() { - return isPrivate; - } + /** + * Clears the value of the 'explanation' field. + * + * @return This builder. + */ + public sparqles.avro.availability.AResult.Builder clearExplanation() { + explanation = null; + fieldSetFlags()[5] = false; + return this; + } - /** Sets the value of the 'isPrivate' field */ - public sparqles.avro.availability.AResult.Builder setIsPrivate(boolean value) { - validate(fields()[3], value); - this.isPrivate = value; - fieldSetFlags()[3] = true; - return this; + @Override + @SuppressWarnings("unchecked") + public AResult build() { + try { + AResult record = new AResult(); + if (endpointResultBuilder != null) { + try { + record.endpointResult = this.endpointResultBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("endpointResult")); + throw e; + } + } else { + record.endpointResult = + fieldSetFlags()[0] + ? this.endpointResult + : (sparqles.avro.EndpointResult) defaultValue(fields()[0]); } + record.responseTime = + fieldSetFlags()[1] ? this.responseTime : (java.lang.Long) defaultValue(fields()[1]); + record.isAvailable = + fieldSetFlags()[2] ? this.isAvailable : (java.lang.Boolean) defaultValue(fields()[2]); + record.isPrivate = + fieldSetFlags()[3] ? this.isPrivate : (java.lang.Boolean) defaultValue(fields()[3]); + record.Exception = + fieldSetFlags()[4] + ? this.Exception + : (java.lang.CharSequence) defaultValue(fields()[4]); + record.explanation = + fieldSetFlags()[5] + ? this.explanation + : (java.lang.CharSequence) defaultValue(fields()[5]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Checks whether the 'isPrivate' field has been set */ - public boolean hasIsPrivate() { - return fieldSetFlags()[3]; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Clears the value of the 'isPrivate' field */ - public sparqles.avro.availability.AResult.Builder clearIsPrivate() { - fieldSetFlags()[3] = false; - return this; - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Gets the value of the 'Exception' field */ - public java.lang.CharSequence getException() { - return Exception; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Sets the value of the 'Exception' field */ - public sparqles.avro.availability.AResult.Builder setException( - java.lang.CharSequence value) { - validate(fields()[4], value); - this.Exception = value; - fieldSetFlags()[4] = true; - return this; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Checks whether the 'Exception' field has been set */ - public boolean hasException() { - return fieldSetFlags()[4]; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Clears the value of the 'Exception' field */ - public sparqles.avro.availability.AResult.Builder clearException() { - Exception = null; - fieldSetFlags()[4] = false; - return this; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + this.endpointResult.customEncode(out); - /** Gets the value of the 'explanation' field */ - public java.lang.CharSequence getExplanation() { - return explanation; - } + out.writeLong(this.responseTime); - /** Sets the value of the 'explanation' field */ - public sparqles.avro.availability.AResult.Builder setExplanation( - java.lang.CharSequence value) { - validate(fields()[5], value); - this.explanation = value; - fieldSetFlags()[5] = true; - return this; - } + out.writeBoolean(this.isAvailable); - /** Checks whether the 'explanation' field has been set */ - public boolean hasExplanation() { - return fieldSetFlags()[5]; - } + out.writeBoolean(this.isPrivate); - /** Clears the value of the 'explanation' field */ - public sparqles.avro.availability.AResult.Builder clearExplanation() { - explanation = null; - fieldSetFlags()[5] = false; - return this; - } + if (this.Exception == null) { + out.writeIndex(1); + out.writeNull(); + } else { + out.writeIndex(0); + out.writeString(this.Exception); + } - @Override - public AResult build() { - try { - AResult record = new AResult(); - record.endpointResult = - fieldSetFlags()[0] - ? this.endpointResult - : (sparqles.avro.EndpointResult) defaultValue(fields()[0]); - record.responseTime = - fieldSetFlags()[1] - ? this.responseTime - : (java.lang.Long) defaultValue(fields()[1]); - record.isAvailable = - fieldSetFlags()[2] - ? this.isAvailable - : (java.lang.Boolean) defaultValue(fields()[2]); - record.isPrivate = - fieldSetFlags()[3] - ? this.isPrivate - : (java.lang.Boolean) defaultValue(fields()[3]); - record.Exception = - fieldSetFlags()[4] - ? this.Exception - : (java.lang.CharSequence) defaultValue(fields()[4]); - record.explanation = - fieldSetFlags()[5] - ? this.explanation - : (java.lang.CharSequence) defaultValue(fields()[5]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); + if (this.explanation == null) { + out.writeIndex(1); + out.writeNull(); + } else { + out.writeIndex(0); + out.writeString(this.explanation); + } + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + if (this.endpointResult == null) { + this.endpointResult = new sparqles.avro.EndpointResult(); + } + this.endpointResult.customDecode(in); + + this.responseTime = in.readLong(); + + this.isAvailable = in.readBoolean(); + + this.isPrivate = in.readBoolean(); + + if (in.readIndex() != 0) { + in.readNull(); + this.Exception = null; + } else { + this.Exception = + in.readString(this.Exception instanceof Utf8 ? (Utf8) this.Exception : null); + } + + if (in.readIndex() != 0) { + in.readNull(); + this.explanation = null; + } else { + this.explanation = + in.readString(this.explanation instanceof Utf8 ? (Utf8) this.explanation : null); + } + + } else { + for (int i = 0; i < 6; i++) { + switch (fieldOrder[i].pos()) { + case 0: + if (this.endpointResult == null) { + this.endpointResult = new sparqles.avro.EndpointResult(); + } + this.endpointResult.customDecode(in); + break; + + case 1: + this.responseTime = in.readLong(); + break; + + case 2: + this.isAvailable = in.readBoolean(); + break; + + case 3: + this.isPrivate = in.readBoolean(); + break; + + case 4: + if (in.readIndex() != 0) { + in.readNull(); + this.Exception = null; + } else { + this.Exception = + in.readString(this.Exception instanceof Utf8 ? (Utf8) this.Exception : null); } + break; + + case 5: + if (in.readIndex() != 0) { + in.readNull(); + this.explanation = null; + } else { + this.explanation = + in.readString(this.explanation instanceof Utf8 ? (Utf8) this.explanation : null); + } + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/calculation/CResult.java b/backend/src/main/java/sparqles/avro/calculation/CResult.java new file mode 100644 index 00000000..9f747015 --- /dev/null +++ b/backend/src/main/java/sparqles/avro/calculation/CResult.java @@ -0,0 +1,1588 @@ +/** + * Autogenerated by Avro + * + *

DO NOT EDIT DIRECTLY + */ +package sparqles.avro.calculation; + +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + +@org.apache.avro.specific.AvroGenerated +public class CResult extends org.apache.avro.specific.SpecificRecordBase + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -341231022483397386L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"CResult\",\"namespace\":\"sparqles.avro.calculation\",\"fields\":[{\"name\":\"endpointResult\",\"type\":{\"type\":\"record\",\"name\":\"EndpointResult\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"start\",\"type\":\"long\"},{\"name\":\"end\",\"type\":\"long\"}],\"import\":\"Endpoint.avsc\"}},{\"name\":\"triples\",\"type\":\"long\"},{\"name\":\"entities\",\"type\":\"long\"},{\"name\":\"classes\",\"type\":\"long\"},{\"name\":\"properties\",\"type\":\"long\"},{\"name\":\"distinctSubjects\",\"type\":\"long\"},{\"name\":\"distinctObjects\",\"type\":\"long\"},{\"name\":\"exampleResources\",\"type\":{\"type\":\"array\",\"items\":\"string\"}},{\"name\":\"VoID\",\"type\":\"string\"},{\"name\":\"VoIDPart\",\"type\":\"boolean\"},{\"name\":\"SD\",\"type\":\"string\"},{\"name\":\"SDPart\",\"type\":\"boolean\"},{\"name\":\"coherence\",\"type\":\"double\"},{\"name\":\"RS\",\"type\":\"double\"}],\"import\":\"EndpointResult.avsc\"}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this CResult to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a CResult from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a CResult instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static CResult fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private sparqles.avro.EndpointResult endpointResult; + private long triples; + private long entities; + private long classes; + private long properties; + private long distinctSubjects; + private long distinctObjects; + private java.util.List exampleResources; + private java.lang.CharSequence VoID; + private boolean VoIDPart; + private java.lang.CharSequence SD; + private boolean SDPart; + private double coherence; + private double RS; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public CResult() {} + + /** + * All-args constructor. + * + * @param endpointResult The new value for endpointResult + * @param triples The new value for triples + * @param entities The new value for entities + * @param classes The new value for classes + * @param properties The new value for properties + * @param distinctSubjects The new value for distinctSubjects + * @param distinctObjects The new value for distinctObjects + * @param exampleResources The new value for exampleResources + * @param VoID The new value for VoID + * @param VoIDPart The new value for VoIDPart + * @param SD The new value for SD + * @param SDPart The new value for SDPart + * @param coherence The new value for coherence + * @param RS The new value for RS + */ + public CResult( + sparqles.avro.EndpointResult endpointResult, + java.lang.Long triples, + java.lang.Long entities, + java.lang.Long classes, + java.lang.Long properties, + java.lang.Long distinctSubjects, + java.lang.Long distinctObjects, + java.util.List exampleResources, + java.lang.CharSequence VoID, + java.lang.Boolean VoIDPart, + java.lang.CharSequence SD, + java.lang.Boolean SDPart, + java.lang.Double coherence, + java.lang.Double RS) { + this.endpointResult = endpointResult; + this.triples = triples; + this.entities = entities; + this.classes = classes; + this.properties = properties; + this.distinctSubjects = distinctSubjects; + this.distinctObjects = distinctObjects; + this.exampleResources = exampleResources; + this.VoID = VoID; + this.VoIDPart = VoIDPart; + this.SD = SD; + this.SDPart = SDPart; + this.coherence = coherence; + this.RS = RS; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return endpointResult; + case 1: + return triples; + case 2: + return entities; + case 3: + return classes; + case 4: + return properties; + case 5: + return distinctSubjects; + case 6: + return distinctObjects; + case 7: + return exampleResources; + case 8: + return VoID; + case 9: + return VoIDPart; + case 10: + return SD; + case 11: + return SDPart; + case 12: + return coherence; + case 13: + return RS; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + endpointResult = (sparqles.avro.EndpointResult) value$; + break; + case 1: + triples = (java.lang.Long) value$; + break; + case 2: + entities = (java.lang.Long) value$; + break; + case 3: + classes = (java.lang.Long) value$; + break; + case 4: + properties = (java.lang.Long) value$; + break; + case 5: + distinctSubjects = (java.lang.Long) value$; + break; + case 6: + distinctObjects = (java.lang.Long) value$; + break; + case 7: + exampleResources = (java.util.List) value$; + break; + case 8: + VoID = (java.lang.CharSequence) value$; + break; + case 9: + VoIDPart = (java.lang.Boolean) value$; + break; + case 10: + SD = (java.lang.CharSequence) value$; + break; + case 11: + SDPart = (java.lang.Boolean) value$; + break; + case 12: + coherence = (java.lang.Double) value$; + break; + case 13: + RS = (java.lang.Double) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'endpointResult' field. + * + * @return The value of the 'endpointResult' field. + */ + public sparqles.avro.EndpointResult getEndpointResult() { + return endpointResult; + } + + /** + * Sets the value of the 'endpointResult' field. + * + * @param value the value to set. + */ + public void setEndpointResult(sparqles.avro.EndpointResult value) { + this.endpointResult = value; + } + + /** + * Gets the value of the 'triples' field. + * + * @return The value of the 'triples' field. + */ + public long getTriples() { + return triples; + } + + /** + * Sets the value of the 'triples' field. + * + * @param value the value to set. + */ + public void setTriples(long value) { + this.triples = value; + } + + /** + * Gets the value of the 'entities' field. + * + * @return The value of the 'entities' field. + */ + public long getEntities() { + return entities; + } + + /** + * Sets the value of the 'entities' field. + * + * @param value the value to set. + */ + public void setEntities(long value) { + this.entities = value; + } + + /** + * Gets the value of the 'classes' field. + * + * @return The value of the 'classes' field. + */ + public long getClasses() { + return classes; + } + + /** + * Sets the value of the 'classes' field. + * + * @param value the value to set. + */ + public void setClasses(long value) { + this.classes = value; + } + + /** + * Gets the value of the 'properties' field. + * + * @return The value of the 'properties' field. + */ + public long getProperties() { + return properties; + } + + /** + * Sets the value of the 'properties' field. + * + * @param value the value to set. + */ + public void setProperties(long value) { + this.properties = value; + } + + /** + * Gets the value of the 'distinctSubjects' field. + * + * @return The value of the 'distinctSubjects' field. + */ + public long getDistinctSubjects() { + return distinctSubjects; + } + + /** + * Sets the value of the 'distinctSubjects' field. + * + * @param value the value to set. + */ + public void setDistinctSubjects(long value) { + this.distinctSubjects = value; + } + + /** + * Gets the value of the 'distinctObjects' field. + * + * @return The value of the 'distinctObjects' field. + */ + public long getDistinctObjects() { + return distinctObjects; + } + + /** + * Sets the value of the 'distinctObjects' field. + * + * @param value the value to set. + */ + public void setDistinctObjects(long value) { + this.distinctObjects = value; + } + + /** + * Gets the value of the 'exampleResources' field. + * + * @return The value of the 'exampleResources' field. + */ + public java.util.List getExampleResources() { + return exampleResources; + } + + /** + * Sets the value of the 'exampleResources' field. + * + * @param value the value to set. + */ + public void setExampleResources(java.util.List value) { + this.exampleResources = value; + } + + /** + * Gets the value of the 'VoID' field. + * + * @return The value of the 'VoID' field. + */ + public java.lang.CharSequence getVoID() { + return VoID; + } + + /** + * Sets the value of the 'VoID' field. + * + * @param value the value to set. + */ + public void setVoID(java.lang.CharSequence value) { + this.VoID = value; + } + + /** + * Gets the value of the 'VoIDPart' field. + * + * @return The value of the 'VoIDPart' field. + */ + public boolean getVoIDPart() { + return VoIDPart; + } + + /** + * Sets the value of the 'VoIDPart' field. + * + * @param value the value to set. + */ + public void setVoIDPart(boolean value) { + this.VoIDPart = value; + } + + /** + * Gets the value of the 'SD' field. + * + * @return The value of the 'SD' field. + */ + public java.lang.CharSequence getSD() { + return SD; + } + + /** + * Sets the value of the 'SD' field. + * + * @param value the value to set. + */ + public void setSD(java.lang.CharSequence value) { + this.SD = value; + } + + /** + * Gets the value of the 'SDPart' field. + * + * @return The value of the 'SDPart' field. + */ + public boolean getSDPart() { + return SDPart; + } + + /** + * Sets the value of the 'SDPart' field. + * + * @param value the value to set. + */ + public void setSDPart(boolean value) { + this.SDPart = value; + } + + /** + * Gets the value of the 'coherence' field. + * + * @return The value of the 'coherence' field. + */ + public double getCoherence() { + return coherence; + } + + /** + * Sets the value of the 'coherence' field. + * + * @param value the value to set. + */ + public void setCoherence(double value) { + this.coherence = value; + } + + /** + * Gets the value of the 'RS' field. + * + * @return The value of the 'RS' field. + */ + public double getRS() { + return RS; + } + + /** + * Sets the value of the 'RS' field. + * + * @param value the value to set. + */ + public void setRS(double value) { + this.RS = value; + } + + /** + * Creates a new CResult RecordBuilder. + * + * @return A new CResult RecordBuilder + */ + public static sparqles.avro.calculation.CResult.Builder newBuilder() { + return new sparqles.avro.calculation.CResult.Builder(); + } + + /** + * Creates a new CResult RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new CResult RecordBuilder + */ + public static sparqles.avro.calculation.CResult.Builder newBuilder( + sparqles.avro.calculation.CResult.Builder other) { + if (other == null) { + return new sparqles.avro.calculation.CResult.Builder(); + } else { + return new sparqles.avro.calculation.CResult.Builder(other); + } + } + + /** + * Creates a new CResult RecordBuilder by copying an existing CResult instance. + * + * @param other The existing instance to copy. + * @return A new CResult RecordBuilder + */ + public static sparqles.avro.calculation.CResult.Builder newBuilder( + sparqles.avro.calculation.CResult other) { + if (other == null) { + return new sparqles.avro.calculation.CResult.Builder(); + } else { + return new sparqles.avro.calculation.CResult.Builder(other); + } + } + + /** RecordBuilder for CResult instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private sparqles.avro.EndpointResult endpointResult; + private sparqles.avro.EndpointResult.Builder endpointResultBuilder; + private long triples; + private long entities; + private long classes; + private long properties; + private long distinctSubjects; + private long distinctObjects; + private java.util.List exampleResources; + private java.lang.CharSequence VoID; + private boolean VoIDPart; + private java.lang.CharSequence SD; + private boolean SDPart; + private double coherence; + private double RS; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } + + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.calculation.CResult.Builder other) { + super(other); + if (isValidValue(fields()[0], other.endpointResult)) { + this.endpointResult = data().deepCopy(fields()[0].schema(), other.endpointResult); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (other.hasEndpointResultBuilder()) { + this.endpointResultBuilder = + sparqles.avro.EndpointResult.newBuilder(other.getEndpointResultBuilder()); + } + if (isValidValue(fields()[1], other.triples)) { + this.triples = data().deepCopy(fields()[1].schema(), other.triples); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.entities)) { + this.entities = data().deepCopy(fields()[2].schema(), other.entities); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.classes)) { + this.classes = data().deepCopy(fields()[3].schema(), other.classes); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + if (isValidValue(fields()[4], other.properties)) { + this.properties = data().deepCopy(fields()[4].schema(), other.properties); + fieldSetFlags()[4] = other.fieldSetFlags()[4]; + } + if (isValidValue(fields()[5], other.distinctSubjects)) { + this.distinctSubjects = data().deepCopy(fields()[5].schema(), other.distinctSubjects); + fieldSetFlags()[5] = other.fieldSetFlags()[5]; + } + if (isValidValue(fields()[6], other.distinctObjects)) { + this.distinctObjects = data().deepCopy(fields()[6].schema(), other.distinctObjects); + fieldSetFlags()[6] = other.fieldSetFlags()[6]; + } + if (isValidValue(fields()[7], other.exampleResources)) { + this.exampleResources = data().deepCopy(fields()[7].schema(), other.exampleResources); + fieldSetFlags()[7] = other.fieldSetFlags()[7]; + } + if (isValidValue(fields()[8], other.VoID)) { + this.VoID = data().deepCopy(fields()[8].schema(), other.VoID); + fieldSetFlags()[8] = other.fieldSetFlags()[8]; + } + if (isValidValue(fields()[9], other.VoIDPart)) { + this.VoIDPart = data().deepCopy(fields()[9].schema(), other.VoIDPart); + fieldSetFlags()[9] = other.fieldSetFlags()[9]; + } + if (isValidValue(fields()[10], other.SD)) { + this.SD = data().deepCopy(fields()[10].schema(), other.SD); + fieldSetFlags()[10] = other.fieldSetFlags()[10]; + } + if (isValidValue(fields()[11], other.SDPart)) { + this.SDPart = data().deepCopy(fields()[11].schema(), other.SDPart); + fieldSetFlags()[11] = other.fieldSetFlags()[11]; + } + if (isValidValue(fields()[12], other.coherence)) { + this.coherence = data().deepCopy(fields()[12].schema(), other.coherence); + fieldSetFlags()[12] = other.fieldSetFlags()[12]; + } + if (isValidValue(fields()[13], other.RS)) { + this.RS = data().deepCopy(fields()[13].schema(), other.RS); + fieldSetFlags()[13] = other.fieldSetFlags()[13]; + } + } + + /** + * Creates a Builder by copying an existing CResult instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.calculation.CResult other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.endpointResult)) { + this.endpointResult = data().deepCopy(fields()[0].schema(), other.endpointResult); + fieldSetFlags()[0] = true; + } + this.endpointResultBuilder = null; + if (isValidValue(fields()[1], other.triples)) { + this.triples = data().deepCopy(fields()[1].schema(), other.triples); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.entities)) { + this.entities = data().deepCopy(fields()[2].schema(), other.entities); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.classes)) { + this.classes = data().deepCopy(fields()[3].schema(), other.classes); + fieldSetFlags()[3] = true; + } + if (isValidValue(fields()[4], other.properties)) { + this.properties = data().deepCopy(fields()[4].schema(), other.properties); + fieldSetFlags()[4] = true; + } + if (isValidValue(fields()[5], other.distinctSubjects)) { + this.distinctSubjects = data().deepCopy(fields()[5].schema(), other.distinctSubjects); + fieldSetFlags()[5] = true; + } + if (isValidValue(fields()[6], other.distinctObjects)) { + this.distinctObjects = data().deepCopy(fields()[6].schema(), other.distinctObjects); + fieldSetFlags()[6] = true; + } + if (isValidValue(fields()[7], other.exampleResources)) { + this.exampleResources = data().deepCopy(fields()[7].schema(), other.exampleResources); + fieldSetFlags()[7] = true; + } + if (isValidValue(fields()[8], other.VoID)) { + this.VoID = data().deepCopy(fields()[8].schema(), other.VoID); + fieldSetFlags()[8] = true; + } + if (isValidValue(fields()[9], other.VoIDPart)) { + this.VoIDPart = data().deepCopy(fields()[9].schema(), other.VoIDPart); + fieldSetFlags()[9] = true; + } + if (isValidValue(fields()[10], other.SD)) { + this.SD = data().deepCopy(fields()[10].schema(), other.SD); + fieldSetFlags()[10] = true; + } + if (isValidValue(fields()[11], other.SDPart)) { + this.SDPart = data().deepCopy(fields()[11].schema(), other.SDPart); + fieldSetFlags()[11] = true; + } + if (isValidValue(fields()[12], other.coherence)) { + this.coherence = data().deepCopy(fields()[12].schema(), other.coherence); + fieldSetFlags()[12] = true; + } + if (isValidValue(fields()[13], other.RS)) { + this.RS = data().deepCopy(fields()[13].schema(), other.RS); + fieldSetFlags()[13] = true; + } + } + + /** + * Gets the value of the 'endpointResult' field. + * + * @return The value. + */ + public sparqles.avro.EndpointResult getEndpointResult() { + return endpointResult; + } + + /** + * Sets the value of the 'endpointResult' field. + * + * @param value The value of 'endpointResult'. + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder setEndpointResult( + sparqles.avro.EndpointResult value) { + validate(fields()[0], value); + this.endpointResultBuilder = null; + this.endpointResult = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'endpointResult' field has been set. + * + * @return True if the 'endpointResult' field has been set, false otherwise. + */ + public boolean hasEndpointResult() { + return fieldSetFlags()[0]; + } + + /** + * Gets the Builder instance for the 'endpointResult' field and creates one if it doesn't exist + * yet. + * + * @return This builder. + */ + public sparqles.avro.EndpointResult.Builder getEndpointResultBuilder() { + if (endpointResultBuilder == null) { + if (hasEndpointResult()) { + setEndpointResultBuilder(sparqles.avro.EndpointResult.newBuilder(endpointResult)); + } else { + setEndpointResultBuilder(sparqles.avro.EndpointResult.newBuilder()); + } + } + return endpointResultBuilder; + } + + /** + * Sets the Builder instance for the 'endpointResult' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder setEndpointResultBuilder( + sparqles.avro.EndpointResult.Builder value) { + clearEndpointResult(); + endpointResultBuilder = value; + return this; + } + + /** + * Checks whether the 'endpointResult' field has an active Builder instance + * + * @return True if the 'endpointResult' field has an active Builder instance + */ + public boolean hasEndpointResultBuilder() { + return endpointResultBuilder != null; + } + + /** + * Clears the value of the 'endpointResult' field. + * + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder clearEndpointResult() { + endpointResult = null; + endpointResultBuilder = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'triples' field. + * + * @return The value. + */ + public long getTriples() { + return triples; + } + + /** + * Sets the value of the 'triples' field. + * + * @param value The value of 'triples'. + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder setTriples(long value) { + validate(fields()[1], value); + this.triples = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'triples' field has been set. + * + * @return True if the 'triples' field has been set, false otherwise. + */ + public boolean hasTriples() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'triples' field. + * + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder clearTriples() { + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'entities' field. + * + * @return The value. + */ + public long getEntities() { + return entities; + } + + /** + * Sets the value of the 'entities' field. + * + * @param value The value of 'entities'. + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder setEntities(long value) { + validate(fields()[2], value); + this.entities = value; + fieldSetFlags()[2] = true; + return this; + } + + /** + * Checks whether the 'entities' field has been set. + * + * @return True if the 'entities' field has been set, false otherwise. + */ + public boolean hasEntities() { + return fieldSetFlags()[2]; + } + + /** + * Clears the value of the 'entities' field. + * + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder clearEntities() { + fieldSetFlags()[2] = false; + return this; + } + + /** + * Gets the value of the 'classes' field. + * + * @return The value. + */ + public long getClasses() { + return classes; + } + + /** + * Sets the value of the 'classes' field. + * + * @param value The value of 'classes'. + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder setClasses(long value) { + validate(fields()[3], value); + this.classes = value; + fieldSetFlags()[3] = true; + return this; + } + + /** + * Checks whether the 'classes' field has been set. + * + * @return True if the 'classes' field has been set, false otherwise. + */ + public boolean hasClasses() { + return fieldSetFlags()[3]; + } + + /** + * Clears the value of the 'classes' field. + * + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder clearClasses() { + fieldSetFlags()[3] = false; + return this; + } + + /** + * Gets the value of the 'properties' field. + * + * @return The value. + */ + public long getProperties() { + return properties; + } + + /** + * Sets the value of the 'properties' field. + * + * @param value The value of 'properties'. + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder setProperties(long value) { + validate(fields()[4], value); + this.properties = value; + fieldSetFlags()[4] = true; + return this; + } + + /** + * Checks whether the 'properties' field has been set. + * + * @return True if the 'properties' field has been set, false otherwise. + */ + public boolean hasProperties() { + return fieldSetFlags()[4]; + } + + /** + * Clears the value of the 'properties' field. + * + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder clearProperties() { + fieldSetFlags()[4] = false; + return this; + } + + /** + * Gets the value of the 'distinctSubjects' field. + * + * @return The value. + */ + public long getDistinctSubjects() { + return distinctSubjects; + } + + /** + * Sets the value of the 'distinctSubjects' field. + * + * @param value The value of 'distinctSubjects'. + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder setDistinctSubjects(long value) { + validate(fields()[5], value); + this.distinctSubjects = value; + fieldSetFlags()[5] = true; + return this; + } + + /** + * Checks whether the 'distinctSubjects' field has been set. + * + * @return True if the 'distinctSubjects' field has been set, false otherwise. + */ + public boolean hasDistinctSubjects() { + return fieldSetFlags()[5]; + } + + /** + * Clears the value of the 'distinctSubjects' field. + * + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder clearDistinctSubjects() { + fieldSetFlags()[5] = false; + return this; + } + + /** + * Gets the value of the 'distinctObjects' field. + * + * @return The value. + */ + public long getDistinctObjects() { + return distinctObjects; + } + + /** + * Sets the value of the 'distinctObjects' field. + * + * @param value The value of 'distinctObjects'. + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder setDistinctObjects(long value) { + validate(fields()[6], value); + this.distinctObjects = value; + fieldSetFlags()[6] = true; + return this; + } + + /** + * Checks whether the 'distinctObjects' field has been set. + * + * @return True if the 'distinctObjects' field has been set, false otherwise. + */ + public boolean hasDistinctObjects() { + return fieldSetFlags()[6]; + } + + /** + * Clears the value of the 'distinctObjects' field. + * + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder clearDistinctObjects() { + fieldSetFlags()[6] = false; + return this; + } + + /** + * Gets the value of the 'exampleResources' field. + * + * @return The value. + */ + public java.util.List getExampleResources() { + return exampleResources; + } + + /** + * Sets the value of the 'exampleResources' field. + * + * @param value The value of 'exampleResources'. + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder setExampleResources( + java.util.List value) { + validate(fields()[7], value); + this.exampleResources = value; + fieldSetFlags()[7] = true; + return this; + } + + /** + * Checks whether the 'exampleResources' field has been set. + * + * @return True if the 'exampleResources' field has been set, false otherwise. + */ + public boolean hasExampleResources() { + return fieldSetFlags()[7]; + } + + /** + * Clears the value of the 'exampleResources' field. + * + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder clearExampleResources() { + exampleResources = null; + fieldSetFlags()[7] = false; + return this; + } + + /** + * Gets the value of the 'VoID' field. + * + * @return The value. + */ + public java.lang.CharSequence getVoID() { + return VoID; + } + + /** + * Sets the value of the 'VoID' field. + * + * @param value The value of 'VoID'. + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder setVoID(java.lang.CharSequence value) { + validate(fields()[8], value); + this.VoID = value; + fieldSetFlags()[8] = true; + return this; + } + + /** + * Checks whether the 'VoID' field has been set. + * + * @return True if the 'VoID' field has been set, false otherwise. + */ + public boolean hasVoID() { + return fieldSetFlags()[8]; + } + + /** + * Clears the value of the 'VoID' field. + * + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder clearVoID() { + VoID = null; + fieldSetFlags()[8] = false; + return this; + } + + /** + * Gets the value of the 'VoIDPart' field. + * + * @return The value. + */ + public boolean getVoIDPart() { + return VoIDPart; + } + + /** + * Sets the value of the 'VoIDPart' field. + * + * @param value The value of 'VoIDPart'. + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder setVoIDPart(boolean value) { + validate(fields()[9], value); + this.VoIDPart = value; + fieldSetFlags()[9] = true; + return this; + } + + /** + * Checks whether the 'VoIDPart' field has been set. + * + * @return True if the 'VoIDPart' field has been set, false otherwise. + */ + public boolean hasVoIDPart() { + return fieldSetFlags()[9]; + } + + /** + * Clears the value of the 'VoIDPart' field. + * + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder clearVoIDPart() { + fieldSetFlags()[9] = false; + return this; + } + + /** + * Gets the value of the 'SD' field. + * + * @return The value. + */ + public java.lang.CharSequence getSD() { + return SD; + } + + /** + * Sets the value of the 'SD' field. + * + * @param value The value of 'SD'. + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder setSD(java.lang.CharSequence value) { + validate(fields()[10], value); + this.SD = value; + fieldSetFlags()[10] = true; + return this; + } + + /** + * Checks whether the 'SD' field has been set. + * + * @return True if the 'SD' field has been set, false otherwise. + */ + public boolean hasSD() { + return fieldSetFlags()[10]; + } + + /** + * Clears the value of the 'SD' field. + * + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder clearSD() { + SD = null; + fieldSetFlags()[10] = false; + return this; + } + + /** + * Gets the value of the 'SDPart' field. + * + * @return The value. + */ + public boolean getSDPart() { + return SDPart; + } + + /** + * Sets the value of the 'SDPart' field. + * + * @param value The value of 'SDPart'. + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder setSDPart(boolean value) { + validate(fields()[11], value); + this.SDPart = value; + fieldSetFlags()[11] = true; + return this; + } + + /** + * Checks whether the 'SDPart' field has been set. + * + * @return True if the 'SDPart' field has been set, false otherwise. + */ + public boolean hasSDPart() { + return fieldSetFlags()[11]; + } + + /** + * Clears the value of the 'SDPart' field. + * + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder clearSDPart() { + fieldSetFlags()[11] = false; + return this; + } + + /** + * Gets the value of the 'coherence' field. + * + * @return The value. + */ + public double getCoherence() { + return coherence; + } + + /** + * Sets the value of the 'coherence' field. + * + * @param value The value of 'coherence'. + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder setCoherence(double value) { + validate(fields()[12], value); + this.coherence = value; + fieldSetFlags()[12] = true; + return this; + } + + /** + * Checks whether the 'coherence' field has been set. + * + * @return True if the 'coherence' field has been set, false otherwise. + */ + public boolean hasCoherence() { + return fieldSetFlags()[12]; + } + + /** + * Clears the value of the 'coherence' field. + * + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder clearCoherence() { + fieldSetFlags()[12] = false; + return this; + } + + /** + * Gets the value of the 'RS' field. + * + * @return The value. + */ + public double getRS() { + return RS; + } + + /** + * Sets the value of the 'RS' field. + * + * @param value The value of 'RS'. + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder setRS(double value) { + validate(fields()[13], value); + this.RS = value; + fieldSetFlags()[13] = true; + return this; + } + + /** + * Checks whether the 'RS' field has been set. + * + * @return True if the 'RS' field has been set, false otherwise. + */ + public boolean hasRS() { + return fieldSetFlags()[13]; + } + + /** + * Clears the value of the 'RS' field. + * + * @return This builder. + */ + public sparqles.avro.calculation.CResult.Builder clearRS() { + fieldSetFlags()[13] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public CResult build() { + try { + CResult record = new CResult(); + if (endpointResultBuilder != null) { + try { + record.endpointResult = this.endpointResultBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("endpointResult")); + throw e; + } + } else { + record.endpointResult = + fieldSetFlags()[0] + ? this.endpointResult + : (sparqles.avro.EndpointResult) defaultValue(fields()[0]); + } + record.triples = + fieldSetFlags()[1] ? this.triples : (java.lang.Long) defaultValue(fields()[1]); + record.entities = + fieldSetFlags()[2] ? this.entities : (java.lang.Long) defaultValue(fields()[2]); + record.classes = + fieldSetFlags()[3] ? this.classes : (java.lang.Long) defaultValue(fields()[3]); + record.properties = + fieldSetFlags()[4] ? this.properties : (java.lang.Long) defaultValue(fields()[4]); + record.distinctSubjects = + fieldSetFlags()[5] ? this.distinctSubjects : (java.lang.Long) defaultValue(fields()[5]); + record.distinctObjects = + fieldSetFlags()[6] ? this.distinctObjects : (java.lang.Long) defaultValue(fields()[6]); + record.exampleResources = + fieldSetFlags()[7] + ? this.exampleResources + : (java.util.List) defaultValue(fields()[7]); + record.VoID = + fieldSetFlags()[8] ? this.VoID : (java.lang.CharSequence) defaultValue(fields()[8]); + record.VoIDPart = + fieldSetFlags()[9] ? this.VoIDPart : (java.lang.Boolean) defaultValue(fields()[9]); + record.SD = + fieldSetFlags()[10] ? this.SD : (java.lang.CharSequence) defaultValue(fields()[10]); + record.SDPart = + fieldSetFlags()[11] ? this.SDPart : (java.lang.Boolean) defaultValue(fields()[11]); + record.coherence = + fieldSetFlags()[12] ? this.coherence : (java.lang.Double) defaultValue(fields()[12]); + record.RS = fieldSetFlags()[13] ? this.RS : (java.lang.Double) defaultValue(fields()[13]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + this.endpointResult.customEncode(out); + + out.writeLong(this.triples); + + out.writeLong(this.entities); + + out.writeLong(this.classes); + + out.writeLong(this.properties); + + out.writeLong(this.distinctSubjects); + + out.writeLong(this.distinctObjects); + + long size0 = this.exampleResources.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (java.lang.CharSequence e0 : this.exampleResources) { + actualSize0++; + out.startItem(); + out.writeString(e0); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); + + out.writeString(this.VoID); + + out.writeBoolean(this.VoIDPart); + + out.writeString(this.SD); + + out.writeBoolean(this.SDPart); + + out.writeDouble(this.coherence); + + out.writeDouble(this.RS); + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + if (this.endpointResult == null) { + this.endpointResult = new sparqles.avro.EndpointResult(); + } + this.endpointResult.customDecode(in); + + this.triples = in.readLong(); + + this.entities = in.readLong(); + + this.classes = in.readLong(); + + this.properties = in.readLong(); + + this.distinctSubjects = in.readLong(); + + this.distinctObjects = in.readLong(); + + long size0 = in.readArrayStart(); + java.util.List a0 = this.exampleResources; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("exampleResources").schema()); + this.exampleResources = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + java.lang.CharSequence e0 = (ga0 != null ? ga0.peek() : null); + e0 = in.readString(e0 instanceof Utf8 ? (Utf8) e0 : null); + a0.add(e0); + } + } + + this.VoID = in.readString(this.VoID instanceof Utf8 ? (Utf8) this.VoID : null); + + this.VoIDPart = in.readBoolean(); + + this.SD = in.readString(this.SD instanceof Utf8 ? (Utf8) this.SD : null); + + this.SDPart = in.readBoolean(); + + this.coherence = in.readDouble(); + + this.RS = in.readDouble(); + + } else { + for (int i = 0; i < 14; i++) { + switch (fieldOrder[i].pos()) { + case 0: + if (this.endpointResult == null) { + this.endpointResult = new sparqles.avro.EndpointResult(); + } + this.endpointResult.customDecode(in); + break; + + case 1: + this.triples = in.readLong(); + break; + + case 2: + this.entities = in.readLong(); + break; + + case 3: + this.classes = in.readLong(); + break; + + case 4: + this.properties = in.readLong(); + break; + + case 5: + this.distinctSubjects = in.readLong(); + break; + + case 6: + this.distinctObjects = in.readLong(); + break; + + case 7: + long size0 = in.readArrayStart(); + java.util.List a0 = this.exampleResources; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("exampleResources").schema()); + this.exampleResources = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + java.lang.CharSequence e0 = (ga0 != null ? ga0.peek() : null); + e0 = in.readString(e0 instanceof Utf8 ? (Utf8) e0 : null); + a0.add(e0); + } + } + break; + + case 8: + this.VoID = in.readString(this.VoID instanceof Utf8 ? (Utf8) this.VoID : null); + break; + + case 9: + this.VoIDPart = in.readBoolean(); + break; + + case 10: + this.SD = in.readString(this.SD instanceof Utf8 ? (Utf8) this.SD : null); + break; + + case 11: + this.SDPart = in.readBoolean(); + break; + + case 12: + this.coherence = in.readDouble(); + break; + + case 13: + this.RS = in.readDouble(); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } +} diff --git a/backend/src/main/java/sparqles/avro/core/Robots.java b/backend/src/main/java/sparqles/avro/core/Robots.java index 6c6a82f7..9ec52e98 100644 --- a/backend/src/main/java/sparqles/avro/core/Robots.java +++ b/backend/src/main/java/sparqles/avro/core/Robots.java @@ -5,392 +5,783 @@ */ package sparqles.avro.core; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class Robots extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"Robots\",\"namespace\":\"sparqles.avro.core\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"content\",\"type\":[\"string\",\"null\"]},{\"name\":\"exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"respCode\",\"type\":\"int\"},{\"name\":\"timestamp\",\"type\":\"long\"}]}"); - @Deprecated public sparqles.avro.Endpoint endpoint; - @Deprecated public java.lang.CharSequence content; - @Deprecated public java.lang.CharSequence exception; - @Deprecated public int respCode; - @Deprecated public long timestamp; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -7234500159875722717L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"Robots\",\"namespace\":\"sparqles.avro.core\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"content\",\"type\":[\"string\",\"null\"]},{\"name\":\"exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"respCode\",\"type\":\"int\"},{\"name\":\"timestamp\",\"type\":\"long\"}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this Robots to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a Robots from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a Robots instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static Robots fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private sparqles.avro.Endpoint endpoint; + private java.lang.CharSequence content; + private java.lang.CharSequence exception; + private int respCode; + private long timestamp; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public Robots() {} + + /** + * All-args constructor. + * + * @param endpoint The new value for endpoint + * @param content The new value for content + * @param exception The new value for exception + * @param respCode The new value for respCode + * @param timestamp The new value for timestamp + */ + public Robots( + sparqles.avro.Endpoint endpoint, + java.lang.CharSequence content, + java.lang.CharSequence exception, + java.lang.Integer respCode, + java.lang.Long timestamp) { + this.endpoint = endpoint; + this.content = content; + this.exception = exception; + this.respCode = respCode; + this.timestamp = timestamp; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return endpoint; + case 1: + return content; + case 2: + return exception; + case 3: + return respCode; + case 4: + return timestamp; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + endpoint = (sparqles.avro.Endpoint) value$; + break; + case 1: + content = (java.lang.CharSequence) value$; + break; + case 2: + exception = (java.lang.CharSequence) value$; + break; + case 3: + respCode = (java.lang.Integer) value$; + break; + case 4: + timestamp = (java.lang.Long) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'endpoint' field. + * + * @return The value of the 'endpoint' field. + */ + public sparqles.avro.Endpoint getEndpoint() { + return endpoint; + } + + /** + * Sets the value of the 'endpoint' field. + * + * @param value the value to set. + */ + public void setEndpoint(sparqles.avro.Endpoint value) { + this.endpoint = value; + } + + /** + * Gets the value of the 'content' field. + * + * @return The value of the 'content' field. + */ + public java.lang.CharSequence getContent() { + return content; + } + + /** + * Sets the value of the 'content' field. + * + * @param value the value to set. + */ + public void setContent(java.lang.CharSequence value) { + this.content = value; + } + + /** + * Gets the value of the 'exception' field. + * + * @return The value of the 'exception' field. + */ + public java.lang.CharSequence getException() { + return exception; + } + + /** + * Sets the value of the 'exception' field. + * + * @param value the value to set. + */ + public void setException(java.lang.CharSequence value) { + this.exception = value; + } + + /** + * Gets the value of the 'respCode' field. + * + * @return The value of the 'respCode' field. + */ + public int getRespCode() { + return respCode; + } + + /** + * Sets the value of the 'respCode' field. + * + * @param value the value to set. + */ + public void setRespCode(int value) { + this.respCode = value; + } + + /** + * Gets the value of the 'timestamp' field. + * + * @return The value of the 'timestamp' field. + */ + public long getTimestamp() { + return timestamp; + } + + /** + * Sets the value of the 'timestamp' field. + * + * @param value the value to set. + */ + public void setTimestamp(long value) { + this.timestamp = value; + } + + /** + * Creates a new Robots RecordBuilder. + * + * @return A new Robots RecordBuilder + */ + public static sparqles.avro.core.Robots.Builder newBuilder() { + return new sparqles.avro.core.Robots.Builder(); + } + + /** + * Creates a new Robots RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new Robots RecordBuilder + */ + public static sparqles.avro.core.Robots.Builder newBuilder( + sparqles.avro.core.Robots.Builder other) { + if (other == null) { + return new sparqles.avro.core.Robots.Builder(); + } else { + return new sparqles.avro.core.Robots.Builder(other); + } + } + + /** + * Creates a new Robots RecordBuilder by copying an existing Robots instance. + * + * @param other The existing instance to copy. + * @return A new Robots RecordBuilder + */ + public static sparqles.avro.core.Robots.Builder newBuilder(sparqles.avro.core.Robots other) { + if (other == null) { + return new sparqles.avro.core.Robots.Builder(); + } else { + return new sparqles.avro.core.Robots.Builder(other); + } + } + + /** RecordBuilder for Robots instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private sparqles.avro.Endpoint endpoint; + private sparqles.avro.Endpoint.Builder endpointBuilder; + private java.lang.CharSequence content; + private java.lang.CharSequence exception; + private int respCode; + private long timestamp; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public Robots() {} - - /** All-args constructor. */ - public Robots( - sparqles.avro.Endpoint endpoint, - java.lang.CharSequence content, - java.lang.CharSequence exception, - java.lang.Integer respCode, - java.lang.Long timestamp) { - this.endpoint = endpoint; - this.content = content; - this.exception = exception; - this.respCode = respCode; - this.timestamp = timestamp; - } - - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + private Builder(sparqles.avro.core.Robots.Builder other) { + super(other); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (other.hasEndpointBuilder()) { + this.endpointBuilder = sparqles.avro.Endpoint.newBuilder(other.getEndpointBuilder()); + } + if (isValidValue(fields()[1], other.content)) { + this.content = data().deepCopy(fields()[1].schema(), other.content); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.exception)) { + this.exception = data().deepCopy(fields()[2].schema(), other.exception); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.respCode)) { + this.respCode = data().deepCopy(fields()[3].schema(), other.respCode); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + if (isValidValue(fields()[4], other.timestamp)) { + this.timestamp = data().deepCopy(fields()[4].schema(), other.timestamp); + fieldSetFlags()[4] = other.fieldSetFlags()[4]; + } } - /** Creates a new Robots RecordBuilder */ - public static sparqles.avro.core.Robots.Builder newBuilder() { - return new sparqles.avro.core.Robots.Builder(); + /** + * Creates a Builder by copying an existing Robots instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.core.Robots other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = true; + } + this.endpointBuilder = null; + if (isValidValue(fields()[1], other.content)) { + this.content = data().deepCopy(fields()[1].schema(), other.content); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.exception)) { + this.exception = data().deepCopy(fields()[2].schema(), other.exception); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.respCode)) { + this.respCode = data().deepCopy(fields()[3].schema(), other.respCode); + fieldSetFlags()[3] = true; + } + if (isValidValue(fields()[4], other.timestamp)) { + this.timestamp = data().deepCopy(fields()[4].schema(), other.timestamp); + fieldSetFlags()[4] = true; + } } - /** Creates a new Robots RecordBuilder by copying an existing Builder */ - public static sparqles.avro.core.Robots.Builder newBuilder( - sparqles.avro.core.Robots.Builder other) { - return new sparqles.avro.core.Robots.Builder(other); + /** + * Gets the value of the 'endpoint' field. + * + * @return The value. + */ + public sparqles.avro.Endpoint getEndpoint() { + return endpoint; } - /** Creates a new Robots RecordBuilder by copying an existing Robots instance */ - public static sparqles.avro.core.Robots.Builder newBuilder(sparqles.avro.core.Robots other) { - return new sparqles.avro.core.Robots.Builder(other); + /** + * Sets the value of the 'endpoint' field. + * + * @param value The value of 'endpoint'. + * @return This builder. + */ + public sparqles.avro.core.Robots.Builder setEndpoint(sparqles.avro.Endpoint value) { + validate(fields()[0], value); + this.endpointBuilder = null; + this.endpoint = value; + fieldSetFlags()[0] = true; + return this; } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Checks whether the 'endpoint' field has been set. + * + * @return True if the 'endpoint' field has been set, false otherwise. + */ + public boolean hasEndpoint() { + return fieldSetFlags()[0]; } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return endpoint; - case 1: - return content; - case 2: - return exception; - case 3: - return respCode; - case 4: - return timestamp; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); + /** + * Gets the Builder instance for the 'endpoint' field and creates one if it doesn't exist yet. + * + * @return This builder. + */ + public sparqles.avro.Endpoint.Builder getEndpointBuilder() { + if (endpointBuilder == null) { + if (hasEndpoint()) { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder(endpoint)); + } else { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder()); } + } + return endpointBuilder; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - endpoint = (sparqles.avro.Endpoint) value$; - break; - case 1: - content = (java.lang.CharSequence) value$; - break; - case 2: - exception = (java.lang.CharSequence) value$; - break; - case 3: - respCode = (java.lang.Integer) value$; - break; - case 4: - timestamp = (java.lang.Long) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the Builder instance for the 'endpoint' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.core.Robots.Builder setEndpointBuilder( + sparqles.avro.Endpoint.Builder value) { + clearEndpoint(); + endpointBuilder = value; + return this; } - /** Gets the value of the 'endpoint' field. */ - public sparqles.avro.Endpoint getEndpoint() { - return endpoint; + /** + * Checks whether the 'endpoint' field has an active Builder instance + * + * @return True if the 'endpoint' field has an active Builder instance + */ + public boolean hasEndpointBuilder() { + return endpointBuilder != null; } /** - * Sets the value of the 'endpoint' field. + * Clears the value of the 'endpoint' field. * - * @param value the value to set. + * @return This builder. */ - public void setEndpoint(sparqles.avro.Endpoint value) { - this.endpoint = value; + public sparqles.avro.core.Robots.Builder clearEndpoint() { + endpoint = null; + endpointBuilder = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'content' field. */ + /** + * Gets the value of the 'content' field. + * + * @return The value. + */ public java.lang.CharSequence getContent() { - return content; + return content; } /** * Sets the value of the 'content' field. * - * @param value the value to set. + * @param value The value of 'content'. + * @return This builder. + */ + public sparqles.avro.core.Robots.Builder setContent(java.lang.CharSequence value) { + validate(fields()[1], value); + this.content = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'content' field has been set. + * + * @return True if the 'content' field has been set, false otherwise. + */ + public boolean hasContent() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'content' field. + * + * @return This builder. */ - public void setContent(java.lang.CharSequence value) { - this.content = value; + public sparqles.avro.core.Robots.Builder clearContent() { + content = null; + fieldSetFlags()[1] = false; + return this; } - /** Gets the value of the 'exception' field. */ + /** + * Gets the value of the 'exception' field. + * + * @return The value. + */ public java.lang.CharSequence getException() { - return exception; + return exception; } /** * Sets the value of the 'exception' field. * - * @param value the value to set. + * @param value The value of 'exception'. + * @return This builder. */ - public void setException(java.lang.CharSequence value) { - this.exception = value; + public sparqles.avro.core.Robots.Builder setException(java.lang.CharSequence value) { + validate(fields()[2], value); + this.exception = value; + fieldSetFlags()[2] = true; + return this; } - /** Gets the value of the 'respCode' field. */ - public java.lang.Integer getRespCode() { - return respCode; + /** + * Checks whether the 'exception' field has been set. + * + * @return True if the 'exception' field has been set, false otherwise. + */ + public boolean hasException() { + return fieldSetFlags()[2]; + } + + /** + * Clears the value of the 'exception' field. + * + * @return This builder. + */ + public sparqles.avro.core.Robots.Builder clearException() { + exception = null; + fieldSetFlags()[2] = false; + return this; + } + + /** + * Gets the value of the 'respCode' field. + * + * @return The value. + */ + public int getRespCode() { + return respCode; } /** * Sets the value of the 'respCode' field. * - * @param value the value to set. + * @param value The value of 'respCode'. + * @return This builder. */ - public void setRespCode(java.lang.Integer value) { - this.respCode = value; + public sparqles.avro.core.Robots.Builder setRespCode(int value) { + validate(fields()[3], value); + this.respCode = value; + fieldSetFlags()[3] = true; + return this; } - /** Gets the value of the 'timestamp' field. */ - public java.lang.Long getTimestamp() { - return timestamp; + /** + * Checks whether the 'respCode' field has been set. + * + * @return True if the 'respCode' field has been set, false otherwise. + */ + public boolean hasRespCode() { + return fieldSetFlags()[3]; + } + + /** + * Clears the value of the 'respCode' field. + * + * @return This builder. + */ + public sparqles.avro.core.Robots.Builder clearRespCode() { + fieldSetFlags()[3] = false; + return this; + } + + /** + * Gets the value of the 'timestamp' field. + * + * @return The value. + */ + public long getTimestamp() { + return timestamp; } /** * Sets the value of the 'timestamp' field. * - * @param value the value to set. + * @param value The value of 'timestamp'. + * @return This builder. */ - public void setTimestamp(java.lang.Long value) { - this.timestamp = value; + public sparqles.avro.core.Robots.Builder setTimestamp(long value) { + validate(fields()[4], value); + this.timestamp = value; + fieldSetFlags()[4] = true; + return this; } - /** RecordBuilder for Robots instances. */ - public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Checks whether the 'timestamp' field has been set. + * + * @return True if the 'timestamp' field has been set, false otherwise. + */ + public boolean hasTimestamp() { + return fieldSetFlags()[4]; + } - private sparqles.avro.Endpoint endpoint; - private java.lang.CharSequence content; - private java.lang.CharSequence exception; - private int respCode; - private long timestamp; + /** + * Clears the value of the 'timestamp' field. + * + * @return This builder. + */ + public sparqles.avro.core.Robots.Builder clearTimestamp() { + fieldSetFlags()[4] = false; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.core.Robots.SCHEMA$); + @Override + @SuppressWarnings("unchecked") + public Robots build() { + try { + Robots record = new Robots(); + if (endpointBuilder != null) { + try { + record.endpoint = this.endpointBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("endpoint")); + throw e; + } + } else { + record.endpoint = + fieldSetFlags()[0] + ? this.endpoint + : (sparqles.avro.Endpoint) defaultValue(fields()[0]); } + record.content = + fieldSetFlags()[1] ? this.content : (java.lang.CharSequence) defaultValue(fields()[1]); + record.exception = + fieldSetFlags()[2] + ? this.exception + : (java.lang.CharSequence) defaultValue(fields()[2]); + record.respCode = + fieldSetFlags()[3] ? this.respCode : (java.lang.Integer) defaultValue(fields()[3]); + record.timestamp = + fieldSetFlags()[4] ? this.timestamp : (java.lang.Long) defaultValue(fields()[4]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + this.endpoint.customEncode(out); + + if (this.content == null) { + out.writeIndex(1); + out.writeNull(); + } else { + out.writeIndex(0); + out.writeString(this.content); + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.core.Robots.Builder other) { - super(other); - if (isValidValue(fields()[0], other.endpoint)) { - this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.content)) { - this.content = data().deepCopy(fields()[1].schema(), other.content); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.exception)) { - this.exception = data().deepCopy(fields()[2].schema(), other.exception); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.respCode)) { - this.respCode = data().deepCopy(fields()[3].schema(), other.respCode); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.timestamp)) { - this.timestamp = data().deepCopy(fields()[4].schema(), other.timestamp); - fieldSetFlags()[4] = true; - } - } + if (this.exception == null) { + out.writeIndex(1); + out.writeNull(); + } else { + out.writeIndex(0); + out.writeString(this.exception); + } - /** Creates a Builder by copying an existing Robots instance */ - private Builder(sparqles.avro.core.Robots other) { - super(sparqles.avro.core.Robots.SCHEMA$); - if (isValidValue(fields()[0], other.endpoint)) { - this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.content)) { - this.content = data().deepCopy(fields()[1].schema(), other.content); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.exception)) { - this.exception = data().deepCopy(fields()[2].schema(), other.exception); - fieldSetFlags()[2] = true; + out.writeInt(this.respCode); + + out.writeLong(this.timestamp); + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); + } + this.endpoint.customDecode(in); + + if (in.readIndex() != 0) { + in.readNull(); + this.content = null; + } else { + this.content = in.readString(this.content instanceof Utf8 ? (Utf8) this.content : null); + } + + if (in.readIndex() != 0) { + in.readNull(); + this.exception = null; + } else { + this.exception = + in.readString(this.exception instanceof Utf8 ? (Utf8) this.exception : null); + } + + this.respCode = in.readInt(); + + this.timestamp = in.readLong(); + + } else { + for (int i = 0; i < 5; i++) { + switch (fieldOrder[i].pos()) { + case 0: + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); } - if (isValidValue(fields()[3], other.respCode)) { - this.respCode = data().deepCopy(fields()[3].schema(), other.respCode); - fieldSetFlags()[3] = true; + this.endpoint.customDecode(in); + break; + + case 1: + if (in.readIndex() != 0) { + in.readNull(); + this.content = null; + } else { + this.content = + in.readString(this.content instanceof Utf8 ? (Utf8) this.content : null); } - if (isValidValue(fields()[4], other.timestamp)) { - this.timestamp = data().deepCopy(fields()[4].schema(), other.timestamp); - fieldSetFlags()[4] = true; + break; + + case 2: + if (in.readIndex() != 0) { + in.readNull(); + this.exception = null; + } else { + this.exception = + in.readString(this.exception instanceof Utf8 ? (Utf8) this.exception : null); } - } + break; - /** Gets the value of the 'endpoint' field */ - public sparqles.avro.Endpoint getEndpoint() { - return endpoint; - } + case 3: + this.respCode = in.readInt(); + break; - /** Sets the value of the 'endpoint' field */ - public sparqles.avro.core.Robots.Builder setEndpoint(sparqles.avro.Endpoint value) { - validate(fields()[0], value); - this.endpoint = value; - fieldSetFlags()[0] = true; - return this; - } + case 4: + this.timestamp = in.readLong(); + break; - /** Checks whether the 'endpoint' field has been set */ - public boolean hasEndpoint() { - return fieldSetFlags()[0]; - } - - /** Clears the value of the 'endpoint' field */ - public sparqles.avro.core.Robots.Builder clearEndpoint() { - endpoint = null; - fieldSetFlags()[0] = false; - return this; - } - - /** Gets the value of the 'content' field */ - public java.lang.CharSequence getContent() { - return content; - } - - /** Sets the value of the 'content' field */ - public sparqles.avro.core.Robots.Builder setContent(java.lang.CharSequence value) { - validate(fields()[1], value); - this.content = value; - fieldSetFlags()[1] = true; - return this; - } - - /** Checks whether the 'content' field has been set */ - public boolean hasContent() { - return fieldSetFlags()[1]; - } - - /** Clears the value of the 'content' field */ - public sparqles.avro.core.Robots.Builder clearContent() { - content = null; - fieldSetFlags()[1] = false; - return this; - } - - /** Gets the value of the 'exception' field */ - public java.lang.CharSequence getException() { - return exception; - } - - /** Sets the value of the 'exception' field */ - public sparqles.avro.core.Robots.Builder setException(java.lang.CharSequence value) { - validate(fields()[2], value); - this.exception = value; - fieldSetFlags()[2] = true; - return this; - } - - /** Checks whether the 'exception' field has been set */ - public boolean hasException() { - return fieldSetFlags()[2]; - } - - /** Clears the value of the 'exception' field */ - public sparqles.avro.core.Robots.Builder clearException() { - exception = null; - fieldSetFlags()[2] = false; - return this; - } - - /** Gets the value of the 'respCode' field */ - public java.lang.Integer getRespCode() { - return respCode; - } - - /** Sets the value of the 'respCode' field */ - public sparqles.avro.core.Robots.Builder setRespCode(int value) { - validate(fields()[3], value); - this.respCode = value; - fieldSetFlags()[3] = true; - return this; - } - - /** Checks whether the 'respCode' field has been set */ - public boolean hasRespCode() { - return fieldSetFlags()[3]; - } - - /** Clears the value of the 'respCode' field */ - public sparqles.avro.core.Robots.Builder clearRespCode() { - fieldSetFlags()[3] = false; - return this; - } - - /** Gets the value of the 'timestamp' field */ - public java.lang.Long getTimestamp() { - return timestamp; - } - - /** Sets the value of the 'timestamp' field */ - public sparqles.avro.core.Robots.Builder setTimestamp(long value) { - validate(fields()[4], value); - this.timestamp = value; - fieldSetFlags()[4] = true; - return this; - } - - /** Checks whether the 'timestamp' field has been set */ - public boolean hasTimestamp() { - return fieldSetFlags()[4]; - } - - /** Clears the value of the 'timestamp' field */ - public sparqles.avro.core.Robots.Builder clearTimestamp() { - fieldSetFlags()[4] = false; - return this; - } - - @Override - public Robots build() { - try { - Robots record = new Robots(); - record.endpoint = - fieldSetFlags()[0] - ? this.endpoint - : (sparqles.avro.Endpoint) defaultValue(fields()[0]); - record.content = - fieldSetFlags()[1] - ? this.content - : (java.lang.CharSequence) defaultValue(fields()[1]); - record.exception = - fieldSetFlags()[2] - ? this.exception - : (java.lang.CharSequence) defaultValue(fields()[2]); - record.respCode = - fieldSetFlags()[3] - ? this.respCode - : (java.lang.Integer) defaultValue(fields()[3]); - record.timestamp = - fieldSetFlags()[4] - ? this.timestamp - : (java.lang.Long) defaultValue(fields()[4]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/discovery/DGETInfo.java b/backend/src/main/java/sparqles/avro/discovery/DGETInfo.java index 8b7935ac..61e6df33 100644 --- a/backend/src/main/java/sparqles/avro/discovery/DGETInfo.java +++ b/backend/src/main/java/sparqles/avro/discovery/DGETInfo.java @@ -5,764 +5,1126 @@ */ package sparqles.avro.discovery; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; + @org.apache.avro.specific.AvroGenerated public class DGETInfo extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"DGETInfo\",\"namespace\":\"sparqles.avro.discovery\",\"fields\":[{\"name\":\"allowedByRobotsTXT\",\"type\":\"boolean\",\"default\":true},{\"name\":\"Operation\",\"type\":\"string\"},{\"name\":\"URL\",\"type\":\"string\"},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"ResponseType\",\"type\":[\"string\",\"null\"]},{\"name\":\"ResponseCode\",\"type\":[\"string\",\"null\"]},{\"name\":\"ResponseServer\",\"type\":[\"string\",\"null\"]},{\"name\":\"ResponseLink\",\"type\":[\"string\",\"null\"]},{\"name\":\"Content\",\"type\":[\"string\",\"null\"]},{\"name\":\"SPARQLDESCpreds\",\"type\":{\"type\":\"map\",\"values\":[\"int\"]}},{\"name\":\"voiDpreds\",\"type\":{\"type\":\"map\",\"values\":[\"int\"]}}]}"); - @Deprecated public boolean allowedByRobotsTXT; - @Deprecated public java.lang.CharSequence Operation; - @Deprecated public java.lang.CharSequence URL; - @Deprecated public java.lang.CharSequence Exception; - @Deprecated public java.lang.CharSequence ResponseType; - @Deprecated public java.lang.CharSequence ResponseCode; - @Deprecated public java.lang.CharSequence ResponseServer; - @Deprecated public java.lang.CharSequence ResponseLink; - @Deprecated public java.lang.CharSequence Content; - @Deprecated public java.util.Map SPARQLDESCpreds; - @Deprecated public java.util.Map voiDpreds; - - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public DGETInfo() {} - - /** All-args constructor. */ - public DGETInfo( - java.lang.Boolean allowedByRobotsTXT, - java.lang.CharSequence Operation, - java.lang.CharSequence URL, - java.lang.CharSequence Exception, - java.lang.CharSequence ResponseType, - java.lang.CharSequence ResponseCode, - java.lang.CharSequence ResponseServer, - java.lang.CharSequence ResponseLink, - java.lang.CharSequence Content, - java.util.Map SPARQLDESCpreds, - java.util.Map voiDpreds) { - this.allowedByRobotsTXT = allowedByRobotsTXT; - this.Operation = Operation; - this.URL = URL; - this.Exception = Exception; - this.ResponseType = ResponseType; - this.ResponseCode = ResponseCode; - this.ResponseServer = ResponseServer; - this.ResponseLink = ResponseLink; - this.Content = Content; - this.SPARQLDESCpreds = SPARQLDESCpreds; - this.voiDpreds = voiDpreds; - } - - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; - } - - /** Creates a new DGETInfo RecordBuilder */ - public static sparqles.avro.discovery.DGETInfo.Builder newBuilder() { - return new sparqles.avro.discovery.DGETInfo.Builder(); - } - - /** Creates a new DGETInfo RecordBuilder by copying an existing Builder */ - public static sparqles.avro.discovery.DGETInfo.Builder newBuilder( - sparqles.avro.discovery.DGETInfo.Builder other) { - return new sparqles.avro.discovery.DGETInfo.Builder(other); - } - - /** Creates a new DGETInfo RecordBuilder by copying an existing DGETInfo instance */ - public static sparqles.avro.discovery.DGETInfo.Builder newBuilder( - sparqles.avro.discovery.DGETInfo other) { - return new sparqles.avro.discovery.DGETInfo.Builder(other); - } - - public org.apache.avro.Schema getSchema() { - return SCHEMA$; - } - - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return allowedByRobotsTXT; - case 1: - return Operation; - case 2: - return URL; - case 3: - return Exception; - case 4: - return ResponseType; - case 5: - return ResponseCode; - case 6: - return ResponseServer; - case 7: - return ResponseLink; - case 8: - return Content; - case 9: - return SPARQLDESCpreds; - case 10: - return voiDpreds; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } - } - - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - allowedByRobotsTXT = (java.lang.Boolean) value$; - break; - case 1: - Operation = (java.lang.CharSequence) value$; - break; - case 2: - URL = (java.lang.CharSequence) value$; - break; - case 3: - Exception = (java.lang.CharSequence) value$; - break; - case 4: - ResponseType = (java.lang.CharSequence) value$; - break; - case 5: - ResponseCode = (java.lang.CharSequence) value$; - break; - case 6: - ResponseServer = (java.lang.CharSequence) value$; - break; - case 7: - ResponseLink = (java.lang.CharSequence) value$; - break; - case 8: - Content = (java.lang.CharSequence) value$; - break; - case 9: - SPARQLDESCpreds = (java.util.Map) value$; - break; - case 10: - voiDpreds = (java.util.Map) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } - } - - /** Gets the value of the 'allowedByRobotsTXT' field. */ - public java.lang.Boolean getAllowedByRobotsTXT() { + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -2432498232695213931L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"DGETInfo\",\"namespace\":\"sparqles.avro.discovery\",\"fields\":[{\"name\":\"allowedByRobotsTXT\",\"type\":\"boolean\",\"default\":true},{\"name\":\"Operation\",\"type\":\"string\"},{\"name\":\"URL\",\"type\":\"string\"},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"ResponseType\",\"type\":[\"string\",\"null\"]},{\"name\":\"ResponseCode\",\"type\":[\"string\",\"null\"]},{\"name\":\"ResponseServer\",\"type\":[\"string\",\"null\"]},{\"name\":\"ResponseLink\",\"type\":[\"string\",\"null\"]},{\"name\":\"Content\",\"type\":[\"string\",\"null\"]},{\"name\":\"SPARQLDESCpreds\",\"type\":{\"type\":\"map\",\"values\":[\"int\"]}},{\"name\":\"voiDpreds\",\"type\":{\"type\":\"map\",\"values\":[\"int\"]}}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this DGETInfo to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a DGETInfo from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a DGETInfo instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static DGETInfo fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private boolean allowedByRobotsTXT; + private java.lang.CharSequence Operation; + private java.lang.CharSequence URL; + private java.lang.CharSequence Exception; + private java.lang.CharSequence ResponseType; + private java.lang.CharSequence ResponseCode; + private java.lang.CharSequence ResponseServer; + private java.lang.CharSequence ResponseLink; + private java.lang.CharSequence Content; + private java.util.Map SPARQLDESCpreds; + private java.util.Map voiDpreds; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public DGETInfo() {} + + /** + * All-args constructor. + * + * @param allowedByRobotsTXT The new value for allowedByRobotsTXT + * @param Operation The new value for Operation + * @param URL The new value for URL + * @param Exception The new value for Exception + * @param ResponseType The new value for ResponseType + * @param ResponseCode The new value for ResponseCode + * @param ResponseServer The new value for ResponseServer + * @param ResponseLink The new value for ResponseLink + * @param Content The new value for Content + * @param SPARQLDESCpreds The new value for SPARQLDESCpreds + * @param voiDpreds The new value for voiDpreds + */ + public DGETInfo( + java.lang.Boolean allowedByRobotsTXT, + java.lang.CharSequence Operation, + java.lang.CharSequence URL, + java.lang.CharSequence Exception, + java.lang.CharSequence ResponseType, + java.lang.CharSequence ResponseCode, + java.lang.CharSequence ResponseServer, + java.lang.CharSequence ResponseLink, + java.lang.CharSequence Content, + java.util.Map SPARQLDESCpreds, + java.util.Map voiDpreds) { + this.allowedByRobotsTXT = allowedByRobotsTXT; + this.Operation = Operation; + this.URL = URL; + this.Exception = Exception; + this.ResponseType = ResponseType; + this.ResponseCode = ResponseCode; + this.ResponseServer = ResponseServer; + this.ResponseLink = ResponseLink; + this.Content = Content; + this.SPARQLDESCpreds = SPARQLDESCpreds; + this.voiDpreds = voiDpreds; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return allowedByRobotsTXT; + case 1: + return Operation; + case 2: + return URL; + case 3: + return Exception; + case 4: + return ResponseType; + case 5: + return ResponseCode; + case 6: + return ResponseServer; + case 7: + return ResponseLink; + case 8: + return Content; + case 9: + return SPARQLDESCpreds; + case 10: + return voiDpreds; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + allowedByRobotsTXT = (java.lang.Boolean) value$; + break; + case 1: + Operation = (java.lang.CharSequence) value$; + break; + case 2: + URL = (java.lang.CharSequence) value$; + break; + case 3: + Exception = (java.lang.CharSequence) value$; + break; + case 4: + ResponseType = (java.lang.CharSequence) value$; + break; + case 5: + ResponseCode = (java.lang.CharSequence) value$; + break; + case 6: + ResponseServer = (java.lang.CharSequence) value$; + break; + case 7: + ResponseLink = (java.lang.CharSequence) value$; + break; + case 8: + Content = (java.lang.CharSequence) value$; + break; + case 9: + SPARQLDESCpreds = (java.util.Map) value$; + break; + case 10: + voiDpreds = (java.util.Map) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'allowedByRobotsTXT' field. + * + * @return The value of the 'allowedByRobotsTXT' field. + */ + public boolean getAllowedByRobotsTXT() { + return allowedByRobotsTXT; + } + + /** + * Sets the value of the 'allowedByRobotsTXT' field. + * + * @param value the value to set. + */ + public void setAllowedByRobotsTXT(boolean value) { + this.allowedByRobotsTXT = value; + } + + /** + * Gets the value of the 'Operation' field. + * + * @return The value of the 'Operation' field. + */ + public java.lang.CharSequence getOperation() { + return Operation; + } + + /** + * Sets the value of the 'Operation' field. + * + * @param value the value to set. + */ + public void setOperation(java.lang.CharSequence value) { + this.Operation = value; + } + + /** + * Gets the value of the 'URL' field. + * + * @return The value of the 'URL' field. + */ + public java.lang.CharSequence getURL() { + return URL; + } + + /** + * Sets the value of the 'URL' field. + * + * @param value the value to set. + */ + public void setURL(java.lang.CharSequence value) { + this.URL = value; + } + + /** + * Gets the value of the 'Exception' field. + * + * @return The value of the 'Exception' field. + */ + public java.lang.CharSequence getException() { + return Exception; + } + + /** + * Sets the value of the 'Exception' field. + * + * @param value the value to set. + */ + public void setException(java.lang.CharSequence value) { + this.Exception = value; + } + + /** + * Gets the value of the 'ResponseType' field. + * + * @return The value of the 'ResponseType' field. + */ + public java.lang.CharSequence getResponseType() { + return ResponseType; + } + + /** + * Sets the value of the 'ResponseType' field. + * + * @param value the value to set. + */ + public void setResponseType(java.lang.CharSequence value) { + this.ResponseType = value; + } + + /** + * Gets the value of the 'ResponseCode' field. + * + * @return The value of the 'ResponseCode' field. + */ + public java.lang.CharSequence getResponseCode() { + return ResponseCode; + } + + /** + * Sets the value of the 'ResponseCode' field. + * + * @param value the value to set. + */ + public void setResponseCode(java.lang.CharSequence value) { + this.ResponseCode = value; + } + + /** + * Gets the value of the 'ResponseServer' field. + * + * @return The value of the 'ResponseServer' field. + */ + public java.lang.CharSequence getResponseServer() { + return ResponseServer; + } + + /** + * Sets the value of the 'ResponseServer' field. + * + * @param value the value to set. + */ + public void setResponseServer(java.lang.CharSequence value) { + this.ResponseServer = value; + } + + /** + * Gets the value of the 'ResponseLink' field. + * + * @return The value of the 'ResponseLink' field. + */ + public java.lang.CharSequence getResponseLink() { + return ResponseLink; + } + + /** + * Sets the value of the 'ResponseLink' field. + * + * @param value the value to set. + */ + public void setResponseLink(java.lang.CharSequence value) { + this.ResponseLink = value; + } + + /** + * Gets the value of the 'Content' field. + * + * @return The value of the 'Content' field. + */ + public java.lang.CharSequence getContent() { + return Content; + } + + /** + * Sets the value of the 'Content' field. + * + * @param value the value to set. + */ + public void setContent(java.lang.CharSequence value) { + this.Content = value; + } + + /** + * Gets the value of the 'SPARQLDESCpreds' field. + * + * @return The value of the 'SPARQLDESCpreds' field. + */ + public java.util.Map getSPARQLDESCpreds() { + return SPARQLDESCpreds; + } + + /** + * Sets the value of the 'SPARQLDESCpreds' field. + * + * @param value the value to set. + */ + public void setSPARQLDESCpreds(java.util.Map value) { + this.SPARQLDESCpreds = value; + } + + /** + * Gets the value of the 'voiDpreds' field. + * + * @return The value of the 'voiDpreds' field. + */ + public java.util.Map getVoiDpreds() { + return voiDpreds; + } + + /** + * Sets the value of the 'voiDpreds' field. + * + * @param value the value to set. + */ + public void setVoiDpreds(java.util.Map value) { + this.voiDpreds = value; + } + + /** + * Creates a new DGETInfo RecordBuilder. + * + * @return A new DGETInfo RecordBuilder + */ + public static sparqles.avro.discovery.DGETInfo.Builder newBuilder() { + return new sparqles.avro.discovery.DGETInfo.Builder(); + } + + /** + * Creates a new DGETInfo RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new DGETInfo RecordBuilder + */ + public static sparqles.avro.discovery.DGETInfo.Builder newBuilder( + sparqles.avro.discovery.DGETInfo.Builder other) { + if (other == null) { + return new sparqles.avro.discovery.DGETInfo.Builder(); + } else { + return new sparqles.avro.discovery.DGETInfo.Builder(other); + } + } + + /** + * Creates a new DGETInfo RecordBuilder by copying an existing DGETInfo instance. + * + * @param other The existing instance to copy. + * @return A new DGETInfo RecordBuilder + */ + public static sparqles.avro.discovery.DGETInfo.Builder newBuilder( + sparqles.avro.discovery.DGETInfo other) { + if (other == null) { + return new sparqles.avro.discovery.DGETInfo.Builder(); + } else { + return new sparqles.avro.discovery.DGETInfo.Builder(other); + } + } + + /** RecordBuilder for DGETInfo instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private boolean allowedByRobotsTXT; + private java.lang.CharSequence Operation; + private java.lang.CharSequence URL; + private java.lang.CharSequence Exception; + private java.lang.CharSequence ResponseType; + private java.lang.CharSequence ResponseCode; + private java.lang.CharSequence ResponseServer; + private java.lang.CharSequence ResponseLink; + private java.lang.CharSequence Content; + private java.util.Map SPARQLDESCpreds; + private java.util.Map voiDpreds; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } + + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.discovery.DGETInfo.Builder other) { + super(other); + if (isValidValue(fields()[0], other.allowedByRobotsTXT)) { + this.allowedByRobotsTXT = data().deepCopy(fields()[0].schema(), other.allowedByRobotsTXT); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.Operation)) { + this.Operation = data().deepCopy(fields()[1].schema(), other.Operation); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.URL)) { + this.URL = data().deepCopy(fields()[2].schema(), other.URL); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.Exception)) { + this.Exception = data().deepCopy(fields()[3].schema(), other.Exception); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + if (isValidValue(fields()[4], other.ResponseType)) { + this.ResponseType = data().deepCopy(fields()[4].schema(), other.ResponseType); + fieldSetFlags()[4] = other.fieldSetFlags()[4]; + } + if (isValidValue(fields()[5], other.ResponseCode)) { + this.ResponseCode = data().deepCopy(fields()[5].schema(), other.ResponseCode); + fieldSetFlags()[5] = other.fieldSetFlags()[5]; + } + if (isValidValue(fields()[6], other.ResponseServer)) { + this.ResponseServer = data().deepCopy(fields()[6].schema(), other.ResponseServer); + fieldSetFlags()[6] = other.fieldSetFlags()[6]; + } + if (isValidValue(fields()[7], other.ResponseLink)) { + this.ResponseLink = data().deepCopy(fields()[7].schema(), other.ResponseLink); + fieldSetFlags()[7] = other.fieldSetFlags()[7]; + } + if (isValidValue(fields()[8], other.Content)) { + this.Content = data().deepCopy(fields()[8].schema(), other.Content); + fieldSetFlags()[8] = other.fieldSetFlags()[8]; + } + if (isValidValue(fields()[9], other.SPARQLDESCpreds)) { + this.SPARQLDESCpreds = data().deepCopy(fields()[9].schema(), other.SPARQLDESCpreds); + fieldSetFlags()[9] = other.fieldSetFlags()[9]; + } + if (isValidValue(fields()[10], other.voiDpreds)) { + this.voiDpreds = data().deepCopy(fields()[10].schema(), other.voiDpreds); + fieldSetFlags()[10] = other.fieldSetFlags()[10]; + } + } + + /** + * Creates a Builder by copying an existing DGETInfo instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.discovery.DGETInfo other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.allowedByRobotsTXT)) { + this.allowedByRobotsTXT = data().deepCopy(fields()[0].schema(), other.allowedByRobotsTXT); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.Operation)) { + this.Operation = data().deepCopy(fields()[1].schema(), other.Operation); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.URL)) { + this.URL = data().deepCopy(fields()[2].schema(), other.URL); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.Exception)) { + this.Exception = data().deepCopy(fields()[3].schema(), other.Exception); + fieldSetFlags()[3] = true; + } + if (isValidValue(fields()[4], other.ResponseType)) { + this.ResponseType = data().deepCopy(fields()[4].schema(), other.ResponseType); + fieldSetFlags()[4] = true; + } + if (isValidValue(fields()[5], other.ResponseCode)) { + this.ResponseCode = data().deepCopy(fields()[5].schema(), other.ResponseCode); + fieldSetFlags()[5] = true; + } + if (isValidValue(fields()[6], other.ResponseServer)) { + this.ResponseServer = data().deepCopy(fields()[6].schema(), other.ResponseServer); + fieldSetFlags()[6] = true; + } + if (isValidValue(fields()[7], other.ResponseLink)) { + this.ResponseLink = data().deepCopy(fields()[7].schema(), other.ResponseLink); + fieldSetFlags()[7] = true; + } + if (isValidValue(fields()[8], other.Content)) { + this.Content = data().deepCopy(fields()[8].schema(), other.Content); + fieldSetFlags()[8] = true; + } + if (isValidValue(fields()[9], other.SPARQLDESCpreds)) { + this.SPARQLDESCpreds = data().deepCopy(fields()[9].schema(), other.SPARQLDESCpreds); + fieldSetFlags()[9] = true; + } + if (isValidValue(fields()[10], other.voiDpreds)) { + this.voiDpreds = data().deepCopy(fields()[10].schema(), other.voiDpreds); + fieldSetFlags()[10] = true; + } + } + + /** + * Gets the value of the 'allowedByRobotsTXT' field. + * + * @return The value. + */ + public boolean getAllowedByRobotsTXT() { + return allowedByRobotsTXT; } /** * Sets the value of the 'allowedByRobotsTXT' field. * - * @param value the value to set. + * @param value The value of 'allowedByRobotsTXT'. + * @return This builder. */ - public void setAllowedByRobotsTXT(java.lang.Boolean value) { - this.allowedByRobotsTXT = value; + public sparqles.avro.discovery.DGETInfo.Builder setAllowedByRobotsTXT(boolean value) { + validate(fields()[0], value); + this.allowedByRobotsTXT = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'Operation' field. */ + /** + * Checks whether the 'allowedByRobotsTXT' field has been set. + * + * @return True if the 'allowedByRobotsTXT' field has been set, false otherwise. + */ + public boolean hasAllowedByRobotsTXT() { + return fieldSetFlags()[0]; + } + + /** + * Clears the value of the 'allowedByRobotsTXT' field. + * + * @return This builder. + */ + public sparqles.avro.discovery.DGETInfo.Builder clearAllowedByRobotsTXT() { + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'Operation' field. + * + * @return The value. + */ public java.lang.CharSequence getOperation() { - return Operation; + return Operation; } /** * Sets the value of the 'Operation' field. * - * @param value the value to set. + * @param value The value of 'Operation'. + * @return This builder. + */ + public sparqles.avro.discovery.DGETInfo.Builder setOperation(java.lang.CharSequence value) { + validate(fields()[1], value); + this.Operation = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'Operation' field has been set. + * + * @return True if the 'Operation' field has been set, false otherwise. + */ + public boolean hasOperation() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'Operation' field. + * + * @return This builder. */ - public void setOperation(java.lang.CharSequence value) { - this.Operation = value; + public sparqles.avro.discovery.DGETInfo.Builder clearOperation() { + Operation = null; + fieldSetFlags()[1] = false; + return this; } - /** Gets the value of the 'URL' field. */ + /** + * Gets the value of the 'URL' field. + * + * @return The value. + */ public java.lang.CharSequence getURL() { - return URL; + return URL; } /** * Sets the value of the 'URL' field. * - * @param value the value to set. + * @param value The value of 'URL'. + * @return This builder. + */ + public sparqles.avro.discovery.DGETInfo.Builder setURL(java.lang.CharSequence value) { + validate(fields()[2], value); + this.URL = value; + fieldSetFlags()[2] = true; + return this; + } + + /** + * Checks whether the 'URL' field has been set. + * + * @return True if the 'URL' field has been set, false otherwise. + */ + public boolean hasURL() { + return fieldSetFlags()[2]; + } + + /** + * Clears the value of the 'URL' field. + * + * @return This builder. */ - public void setURL(java.lang.CharSequence value) { - this.URL = value; + public sparqles.avro.discovery.DGETInfo.Builder clearURL() { + URL = null; + fieldSetFlags()[2] = false; + return this; } - /** Gets the value of the 'Exception' field. */ + /** + * Gets the value of the 'Exception' field. + * + * @return The value. + */ public java.lang.CharSequence getException() { - return Exception; + return Exception; } /** * Sets the value of the 'Exception' field. * - * @param value the value to set. + * @param value The value of 'Exception'. + * @return This builder. + */ + public sparqles.avro.discovery.DGETInfo.Builder setException(java.lang.CharSequence value) { + validate(fields()[3], value); + this.Exception = value; + fieldSetFlags()[3] = true; + return this; + } + + /** + * Checks whether the 'Exception' field has been set. + * + * @return True if the 'Exception' field has been set, false otherwise. */ - public void setException(java.lang.CharSequence value) { - this.Exception = value; + public boolean hasException() { + return fieldSetFlags()[3]; } - /** Gets the value of the 'ResponseType' field. */ + /** + * Clears the value of the 'Exception' field. + * + * @return This builder. + */ + public sparqles.avro.discovery.DGETInfo.Builder clearException() { + Exception = null; + fieldSetFlags()[3] = false; + return this; + } + + /** + * Gets the value of the 'ResponseType' field. + * + * @return The value. + */ public java.lang.CharSequence getResponseType() { - return ResponseType; + return ResponseType; } /** * Sets the value of the 'ResponseType' field. * - * @param value the value to set. + * @param value The value of 'ResponseType'. + * @return This builder. */ - public void setResponseType(java.lang.CharSequence value) { - this.ResponseType = value; + public sparqles.avro.discovery.DGETInfo.Builder setResponseType(java.lang.CharSequence value) { + validate(fields()[4], value); + this.ResponseType = value; + fieldSetFlags()[4] = true; + return this; } - /** Gets the value of the 'ResponseCode' field. */ + /** + * Checks whether the 'ResponseType' field has been set. + * + * @return True if the 'ResponseType' field has been set, false otherwise. + */ + public boolean hasResponseType() { + return fieldSetFlags()[4]; + } + + /** + * Clears the value of the 'ResponseType' field. + * + * @return This builder. + */ + public sparqles.avro.discovery.DGETInfo.Builder clearResponseType() { + ResponseType = null; + fieldSetFlags()[4] = false; + return this; + } + + /** + * Gets the value of the 'ResponseCode' field. + * + * @return The value. + */ public java.lang.CharSequence getResponseCode() { - return ResponseCode; + return ResponseCode; } /** * Sets the value of the 'ResponseCode' field. * - * @param value the value to set. + * @param value The value of 'ResponseCode'. + * @return This builder. + */ + public sparqles.avro.discovery.DGETInfo.Builder setResponseCode(java.lang.CharSequence value) { + validate(fields()[5], value); + this.ResponseCode = value; + fieldSetFlags()[5] = true; + return this; + } + + /** + * Checks whether the 'ResponseCode' field has been set. + * + * @return True if the 'ResponseCode' field has been set, false otherwise. + */ + public boolean hasResponseCode() { + return fieldSetFlags()[5]; + } + + /** + * Clears the value of the 'ResponseCode' field. + * + * @return This builder. */ - public void setResponseCode(java.lang.CharSequence value) { - this.ResponseCode = value; + public sparqles.avro.discovery.DGETInfo.Builder clearResponseCode() { + ResponseCode = null; + fieldSetFlags()[5] = false; + return this; } - /** Gets the value of the 'ResponseServer' field. */ + /** + * Gets the value of the 'ResponseServer' field. + * + * @return The value. + */ public java.lang.CharSequence getResponseServer() { - return ResponseServer; + return ResponseServer; } /** * Sets the value of the 'ResponseServer' field. * - * @param value the value to set. + * @param value The value of 'ResponseServer'. + * @return This builder. */ - public void setResponseServer(java.lang.CharSequence value) { - this.ResponseServer = value; + public sparqles.avro.discovery.DGETInfo.Builder setResponseServer( + java.lang.CharSequence value) { + validate(fields()[6], value); + this.ResponseServer = value; + fieldSetFlags()[6] = true; + return this; } - /** Gets the value of the 'ResponseLink' field. */ + /** + * Checks whether the 'ResponseServer' field has been set. + * + * @return True if the 'ResponseServer' field has been set, false otherwise. + */ + public boolean hasResponseServer() { + return fieldSetFlags()[6]; + } + + /** + * Clears the value of the 'ResponseServer' field. + * + * @return This builder. + */ + public sparqles.avro.discovery.DGETInfo.Builder clearResponseServer() { + ResponseServer = null; + fieldSetFlags()[6] = false; + return this; + } + + /** + * Gets the value of the 'ResponseLink' field. + * + * @return The value. + */ public java.lang.CharSequence getResponseLink() { - return ResponseLink; + return ResponseLink; } /** * Sets the value of the 'ResponseLink' field. * - * @param value the value to set. + * @param value The value of 'ResponseLink'. + * @return This builder. */ - public void setResponseLink(java.lang.CharSequence value) { - this.ResponseLink = value; + public sparqles.avro.discovery.DGETInfo.Builder setResponseLink(java.lang.CharSequence value) { + validate(fields()[7], value); + this.ResponseLink = value; + fieldSetFlags()[7] = true; + return this; } - /** Gets the value of the 'Content' field. */ + /** + * Checks whether the 'ResponseLink' field has been set. + * + * @return True if the 'ResponseLink' field has been set, false otherwise. + */ + public boolean hasResponseLink() { + return fieldSetFlags()[7]; + } + + /** + * Clears the value of the 'ResponseLink' field. + * + * @return This builder. + */ + public sparqles.avro.discovery.DGETInfo.Builder clearResponseLink() { + ResponseLink = null; + fieldSetFlags()[7] = false; + return this; + } + + /** + * Gets the value of the 'Content' field. + * + * @return The value. + */ public java.lang.CharSequence getContent() { - return Content; + return Content; } /** * Sets the value of the 'Content' field. * - * @param value the value to set. + * @param value The value of 'Content'. + * @return This builder. + */ + public sparqles.avro.discovery.DGETInfo.Builder setContent(java.lang.CharSequence value) { + validate(fields()[8], value); + this.Content = value; + fieldSetFlags()[8] = true; + return this; + } + + /** + * Checks whether the 'Content' field has been set. + * + * @return True if the 'Content' field has been set, false otherwise. */ - public void setContent(java.lang.CharSequence value) { - this.Content = value; + public boolean hasContent() { + return fieldSetFlags()[8]; } - /** Gets the value of the 'SPARQLDESCpreds' field. */ + /** + * Clears the value of the 'Content' field. + * + * @return This builder. + */ + public sparqles.avro.discovery.DGETInfo.Builder clearContent() { + Content = null; + fieldSetFlags()[8] = false; + return this; + } + + /** + * Gets the value of the 'SPARQLDESCpreds' field. + * + * @return The value. + */ public java.util.Map getSPARQLDESCpreds() { - return SPARQLDESCpreds; + return SPARQLDESCpreds; } /** * Sets the value of the 'SPARQLDESCpreds' field. * - * @param value the value to set. + * @param value The value of 'SPARQLDESCpreds'. + * @return This builder. + */ + public sparqles.avro.discovery.DGETInfo.Builder setSPARQLDESCpreds( + java.util.Map value) { + validate(fields()[9], value); + this.SPARQLDESCpreds = value; + fieldSetFlags()[9] = true; + return this; + } + + /** + * Checks whether the 'SPARQLDESCpreds' field has been set. + * + * @return True if the 'SPARQLDESCpreds' field has been set, false otherwise. + */ + public boolean hasSPARQLDESCpreds() { + return fieldSetFlags()[9]; + } + + /** + * Clears the value of the 'SPARQLDESCpreds' field. + * + * @return This builder. */ - public void setSPARQLDESCpreds(java.util.Map value) { - this.SPARQLDESCpreds = value; + public sparqles.avro.discovery.DGETInfo.Builder clearSPARQLDESCpreds() { + SPARQLDESCpreds = null; + fieldSetFlags()[9] = false; + return this; } - /** Gets the value of the 'voiDpreds' field. */ + /** + * Gets the value of the 'voiDpreds' field. + * + * @return The value. + */ public java.util.Map getVoiDpreds() { - return voiDpreds; + return voiDpreds; } /** * Sets the value of the 'voiDpreds' field. * - * @param value the value to set. - */ - public void setVoiDpreds(java.util.Map value) { - this.voiDpreds = value; - } - - /** RecordBuilder for DGETInfo instances. */ - public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { - - private boolean allowedByRobotsTXT; - private java.lang.CharSequence Operation; - private java.lang.CharSequence URL; - private java.lang.CharSequence Exception; - private java.lang.CharSequence ResponseType; - private java.lang.CharSequence ResponseCode; - private java.lang.CharSequence ResponseServer; - private java.lang.CharSequence ResponseLink; - private java.lang.CharSequence Content; - private java.util.Map SPARQLDESCpreds; - private java.util.Map voiDpreds; - - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.discovery.DGETInfo.SCHEMA$); - } - - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.discovery.DGETInfo.Builder other) { - super(other); - if (isValidValue(fields()[0], other.allowedByRobotsTXT)) { - this.allowedByRobotsTXT = - data().deepCopy(fields()[0].schema(), other.allowedByRobotsTXT); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.Operation)) { - this.Operation = data().deepCopy(fields()[1].schema(), other.Operation); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.URL)) { - this.URL = data().deepCopy(fields()[2].schema(), other.URL); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.Exception)) { - this.Exception = data().deepCopy(fields()[3].schema(), other.Exception); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.ResponseType)) { - this.ResponseType = data().deepCopy(fields()[4].schema(), other.ResponseType); - fieldSetFlags()[4] = true; - } - if (isValidValue(fields()[5], other.ResponseCode)) { - this.ResponseCode = data().deepCopy(fields()[5].schema(), other.ResponseCode); - fieldSetFlags()[5] = true; - } - if (isValidValue(fields()[6], other.ResponseServer)) { - this.ResponseServer = data().deepCopy(fields()[6].schema(), other.ResponseServer); - fieldSetFlags()[6] = true; - } - if (isValidValue(fields()[7], other.ResponseLink)) { - this.ResponseLink = data().deepCopy(fields()[7].schema(), other.ResponseLink); - fieldSetFlags()[7] = true; - } - if (isValidValue(fields()[8], other.Content)) { - this.Content = data().deepCopy(fields()[8].schema(), other.Content); - fieldSetFlags()[8] = true; - } - if (isValidValue(fields()[9], other.SPARQLDESCpreds)) { - this.SPARQLDESCpreds = data().deepCopy(fields()[9].schema(), other.SPARQLDESCpreds); - fieldSetFlags()[9] = true; - } - if (isValidValue(fields()[10], other.voiDpreds)) { - this.voiDpreds = data().deepCopy(fields()[10].schema(), other.voiDpreds); - fieldSetFlags()[10] = true; - } - } - - /** Creates a Builder by copying an existing DGETInfo instance */ - private Builder(sparqles.avro.discovery.DGETInfo other) { - super(sparqles.avro.discovery.DGETInfo.SCHEMA$); - if (isValidValue(fields()[0], other.allowedByRobotsTXT)) { - this.allowedByRobotsTXT = - data().deepCopy(fields()[0].schema(), other.allowedByRobotsTXT); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.Operation)) { - this.Operation = data().deepCopy(fields()[1].schema(), other.Operation); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.URL)) { - this.URL = data().deepCopy(fields()[2].schema(), other.URL); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.Exception)) { - this.Exception = data().deepCopy(fields()[3].schema(), other.Exception); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.ResponseType)) { - this.ResponseType = data().deepCopy(fields()[4].schema(), other.ResponseType); - fieldSetFlags()[4] = true; - } - if (isValidValue(fields()[5], other.ResponseCode)) { - this.ResponseCode = data().deepCopy(fields()[5].schema(), other.ResponseCode); - fieldSetFlags()[5] = true; - } - if (isValidValue(fields()[6], other.ResponseServer)) { - this.ResponseServer = data().deepCopy(fields()[6].schema(), other.ResponseServer); - fieldSetFlags()[6] = true; - } - if (isValidValue(fields()[7], other.ResponseLink)) { - this.ResponseLink = data().deepCopy(fields()[7].schema(), other.ResponseLink); - fieldSetFlags()[7] = true; - } - if (isValidValue(fields()[8], other.Content)) { - this.Content = data().deepCopy(fields()[8].schema(), other.Content); - fieldSetFlags()[8] = true; - } - if (isValidValue(fields()[9], other.SPARQLDESCpreds)) { - this.SPARQLDESCpreds = data().deepCopy(fields()[9].schema(), other.SPARQLDESCpreds); - fieldSetFlags()[9] = true; - } - if (isValidValue(fields()[10], other.voiDpreds)) { - this.voiDpreds = data().deepCopy(fields()[10].schema(), other.voiDpreds); - fieldSetFlags()[10] = true; - } - } - - /** Gets the value of the 'allowedByRobotsTXT' field */ - public java.lang.Boolean getAllowedByRobotsTXT() { - return allowedByRobotsTXT; - } - - /** Sets the value of the 'allowedByRobotsTXT' field */ - public sparqles.avro.discovery.DGETInfo.Builder setAllowedByRobotsTXT(boolean value) { - validate(fields()[0], value); - this.allowedByRobotsTXT = value; - fieldSetFlags()[0] = true; - return this; - } - - /** Checks whether the 'allowedByRobotsTXT' field has been set */ - public boolean hasAllowedByRobotsTXT() { - return fieldSetFlags()[0]; - } - - /** Clears the value of the 'allowedByRobotsTXT' field */ - public sparqles.avro.discovery.DGETInfo.Builder clearAllowedByRobotsTXT() { - fieldSetFlags()[0] = false; - return this; - } - - /** Gets the value of the 'Operation' field */ - public java.lang.CharSequence getOperation() { - return Operation; - } - - /** Sets the value of the 'Operation' field */ - public sparqles.avro.discovery.DGETInfo.Builder setOperation(java.lang.CharSequence value) { - validate(fields()[1], value); - this.Operation = value; - fieldSetFlags()[1] = true; - return this; - } - - /** Checks whether the 'Operation' field has been set */ - public boolean hasOperation() { - return fieldSetFlags()[1]; - } - - /** Clears the value of the 'Operation' field */ - public sparqles.avro.discovery.DGETInfo.Builder clearOperation() { - Operation = null; - fieldSetFlags()[1] = false; - return this; - } - - /** Gets the value of the 'URL' field */ - public java.lang.CharSequence getURL() { - return URL; - } - - /** Sets the value of the 'URL' field */ - public sparqles.avro.discovery.DGETInfo.Builder setURL(java.lang.CharSequence value) { - validate(fields()[2], value); - this.URL = value; - fieldSetFlags()[2] = true; - return this; - } - - /** Checks whether the 'URL' field has been set */ - public boolean hasURL() { - return fieldSetFlags()[2]; - } - - /** Clears the value of the 'URL' field */ - public sparqles.avro.discovery.DGETInfo.Builder clearURL() { - URL = null; - fieldSetFlags()[2] = false; - return this; - } - - /** Gets the value of the 'Exception' field */ - public java.lang.CharSequence getException() { - return Exception; - } - - /** Sets the value of the 'Exception' field */ - public sparqles.avro.discovery.DGETInfo.Builder setException(java.lang.CharSequence value) { - validate(fields()[3], value); - this.Exception = value; - fieldSetFlags()[3] = true; - return this; - } - - /** Checks whether the 'Exception' field has been set */ - public boolean hasException() { - return fieldSetFlags()[3]; - } - - /** Clears the value of the 'Exception' field */ - public sparqles.avro.discovery.DGETInfo.Builder clearException() { - Exception = null; - fieldSetFlags()[3] = false; - return this; - } - - /** Gets the value of the 'ResponseType' field */ - public java.lang.CharSequence getResponseType() { - return ResponseType; - } - - /** Sets the value of the 'ResponseType' field */ - public sparqles.avro.discovery.DGETInfo.Builder setResponseType( - java.lang.CharSequence value) { - validate(fields()[4], value); - this.ResponseType = value; - fieldSetFlags()[4] = true; - return this; - } - - /** Checks whether the 'ResponseType' field has been set */ - public boolean hasResponseType() { - return fieldSetFlags()[4]; - } - - /** Clears the value of the 'ResponseType' field */ - public sparqles.avro.discovery.DGETInfo.Builder clearResponseType() { - ResponseType = null; - fieldSetFlags()[4] = false; - return this; - } - - /** Gets the value of the 'ResponseCode' field */ - public java.lang.CharSequence getResponseCode() { - return ResponseCode; - } - - /** Sets the value of the 'ResponseCode' field */ - public sparqles.avro.discovery.DGETInfo.Builder setResponseCode( - java.lang.CharSequence value) { - validate(fields()[5], value); - this.ResponseCode = value; - fieldSetFlags()[5] = true; - return this; - } - - /** Checks whether the 'ResponseCode' field has been set */ - public boolean hasResponseCode() { - return fieldSetFlags()[5]; - } - - /** Clears the value of the 'ResponseCode' field */ - public sparqles.avro.discovery.DGETInfo.Builder clearResponseCode() { - ResponseCode = null; - fieldSetFlags()[5] = false; - return this; - } - - /** Gets the value of the 'ResponseServer' field */ - public java.lang.CharSequence getResponseServer() { - return ResponseServer; - } - - /** Sets the value of the 'ResponseServer' field */ - public sparqles.avro.discovery.DGETInfo.Builder setResponseServer( - java.lang.CharSequence value) { - validate(fields()[6], value); - this.ResponseServer = value; - fieldSetFlags()[6] = true; - return this; - } - - /** Checks whether the 'ResponseServer' field has been set */ - public boolean hasResponseServer() { - return fieldSetFlags()[6]; - } - - /** Clears the value of the 'ResponseServer' field */ - public sparqles.avro.discovery.DGETInfo.Builder clearResponseServer() { - ResponseServer = null; - fieldSetFlags()[6] = false; - return this; - } - - /** Gets the value of the 'ResponseLink' field */ - public java.lang.CharSequence getResponseLink() { - return ResponseLink; - } - - /** Sets the value of the 'ResponseLink' field */ - public sparqles.avro.discovery.DGETInfo.Builder setResponseLink( - java.lang.CharSequence value) { - validate(fields()[7], value); - this.ResponseLink = value; - fieldSetFlags()[7] = true; - return this; - } - - /** Checks whether the 'ResponseLink' field has been set */ - public boolean hasResponseLink() { - return fieldSetFlags()[7]; - } - - /** Clears the value of the 'ResponseLink' field */ - public sparqles.avro.discovery.DGETInfo.Builder clearResponseLink() { - ResponseLink = null; - fieldSetFlags()[7] = false; - return this; - } - - /** Gets the value of the 'Content' field */ - public java.lang.CharSequence getContent() { - return Content; - } - - /** Sets the value of the 'Content' field */ - public sparqles.avro.discovery.DGETInfo.Builder setContent(java.lang.CharSequence value) { - validate(fields()[8], value); - this.Content = value; - fieldSetFlags()[8] = true; - return this; - } - - /** Checks whether the 'Content' field has been set */ - public boolean hasContent() { - return fieldSetFlags()[8]; - } - - /** Clears the value of the 'Content' field */ - public sparqles.avro.discovery.DGETInfo.Builder clearContent() { - Content = null; - fieldSetFlags()[8] = false; - return this; - } - - /** Gets the value of the 'SPARQLDESCpreds' field */ - public java.util.Map getSPARQLDESCpreds() { - return SPARQLDESCpreds; - } - - /** Sets the value of the 'SPARQLDESCpreds' field */ - public sparqles.avro.discovery.DGETInfo.Builder setSPARQLDESCpreds( - java.util.Map value) { - validate(fields()[9], value); - this.SPARQLDESCpreds = value; - fieldSetFlags()[9] = true; - return this; - } - - /** Checks whether the 'SPARQLDESCpreds' field has been set */ - public boolean hasSPARQLDESCpreds() { - return fieldSetFlags()[9]; - } - - /** Clears the value of the 'SPARQLDESCpreds' field */ - public sparqles.avro.discovery.DGETInfo.Builder clearSPARQLDESCpreds() { - SPARQLDESCpreds = null; - fieldSetFlags()[9] = false; - return this; - } - - /** Gets the value of the 'voiDpreds' field */ - public java.util.Map getVoiDpreds() { - return voiDpreds; - } - - /** Sets the value of the 'voiDpreds' field */ - public sparqles.avro.discovery.DGETInfo.Builder setVoiDpreds( - java.util.Map value) { - validate(fields()[10], value); - this.voiDpreds = value; - fieldSetFlags()[10] = true; - return this; - } - - /** Checks whether the 'voiDpreds' field has been set */ - public boolean hasVoiDpreds() { - return fieldSetFlags()[10]; - } - - /** Clears the value of the 'voiDpreds' field */ - public sparqles.avro.discovery.DGETInfo.Builder clearVoiDpreds() { - voiDpreds = null; - fieldSetFlags()[10] = false; - return this; - } - - @Override - public DGETInfo build() { - try { - DGETInfo record = new DGETInfo(); - record.allowedByRobotsTXT = - fieldSetFlags()[0] - ? this.allowedByRobotsTXT - : (java.lang.Boolean) defaultValue(fields()[0]); - record.Operation = - fieldSetFlags()[1] - ? this.Operation - : (java.lang.CharSequence) defaultValue(fields()[1]); - record.URL = - fieldSetFlags()[2] - ? this.URL - : (java.lang.CharSequence) defaultValue(fields()[2]); - record.Exception = - fieldSetFlags()[3] - ? this.Exception - : (java.lang.CharSequence) defaultValue(fields()[3]); - record.ResponseType = - fieldSetFlags()[4] - ? this.ResponseType - : (java.lang.CharSequence) defaultValue(fields()[4]); - record.ResponseCode = - fieldSetFlags()[5] - ? this.ResponseCode - : (java.lang.CharSequence) defaultValue(fields()[5]); - record.ResponseServer = - fieldSetFlags()[6] - ? this.ResponseServer - : (java.lang.CharSequence) defaultValue(fields()[6]); - record.ResponseLink = - fieldSetFlags()[7] - ? this.ResponseLink - : (java.lang.CharSequence) defaultValue(fields()[7]); - record.Content = - fieldSetFlags()[8] - ? this.Content - : (java.lang.CharSequence) defaultValue(fields()[8]); - record.SPARQLDESCpreds = - fieldSetFlags()[9] - ? this.SPARQLDESCpreds - : (java.util.Map) - defaultValue(fields()[9]); - record.voiDpreds = - fieldSetFlags()[10] - ? this.voiDpreds - : (java.util.Map) - defaultValue(fields()[10]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } - } + * @param value The value of 'voiDpreds'. + * @return This builder. + */ + public sparqles.avro.discovery.DGETInfo.Builder setVoiDpreds( + java.util.Map value) { + validate(fields()[10], value); + this.voiDpreds = value; + fieldSetFlags()[10] = true; + return this; + } + + /** + * Checks whether the 'voiDpreds' field has been set. + * + * @return True if the 'voiDpreds' field has been set, false otherwise. + */ + public boolean hasVoiDpreds() { + return fieldSetFlags()[10]; } + + /** + * Clears the value of the 'voiDpreds' field. + * + * @return This builder. + */ + public sparqles.avro.discovery.DGETInfo.Builder clearVoiDpreds() { + voiDpreds = null; + fieldSetFlags()[10] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public DGETInfo build() { + try { + DGETInfo record = new DGETInfo(); + record.allowedByRobotsTXT = + fieldSetFlags()[0] + ? this.allowedByRobotsTXT + : (java.lang.Boolean) defaultValue(fields()[0]); + record.Operation = + fieldSetFlags()[1] + ? this.Operation + : (java.lang.CharSequence) defaultValue(fields()[1]); + record.URL = + fieldSetFlags()[2] ? this.URL : (java.lang.CharSequence) defaultValue(fields()[2]); + record.Exception = + fieldSetFlags()[3] + ? this.Exception + : (java.lang.CharSequence) defaultValue(fields()[3]); + record.ResponseType = + fieldSetFlags()[4] + ? this.ResponseType + : (java.lang.CharSequence) defaultValue(fields()[4]); + record.ResponseCode = + fieldSetFlags()[5] + ? this.ResponseCode + : (java.lang.CharSequence) defaultValue(fields()[5]); + record.ResponseServer = + fieldSetFlags()[6] + ? this.ResponseServer + : (java.lang.CharSequence) defaultValue(fields()[6]); + record.ResponseLink = + fieldSetFlags()[7] + ? this.ResponseLink + : (java.lang.CharSequence) defaultValue(fields()[7]); + record.Content = + fieldSetFlags()[8] ? this.Content : (java.lang.CharSequence) defaultValue(fields()[8]); + record.SPARQLDESCpreds = + fieldSetFlags()[9] + ? this.SPARQLDESCpreds + : (java.util.Map) + defaultValue(fields()[9]); + record.voiDpreds = + fieldSetFlags()[10] + ? this.voiDpreds + : (java.util.Map) + defaultValue(fields()[10]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } } diff --git a/backend/src/main/java/sparqles/avro/discovery/DResult.java b/backend/src/main/java/sparqles/avro/discovery/DResult.java index 5fdd489e..cd58e081 100644 --- a/backend/src/main/java/sparqles/avro/discovery/DResult.java +++ b/backend/src/main/java/sparqles/avro/discovery/DResult.java @@ -5,343 +5,667 @@ */ package sparqles.avro.discovery; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; + @org.apache.avro.specific.AvroGenerated public class DResult extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"DResult\",\"namespace\":\"sparqles.avro.discovery\",\"fields\":[{\"name\":\"endpointResult\",\"type\":{\"type\":\"record\",\"name\":\"EndpointResult\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"start\",\"type\":\"long\"},{\"name\":\"end\",\"type\":\"long\"}],\"import\":\"Endpoint.avsc\"}},{\"name\":\"RobotsTXT\",\"type\":{\"type\":\"record\",\"name\":\"RobotsTXT\",\"fields\":[{\"name\":\"hasRobotsTXT\",\"type\":\"boolean\",\"default\":false},{\"name\":\"allowedByRobotsTXT\",\"type\":\"boolean\",\"default\":true},{\"name\":\"sitemapXML\",\"type\":\"boolean\",\"default\":false},{\"name\":\"sitemapXMLSPARQL\",\"type\":\"boolean\",\"default\":false},{\"name\":\"sitemapXMLSPARQLMatch\",\"type\":\"boolean\",\"default\":false},{\"name\":\"sitemapXMLVoiD\",\"type\":\"boolean\",\"default\":false},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]}]}},{\"name\":\"descriptionFiles\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"DGETInfo\",\"fields\":[{\"name\":\"allowedByRobotsTXT\",\"type\":\"boolean\",\"default\":true},{\"name\":\"Operation\",\"type\":\"string\"},{\"name\":\"URL\",\"type\":\"string\"},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"ResponseType\",\"type\":[\"string\",\"null\"]},{\"name\":\"ResponseCode\",\"type\":[\"string\",\"null\"]},{\"name\":\"ResponseServer\",\"type\":[\"string\",\"null\"]},{\"name\":\"ResponseLink\",\"type\":[\"string\",\"null\"]},{\"name\":\"Content\",\"type\":[\"string\",\"null\"]},{\"name\":\"SPARQLDESCpreds\",\"type\":{\"type\":\"map\",\"values\":[\"int\"]}},{\"name\":\"voiDpreds\",\"type\":{\"type\":\"map\",\"values\":[\"int\"]}}]}}},{\"name\":\"queryInfo\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"QueryInfo\",\"fields\":[{\"name\":\"URL\",\"type\":\"string\"},{\"name\":\"Operation\",\"type\":\"string\"},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"allowedByRobotsTXT\",\"type\":\"boolean\",\"default\":true},{\"name\":\"Results\",\"type\":{\"type\":\"array\",\"items\":\"string\"}}]}}}]}"); - @Deprecated public sparqles.avro.EndpointResult endpointResult; - @Deprecated public sparqles.avro.discovery.RobotsTXT RobotsTXT; - @Deprecated public java.util.List descriptionFiles; - @Deprecated public java.util.List queryInfo; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -1692480750071709782L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"DResult\",\"namespace\":\"sparqles.avro.discovery\",\"fields\":[{\"name\":\"endpointResult\",\"type\":{\"type\":\"record\",\"name\":\"EndpointResult\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"start\",\"type\":\"long\"},{\"name\":\"end\",\"type\":\"long\"}],\"import\":\"Endpoint.avsc\"}},{\"name\":\"RobotsTXT\",\"type\":{\"type\":\"record\",\"name\":\"RobotsTXT\",\"fields\":[{\"name\":\"hasRobotsTXT\",\"type\":\"boolean\",\"default\":false},{\"name\":\"allowedByRobotsTXT\",\"type\":\"boolean\",\"default\":true},{\"name\":\"sitemapXML\",\"type\":\"boolean\",\"default\":false},{\"name\":\"sitemapXMLSPARQL\",\"type\":\"boolean\",\"default\":false},{\"name\":\"sitemapXMLSPARQLMatch\",\"type\":\"boolean\",\"default\":false},{\"name\":\"sitemapXMLVoiD\",\"type\":\"boolean\",\"default\":false},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]}]}},{\"name\":\"descriptionFiles\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"DGETInfo\",\"fields\":[{\"name\":\"allowedByRobotsTXT\",\"type\":\"boolean\",\"default\":true},{\"name\":\"Operation\",\"type\":\"string\"},{\"name\":\"URL\",\"type\":\"string\"},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"ResponseType\",\"type\":[\"string\",\"null\"]},{\"name\":\"ResponseCode\",\"type\":[\"string\",\"null\"]},{\"name\":\"ResponseServer\",\"type\":[\"string\",\"null\"]},{\"name\":\"ResponseLink\",\"type\":[\"string\",\"null\"]},{\"name\":\"Content\",\"type\":[\"string\",\"null\"]},{\"name\":\"SPARQLDESCpreds\",\"type\":{\"type\":\"map\",\"values\":[\"int\"]}},{\"name\":\"voiDpreds\",\"type\":{\"type\":\"map\",\"values\":[\"int\"]}}]}}},{\"name\":\"queryInfo\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"QueryInfo\",\"fields\":[{\"name\":\"URL\",\"type\":\"string\"},{\"name\":\"Operation\",\"type\":\"string\"},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"allowedByRobotsTXT\",\"type\":\"boolean\",\"default\":true},{\"name\":\"Results\",\"type\":{\"type\":\"array\",\"items\":\"string\"}}]}}}],\"import\":\"EndpointResult.avsc\"}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this DResult to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a DResult from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a DResult instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static DResult fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private sparqles.avro.EndpointResult endpointResult; + private sparqles.avro.discovery.RobotsTXT RobotsTXT; + private java.util.List descriptionFiles; + private java.util.List queryInfo; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public DResult() {} + + /** + * All-args constructor. + * + * @param endpointResult The new value for endpointResult + * @param RobotsTXT The new value for RobotsTXT + * @param descriptionFiles The new value for descriptionFiles + * @param queryInfo The new value for queryInfo + */ + public DResult( + sparqles.avro.EndpointResult endpointResult, + sparqles.avro.discovery.RobotsTXT RobotsTXT, + java.util.List descriptionFiles, + java.util.List queryInfo) { + this.endpointResult = endpointResult; + this.RobotsTXT = RobotsTXT; + this.descriptionFiles = descriptionFiles; + this.queryInfo = queryInfo; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return endpointResult; + case 1: + return RobotsTXT; + case 2: + return descriptionFiles; + case 3: + return queryInfo; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + endpointResult = (sparqles.avro.EndpointResult) value$; + break; + case 1: + RobotsTXT = (sparqles.avro.discovery.RobotsTXT) value$; + break; + case 2: + descriptionFiles = (java.util.List) value$; + break; + case 3: + queryInfo = (java.util.List) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'endpointResult' field. + * + * @return The value of the 'endpointResult' field. + */ + public sparqles.avro.EndpointResult getEndpointResult() { + return endpointResult; + } + + /** + * Sets the value of the 'endpointResult' field. + * + * @param value the value to set. + */ + public void setEndpointResult(sparqles.avro.EndpointResult value) { + this.endpointResult = value; + } + + /** + * Gets the value of the 'RobotsTXT' field. + * + * @return The value of the 'RobotsTXT' field. + */ + public sparqles.avro.discovery.RobotsTXT getRobotsTXT() { + return RobotsTXT; + } + + /** + * Sets the value of the 'RobotsTXT' field. + * + * @param value the value to set. + */ + public void setRobotsTXT(sparqles.avro.discovery.RobotsTXT value) { + this.RobotsTXT = value; + } + + /** + * Gets the value of the 'descriptionFiles' field. + * + * @return The value of the 'descriptionFiles' field. + */ + public java.util.List getDescriptionFiles() { + return descriptionFiles; + } + + /** + * Sets the value of the 'descriptionFiles' field. + * + * @param value the value to set. + */ + public void setDescriptionFiles(java.util.List value) { + this.descriptionFiles = value; + } + + /** + * Gets the value of the 'queryInfo' field. + * + * @return The value of the 'queryInfo' field. + */ + public java.util.List getQueryInfo() { + return queryInfo; + } + + /** + * Sets the value of the 'queryInfo' field. + * + * @param value the value to set. + */ + public void setQueryInfo(java.util.List value) { + this.queryInfo = value; + } + + /** + * Creates a new DResult RecordBuilder. + * + * @return A new DResult RecordBuilder + */ + public static sparqles.avro.discovery.DResult.Builder newBuilder() { + return new sparqles.avro.discovery.DResult.Builder(); + } + + /** + * Creates a new DResult RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new DResult RecordBuilder + */ + public static sparqles.avro.discovery.DResult.Builder newBuilder( + sparqles.avro.discovery.DResult.Builder other) { + if (other == null) { + return new sparqles.avro.discovery.DResult.Builder(); + } else { + return new sparqles.avro.discovery.DResult.Builder(other); + } + } + + /** + * Creates a new DResult RecordBuilder by copying an existing DResult instance. + * + * @param other The existing instance to copy. + * @return A new DResult RecordBuilder + */ + public static sparqles.avro.discovery.DResult.Builder newBuilder( + sparqles.avro.discovery.DResult other) { + if (other == null) { + return new sparqles.avro.discovery.DResult.Builder(); + } else { + return new sparqles.avro.discovery.DResult.Builder(other); + } + } + + /** RecordBuilder for DResult instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private sparqles.avro.EndpointResult endpointResult; + private sparqles.avro.EndpointResult.Builder endpointResultBuilder; + private sparqles.avro.discovery.RobotsTXT RobotsTXT; + private sparqles.avro.discovery.RobotsTXT.Builder RobotsTXTBuilder; + private java.util.List descriptionFiles; + private java.util.List queryInfo; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public DResult() {} - - /** All-args constructor. */ - public DResult( - sparqles.avro.EndpointResult endpointResult, - sparqles.avro.discovery.RobotsTXT RobotsTXT, - java.util.List descriptionFiles, - java.util.List queryInfo) { - this.endpointResult = endpointResult; - this.RobotsTXT = RobotsTXT; - this.descriptionFiles = descriptionFiles; - this.queryInfo = queryInfo; - } - - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + private Builder(sparqles.avro.discovery.DResult.Builder other) { + super(other); + if (isValidValue(fields()[0], other.endpointResult)) { + this.endpointResult = data().deepCopy(fields()[0].schema(), other.endpointResult); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (other.hasEndpointResultBuilder()) { + this.endpointResultBuilder = + sparqles.avro.EndpointResult.newBuilder(other.getEndpointResultBuilder()); + } + if (isValidValue(fields()[1], other.RobotsTXT)) { + this.RobotsTXT = data().deepCopy(fields()[1].schema(), other.RobotsTXT); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (other.hasRobotsTXTBuilder()) { + this.RobotsTXTBuilder = + sparqles.avro.discovery.RobotsTXT.newBuilder(other.getRobotsTXTBuilder()); + } + if (isValidValue(fields()[2], other.descriptionFiles)) { + this.descriptionFiles = data().deepCopy(fields()[2].schema(), other.descriptionFiles); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.queryInfo)) { + this.queryInfo = data().deepCopy(fields()[3].schema(), other.queryInfo); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } } - /** Creates a new DResult RecordBuilder */ - public static sparqles.avro.discovery.DResult.Builder newBuilder() { - return new sparqles.avro.discovery.DResult.Builder(); + /** + * Creates a Builder by copying an existing DResult instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.discovery.DResult other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.endpointResult)) { + this.endpointResult = data().deepCopy(fields()[0].schema(), other.endpointResult); + fieldSetFlags()[0] = true; + } + this.endpointResultBuilder = null; + if (isValidValue(fields()[1], other.RobotsTXT)) { + this.RobotsTXT = data().deepCopy(fields()[1].schema(), other.RobotsTXT); + fieldSetFlags()[1] = true; + } + this.RobotsTXTBuilder = null; + if (isValidValue(fields()[2], other.descriptionFiles)) { + this.descriptionFiles = data().deepCopy(fields()[2].schema(), other.descriptionFiles); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.queryInfo)) { + this.queryInfo = data().deepCopy(fields()[3].schema(), other.queryInfo); + fieldSetFlags()[3] = true; + } } - /** Creates a new DResult RecordBuilder by copying an existing Builder */ - public static sparqles.avro.discovery.DResult.Builder newBuilder( - sparqles.avro.discovery.DResult.Builder other) { - return new sparqles.avro.discovery.DResult.Builder(other); + /** + * Gets the value of the 'endpointResult' field. + * + * @return The value. + */ + public sparqles.avro.EndpointResult getEndpointResult() { + return endpointResult; } - /** Creates a new DResult RecordBuilder by copying an existing DResult instance */ - public static sparqles.avro.discovery.DResult.Builder newBuilder( - sparqles.avro.discovery.DResult other) { - return new sparqles.avro.discovery.DResult.Builder(other); + /** + * Sets the value of the 'endpointResult' field. + * + * @param value The value of 'endpointResult'. + * @return This builder. + */ + public sparqles.avro.discovery.DResult.Builder setEndpointResult( + sparqles.avro.EndpointResult value) { + validate(fields()[0], value); + this.endpointResultBuilder = null; + this.endpointResult = value; + fieldSetFlags()[0] = true; + return this; } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Checks whether the 'endpointResult' field has been set. + * + * @return True if the 'endpointResult' field has been set, false otherwise. + */ + public boolean hasEndpointResult() { + return fieldSetFlags()[0]; } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return endpointResult; - case 1: - return RobotsTXT; - case 2: - return descriptionFiles; - case 3: - return queryInfo; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); + /** + * Gets the Builder instance for the 'endpointResult' field and creates one if it doesn't exist + * yet. + * + * @return This builder. + */ + public sparqles.avro.EndpointResult.Builder getEndpointResultBuilder() { + if (endpointResultBuilder == null) { + if (hasEndpointResult()) { + setEndpointResultBuilder(sparqles.avro.EndpointResult.newBuilder(endpointResult)); + } else { + setEndpointResultBuilder(sparqles.avro.EndpointResult.newBuilder()); } + } + return endpointResultBuilder; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - endpointResult = (sparqles.avro.EndpointResult) value$; - break; - case 1: - RobotsTXT = (sparqles.avro.discovery.RobotsTXT) value$; - break; - case 2: - descriptionFiles = (java.util.List) value$; - break; - case 3: - queryInfo = (java.util.List) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the Builder instance for the 'endpointResult' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.discovery.DResult.Builder setEndpointResultBuilder( + sparqles.avro.EndpointResult.Builder value) { + clearEndpointResult(); + endpointResultBuilder = value; + return this; } - /** Gets the value of the 'endpointResult' field. */ - public sparqles.avro.EndpointResult getEndpointResult() { - return endpointResult; + /** + * Checks whether the 'endpointResult' field has an active Builder instance + * + * @return True if the 'endpointResult' field has an active Builder instance + */ + public boolean hasEndpointResultBuilder() { + return endpointResultBuilder != null; } /** - * Sets the value of the 'endpointResult' field. + * Clears the value of the 'endpointResult' field. * - * @param value the value to set. + * @return This builder. */ - public void setEndpointResult(sparqles.avro.EndpointResult value) { - this.endpointResult = value; + public sparqles.avro.discovery.DResult.Builder clearEndpointResult() { + endpointResult = null; + endpointResultBuilder = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'RobotsTXT' field. */ + /** + * Gets the value of the 'RobotsTXT' field. + * + * @return The value. + */ public sparqles.avro.discovery.RobotsTXT getRobotsTXT() { - return RobotsTXT; + return RobotsTXT; } /** * Sets the value of the 'RobotsTXT' field. * - * @param value the value to set. + * @param value The value of 'RobotsTXT'. + * @return This builder. */ - public void setRobotsTXT(sparqles.avro.discovery.RobotsTXT value) { - this.RobotsTXT = value; + public sparqles.avro.discovery.DResult.Builder setRobotsTXT( + sparqles.avro.discovery.RobotsTXT value) { + validate(fields()[1], value); + this.RobotsTXTBuilder = null; + this.RobotsTXT = value; + fieldSetFlags()[1] = true; + return this; } - /** Gets the value of the 'descriptionFiles' field. */ - public java.util.List getDescriptionFiles() { - return descriptionFiles; + /** + * Checks whether the 'RobotsTXT' field has been set. + * + * @return True if the 'RobotsTXT' field has been set, false otherwise. + */ + public boolean hasRobotsTXT() { + return fieldSetFlags()[1]; } /** - * Sets the value of the 'descriptionFiles' field. + * Gets the Builder instance for the 'RobotsTXT' field and creates one if it doesn't exist yet. * - * @param value the value to set. + * @return This builder. */ - public void setDescriptionFiles(java.util.List value) { - this.descriptionFiles = value; + public sparqles.avro.discovery.RobotsTXT.Builder getRobotsTXTBuilder() { + if (RobotsTXTBuilder == null) { + if (hasRobotsTXT()) { + setRobotsTXTBuilder(sparqles.avro.discovery.RobotsTXT.newBuilder(RobotsTXT)); + } else { + setRobotsTXTBuilder(sparqles.avro.discovery.RobotsTXT.newBuilder()); + } + } + return RobotsTXTBuilder; } - /** Gets the value of the 'queryInfo' field. */ - public java.util.List getQueryInfo() { - return queryInfo; + /** + * Sets the Builder instance for the 'RobotsTXT' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.discovery.DResult.Builder setRobotsTXTBuilder( + sparqles.avro.discovery.RobotsTXT.Builder value) { + clearRobotsTXT(); + RobotsTXTBuilder = value; + return this; } /** - * Sets the value of the 'queryInfo' field. + * Checks whether the 'RobotsTXT' field has an active Builder instance * - * @param value the value to set. + * @return True if the 'RobotsTXT' field has an active Builder instance */ - public void setQueryInfo(java.util.List value) { - this.queryInfo = value; + public boolean hasRobotsTXTBuilder() { + return RobotsTXTBuilder != null; } - /** RecordBuilder for DResult instances. */ - public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { - - private sparqles.avro.EndpointResult endpointResult; - private sparqles.avro.discovery.RobotsTXT RobotsTXT; - private java.util.List descriptionFiles; - private java.util.List queryInfo; - - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.discovery.DResult.SCHEMA$); - } - - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.discovery.DResult.Builder other) { - super(other); - if (isValidValue(fields()[0], other.endpointResult)) { - this.endpointResult = data().deepCopy(fields()[0].schema(), other.endpointResult); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.RobotsTXT)) { - this.RobotsTXT = data().deepCopy(fields()[1].schema(), other.RobotsTXT); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.descriptionFiles)) { - this.descriptionFiles = - data().deepCopy(fields()[2].schema(), other.descriptionFiles); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.queryInfo)) { - this.queryInfo = data().deepCopy(fields()[3].schema(), other.queryInfo); - fieldSetFlags()[3] = true; - } - } - - /** Creates a Builder by copying an existing DResult instance */ - private Builder(sparqles.avro.discovery.DResult other) { - super(sparqles.avro.discovery.DResult.SCHEMA$); - if (isValidValue(fields()[0], other.endpointResult)) { - this.endpointResult = data().deepCopy(fields()[0].schema(), other.endpointResult); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.RobotsTXT)) { - this.RobotsTXT = data().deepCopy(fields()[1].schema(), other.RobotsTXT); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.descriptionFiles)) { - this.descriptionFiles = - data().deepCopy(fields()[2].schema(), other.descriptionFiles); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.queryInfo)) { - this.queryInfo = data().deepCopy(fields()[3].schema(), other.queryInfo); - fieldSetFlags()[3] = true; - } - } - - /** Gets the value of the 'endpointResult' field */ - public sparqles.avro.EndpointResult getEndpointResult() { - return endpointResult; - } - - /** Sets the value of the 'endpointResult' field */ - public sparqles.avro.discovery.DResult.Builder setEndpointResult( - sparqles.avro.EndpointResult value) { - validate(fields()[0], value); - this.endpointResult = value; - fieldSetFlags()[0] = true; - return this; - } - - /** Checks whether the 'endpointResult' field has been set */ - public boolean hasEndpointResult() { - return fieldSetFlags()[0]; - } + /** + * Clears the value of the 'RobotsTXT' field. + * + * @return This builder. + */ + public sparqles.avro.discovery.DResult.Builder clearRobotsTXT() { + RobotsTXT = null; + RobotsTXTBuilder = null; + fieldSetFlags()[1] = false; + return this; + } - /** Clears the value of the 'endpointResult' field */ - public sparqles.avro.discovery.DResult.Builder clearEndpointResult() { - endpointResult = null; - fieldSetFlags()[0] = false; - return this; - } + /** + * Gets the value of the 'descriptionFiles' field. + * + * @return The value. + */ + public java.util.List getDescriptionFiles() { + return descriptionFiles; + } - /** Gets the value of the 'RobotsTXT' field */ - public sparqles.avro.discovery.RobotsTXT getRobotsTXT() { - return RobotsTXT; - } + /** + * Sets the value of the 'descriptionFiles' field. + * + * @param value The value of 'descriptionFiles'. + * @return This builder. + */ + public sparqles.avro.discovery.DResult.Builder setDescriptionFiles( + java.util.List value) { + validate(fields()[2], value); + this.descriptionFiles = value; + fieldSetFlags()[2] = true; + return this; + } - /** Sets the value of the 'RobotsTXT' field */ - public sparqles.avro.discovery.DResult.Builder setRobotsTXT( - sparqles.avro.discovery.RobotsTXT value) { - validate(fields()[1], value); - this.RobotsTXT = value; - fieldSetFlags()[1] = true; - return this; - } + /** + * Checks whether the 'descriptionFiles' field has been set. + * + * @return True if the 'descriptionFiles' field has been set, false otherwise. + */ + public boolean hasDescriptionFiles() { + return fieldSetFlags()[2]; + } - /** Checks whether the 'RobotsTXT' field has been set */ - public boolean hasRobotsTXT() { - return fieldSetFlags()[1]; - } + /** + * Clears the value of the 'descriptionFiles' field. + * + * @return This builder. + */ + public sparqles.avro.discovery.DResult.Builder clearDescriptionFiles() { + descriptionFiles = null; + fieldSetFlags()[2] = false; + return this; + } - /** Clears the value of the 'RobotsTXT' field */ - public sparqles.avro.discovery.DResult.Builder clearRobotsTXT() { - RobotsTXT = null; - fieldSetFlags()[1] = false; - return this; - } + /** + * Gets the value of the 'queryInfo' field. + * + * @return The value. + */ + public java.util.List getQueryInfo() { + return queryInfo; + } - /** Gets the value of the 'descriptionFiles' field */ - public java.util.List getDescriptionFiles() { - return descriptionFiles; - } + /** + * Sets the value of the 'queryInfo' field. + * + * @param value The value of 'queryInfo'. + * @return This builder. + */ + public sparqles.avro.discovery.DResult.Builder setQueryInfo( + java.util.List value) { + validate(fields()[3], value); + this.queryInfo = value; + fieldSetFlags()[3] = true; + return this; + } - /** Sets the value of the 'descriptionFiles' field */ - public sparqles.avro.discovery.DResult.Builder setDescriptionFiles( - java.util.List value) { - validate(fields()[2], value); - this.descriptionFiles = value; - fieldSetFlags()[2] = true; - return this; - } + /** + * Checks whether the 'queryInfo' field has been set. + * + * @return True if the 'queryInfo' field has been set, false otherwise. + */ + public boolean hasQueryInfo() { + return fieldSetFlags()[3]; + } - /** Checks whether the 'descriptionFiles' field has been set */ - public boolean hasDescriptionFiles() { - return fieldSetFlags()[2]; - } + /** + * Clears the value of the 'queryInfo' field. + * + * @return This builder. + */ + public sparqles.avro.discovery.DResult.Builder clearQueryInfo() { + queryInfo = null; + fieldSetFlags()[3] = false; + return this; + } - /** Clears the value of the 'descriptionFiles' field */ - public sparqles.avro.discovery.DResult.Builder clearDescriptionFiles() { - descriptionFiles = null; - fieldSetFlags()[2] = false; - return this; + @Override + @SuppressWarnings("unchecked") + public DResult build() { + try { + DResult record = new DResult(); + if (endpointResultBuilder != null) { + try { + record.endpointResult = this.endpointResultBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("endpointResult")); + throw e; + } + } else { + record.endpointResult = + fieldSetFlags()[0] + ? this.endpointResult + : (sparqles.avro.EndpointResult) defaultValue(fields()[0]); } - - /** Gets the value of the 'queryInfo' field */ - public java.util.List getQueryInfo() { - return queryInfo; + if (RobotsTXTBuilder != null) { + try { + record.RobotsTXT = this.RobotsTXTBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("RobotsTXT")); + throw e; + } + } else { + record.RobotsTXT = + fieldSetFlags()[1] + ? this.RobotsTXT + : (sparqles.avro.discovery.RobotsTXT) defaultValue(fields()[1]); } + record.descriptionFiles = + fieldSetFlags()[2] + ? this.descriptionFiles + : (java.util.List) defaultValue(fields()[2]); + record.queryInfo = + fieldSetFlags()[3] + ? this.queryInfo + : (java.util.List) defaultValue(fields()[3]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Sets the value of the 'queryInfo' field */ - public sparqles.avro.discovery.DResult.Builder setQueryInfo( - java.util.List value) { - validate(fields()[3], value); - this.queryInfo = value; - fieldSetFlags()[3] = true; - return this; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Checks whether the 'queryInfo' field has been set */ - public boolean hasQueryInfo() { - return fieldSetFlags()[3]; - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Clears the value of the 'queryInfo' field */ - public sparqles.avro.discovery.DResult.Builder clearQueryInfo() { - queryInfo = null; - fieldSetFlags()[3] = false; - return this; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - @Override - public DResult build() { - try { - DResult record = new DResult(); - record.endpointResult = - fieldSetFlags()[0] - ? this.endpointResult - : (sparqles.avro.EndpointResult) defaultValue(fields()[0]); - record.RobotsTXT = - fieldSetFlags()[1] - ? this.RobotsTXT - : (sparqles.avro.discovery.RobotsTXT) defaultValue(fields()[1]); - record.descriptionFiles = - fieldSetFlags()[2] - ? this.descriptionFiles - : (java.util.List) - defaultValue(fields()[2]); - record.queryInfo = - fieldSetFlags()[3] - ? this.queryInfo - : (java.util.List) - defaultValue(fields()[3]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } - } - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } } diff --git a/backend/src/main/java/sparqles/avro/discovery/QueryInfo.java b/backend/src/main/java/sparqles/avro/discovery/QueryInfo.java index 59b5c479..3882311a 100644 --- a/backend/src/main/java/sparqles/avro/discovery/QueryInfo.java +++ b/backend/src/main/java/sparqles/avro/discovery/QueryInfo.java @@ -5,401 +5,762 @@ */ package sparqles.avro.discovery; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class QueryInfo extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"QueryInfo\",\"namespace\":\"sparqles.avro.discovery\",\"fields\":[{\"name\":\"URL\",\"type\":\"string\"},{\"name\":\"Operation\",\"type\":\"string\"},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"allowedByRobotsTXT\",\"type\":\"boolean\",\"default\":true},{\"name\":\"Results\",\"type\":{\"type\":\"array\",\"items\":\"string\"}}]}"); - @Deprecated public java.lang.CharSequence URL; - @Deprecated public java.lang.CharSequence Operation; - @Deprecated public java.lang.CharSequence Exception; - @Deprecated public boolean allowedByRobotsTXT; - @Deprecated public java.util.List Results; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -6620100892292357578L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"QueryInfo\",\"namespace\":\"sparqles.avro.discovery\",\"fields\":[{\"name\":\"URL\",\"type\":\"string\"},{\"name\":\"Operation\",\"type\":\"string\"},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"allowedByRobotsTXT\",\"type\":\"boolean\",\"default\":true},{\"name\":\"Results\",\"type\":{\"type\":\"array\",\"items\":\"string\"}}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this QueryInfo to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a QueryInfo from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a QueryInfo instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static QueryInfo fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence URL; + private java.lang.CharSequence Operation; + private java.lang.CharSequence Exception; + private boolean allowedByRobotsTXT; + private java.util.List Results; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public QueryInfo() {} + + /** + * All-args constructor. + * + * @param URL The new value for URL + * @param Operation The new value for Operation + * @param Exception The new value for Exception + * @param allowedByRobotsTXT The new value for allowedByRobotsTXT + * @param Results The new value for Results + */ + public QueryInfo( + java.lang.CharSequence URL, + java.lang.CharSequence Operation, + java.lang.CharSequence Exception, + java.lang.Boolean allowedByRobotsTXT, + java.util.List Results) { + this.URL = URL; + this.Operation = Operation; + this.Exception = Exception; + this.allowedByRobotsTXT = allowedByRobotsTXT; + this.Results = Results; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return URL; + case 1: + return Operation; + case 2: + return Exception; + case 3: + return allowedByRobotsTXT; + case 4: + return Results; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + URL = (java.lang.CharSequence) value$; + break; + case 1: + Operation = (java.lang.CharSequence) value$; + break; + case 2: + Exception = (java.lang.CharSequence) value$; + break; + case 3: + allowedByRobotsTXT = (java.lang.Boolean) value$; + break; + case 4: + Results = (java.util.List) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'URL' field. + * + * @return The value of the 'URL' field. + */ + public java.lang.CharSequence getURL() { + return URL; + } + + /** + * Sets the value of the 'URL' field. + * + * @param value the value to set. + */ + public void setURL(java.lang.CharSequence value) { + this.URL = value; + } + + /** + * Gets the value of the 'Operation' field. + * + * @return The value of the 'Operation' field. + */ + public java.lang.CharSequence getOperation() { + return Operation; + } + + /** + * Sets the value of the 'Operation' field. + * + * @param value the value to set. + */ + public void setOperation(java.lang.CharSequence value) { + this.Operation = value; + } + + /** + * Gets the value of the 'Exception' field. + * + * @return The value of the 'Exception' field. + */ + public java.lang.CharSequence getException() { + return Exception; + } + + /** + * Sets the value of the 'Exception' field. + * + * @param value the value to set. + */ + public void setException(java.lang.CharSequence value) { + this.Exception = value; + } + + /** + * Gets the value of the 'allowedByRobotsTXT' field. + * + * @return The value of the 'allowedByRobotsTXT' field. + */ + public boolean getAllowedByRobotsTXT() { + return allowedByRobotsTXT; + } + + /** + * Sets the value of the 'allowedByRobotsTXT' field. + * + * @param value the value to set. + */ + public void setAllowedByRobotsTXT(boolean value) { + this.allowedByRobotsTXT = value; + } + + /** + * Gets the value of the 'Results' field. + * + * @return The value of the 'Results' field. + */ + public java.util.List getResults() { + return Results; + } + + /** + * Sets the value of the 'Results' field. + * + * @param value the value to set. + */ + public void setResults(java.util.List value) { + this.Results = value; + } + + /** + * Creates a new QueryInfo RecordBuilder. + * + * @return A new QueryInfo RecordBuilder + */ + public static sparqles.avro.discovery.QueryInfo.Builder newBuilder() { + return new sparqles.avro.discovery.QueryInfo.Builder(); + } + + /** + * Creates a new QueryInfo RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new QueryInfo RecordBuilder + */ + public static sparqles.avro.discovery.QueryInfo.Builder newBuilder( + sparqles.avro.discovery.QueryInfo.Builder other) { + if (other == null) { + return new sparqles.avro.discovery.QueryInfo.Builder(); + } else { + return new sparqles.avro.discovery.QueryInfo.Builder(other); + } + } + + /** + * Creates a new QueryInfo RecordBuilder by copying an existing QueryInfo instance. + * + * @param other The existing instance to copy. + * @return A new QueryInfo RecordBuilder + */ + public static sparqles.avro.discovery.QueryInfo.Builder newBuilder( + sparqles.avro.discovery.QueryInfo other) { + if (other == null) { + return new sparqles.avro.discovery.QueryInfo.Builder(); + } else { + return new sparqles.avro.discovery.QueryInfo.Builder(other); + } + } + + /** RecordBuilder for QueryInfo instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence URL; + private java.lang.CharSequence Operation; + private java.lang.CharSequence Exception; + private boolean allowedByRobotsTXT; + private java.util.List Results; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public QueryInfo() {} - - /** All-args constructor. */ - public QueryInfo( - java.lang.CharSequence URL, - java.lang.CharSequence Operation, - java.lang.CharSequence Exception, - java.lang.Boolean allowedByRobotsTXT, - java.util.List Results) { - this.URL = URL; - this.Operation = Operation; - this.Exception = Exception; - this.allowedByRobotsTXT = allowedByRobotsTXT; - this.Results = Results; + private Builder(sparqles.avro.discovery.QueryInfo.Builder other) { + super(other); + if (isValidValue(fields()[0], other.URL)) { + this.URL = data().deepCopy(fields()[0].schema(), other.URL); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.Operation)) { + this.Operation = data().deepCopy(fields()[1].schema(), other.Operation); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.Exception)) { + this.Exception = data().deepCopy(fields()[2].schema(), other.Exception); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.allowedByRobotsTXT)) { + this.allowedByRobotsTXT = data().deepCopy(fields()[3].schema(), other.allowedByRobotsTXT); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + if (isValidValue(fields()[4], other.Results)) { + this.Results = data().deepCopy(fields()[4].schema(), other.Results); + fieldSetFlags()[4] = other.fieldSetFlags()[4]; + } } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + /** + * Creates a Builder by copying an existing QueryInfo instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.discovery.QueryInfo other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.URL)) { + this.URL = data().deepCopy(fields()[0].schema(), other.URL); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.Operation)) { + this.Operation = data().deepCopy(fields()[1].schema(), other.Operation); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.Exception)) { + this.Exception = data().deepCopy(fields()[2].schema(), other.Exception); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.allowedByRobotsTXT)) { + this.allowedByRobotsTXT = data().deepCopy(fields()[3].schema(), other.allowedByRobotsTXT); + fieldSetFlags()[3] = true; + } + if (isValidValue(fields()[4], other.Results)) { + this.Results = data().deepCopy(fields()[4].schema(), other.Results); + fieldSetFlags()[4] = true; + } } - /** Creates a new QueryInfo RecordBuilder */ - public static sparqles.avro.discovery.QueryInfo.Builder newBuilder() { - return new sparqles.avro.discovery.QueryInfo.Builder(); + /** + * Gets the value of the 'URL' field. + * + * @return The value. + */ + public java.lang.CharSequence getURL() { + return URL; } - /** Creates a new QueryInfo RecordBuilder by copying an existing Builder */ - public static sparqles.avro.discovery.QueryInfo.Builder newBuilder( - sparqles.avro.discovery.QueryInfo.Builder other) { - return new sparqles.avro.discovery.QueryInfo.Builder(other); + /** + * Sets the value of the 'URL' field. + * + * @param value The value of 'URL'. + * @return This builder. + */ + public sparqles.avro.discovery.QueryInfo.Builder setURL(java.lang.CharSequence value) { + validate(fields()[0], value); + this.URL = value; + fieldSetFlags()[0] = true; + return this; } - /** Creates a new QueryInfo RecordBuilder by copying an existing QueryInfo instance */ - public static sparqles.avro.discovery.QueryInfo.Builder newBuilder( - sparqles.avro.discovery.QueryInfo other) { - return new sparqles.avro.discovery.QueryInfo.Builder(other); + /** + * Checks whether the 'URL' field has been set. + * + * @return True if the 'URL' field has been set, false otherwise. + */ + public boolean hasURL() { + return fieldSetFlags()[0]; } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Clears the value of the 'URL' field. + * + * @return This builder. + */ + public sparqles.avro.discovery.QueryInfo.Builder clearURL() { + URL = null; + fieldSetFlags()[0] = false; + return this; } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return URL; - case 1: - return Operation; - case 2: - return Exception; - case 3: - return allowedByRobotsTXT; - case 4: - return Results; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Gets the value of the 'Operation' field. + * + * @return The value. + */ + public java.lang.CharSequence getOperation() { + return Operation; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - URL = (java.lang.CharSequence) value$; - break; - case 1: - Operation = (java.lang.CharSequence) value$; - break; - case 2: - Exception = (java.lang.CharSequence) value$; - break; - case 3: - allowedByRobotsTXT = (java.lang.Boolean) value$; - break; - case 4: - Results = (java.util.List) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the value of the 'Operation' field. + * + * @param value The value of 'Operation'. + * @return This builder. + */ + public sparqles.avro.discovery.QueryInfo.Builder setOperation(java.lang.CharSequence value) { + validate(fields()[1], value); + this.Operation = value; + fieldSetFlags()[1] = true; + return this; } - /** Gets the value of the 'URL' field. */ - public java.lang.CharSequence getURL() { - return URL; + /** + * Checks whether the 'Operation' field has been set. + * + * @return True if the 'Operation' field has been set, false otherwise. + */ + public boolean hasOperation() { + return fieldSetFlags()[1]; } /** - * Sets the value of the 'URL' field. + * Clears the value of the 'Operation' field. * - * @param value the value to set. + * @return This builder. */ - public void setURL(java.lang.CharSequence value) { - this.URL = value; + public sparqles.avro.discovery.QueryInfo.Builder clearOperation() { + Operation = null; + fieldSetFlags()[1] = false; + return this; } - /** Gets the value of the 'Operation' field. */ - public java.lang.CharSequence getOperation() { - return Operation; + /** + * Gets the value of the 'Exception' field. + * + * @return The value. + */ + public java.lang.CharSequence getException() { + return Exception; } /** - * Sets the value of the 'Operation' field. + * Sets the value of the 'Exception' field. * - * @param value the value to set. + * @param value The value of 'Exception'. + * @return This builder. */ - public void setOperation(java.lang.CharSequence value) { - this.Operation = value; + public sparqles.avro.discovery.QueryInfo.Builder setException(java.lang.CharSequence value) { + validate(fields()[2], value); + this.Exception = value; + fieldSetFlags()[2] = true; + return this; } - /** Gets the value of the 'Exception' field. */ - public java.lang.CharSequence getException() { - return Exception; + /** + * Checks whether the 'Exception' field has been set. + * + * @return True if the 'Exception' field has been set, false otherwise. + */ + public boolean hasException() { + return fieldSetFlags()[2]; } /** - * Sets the value of the 'Exception' field. + * Clears the value of the 'Exception' field. * - * @param value the value to set. + * @return This builder. */ - public void setException(java.lang.CharSequence value) { - this.Exception = value; + public sparqles.avro.discovery.QueryInfo.Builder clearException() { + Exception = null; + fieldSetFlags()[2] = false; + return this; } - /** Gets the value of the 'allowedByRobotsTXT' field. */ - public java.lang.Boolean getAllowedByRobotsTXT() { - return allowedByRobotsTXT; + /** + * Gets the value of the 'allowedByRobotsTXT' field. + * + * @return The value. + */ + public boolean getAllowedByRobotsTXT() { + return allowedByRobotsTXT; } /** * Sets the value of the 'allowedByRobotsTXT' field. * - * @param value the value to set. + * @param value The value of 'allowedByRobotsTXT'. + * @return This builder. */ - public void setAllowedByRobotsTXT(java.lang.Boolean value) { - this.allowedByRobotsTXT = value; + public sparqles.avro.discovery.QueryInfo.Builder setAllowedByRobotsTXT(boolean value) { + validate(fields()[3], value); + this.allowedByRobotsTXT = value; + fieldSetFlags()[3] = true; + return this; } - /** Gets the value of the 'Results' field. */ + /** + * Checks whether the 'allowedByRobotsTXT' field has been set. + * + * @return True if the 'allowedByRobotsTXT' field has been set, false otherwise. + */ + public boolean hasAllowedByRobotsTXT() { + return fieldSetFlags()[3]; + } + + /** + * Clears the value of the 'allowedByRobotsTXT' field. + * + * @return This builder. + */ + public sparqles.avro.discovery.QueryInfo.Builder clearAllowedByRobotsTXT() { + fieldSetFlags()[3] = false; + return this; + } + + /** + * Gets the value of the 'Results' field. + * + * @return The value. + */ public java.util.List getResults() { - return Results; + return Results; } /** * Sets the value of the 'Results' field. * - * @param value the value to set. + * @param value The value of 'Results'. + * @return This builder. */ - public void setResults(java.util.List value) { - this.Results = value; + public sparqles.avro.discovery.QueryInfo.Builder setResults( + java.util.List value) { + validate(fields()[4], value); + this.Results = value; + fieldSetFlags()[4] = true; + return this; } - /** RecordBuilder for QueryInfo instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Checks whether the 'Results' field has been set. + * + * @return True if the 'Results' field has been set, false otherwise. + */ + public boolean hasResults() { + return fieldSetFlags()[4]; + } - private java.lang.CharSequence URL; - private java.lang.CharSequence Operation; - private java.lang.CharSequence Exception; - private boolean allowedByRobotsTXT; - private java.util.List Results; + /** + * Clears the value of the 'Results' field. + * + * @return This builder. + */ + public sparqles.avro.discovery.QueryInfo.Builder clearResults() { + Results = null; + fieldSetFlags()[4] = false; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.discovery.QueryInfo.SCHEMA$); - } + @Override + @SuppressWarnings("unchecked") + public QueryInfo build() { + try { + QueryInfo record = new QueryInfo(); + record.URL = + fieldSetFlags()[0] ? this.URL : (java.lang.CharSequence) defaultValue(fields()[0]); + record.Operation = + fieldSetFlags()[1] + ? this.Operation + : (java.lang.CharSequence) defaultValue(fields()[1]); + record.Exception = + fieldSetFlags()[2] + ? this.Exception + : (java.lang.CharSequence) defaultValue(fields()[2]); + record.allowedByRobotsTXT = + fieldSetFlags()[3] + ? this.allowedByRobotsTXT + : (java.lang.Boolean) defaultValue(fields()[3]); + record.Results = + fieldSetFlags()[4] + ? this.Results + : (java.util.List) defaultValue(fields()[4]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.URL); + + out.writeString(this.Operation); + + if (this.Exception == null) { + out.writeIndex(1); + out.writeNull(); + } else { + out.writeIndex(0); + out.writeString(this.Exception); + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.discovery.QueryInfo.Builder other) { - super(other); - if (isValidValue(fields()[0], other.URL)) { - this.URL = data().deepCopy(fields()[0].schema(), other.URL); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.Operation)) { - this.Operation = data().deepCopy(fields()[1].schema(), other.Operation); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.Exception)) { - this.Exception = data().deepCopy(fields()[2].schema(), other.Exception); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.allowedByRobotsTXT)) { - this.allowedByRobotsTXT = - data().deepCopy(fields()[3].schema(), other.allowedByRobotsTXT); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.Results)) { - this.Results = data().deepCopy(fields()[4].schema(), other.Results); - fieldSetFlags()[4] = true; - } - } + out.writeBoolean(this.allowedByRobotsTXT); - /** Creates a Builder by copying an existing QueryInfo instance */ - private Builder(sparqles.avro.discovery.QueryInfo other) { - super(sparqles.avro.discovery.QueryInfo.SCHEMA$); - if (isValidValue(fields()[0], other.URL)) { - this.URL = data().deepCopy(fields()[0].schema(), other.URL); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.Operation)) { - this.Operation = data().deepCopy(fields()[1].schema(), other.Operation); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.Exception)) { - this.Exception = data().deepCopy(fields()[2].schema(), other.Exception); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.allowedByRobotsTXT)) { - this.allowedByRobotsTXT = - data().deepCopy(fields()[3].schema(), other.allowedByRobotsTXT); - fieldSetFlags()[3] = true; + long size0 = this.Results.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (java.lang.CharSequence e0 : this.Results) { + actualSize0++; + out.startItem(); + out.writeString(e0); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.URL = in.readString(this.URL instanceof Utf8 ? (Utf8) this.URL : null); + + this.Operation = in.readString(this.Operation instanceof Utf8 ? (Utf8) this.Operation : null); + + if (in.readIndex() != 0) { + in.readNull(); + this.Exception = null; + } else { + this.Exception = + in.readString(this.Exception instanceof Utf8 ? (Utf8) this.Exception : null); + } + + this.allowedByRobotsTXT = in.readBoolean(); + + long size0 = in.readArrayStart(); + java.util.List a0 = this.Results; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("Results").schema()); + this.Results = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + java.lang.CharSequence e0 = (ga0 != null ? ga0.peek() : null); + e0 = in.readString(e0 instanceof Utf8 ? (Utf8) e0 : null); + a0.add(e0); + } + } + + } else { + for (int i = 0; i < 5; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.URL = in.readString(this.URL instanceof Utf8 ? (Utf8) this.URL : null); + break; + + case 1: + this.Operation = + in.readString(this.Operation instanceof Utf8 ? (Utf8) this.Operation : null); + break; + + case 2: + if (in.readIndex() != 0) { + in.readNull(); + this.Exception = null; + } else { + this.Exception = + in.readString(this.Exception instanceof Utf8 ? (Utf8) this.Exception : null); } - if (isValidValue(fields()[4], other.Results)) { - this.Results = data().deepCopy(fields()[4].schema(), other.Results); - fieldSetFlags()[4] = true; + break; + + case 3: + this.allowedByRobotsTXT = in.readBoolean(); + break; + + case 4: + long size0 = in.readArrayStart(); + java.util.List a0 = this.Results; + if (a0 == null) { + a0 = + new SpecificData.Array( + (int) size0, SCHEMA$.getField("Results").schema()); + this.Results = a0; + } else a0.clear(); + SpecificData.Array ga0 = + (a0 instanceof SpecificData.Array + ? (SpecificData.Array) a0 + : null); + for (; 0 < size0; size0 = in.arrayNext()) { + for (; size0 != 0; size0--) { + java.lang.CharSequence e0 = (ga0 != null ? ga0.peek() : null); + e0 = in.readString(e0 instanceof Utf8 ? (Utf8) e0 : null); + a0.add(e0); + } } - } - - /** Gets the value of the 'URL' field */ - public java.lang.CharSequence getURL() { - return URL; - } - - /** Sets the value of the 'URL' field */ - public sparqles.avro.discovery.QueryInfo.Builder setURL(java.lang.CharSequence value) { - validate(fields()[0], value); - this.URL = value; - fieldSetFlags()[0] = true; - return this; - } - - /** Checks whether the 'URL' field has been set */ - public boolean hasURL() { - return fieldSetFlags()[0]; - } + break; - /** Clears the value of the 'URL' field */ - public sparqles.avro.discovery.QueryInfo.Builder clearURL() { - URL = null; - fieldSetFlags()[0] = false; - return this; - } - - /** Gets the value of the 'Operation' field */ - public java.lang.CharSequence getOperation() { - return Operation; - } - - /** Sets the value of the 'Operation' field */ - public sparqles.avro.discovery.QueryInfo.Builder setOperation( - java.lang.CharSequence value) { - validate(fields()[1], value); - this.Operation = value; - fieldSetFlags()[1] = true; - return this; - } - - /** Checks whether the 'Operation' field has been set */ - public boolean hasOperation() { - return fieldSetFlags()[1]; - } - - /** Clears the value of the 'Operation' field */ - public sparqles.avro.discovery.QueryInfo.Builder clearOperation() { - Operation = null; - fieldSetFlags()[1] = false; - return this; - } - - /** Gets the value of the 'Exception' field */ - public java.lang.CharSequence getException() { - return Exception; - } - - /** Sets the value of the 'Exception' field */ - public sparqles.avro.discovery.QueryInfo.Builder setException( - java.lang.CharSequence value) { - validate(fields()[2], value); - this.Exception = value; - fieldSetFlags()[2] = true; - return this; - } - - /** Checks whether the 'Exception' field has been set */ - public boolean hasException() { - return fieldSetFlags()[2]; - } - - /** Clears the value of the 'Exception' field */ - public sparqles.avro.discovery.QueryInfo.Builder clearException() { - Exception = null; - fieldSetFlags()[2] = false; - return this; - } - - /** Gets the value of the 'allowedByRobotsTXT' field */ - public java.lang.Boolean getAllowedByRobotsTXT() { - return allowedByRobotsTXT; - } - - /** Sets the value of the 'allowedByRobotsTXT' field */ - public sparqles.avro.discovery.QueryInfo.Builder setAllowedByRobotsTXT(boolean value) { - validate(fields()[3], value); - this.allowedByRobotsTXT = value; - fieldSetFlags()[3] = true; - return this; - } - - /** Checks whether the 'allowedByRobotsTXT' field has been set */ - public boolean hasAllowedByRobotsTXT() { - return fieldSetFlags()[3]; - } - - /** Clears the value of the 'allowedByRobotsTXT' field */ - public sparqles.avro.discovery.QueryInfo.Builder clearAllowedByRobotsTXT() { - fieldSetFlags()[3] = false; - return this; - } - - /** Gets the value of the 'Results' field */ - public java.util.List getResults() { - return Results; - } - - /** Sets the value of the 'Results' field */ - public sparqles.avro.discovery.QueryInfo.Builder setResults( - java.util.List value) { - validate(fields()[4], value); - this.Results = value; - fieldSetFlags()[4] = true; - return this; - } - - /** Checks whether the 'Results' field has been set */ - public boolean hasResults() { - return fieldSetFlags()[4]; - } - - /** Clears the value of the 'Results' field */ - public sparqles.avro.discovery.QueryInfo.Builder clearResults() { - Results = null; - fieldSetFlags()[4] = false; - return this; - } - - @Override - public QueryInfo build() { - try { - QueryInfo record = new QueryInfo(); - record.URL = - fieldSetFlags()[0] - ? this.URL - : (java.lang.CharSequence) defaultValue(fields()[0]); - record.Operation = - fieldSetFlags()[1] - ? this.Operation - : (java.lang.CharSequence) defaultValue(fields()[1]); - record.Exception = - fieldSetFlags()[2] - ? this.Exception - : (java.lang.CharSequence) defaultValue(fields()[2]); - record.allowedByRobotsTXT = - fieldSetFlags()[3] - ? this.allowedByRobotsTXT - : (java.lang.Boolean) defaultValue(fields()[3]); - record.Results = - fieldSetFlags()[4] - ? this.Results - : (java.util.List) - defaultValue(fields()[4]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/discovery/RobotsTXT.java b/backend/src/main/java/sparqles/avro/discovery/RobotsTXT.java index 2cc2177f..5c2c0b22 100644 --- a/backend/src/main/java/sparqles/avro/discovery/RobotsTXT.java +++ b/backend/src/main/java/sparqles/avro/discovery/RobotsTXT.java @@ -5,517 +5,887 @@ */ package sparqles.avro.discovery; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class RobotsTXT extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"RobotsTXT\",\"namespace\":\"sparqles.avro.discovery\",\"fields\":[{\"name\":\"hasRobotsTXT\",\"type\":\"boolean\",\"default\":false},{\"name\":\"allowedByRobotsTXT\",\"type\":\"boolean\",\"default\":true},{\"name\":\"sitemapXML\",\"type\":\"boolean\",\"default\":false},{\"name\":\"sitemapXMLSPARQL\",\"type\":\"boolean\",\"default\":false},{\"name\":\"sitemapXMLSPARQLMatch\",\"type\":\"boolean\",\"default\":false},{\"name\":\"sitemapXMLVoiD\",\"type\":\"boolean\",\"default\":false},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]}]}"); - @Deprecated public boolean hasRobotsTXT; - @Deprecated public boolean allowedByRobotsTXT; - @Deprecated public boolean sitemapXML; - @Deprecated public boolean sitemapXMLSPARQL; - @Deprecated public boolean sitemapXMLSPARQLMatch; - @Deprecated public boolean sitemapXMLVoiD; - @Deprecated public java.lang.CharSequence Exception; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 8698781811247583494L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"RobotsTXT\",\"namespace\":\"sparqles.avro.discovery\",\"fields\":[{\"name\":\"hasRobotsTXT\",\"type\":\"boolean\",\"default\":false},{\"name\":\"allowedByRobotsTXT\",\"type\":\"boolean\",\"default\":true},{\"name\":\"sitemapXML\",\"type\":\"boolean\",\"default\":false},{\"name\":\"sitemapXMLSPARQL\",\"type\":\"boolean\",\"default\":false},{\"name\":\"sitemapXMLSPARQLMatch\",\"type\":\"boolean\",\"default\":false},{\"name\":\"sitemapXMLVoiD\",\"type\":\"boolean\",\"default\":false},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this RobotsTXT to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a RobotsTXT from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a RobotsTXT instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static RobotsTXT fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private boolean hasRobotsTXT; + private boolean allowedByRobotsTXT; + private boolean sitemapXML; + private boolean sitemapXMLSPARQL; + private boolean sitemapXMLSPARQLMatch; + private boolean sitemapXMLVoiD; + private java.lang.CharSequence Exception; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public RobotsTXT() {} + + /** + * All-args constructor. + * + * @param hasRobotsTXT The new value for hasRobotsTXT + * @param allowedByRobotsTXT The new value for allowedByRobotsTXT + * @param sitemapXML The new value for sitemapXML + * @param sitemapXMLSPARQL The new value for sitemapXMLSPARQL + * @param sitemapXMLSPARQLMatch The new value for sitemapXMLSPARQLMatch + * @param sitemapXMLVoiD The new value for sitemapXMLVoiD + * @param Exception The new value for Exception + */ + public RobotsTXT( + java.lang.Boolean hasRobotsTXT, + java.lang.Boolean allowedByRobotsTXT, + java.lang.Boolean sitemapXML, + java.lang.Boolean sitemapXMLSPARQL, + java.lang.Boolean sitemapXMLSPARQLMatch, + java.lang.Boolean sitemapXMLVoiD, + java.lang.CharSequence Exception) { + this.hasRobotsTXT = hasRobotsTXT; + this.allowedByRobotsTXT = allowedByRobotsTXT; + this.sitemapXML = sitemapXML; + this.sitemapXMLSPARQL = sitemapXMLSPARQL; + this.sitemapXMLSPARQLMatch = sitemapXMLSPARQLMatch; + this.sitemapXMLVoiD = sitemapXMLVoiD; + this.Exception = Exception; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return hasRobotsTXT; + case 1: + return allowedByRobotsTXT; + case 2: + return sitemapXML; + case 3: + return sitemapXMLSPARQL; + case 4: + return sitemapXMLSPARQLMatch; + case 5: + return sitemapXMLVoiD; + case 6: + return Exception; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + hasRobotsTXT = (java.lang.Boolean) value$; + break; + case 1: + allowedByRobotsTXT = (java.lang.Boolean) value$; + break; + case 2: + sitemapXML = (java.lang.Boolean) value$; + break; + case 3: + sitemapXMLSPARQL = (java.lang.Boolean) value$; + break; + case 4: + sitemapXMLSPARQLMatch = (java.lang.Boolean) value$; + break; + case 5: + sitemapXMLVoiD = (java.lang.Boolean) value$; + break; + case 6: + Exception = (java.lang.CharSequence) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'hasRobotsTXT' field. + * + * @return The value of the 'hasRobotsTXT' field. + */ + public boolean getHasRobotsTXT() { + return hasRobotsTXT; + } + + /** + * Sets the value of the 'hasRobotsTXT' field. + * + * @param value the value to set. + */ + public void setHasRobotsTXT(boolean value) { + this.hasRobotsTXT = value; + } + + /** + * Gets the value of the 'allowedByRobotsTXT' field. + * + * @return The value of the 'allowedByRobotsTXT' field. + */ + public boolean getAllowedByRobotsTXT() { + return allowedByRobotsTXT; + } + + /** + * Sets the value of the 'allowedByRobotsTXT' field. + * + * @param value the value to set. + */ + public void setAllowedByRobotsTXT(boolean value) { + this.allowedByRobotsTXT = value; + } + + /** + * Gets the value of the 'sitemapXML' field. + * + * @return The value of the 'sitemapXML' field. + */ + public boolean getSitemapXML() { + return sitemapXML; + } + + /** + * Sets the value of the 'sitemapXML' field. + * + * @param value the value to set. + */ + public void setSitemapXML(boolean value) { + this.sitemapXML = value; + } + + /** + * Gets the value of the 'sitemapXMLSPARQL' field. + * + * @return The value of the 'sitemapXMLSPARQL' field. + */ + public boolean getSitemapXMLSPARQL() { + return sitemapXMLSPARQL; + } + + /** + * Sets the value of the 'sitemapXMLSPARQL' field. + * + * @param value the value to set. + */ + public void setSitemapXMLSPARQL(boolean value) { + this.sitemapXMLSPARQL = value; + } + + /** + * Gets the value of the 'sitemapXMLSPARQLMatch' field. + * + * @return The value of the 'sitemapXMLSPARQLMatch' field. + */ + public boolean getSitemapXMLSPARQLMatch() { + return sitemapXMLSPARQLMatch; + } + + /** + * Sets the value of the 'sitemapXMLSPARQLMatch' field. + * + * @param value the value to set. + */ + public void setSitemapXMLSPARQLMatch(boolean value) { + this.sitemapXMLSPARQLMatch = value; + } + + /** + * Gets the value of the 'sitemapXMLVoiD' field. + * + * @return The value of the 'sitemapXMLVoiD' field. + */ + public boolean getSitemapXMLVoiD() { + return sitemapXMLVoiD; + } + + /** + * Sets the value of the 'sitemapXMLVoiD' field. + * + * @param value the value to set. + */ + public void setSitemapXMLVoiD(boolean value) { + this.sitemapXMLVoiD = value; + } + + /** + * Gets the value of the 'Exception' field. + * + * @return The value of the 'Exception' field. + */ + public java.lang.CharSequence getException() { + return Exception; + } + + /** + * Sets the value of the 'Exception' field. + * + * @param value the value to set. + */ + public void setException(java.lang.CharSequence value) { + this.Exception = value; + } + + /** + * Creates a new RobotsTXT RecordBuilder. + * + * @return A new RobotsTXT RecordBuilder + */ + public static sparqles.avro.discovery.RobotsTXT.Builder newBuilder() { + return new sparqles.avro.discovery.RobotsTXT.Builder(); + } + + /** + * Creates a new RobotsTXT RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new RobotsTXT RecordBuilder + */ + public static sparqles.avro.discovery.RobotsTXT.Builder newBuilder( + sparqles.avro.discovery.RobotsTXT.Builder other) { + if (other == null) { + return new sparqles.avro.discovery.RobotsTXT.Builder(); + } else { + return new sparqles.avro.discovery.RobotsTXT.Builder(other); + } + } + + /** + * Creates a new RobotsTXT RecordBuilder by copying an existing RobotsTXT instance. + * + * @param other The existing instance to copy. + * @return A new RobotsTXT RecordBuilder + */ + public static sparqles.avro.discovery.RobotsTXT.Builder newBuilder( + sparqles.avro.discovery.RobotsTXT other) { + if (other == null) { + return new sparqles.avro.discovery.RobotsTXT.Builder(); + } else { + return new sparqles.avro.discovery.RobotsTXT.Builder(other); + } + } + + /** RecordBuilder for RobotsTXT instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private boolean hasRobotsTXT; + private boolean allowedByRobotsTXT; + private boolean sitemapXML; + private boolean sitemapXMLSPARQL; + private boolean sitemapXMLSPARQLMatch; + private boolean sitemapXMLVoiD; + private java.lang.CharSequence Exception; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public RobotsTXT() {} - - /** All-args constructor. */ - public RobotsTXT( - java.lang.Boolean hasRobotsTXT, - java.lang.Boolean allowedByRobotsTXT, - java.lang.Boolean sitemapXML, - java.lang.Boolean sitemapXMLSPARQL, - java.lang.Boolean sitemapXMLSPARQLMatch, - java.lang.Boolean sitemapXMLVoiD, - java.lang.CharSequence Exception) { - this.hasRobotsTXT = hasRobotsTXT; - this.allowedByRobotsTXT = allowedByRobotsTXT; - this.sitemapXML = sitemapXML; - this.sitemapXMLSPARQL = sitemapXMLSPARQL; - this.sitemapXMLSPARQLMatch = sitemapXMLSPARQLMatch; - this.sitemapXMLVoiD = sitemapXMLVoiD; - this.Exception = Exception; - } - - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; - } - - /** Creates a new RobotsTXT RecordBuilder */ - public static sparqles.avro.discovery.RobotsTXT.Builder newBuilder() { - return new sparqles.avro.discovery.RobotsTXT.Builder(); - } - - /** Creates a new RobotsTXT RecordBuilder by copying an existing Builder */ - public static sparqles.avro.discovery.RobotsTXT.Builder newBuilder( - sparqles.avro.discovery.RobotsTXT.Builder other) { - return new sparqles.avro.discovery.RobotsTXT.Builder(other); - } - - /** Creates a new RobotsTXT RecordBuilder by copying an existing RobotsTXT instance */ - public static sparqles.avro.discovery.RobotsTXT.Builder newBuilder( - sparqles.avro.discovery.RobotsTXT other) { - return new sparqles.avro.discovery.RobotsTXT.Builder(other); - } - - public org.apache.avro.Schema getSchema() { - return SCHEMA$; - } - - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return hasRobotsTXT; - case 1: - return allowedByRobotsTXT; - case 2: - return sitemapXML; - case 3: - return sitemapXMLSPARQL; - case 4: - return sitemapXMLSPARQLMatch; - case 5: - return sitemapXMLVoiD; - case 6: - return Exception; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + private Builder(sparqles.avro.discovery.RobotsTXT.Builder other) { + super(other); + if (isValidValue(fields()[0], other.hasRobotsTXT)) { + this.hasRobotsTXT = data().deepCopy(fields()[0].schema(), other.hasRobotsTXT); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.allowedByRobotsTXT)) { + this.allowedByRobotsTXT = data().deepCopy(fields()[1].schema(), other.allowedByRobotsTXT); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.sitemapXML)) { + this.sitemapXML = data().deepCopy(fields()[2].schema(), other.sitemapXML); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.sitemapXMLSPARQL)) { + this.sitemapXMLSPARQL = data().deepCopy(fields()[3].schema(), other.sitemapXMLSPARQL); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + if (isValidValue(fields()[4], other.sitemapXMLSPARQLMatch)) { + this.sitemapXMLSPARQLMatch = + data().deepCopy(fields()[4].schema(), other.sitemapXMLSPARQLMatch); + fieldSetFlags()[4] = other.fieldSetFlags()[4]; + } + if (isValidValue(fields()[5], other.sitemapXMLVoiD)) { + this.sitemapXMLVoiD = data().deepCopy(fields()[5].schema(), other.sitemapXMLVoiD); + fieldSetFlags()[5] = other.fieldSetFlags()[5]; + } + if (isValidValue(fields()[6], other.Exception)) { + this.Exception = data().deepCopy(fields()[6].schema(), other.Exception); + fieldSetFlags()[6] = other.fieldSetFlags()[6]; + } } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - hasRobotsTXT = (java.lang.Boolean) value$; - break; - case 1: - allowedByRobotsTXT = (java.lang.Boolean) value$; - break; - case 2: - sitemapXML = (java.lang.Boolean) value$; - break; - case 3: - sitemapXMLSPARQL = (java.lang.Boolean) value$; - break; - case 4: - sitemapXMLSPARQLMatch = (java.lang.Boolean) value$; - break; - case 5: - sitemapXMLVoiD = (java.lang.Boolean) value$; - break; - case 6: - Exception = (java.lang.CharSequence) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Creates a Builder by copying an existing RobotsTXT instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.discovery.RobotsTXT other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.hasRobotsTXT)) { + this.hasRobotsTXT = data().deepCopy(fields()[0].schema(), other.hasRobotsTXT); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.allowedByRobotsTXT)) { + this.allowedByRobotsTXT = data().deepCopy(fields()[1].schema(), other.allowedByRobotsTXT); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.sitemapXML)) { + this.sitemapXML = data().deepCopy(fields()[2].schema(), other.sitemapXML); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.sitemapXMLSPARQL)) { + this.sitemapXMLSPARQL = data().deepCopy(fields()[3].schema(), other.sitemapXMLSPARQL); + fieldSetFlags()[3] = true; + } + if (isValidValue(fields()[4], other.sitemapXMLSPARQLMatch)) { + this.sitemapXMLSPARQLMatch = + data().deepCopy(fields()[4].schema(), other.sitemapXMLSPARQLMatch); + fieldSetFlags()[4] = true; + } + if (isValidValue(fields()[5], other.sitemapXMLVoiD)) { + this.sitemapXMLVoiD = data().deepCopy(fields()[5].schema(), other.sitemapXMLVoiD); + fieldSetFlags()[5] = true; + } + if (isValidValue(fields()[6], other.Exception)) { + this.Exception = data().deepCopy(fields()[6].schema(), other.Exception); + fieldSetFlags()[6] = true; + } } - /** Gets the value of the 'hasRobotsTXT' field. */ - public java.lang.Boolean getHasRobotsTXT() { - return hasRobotsTXT; + /** + * Gets the value of the 'hasRobotsTXT' field. + * + * @return The value. + */ + public boolean getHasRobotsTXT() { + return hasRobotsTXT; } /** * Sets the value of the 'hasRobotsTXT' field. * - * @param value the value to set. + * @param value The value of 'hasRobotsTXT'. + * @return This builder. */ - public void setHasRobotsTXT(java.lang.Boolean value) { - this.hasRobotsTXT = value; + public sparqles.avro.discovery.RobotsTXT.Builder setHasRobotsTXT(boolean value) { + validate(fields()[0], value); + this.hasRobotsTXT = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'allowedByRobotsTXT' field. */ - public java.lang.Boolean getAllowedByRobotsTXT() { - return allowedByRobotsTXT; + /** + * Checks whether the 'hasRobotsTXT' field has been set. + * + * @return True if the 'hasRobotsTXT' field has been set, false otherwise. + */ + public boolean hasHasRobotsTXT() { + return fieldSetFlags()[0]; } /** - * Sets the value of the 'allowedByRobotsTXT' field. + * Clears the value of the 'hasRobotsTXT' field. * - * @param value the value to set. + * @return This builder. */ - public void setAllowedByRobotsTXT(java.lang.Boolean value) { - this.allowedByRobotsTXT = value; + public sparqles.avro.discovery.RobotsTXT.Builder clearHasRobotsTXT() { + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'sitemapXML' field. */ - public java.lang.Boolean getSitemapXML() { - return sitemapXML; + /** + * Gets the value of the 'allowedByRobotsTXT' field. + * + * @return The value. + */ + public boolean getAllowedByRobotsTXT() { + return allowedByRobotsTXT; } /** - * Sets the value of the 'sitemapXML' field. + * Sets the value of the 'allowedByRobotsTXT' field. * - * @param value the value to set. + * @param value The value of 'allowedByRobotsTXT'. + * @return This builder. */ - public void setSitemapXML(java.lang.Boolean value) { - this.sitemapXML = value; + public sparqles.avro.discovery.RobotsTXT.Builder setAllowedByRobotsTXT(boolean value) { + validate(fields()[1], value); + this.allowedByRobotsTXT = value; + fieldSetFlags()[1] = true; + return this; } - /** Gets the value of the 'sitemapXMLSPARQL' field. */ - public java.lang.Boolean getSitemapXMLSPARQL() { - return sitemapXMLSPARQL; + /** + * Checks whether the 'allowedByRobotsTXT' field has been set. + * + * @return True if the 'allowedByRobotsTXT' field has been set, false otherwise. + */ + public boolean hasAllowedByRobotsTXT() { + return fieldSetFlags()[1]; } /** - * Sets the value of the 'sitemapXMLSPARQL' field. + * Clears the value of the 'allowedByRobotsTXT' field. * - * @param value the value to set. + * @return This builder. */ - public void setSitemapXMLSPARQL(java.lang.Boolean value) { - this.sitemapXMLSPARQL = value; + public sparqles.avro.discovery.RobotsTXT.Builder clearAllowedByRobotsTXT() { + fieldSetFlags()[1] = false; + return this; } - /** Gets the value of the 'sitemapXMLSPARQLMatch' field. */ - public java.lang.Boolean getSitemapXMLSPARQLMatch() { - return sitemapXMLSPARQLMatch; + /** + * Gets the value of the 'sitemapXML' field. + * + * @return The value. + */ + public boolean getSitemapXML() { + return sitemapXML; } /** - * Sets the value of the 'sitemapXMLSPARQLMatch' field. + * Sets the value of the 'sitemapXML' field. * - * @param value the value to set. + * @param value The value of 'sitemapXML'. + * @return This builder. */ - public void setSitemapXMLSPARQLMatch(java.lang.Boolean value) { - this.sitemapXMLSPARQLMatch = value; + public sparqles.avro.discovery.RobotsTXT.Builder setSitemapXML(boolean value) { + validate(fields()[2], value); + this.sitemapXML = value; + fieldSetFlags()[2] = true; + return this; } - /** Gets the value of the 'sitemapXMLVoiD' field. */ - public java.lang.Boolean getSitemapXMLVoiD() { - return sitemapXMLVoiD; + /** + * Checks whether the 'sitemapXML' field has been set. + * + * @return True if the 'sitemapXML' field has been set, false otherwise. + */ + public boolean hasSitemapXML() { + return fieldSetFlags()[2]; } /** - * Sets the value of the 'sitemapXMLVoiD' field. + * Clears the value of the 'sitemapXML' field. * - * @param value the value to set. + * @return This builder. */ - public void setSitemapXMLVoiD(java.lang.Boolean value) { - this.sitemapXMLVoiD = value; + public sparqles.avro.discovery.RobotsTXT.Builder clearSitemapXML() { + fieldSetFlags()[2] = false; + return this; } - /** Gets the value of the 'Exception' field. */ - public java.lang.CharSequence getException() { - return Exception; + /** + * Gets the value of the 'sitemapXMLSPARQL' field. + * + * @return The value. + */ + public boolean getSitemapXMLSPARQL() { + return sitemapXMLSPARQL; } /** - * Sets the value of the 'Exception' field. + * Sets the value of the 'sitemapXMLSPARQL' field. * - * @param value the value to set. + * @param value The value of 'sitemapXMLSPARQL'. + * @return This builder. */ - public void setException(java.lang.CharSequence value) { - this.Exception = value; - } - - /** RecordBuilder for RobotsTXT instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { - - private boolean hasRobotsTXT; - private boolean allowedByRobotsTXT; - private boolean sitemapXML; - private boolean sitemapXMLSPARQL; - private boolean sitemapXMLSPARQLMatch; - private boolean sitemapXMLVoiD; - private java.lang.CharSequence Exception; - - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.discovery.RobotsTXT.SCHEMA$); - } - - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.discovery.RobotsTXT.Builder other) { - super(other); - if (isValidValue(fields()[0], other.hasRobotsTXT)) { - this.hasRobotsTXT = data().deepCopy(fields()[0].schema(), other.hasRobotsTXT); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.allowedByRobotsTXT)) { - this.allowedByRobotsTXT = - data().deepCopy(fields()[1].schema(), other.allowedByRobotsTXT); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.sitemapXML)) { - this.sitemapXML = data().deepCopy(fields()[2].schema(), other.sitemapXML); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.sitemapXMLSPARQL)) { - this.sitemapXMLSPARQL = - data().deepCopy(fields()[3].schema(), other.sitemapXMLSPARQL); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.sitemapXMLSPARQLMatch)) { - this.sitemapXMLSPARQLMatch = - data().deepCopy(fields()[4].schema(), other.sitemapXMLSPARQLMatch); - fieldSetFlags()[4] = true; - } - if (isValidValue(fields()[5], other.sitemapXMLVoiD)) { - this.sitemapXMLVoiD = data().deepCopy(fields()[5].schema(), other.sitemapXMLVoiD); - fieldSetFlags()[5] = true; - } - if (isValidValue(fields()[6], other.Exception)) { - this.Exception = data().deepCopy(fields()[6].schema(), other.Exception); - fieldSetFlags()[6] = true; - } - } - - /** Creates a Builder by copying an existing RobotsTXT instance */ - private Builder(sparqles.avro.discovery.RobotsTXT other) { - super(sparqles.avro.discovery.RobotsTXT.SCHEMA$); - if (isValidValue(fields()[0], other.hasRobotsTXT)) { - this.hasRobotsTXT = data().deepCopy(fields()[0].schema(), other.hasRobotsTXT); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.allowedByRobotsTXT)) { - this.allowedByRobotsTXT = - data().deepCopy(fields()[1].schema(), other.allowedByRobotsTXT); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.sitemapXML)) { - this.sitemapXML = data().deepCopy(fields()[2].schema(), other.sitemapXML); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.sitemapXMLSPARQL)) { - this.sitemapXMLSPARQL = - data().deepCopy(fields()[3].schema(), other.sitemapXMLSPARQL); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.sitemapXMLSPARQLMatch)) { - this.sitemapXMLSPARQLMatch = - data().deepCopy(fields()[4].schema(), other.sitemapXMLSPARQLMatch); - fieldSetFlags()[4] = true; - } - if (isValidValue(fields()[5], other.sitemapXMLVoiD)) { - this.sitemapXMLVoiD = data().deepCopy(fields()[5].schema(), other.sitemapXMLVoiD); - fieldSetFlags()[5] = true; - } - if (isValidValue(fields()[6], other.Exception)) { - this.Exception = data().deepCopy(fields()[6].schema(), other.Exception); - fieldSetFlags()[6] = true; - } - } - - /** Gets the value of the 'hasRobotsTXT' field */ - public java.lang.Boolean getHasRobotsTXT() { - return hasRobotsTXT; - } - - /** Sets the value of the 'hasRobotsTXT' field */ - public sparqles.avro.discovery.RobotsTXT.Builder setHasRobotsTXT(boolean value) { - validate(fields()[0], value); - this.hasRobotsTXT = value; - fieldSetFlags()[0] = true; - return this; - } + public sparqles.avro.discovery.RobotsTXT.Builder setSitemapXMLSPARQL(boolean value) { + validate(fields()[3], value); + this.sitemapXMLSPARQL = value; + fieldSetFlags()[3] = true; + return this; + } - /** Checks whether the 'hasRobotsTXT' field has been set */ - public boolean hasHasRobotsTXT() { - return fieldSetFlags()[0]; - } + /** + * Checks whether the 'sitemapXMLSPARQL' field has been set. + * + * @return True if the 'sitemapXMLSPARQL' field has been set, false otherwise. + */ + public boolean hasSitemapXMLSPARQL() { + return fieldSetFlags()[3]; + } - /** Clears the value of the 'hasRobotsTXT' field */ - public sparqles.avro.discovery.RobotsTXT.Builder clearHasRobotsTXT() { - fieldSetFlags()[0] = false; - return this; - } + /** + * Clears the value of the 'sitemapXMLSPARQL' field. + * + * @return This builder. + */ + public sparqles.avro.discovery.RobotsTXT.Builder clearSitemapXMLSPARQL() { + fieldSetFlags()[3] = false; + return this; + } - /** Gets the value of the 'allowedByRobotsTXT' field */ - public java.lang.Boolean getAllowedByRobotsTXT() { - return allowedByRobotsTXT; - } + /** + * Gets the value of the 'sitemapXMLSPARQLMatch' field. + * + * @return The value. + */ + public boolean getSitemapXMLSPARQLMatch() { + return sitemapXMLSPARQLMatch; + } - /** Sets the value of the 'allowedByRobotsTXT' field */ - public sparqles.avro.discovery.RobotsTXT.Builder setAllowedByRobotsTXT(boolean value) { - validate(fields()[1], value); - this.allowedByRobotsTXT = value; - fieldSetFlags()[1] = true; - return this; - } + /** + * Sets the value of the 'sitemapXMLSPARQLMatch' field. + * + * @param value The value of 'sitemapXMLSPARQLMatch'. + * @return This builder. + */ + public sparqles.avro.discovery.RobotsTXT.Builder setSitemapXMLSPARQLMatch(boolean value) { + validate(fields()[4], value); + this.sitemapXMLSPARQLMatch = value; + fieldSetFlags()[4] = true; + return this; + } - /** Checks whether the 'allowedByRobotsTXT' field has been set */ - public boolean hasAllowedByRobotsTXT() { - return fieldSetFlags()[1]; - } + /** + * Checks whether the 'sitemapXMLSPARQLMatch' field has been set. + * + * @return True if the 'sitemapXMLSPARQLMatch' field has been set, false otherwise. + */ + public boolean hasSitemapXMLSPARQLMatch() { + return fieldSetFlags()[4]; + } - /** Clears the value of the 'allowedByRobotsTXT' field */ - public sparqles.avro.discovery.RobotsTXT.Builder clearAllowedByRobotsTXT() { - fieldSetFlags()[1] = false; - return this; - } + /** + * Clears the value of the 'sitemapXMLSPARQLMatch' field. + * + * @return This builder. + */ + public sparqles.avro.discovery.RobotsTXT.Builder clearSitemapXMLSPARQLMatch() { + fieldSetFlags()[4] = false; + return this; + } - /** Gets the value of the 'sitemapXML' field */ - public java.lang.Boolean getSitemapXML() { - return sitemapXML; - } + /** + * Gets the value of the 'sitemapXMLVoiD' field. + * + * @return The value. + */ + public boolean getSitemapXMLVoiD() { + return sitemapXMLVoiD; + } - /** Sets the value of the 'sitemapXML' field */ - public sparqles.avro.discovery.RobotsTXT.Builder setSitemapXML(boolean value) { - validate(fields()[2], value); - this.sitemapXML = value; - fieldSetFlags()[2] = true; - return this; - } + /** + * Sets the value of the 'sitemapXMLVoiD' field. + * + * @param value The value of 'sitemapXMLVoiD'. + * @return This builder. + */ + public sparqles.avro.discovery.RobotsTXT.Builder setSitemapXMLVoiD(boolean value) { + validate(fields()[5], value); + this.sitemapXMLVoiD = value; + fieldSetFlags()[5] = true; + return this; + } - /** Checks whether the 'sitemapXML' field has been set */ - public boolean hasSitemapXML() { - return fieldSetFlags()[2]; - } + /** + * Checks whether the 'sitemapXMLVoiD' field has been set. + * + * @return True if the 'sitemapXMLVoiD' field has been set, false otherwise. + */ + public boolean hasSitemapXMLVoiD() { + return fieldSetFlags()[5]; + } - /** Clears the value of the 'sitemapXML' field */ - public sparqles.avro.discovery.RobotsTXT.Builder clearSitemapXML() { - fieldSetFlags()[2] = false; - return this; - } + /** + * Clears the value of the 'sitemapXMLVoiD' field. + * + * @return This builder. + */ + public sparqles.avro.discovery.RobotsTXT.Builder clearSitemapXMLVoiD() { + fieldSetFlags()[5] = false; + return this; + } - /** Gets the value of the 'sitemapXMLSPARQL' field */ - public java.lang.Boolean getSitemapXMLSPARQL() { - return sitemapXMLSPARQL; - } + /** + * Gets the value of the 'Exception' field. + * + * @return The value. + */ + public java.lang.CharSequence getException() { + return Exception; + } - /** Sets the value of the 'sitemapXMLSPARQL' field */ - public sparqles.avro.discovery.RobotsTXT.Builder setSitemapXMLSPARQL(boolean value) { - validate(fields()[3], value); - this.sitemapXMLSPARQL = value; - fieldSetFlags()[3] = true; - return this; - } + /** + * Sets the value of the 'Exception' field. + * + * @param value The value of 'Exception'. + * @return This builder. + */ + public sparqles.avro.discovery.RobotsTXT.Builder setException(java.lang.CharSequence value) { + validate(fields()[6], value); + this.Exception = value; + fieldSetFlags()[6] = true; + return this; + } - /** Checks whether the 'sitemapXMLSPARQL' field has been set */ - public boolean hasSitemapXMLSPARQL() { - return fieldSetFlags()[3]; - } + /** + * Checks whether the 'Exception' field has been set. + * + * @return True if the 'Exception' field has been set, false otherwise. + */ + public boolean hasException() { + return fieldSetFlags()[6]; + } - /** Clears the value of the 'sitemapXMLSPARQL' field */ - public sparqles.avro.discovery.RobotsTXT.Builder clearSitemapXMLSPARQL() { - fieldSetFlags()[3] = false; - return this; - } + /** + * Clears the value of the 'Exception' field. + * + * @return This builder. + */ + public sparqles.avro.discovery.RobotsTXT.Builder clearException() { + Exception = null; + fieldSetFlags()[6] = false; + return this; + } - /** Gets the value of the 'sitemapXMLSPARQLMatch' field */ - public java.lang.Boolean getSitemapXMLSPARQLMatch() { - return sitemapXMLSPARQLMatch; - } + @Override + @SuppressWarnings("unchecked") + public RobotsTXT build() { + try { + RobotsTXT record = new RobotsTXT(); + record.hasRobotsTXT = + fieldSetFlags()[0] ? this.hasRobotsTXT : (java.lang.Boolean) defaultValue(fields()[0]); + record.allowedByRobotsTXT = + fieldSetFlags()[1] + ? this.allowedByRobotsTXT + : (java.lang.Boolean) defaultValue(fields()[1]); + record.sitemapXML = + fieldSetFlags()[2] ? this.sitemapXML : (java.lang.Boolean) defaultValue(fields()[2]); + record.sitemapXMLSPARQL = + fieldSetFlags()[3] + ? this.sitemapXMLSPARQL + : (java.lang.Boolean) defaultValue(fields()[3]); + record.sitemapXMLSPARQLMatch = + fieldSetFlags()[4] + ? this.sitemapXMLSPARQLMatch + : (java.lang.Boolean) defaultValue(fields()[4]); + record.sitemapXMLVoiD = + fieldSetFlags()[5] + ? this.sitemapXMLVoiD + : (java.lang.Boolean) defaultValue(fields()[5]); + record.Exception = + fieldSetFlags()[6] + ? this.Exception + : (java.lang.CharSequence) defaultValue(fields()[6]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Sets the value of the 'sitemapXMLSPARQLMatch' field */ - public sparqles.avro.discovery.RobotsTXT.Builder setSitemapXMLSPARQLMatch(boolean value) { - validate(fields()[4], value); - this.sitemapXMLSPARQLMatch = value; - fieldSetFlags()[4] = true; - return this; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Checks whether the 'sitemapXMLSPARQLMatch' field has been set */ - public boolean hasSitemapXMLSPARQLMatch() { - return fieldSetFlags()[4]; - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Clears the value of the 'sitemapXMLSPARQLMatch' field */ - public sparqles.avro.discovery.RobotsTXT.Builder clearSitemapXMLSPARQLMatch() { - fieldSetFlags()[4] = false; - return this; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Gets the value of the 'sitemapXMLVoiD' field */ - public java.lang.Boolean getSitemapXMLVoiD() { - return sitemapXMLVoiD; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Sets the value of the 'sitemapXMLVoiD' field */ - public sparqles.avro.discovery.RobotsTXT.Builder setSitemapXMLVoiD(boolean value) { - validate(fields()[5], value); - this.sitemapXMLVoiD = value; - fieldSetFlags()[5] = true; - return this; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Checks whether the 'sitemapXMLVoiD' field has been set */ - public boolean hasSitemapXMLVoiD() { - return fieldSetFlags()[5]; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeBoolean(this.hasRobotsTXT); - /** Clears the value of the 'sitemapXMLVoiD' field */ - public sparqles.avro.discovery.RobotsTXT.Builder clearSitemapXMLVoiD() { - fieldSetFlags()[5] = false; - return this; - } + out.writeBoolean(this.allowedByRobotsTXT); - /** Gets the value of the 'Exception' field */ - public java.lang.CharSequence getException() { - return Exception; - } + out.writeBoolean(this.sitemapXML); - /** Sets the value of the 'Exception' field */ - public sparqles.avro.discovery.RobotsTXT.Builder setException( - java.lang.CharSequence value) { - validate(fields()[6], value); - this.Exception = value; - fieldSetFlags()[6] = true; - return this; - } + out.writeBoolean(this.sitemapXMLSPARQL); - /** Checks whether the 'Exception' field has been set */ - public boolean hasException() { - return fieldSetFlags()[6]; - } + out.writeBoolean(this.sitemapXMLSPARQLMatch); - /** Clears the value of the 'Exception' field */ - public sparqles.avro.discovery.RobotsTXT.Builder clearException() { - Exception = null; - fieldSetFlags()[6] = false; - return this; - } + out.writeBoolean(this.sitemapXMLVoiD); - @Override - public RobotsTXT build() { - try { - RobotsTXT record = new RobotsTXT(); - record.hasRobotsTXT = - fieldSetFlags()[0] - ? this.hasRobotsTXT - : (java.lang.Boolean) defaultValue(fields()[0]); - record.allowedByRobotsTXT = - fieldSetFlags()[1] - ? this.allowedByRobotsTXT - : (java.lang.Boolean) defaultValue(fields()[1]); - record.sitemapXML = - fieldSetFlags()[2] - ? this.sitemapXML - : (java.lang.Boolean) defaultValue(fields()[2]); - record.sitemapXMLSPARQL = - fieldSetFlags()[3] - ? this.sitemapXMLSPARQL - : (java.lang.Boolean) defaultValue(fields()[3]); - record.sitemapXMLSPARQLMatch = - fieldSetFlags()[4] - ? this.sitemapXMLSPARQLMatch - : (java.lang.Boolean) defaultValue(fields()[4]); - record.sitemapXMLVoiD = - fieldSetFlags()[5] - ? this.sitemapXMLVoiD - : (java.lang.Boolean) defaultValue(fields()[5]); - record.Exception = - fieldSetFlags()[6] - ? this.Exception - : (java.lang.CharSequence) defaultValue(fields()[6]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); + if (this.Exception == null) { + out.writeIndex(1); + out.writeNull(); + } else { + out.writeIndex(0); + out.writeString(this.Exception); + } + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.hasRobotsTXT = in.readBoolean(); + + this.allowedByRobotsTXT = in.readBoolean(); + + this.sitemapXML = in.readBoolean(); + + this.sitemapXMLSPARQL = in.readBoolean(); + + this.sitemapXMLSPARQLMatch = in.readBoolean(); + + this.sitemapXMLVoiD = in.readBoolean(); + + if (in.readIndex() != 0) { + in.readNull(); + this.Exception = null; + } else { + this.Exception = + in.readString(this.Exception instanceof Utf8 ? (Utf8) this.Exception : null); + } + + } else { + for (int i = 0; i < 7; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.hasRobotsTXT = in.readBoolean(); + break; + + case 1: + this.allowedByRobotsTXT = in.readBoolean(); + break; + + case 2: + this.sitemapXML = in.readBoolean(); + break; + + case 3: + this.sitemapXMLSPARQL = in.readBoolean(); + break; + + case 4: + this.sitemapXMLSPARQLMatch = in.readBoolean(); + break; + + case 5: + this.sitemapXMLVoiD = in.readBoolean(); + break; + + case 6: + if (in.readIndex() != 0) { + in.readNull(); + this.Exception = null; + } else { + this.Exception = + in.readString(this.Exception instanceof Utf8 ? (Utf8) this.Exception : null); } + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/features/FResult.java b/backend/src/main/java/sparqles/avro/features/FResult.java index 2878c860..cc5be1fa 100644 --- a/backend/src/main/java/sparqles/avro/features/FResult.java +++ b/backend/src/main/java/sparqles/avro/features/FResult.java @@ -5,229 +5,551 @@ */ package sparqles.avro.features; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class FResult extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"FResult\",\"namespace\":\"sparqles.avro.features\",\"fields\":[{\"name\":\"endpointResult\",\"type\":{\"type\":\"record\",\"name\":\"EndpointResult\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"start\",\"type\":\"long\"},{\"name\":\"end\",\"type\":\"long\"}],\"import\":\"Endpoint.avsc\"}},{\"name\":\"results\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"record\",\"name\":\"FSingleResult\",\"fields\":[{\"name\":\"query\",\"type\":\"string\"},{\"name\":\"run\",\"type\":{\"type\":\"record\",\"name\":\"Run\",\"namespace\":\"sparqles.avro.performance\",\"fields\":[{\"name\":\"frestout\",\"type\":\"long\"},{\"name\":\"solutions\",\"type\":\"int\"},{\"name\":\"inittime\",\"type\":\"long\"},{\"name\":\"exectime\",\"type\":\"long\"},{\"name\":\"closetime\",\"type\":\"long\"},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"exectout\",\"type\":\"long\"}]}}]}}}]}"); - @Deprecated public sparqles.avro.EndpointResult endpointResult; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 7825355288541610040L; - @Deprecated - public java.util.Map results; + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"FResult\",\"namespace\":\"sparqles.avro.features\",\"fields\":[{\"name\":\"endpointResult\",\"type\":{\"type\":\"record\",\"name\":\"EndpointResult\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"start\",\"type\":\"long\"},{\"name\":\"end\",\"type\":\"long\"}],\"import\":\"Endpoint.avsc\"}},{\"name\":\"results\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"record\",\"name\":\"FSingleResult\",\"fields\":[{\"name\":\"query\",\"type\":\"string\"},{\"name\":\"run\",\"type\":{\"type\":\"record\",\"name\":\"Run\",\"namespace\":\"sparqles.avro.performance\",\"fields\":[{\"name\":\"frestout\",\"type\":\"long\"},{\"name\":\"solutions\",\"type\":\"int\"},{\"name\":\"inittime\",\"type\":\"long\"},{\"name\":\"exectime\",\"type\":\"long\"},{\"name\":\"closetime\",\"type\":\"long\"},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"exectout\",\"type\":\"long\"}]}}]}}}],\"import\":\"Run.avsc\"}"); - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public FResult() {} + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } - /** All-args constructor. */ - public FResult( - sparqles.avro.EndpointResult endpointResult, - java.util.Map results) { - this.endpointResult = endpointResult; - this.results = results; - } + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this FResult to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a FResult from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a FResult instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static FResult fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private sparqles.avro.EndpointResult endpointResult; + private java.util.Map results; - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public FResult() {} + + /** + * All-args constructor. + * + * @param endpointResult The new value for endpointResult + * @param results The new value for results + */ + public FResult( + sparqles.avro.EndpointResult endpointResult, + java.util.Map results) { + this.endpointResult = endpointResult; + this.results = results; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return endpointResult; + case 1: + return results; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - /** Creates a new FResult RecordBuilder */ - public static sparqles.avro.features.FResult.Builder newBuilder() { - return new sparqles.avro.features.FResult.Builder(); + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + endpointResult = (sparqles.avro.EndpointResult) value$; + break; + case 1: + results = + (java.util.Map) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } + + /** + * Gets the value of the 'endpointResult' field. + * + * @return The value of the 'endpointResult' field. + */ + public sparqles.avro.EndpointResult getEndpointResult() { + return endpointResult; + } + + /** + * Sets the value of the 'endpointResult' field. + * + * @param value the value to set. + */ + public void setEndpointResult(sparqles.avro.EndpointResult value) { + this.endpointResult = value; + } + + /** + * Gets the value of the 'results' field. + * + * @return The value of the 'results' field. + */ + public java.util.Map getResults() { + return results; + } + + /** + * Sets the value of the 'results' field. + * + * @param value the value to set. + */ + public void setResults( + java.util.Map value) { + this.results = value; + } + + /** + * Creates a new FResult RecordBuilder. + * + * @return A new FResult RecordBuilder + */ + public static sparqles.avro.features.FResult.Builder newBuilder() { + return new sparqles.avro.features.FResult.Builder(); + } - /** Creates a new FResult RecordBuilder by copying an existing Builder */ - public static sparqles.avro.features.FResult.Builder newBuilder( - sparqles.avro.features.FResult.Builder other) { - return new sparqles.avro.features.FResult.Builder(other); + /** + * Creates a new FResult RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new FResult RecordBuilder + */ + public static sparqles.avro.features.FResult.Builder newBuilder( + sparqles.avro.features.FResult.Builder other) { + if (other == null) { + return new sparqles.avro.features.FResult.Builder(); + } else { + return new sparqles.avro.features.FResult.Builder(other); } + } - /** Creates a new FResult RecordBuilder by copying an existing FResult instance */ - public static sparqles.avro.features.FResult.Builder newBuilder( - sparqles.avro.features.FResult other) { - return new sparqles.avro.features.FResult.Builder(other); + /** + * Creates a new FResult RecordBuilder by copying an existing FResult instance. + * + * @param other The existing instance to copy. + * @return A new FResult RecordBuilder + */ + public static sparqles.avro.features.FResult.Builder newBuilder( + sparqles.avro.features.FResult other) { + if (other == null) { + return new sparqles.avro.features.FResult.Builder(); + } else { + return new sparqles.avro.features.FResult.Builder(other); } + } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** RecordBuilder for FResult instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private sparqles.avro.EndpointResult endpointResult; + private sparqles.avro.EndpointResult.Builder endpointResultBuilder; + private java.util.Map results; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return endpointResult; - case 1: - return results; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.features.FResult.Builder other) { + super(other); + if (isValidValue(fields()[0], other.endpointResult)) { + this.endpointResult = data().deepCopy(fields()[0].schema(), other.endpointResult); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (other.hasEndpointResultBuilder()) { + this.endpointResultBuilder = + sparqles.avro.EndpointResult.newBuilder(other.getEndpointResultBuilder()); + } + if (isValidValue(fields()[1], other.results)) { + this.results = data().deepCopy(fields()[1].schema(), other.results); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - endpointResult = (sparqles.avro.EndpointResult) value$; - break; - case 1: - results = - (java.util.Map< - java.lang.CharSequence, - sparqles.avro.features.FSingleResult>) - value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Creates a Builder by copying an existing FResult instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.features.FResult other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.endpointResult)) { + this.endpointResult = data().deepCopy(fields()[0].schema(), other.endpointResult); + fieldSetFlags()[0] = true; + } + this.endpointResultBuilder = null; + if (isValidValue(fields()[1], other.results)) { + this.results = data().deepCopy(fields()[1].schema(), other.results); + fieldSetFlags()[1] = true; + } } - /** Gets the value of the 'endpointResult' field. */ + /** + * Gets the value of the 'endpointResult' field. + * + * @return The value. + */ public sparqles.avro.EndpointResult getEndpointResult() { - return endpointResult; + return endpointResult; } /** * Sets the value of the 'endpointResult' field. * - * @param value the value to set. + * @param value The value of 'endpointResult'. + * @return This builder. + */ + public sparqles.avro.features.FResult.Builder setEndpointResult( + sparqles.avro.EndpointResult value) { + validate(fields()[0], value); + this.endpointResultBuilder = null; + this.endpointResult = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'endpointResult' field has been set. + * + * @return True if the 'endpointResult' field has been set, false otherwise. + */ + public boolean hasEndpointResult() { + return fieldSetFlags()[0]; + } + + /** + * Gets the Builder instance for the 'endpointResult' field and creates one if it doesn't exist + * yet. + * + * @return This builder. + */ + public sparqles.avro.EndpointResult.Builder getEndpointResultBuilder() { + if (endpointResultBuilder == null) { + if (hasEndpointResult()) { + setEndpointResultBuilder(sparqles.avro.EndpointResult.newBuilder(endpointResult)); + } else { + setEndpointResultBuilder(sparqles.avro.EndpointResult.newBuilder()); + } + } + return endpointResultBuilder; + } + + /** + * Sets the Builder instance for the 'endpointResult' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.features.FResult.Builder setEndpointResultBuilder( + sparqles.avro.EndpointResult.Builder value) { + clearEndpointResult(); + endpointResultBuilder = value; + return this; + } + + /** + * Checks whether the 'endpointResult' field has an active Builder instance + * + * @return True if the 'endpointResult' field has an active Builder instance */ - public void setEndpointResult(sparqles.avro.EndpointResult value) { - this.endpointResult = value; + public boolean hasEndpointResultBuilder() { + return endpointResultBuilder != null; } - /** Gets the value of the 'results' field. */ + /** + * Clears the value of the 'endpointResult' field. + * + * @return This builder. + */ + public sparqles.avro.features.FResult.Builder clearEndpointResult() { + endpointResult = null; + endpointResultBuilder = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'results' field. + * + * @return The value. + */ public java.util.Map - getResults() { - return results; + getResults() { + return results; } /** * Sets the value of the 'results' field. * - * @param value the value to set. + * @param value The value of 'results'. + * @return This builder. */ - public void setResults( - java.util.Map value) { - this.results = value; + public sparqles.avro.features.FResult.Builder setResults( + java.util.Map value) { + validate(fields()[1], value); + this.results = value; + fieldSetFlags()[1] = true; + return this; } - /** RecordBuilder for FResult instances. */ - public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Checks whether the 'results' field has been set. + * + * @return True if the 'results' field has been set, false otherwise. + */ + public boolean hasResults() { + return fieldSetFlags()[1]; + } - private sparqles.avro.EndpointResult endpointResult; - private java.util.Map results; + /** + * Clears the value of the 'results' field. + * + * @return This builder. + */ + public sparqles.avro.features.FResult.Builder clearResults() { + results = null; + fieldSetFlags()[1] = false; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.features.FResult.SCHEMA$); + @Override + @SuppressWarnings("unchecked") + public FResult build() { + try { + FResult record = new FResult(); + if (endpointResultBuilder != null) { + try { + record.endpointResult = this.endpointResultBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("endpointResult")); + throw e; + } + } else { + record.endpointResult = + fieldSetFlags()[0] + ? this.endpointResult + : (sparqles.avro.EndpointResult) defaultValue(fields()[0]); } + record.results = + fieldSetFlags()[1] + ? this.results + : (java.util.Map) + defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.features.FResult.Builder other) { - super(other); - if (isValidValue(fields()[0], other.endpointResult)) { - this.endpointResult = data().deepCopy(fields()[0].schema(), other.endpointResult); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.results)) { - this.results = data().deepCopy(fields()[1].schema(), other.results); - fieldSetFlags()[1] = true; - } - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Creates a Builder by copying an existing FResult instance */ - private Builder(sparqles.avro.features.FResult other) { - super(sparqles.avro.features.FResult.SCHEMA$); - if (isValidValue(fields()[0], other.endpointResult)) { - this.endpointResult = data().deepCopy(fields()[0].schema(), other.endpointResult); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.results)) { - this.results = data().deepCopy(fields()[1].schema(), other.results); - fieldSetFlags()[1] = true; - } - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Gets the value of the 'endpointResult' field */ - public sparqles.avro.EndpointResult getEndpointResult() { - return endpointResult; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Sets the value of the 'endpointResult' field */ - public sparqles.avro.features.FResult.Builder setEndpointResult( - sparqles.avro.EndpointResult value) { - validate(fields()[0], value); - this.endpointResult = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Checks whether the 'endpointResult' field has been set */ - public boolean hasEndpointResult() { - return fieldSetFlags()[0]; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Clears the value of the 'endpointResult' field */ - public sparqles.avro.features.FResult.Builder clearEndpointResult() { - endpointResult = null; - fieldSetFlags()[0] = false; - return this; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + this.endpointResult.customEncode(out); - /** Gets the value of the 'results' field */ - public java.util.Map - getResults() { - return results; - } + long size0 = this.results.size(); + out.writeMapStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (java.util.Map.Entry e0 : + this.results.entrySet()) { + actualSize0++; + out.startItem(); + out.writeString(e0.getKey()); + sparqles.avro.features.FSingleResult v0 = e0.getValue(); + v0.customEncode(out); + } + out.writeMapEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Map-size written was " + size0 + ", but element count was " + actualSize0 + "."); + } - /** Sets the value of the 'results' field */ - public sparqles.avro.features.FResult.Builder setResults( - java.util.Map value) { - validate(fields()[1], value); - this.results = value; - fieldSetFlags()[1] = true; - return this; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + if (this.endpointResult == null) { + this.endpointResult = new sparqles.avro.EndpointResult(); + } + this.endpointResult.customDecode(in); - /** Checks whether the 'results' field has been set */ - public boolean hasResults() { - return fieldSetFlags()[1]; + long size0 = in.readMapStart(); + java.util.Map m0 = + this.results; // Need fresh name due to limitation of macro system + if (m0 == null) { + m0 = + new java.util.HashMap( + (int) (size0 * 4) / 3 + 1); + this.results = m0; + } else m0.clear(); + for (; 0 < size0; size0 = in.mapNext()) { + for (; size0 != 0; size0--) { + java.lang.CharSequence k0 = null; + k0 = in.readString(k0 instanceof Utf8 ? (Utf8) k0 : null); + sparqles.avro.features.FSingleResult v0 = null; + if (v0 == null) { + v0 = new sparqles.avro.features.FSingleResult(); + } + v0.customDecode(in); + m0.put(k0, v0); } + } - /** Clears the value of the 'results' field */ - public sparqles.avro.features.FResult.Builder clearResults() { - results = null; - fieldSetFlags()[1] = false; - return this; - } + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + if (this.endpointResult == null) { + this.endpointResult = new sparqles.avro.EndpointResult(); + } + this.endpointResult.customDecode(in); + break; - @Override - public FResult build() { - try { - FResult record = new FResult(); - record.endpointResult = - fieldSetFlags()[0] - ? this.endpointResult - : (sparqles.avro.EndpointResult) defaultValue(fields()[0]); - record.results = - fieldSetFlags()[1] - ? this.results - : (java.util.Map< - java.lang.CharSequence, - sparqles.avro.features.FSingleResult>) - defaultValue(fields()[1]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); + case 1: + long size0 = in.readMapStart(); + java.util.Map m0 = + this.results; // Need fresh name due to limitation of macro system + if (m0 == null) { + m0 = + new java.util.HashMap< + java.lang.CharSequence, sparqles.avro.features.FSingleResult>( + (int) (size0 * 4) / 3 + 1); + this.results = m0; + } else m0.clear(); + for (; 0 < size0; size0 = in.mapNext()) { + for (; size0 != 0; size0--) { + java.lang.CharSequence k0 = null; + k0 = in.readString(k0 instanceof Utf8 ? (Utf8) k0 : null); + sparqles.avro.features.FSingleResult v0 = null; + if (v0 == null) { + v0 = new sparqles.avro.features.FSingleResult(); + } + v0.customDecode(in); + m0.put(k0, v0); + } } + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/features/FSingleResult.java b/backend/src/main/java/sparqles/avro/features/FSingleResult.java index 470c064a..424210ab 100644 --- a/backend/src/main/java/sparqles/avro/features/FSingleResult.java +++ b/backend/src/main/java/sparqles/avro/features/FSingleResult.java @@ -5,215 +5,485 @@ */ package sparqles.avro.features; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class FSingleResult extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"FSingleResult\",\"namespace\":\"sparqles.avro.features\",\"fields\":[{\"name\":\"query\",\"type\":\"string\"},{\"name\":\"run\",\"type\":{\"type\":\"record\",\"name\":\"Run\",\"namespace\":\"sparqles.avro.performance\",\"fields\":[{\"name\":\"frestout\",\"type\":\"long\"},{\"name\":\"solutions\",\"type\":\"int\"},{\"name\":\"inittime\",\"type\":\"long\"},{\"name\":\"exectime\",\"type\":\"long\"},{\"name\":\"closetime\",\"type\":\"long\"},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"exectout\",\"type\":\"long\"}]}}]}"); - @Deprecated public java.lang.CharSequence query; - @Deprecated public sparqles.avro.performance.Run run; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -1065450960484486367L; - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public FSingleResult() {} + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"FSingleResult\",\"namespace\":\"sparqles.avro.features\",\"fields\":[{\"name\":\"query\",\"type\":\"string\"},{\"name\":\"run\",\"type\":{\"type\":\"record\",\"name\":\"Run\",\"namespace\":\"sparqles.avro.performance\",\"fields\":[{\"name\":\"frestout\",\"type\":\"long\"},{\"name\":\"solutions\",\"type\":\"int\"},{\"name\":\"inittime\",\"type\":\"long\"},{\"name\":\"exectime\",\"type\":\"long\"},{\"name\":\"closetime\",\"type\":\"long\"},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"exectout\",\"type\":\"long\"}]}}]}"); - /** All-args constructor. */ - public FSingleResult(java.lang.CharSequence query, sparqles.avro.performance.Run run) { - this.query = query; - this.run = run; - } + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this FSingleResult to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a FSingleResult from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a FSingleResult instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static FSingleResult fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + private java.lang.CharSequence query; + private sparqles.avro.performance.Run run; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public FSingleResult() {} + + /** + * All-args constructor. + * + * @param query The new value for query + * @param run The new value for run + */ + public FSingleResult(java.lang.CharSequence query, sparqles.avro.performance.Run run) { + this.query = query; + this.run = run; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return query; + case 1: + return run; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - /** Creates a new FSingleResult RecordBuilder */ - public static sparqles.avro.features.FSingleResult.Builder newBuilder() { - return new sparqles.avro.features.FSingleResult.Builder(); + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + query = (java.lang.CharSequence) value$; + break; + case 1: + run = (sparqles.avro.performance.Run) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } + + /** + * Gets the value of the 'query' field. + * + * @return The value of the 'query' field. + */ + public java.lang.CharSequence getQuery() { + return query; + } + + /** + * Sets the value of the 'query' field. + * + * @param value the value to set. + */ + public void setQuery(java.lang.CharSequence value) { + this.query = value; + } + + /** + * Gets the value of the 'run' field. + * + * @return The value of the 'run' field. + */ + public sparqles.avro.performance.Run getRun() { + return run; + } + + /** + * Sets the value of the 'run' field. + * + * @param value the value to set. + */ + public void setRun(sparqles.avro.performance.Run value) { + this.run = value; + } - /** Creates a new FSingleResult RecordBuilder by copying an existing Builder */ - public static sparqles.avro.features.FSingleResult.Builder newBuilder( - sparqles.avro.features.FSingleResult.Builder other) { - return new sparqles.avro.features.FSingleResult.Builder(other); + /** + * Creates a new FSingleResult RecordBuilder. + * + * @return A new FSingleResult RecordBuilder + */ + public static sparqles.avro.features.FSingleResult.Builder newBuilder() { + return new sparqles.avro.features.FSingleResult.Builder(); + } + + /** + * Creates a new FSingleResult RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new FSingleResult RecordBuilder + */ + public static sparqles.avro.features.FSingleResult.Builder newBuilder( + sparqles.avro.features.FSingleResult.Builder other) { + if (other == null) { + return new sparqles.avro.features.FSingleResult.Builder(); + } else { + return new sparqles.avro.features.FSingleResult.Builder(other); } + } - /** Creates a new FSingleResult RecordBuilder by copying an existing FSingleResult instance */ - public static sparqles.avro.features.FSingleResult.Builder newBuilder( - sparqles.avro.features.FSingleResult other) { - return new sparqles.avro.features.FSingleResult.Builder(other); + /** + * Creates a new FSingleResult RecordBuilder by copying an existing FSingleResult instance. + * + * @param other The existing instance to copy. + * @return A new FSingleResult RecordBuilder + */ + public static sparqles.avro.features.FSingleResult.Builder newBuilder( + sparqles.avro.features.FSingleResult other) { + if (other == null) { + return new sparqles.avro.features.FSingleResult.Builder(); + } else { + return new sparqles.avro.features.FSingleResult.Builder(other); } + } + + /** RecordBuilder for FSingleResult instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence query; + private sparqles.avro.performance.Run run; + private sparqles.avro.performance.Run.Builder runBuilder; - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return query; - case 1: - return run; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.features.FSingleResult.Builder other) { + super(other); + if (isValidValue(fields()[0], other.query)) { + this.query = data().deepCopy(fields()[0].schema(), other.query); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.run)) { + this.run = data().deepCopy(fields()[1].schema(), other.run); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (other.hasRunBuilder()) { + this.runBuilder = sparqles.avro.performance.Run.newBuilder(other.getRunBuilder()); + } } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - query = (java.lang.CharSequence) value$; - break; - case 1: - run = (sparqles.avro.performance.Run) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Creates a Builder by copying an existing FSingleResult instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.features.FSingleResult other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.query)) { + this.query = data().deepCopy(fields()[0].schema(), other.query); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.run)) { + this.run = data().deepCopy(fields()[1].schema(), other.run); + fieldSetFlags()[1] = true; + } + this.runBuilder = null; } - /** Gets the value of the 'query' field. */ + /** + * Gets the value of the 'query' field. + * + * @return The value. + */ public java.lang.CharSequence getQuery() { - return query; + return query; } /** * Sets the value of the 'query' field. * - * @param value the value to set. + * @param value The value of 'query'. + * @return This builder. + */ + public sparqles.avro.features.FSingleResult.Builder setQuery(java.lang.CharSequence value) { + validate(fields()[0], value); + this.query = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'query' field has been set. + * + * @return True if the 'query' field has been set, false otherwise. + */ + public boolean hasQuery() { + return fieldSetFlags()[0]; + } + + /** + * Clears the value of the 'query' field. + * + * @return This builder. */ - public void setQuery(java.lang.CharSequence value) { - this.query = value; + public sparqles.avro.features.FSingleResult.Builder clearQuery() { + query = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'run' field. */ + /** + * Gets the value of the 'run' field. + * + * @return The value. + */ public sparqles.avro.performance.Run getRun() { - return run; + return run; } /** * Sets the value of the 'run' field. * - * @param value the value to set. + * @param value The value of 'run'. + * @return This builder. */ - public void setRun(sparqles.avro.performance.Run value) { - this.run = value; + public sparqles.avro.features.FSingleResult.Builder setRun( + sparqles.avro.performance.Run value) { + validate(fields()[1], value); + this.runBuilder = null; + this.run = value; + fieldSetFlags()[1] = true; + return this; } - /** RecordBuilder for FSingleResult instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { - - private java.lang.CharSequence query; - private sparqles.avro.performance.Run run; + /** + * Checks whether the 'run' field has been set. + * + * @return True if the 'run' field has been set, false otherwise. + */ + public boolean hasRun() { + return fieldSetFlags()[1]; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.features.FSingleResult.SCHEMA$); + /** + * Gets the Builder instance for the 'run' field and creates one if it doesn't exist yet. + * + * @return This builder. + */ + public sparqles.avro.performance.Run.Builder getRunBuilder() { + if (runBuilder == null) { + if (hasRun()) { + setRunBuilder(sparqles.avro.performance.Run.newBuilder(run)); + } else { + setRunBuilder(sparqles.avro.performance.Run.newBuilder()); } + } + return runBuilder; + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.features.FSingleResult.Builder other) { - super(other); - if (isValidValue(fields()[0], other.query)) { - this.query = data().deepCopy(fields()[0].schema(), other.query); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.run)) { - this.run = data().deepCopy(fields()[1].schema(), other.run); - fieldSetFlags()[1] = true; - } - } + /** + * Sets the Builder instance for the 'run' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.features.FSingleResult.Builder setRunBuilder( + sparqles.avro.performance.Run.Builder value) { + clearRun(); + runBuilder = value; + return this; + } - /** Creates a Builder by copying an existing FSingleResult instance */ - private Builder(sparqles.avro.features.FSingleResult other) { - super(sparqles.avro.features.FSingleResult.SCHEMA$); - if (isValidValue(fields()[0], other.query)) { - this.query = data().deepCopy(fields()[0].schema(), other.query); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.run)) { - this.run = data().deepCopy(fields()[1].schema(), other.run); - fieldSetFlags()[1] = true; - } - } + /** + * Checks whether the 'run' field has an active Builder instance + * + * @return True if the 'run' field has an active Builder instance + */ + public boolean hasRunBuilder() { + return runBuilder != null; + } - /** Gets the value of the 'query' field */ - public java.lang.CharSequence getQuery() { - return query; - } + /** + * Clears the value of the 'run' field. + * + * @return This builder. + */ + public sparqles.avro.features.FSingleResult.Builder clearRun() { + run = null; + runBuilder = null; + fieldSetFlags()[1] = false; + return this; + } - /** Sets the value of the 'query' field */ - public sparqles.avro.features.FSingleResult.Builder setQuery(java.lang.CharSequence value) { - validate(fields()[0], value); - this.query = value; - fieldSetFlags()[0] = true; - return this; + @Override + @SuppressWarnings("unchecked") + public FSingleResult build() { + try { + FSingleResult record = new FSingleResult(); + record.query = + fieldSetFlags()[0] ? this.query : (java.lang.CharSequence) defaultValue(fields()[0]); + if (runBuilder != null) { + try { + record.run = this.runBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("run")); + throw e; + } + } else { + record.run = + fieldSetFlags()[1] + ? this.run + : (sparqles.avro.performance.Run) defaultValue(fields()[1]); } + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Checks whether the 'query' field has been set */ - public boolean hasQuery() { - return fieldSetFlags()[0]; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Clears the value of the 'query' field */ - public sparqles.avro.features.FSingleResult.Builder clearQuery() { - query = null; - fieldSetFlags()[0] = false; - return this; - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Gets the value of the 'run' field */ - public sparqles.avro.performance.Run getRun() { - return run; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Sets the value of the 'run' field */ - public sparqles.avro.features.FSingleResult.Builder setRun( - sparqles.avro.performance.Run value) { - validate(fields()[1], value); - this.run = value; - fieldSetFlags()[1] = true; - return this; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Checks whether the 'run' field has been set */ - public boolean hasRun() { - return fieldSetFlags()[1]; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Clears the value of the 'run' field */ - public sparqles.avro.features.FSingleResult.Builder clearRun() { - run = null; - fieldSetFlags()[1] = false; - return this; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.query); - @Override - public FSingleResult build() { - try { - FSingleResult record = new FSingleResult(); - record.query = - fieldSetFlags()[0] - ? this.query - : (java.lang.CharSequence) defaultValue(fields()[0]); - record.run = - fieldSetFlags()[1] - ? this.run - : (sparqles.avro.performance.Run) defaultValue(fields()[1]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); + this.run.customEncode(out); + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.query = in.readString(this.query instanceof Utf8 ? (Utf8) this.query : null); + + if (this.run == null) { + this.run = new sparqles.avro.performance.Run(); + } + this.run.customDecode(in); + + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.query = in.readString(this.query instanceof Utf8 ? (Utf8) this.query : null); + break; + + case 1: + if (this.run == null) { + this.run = new sparqles.avro.performance.Run(); } + this.run.customDecode(in); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/performance/PResult.java b/backend/src/main/java/sparqles/avro/performance/PResult.java index 66081751..b8cbb7fc 100644 --- a/backend/src/main/java/sparqles/avro/performance/PResult.java +++ b/backend/src/main/java/sparqles/avro/performance/PResult.java @@ -5,232 +5,552 @@ */ package sparqles.avro.performance; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class PResult extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"PResult\",\"namespace\":\"sparqles.avro.performance\",\"fields\":[{\"name\":\"endpointResult\",\"type\":{\"type\":\"record\",\"name\":\"EndpointResult\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"start\",\"type\":\"long\"},{\"name\":\"end\",\"type\":\"long\"}],\"import\":\"Endpoint.avsc\"}},{\"name\":\"results\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"record\",\"name\":\"PSingleResult\",\"fields\":[{\"name\":\"query\",\"type\":\"string\"},{\"name\":\"cold\",\"type\":{\"type\":\"record\",\"name\":\"Run\",\"fields\":[{\"name\":\"frestout\",\"type\":\"long\"},{\"name\":\"solutions\",\"type\":\"int\"},{\"name\":\"inittime\",\"type\":\"long\"},{\"name\":\"exectime\",\"type\":\"long\"},{\"name\":\"closetime\",\"type\":\"long\"},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"exectout\",\"type\":\"long\"}]}},{\"name\":\"warm\",\"type\":\"Run\"}]}}}]}"); - @Deprecated public sparqles.avro.EndpointResult endpointResult; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 6056298654927421624L; - @Deprecated - public java.util.Map results; + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"PResult\",\"namespace\":\"sparqles.avro.performance\",\"fields\":[{\"name\":\"endpointResult\",\"type\":{\"type\":\"record\",\"name\":\"EndpointResult\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"start\",\"type\":\"long\"},{\"name\":\"end\",\"type\":\"long\"}],\"import\":\"Endpoint.avsc\"}},{\"name\":\"results\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"record\",\"name\":\"PSingleResult\",\"fields\":[{\"name\":\"query\",\"type\":\"string\"},{\"name\":\"cold\",\"type\":{\"type\":\"record\",\"name\":\"Run\",\"fields\":[{\"name\":\"frestout\",\"type\":\"long\"},{\"name\":\"solutions\",\"type\":\"int\"},{\"name\":\"inittime\",\"type\":\"long\"},{\"name\":\"exectime\",\"type\":\"long\"},{\"name\":\"closetime\",\"type\":\"long\"},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"exectout\",\"type\":\"long\"}]}},{\"name\":\"warm\",\"type\":\"Run\"}]}}}],\"import\":\"Run.avsc\"}"); - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public PResult() {} + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } - /** All-args constructor. */ - public PResult( - sparqles.avro.EndpointResult endpointResult, - java.util.Map - results) { - this.endpointResult = endpointResult; - this.results = results; - } + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this PResult to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a PResult from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a PResult instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static PResult fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private sparqles.avro.EndpointResult endpointResult; + private java.util.Map results; - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public PResult() {} + + /** + * All-args constructor. + * + * @param endpointResult The new value for endpointResult + * @param results The new value for results + */ + public PResult( + sparqles.avro.EndpointResult endpointResult, + java.util.Map results) { + this.endpointResult = endpointResult; + this.results = results; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return endpointResult; + case 1: + return results; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - /** Creates a new PResult RecordBuilder */ - public static sparqles.avro.performance.PResult.Builder newBuilder() { - return new sparqles.avro.performance.PResult.Builder(); + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + endpointResult = (sparqles.avro.EndpointResult) value$; + break; + case 1: + results = + (java.util.Map) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } + + /** + * Gets the value of the 'endpointResult' field. + * + * @return The value of the 'endpointResult' field. + */ + public sparqles.avro.EndpointResult getEndpointResult() { + return endpointResult; + } + + /** + * Sets the value of the 'endpointResult' field. + * + * @param value the value to set. + */ + public void setEndpointResult(sparqles.avro.EndpointResult value) { + this.endpointResult = value; + } + + /** + * Gets the value of the 'results' field. + * + * @return The value of the 'results' field. + */ + public java.util.Map + getResults() { + return results; + } + + /** + * Sets the value of the 'results' field. + * + * @param value the value to set. + */ + public void setResults( + java.util.Map value) { + this.results = value; + } + + /** + * Creates a new PResult RecordBuilder. + * + * @return A new PResult RecordBuilder + */ + public static sparqles.avro.performance.PResult.Builder newBuilder() { + return new sparqles.avro.performance.PResult.Builder(); + } - /** Creates a new PResult RecordBuilder by copying an existing Builder */ - public static sparqles.avro.performance.PResult.Builder newBuilder( - sparqles.avro.performance.PResult.Builder other) { - return new sparqles.avro.performance.PResult.Builder(other); + /** + * Creates a new PResult RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new PResult RecordBuilder + */ + public static sparqles.avro.performance.PResult.Builder newBuilder( + sparqles.avro.performance.PResult.Builder other) { + if (other == null) { + return new sparqles.avro.performance.PResult.Builder(); + } else { + return new sparqles.avro.performance.PResult.Builder(other); } + } - /** Creates a new PResult RecordBuilder by copying an existing PResult instance */ - public static sparqles.avro.performance.PResult.Builder newBuilder( - sparqles.avro.performance.PResult other) { - return new sparqles.avro.performance.PResult.Builder(other); + /** + * Creates a new PResult RecordBuilder by copying an existing PResult instance. + * + * @param other The existing instance to copy. + * @return A new PResult RecordBuilder + */ + public static sparqles.avro.performance.PResult.Builder newBuilder( + sparqles.avro.performance.PResult other) { + if (other == null) { + return new sparqles.avro.performance.PResult.Builder(); + } else { + return new sparqles.avro.performance.PResult.Builder(other); } + } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** RecordBuilder for PResult instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private sparqles.avro.EndpointResult endpointResult; + private sparqles.avro.EndpointResult.Builder endpointResultBuilder; + private java.util.Map results; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return endpointResult; - case 1: - return results; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.performance.PResult.Builder other) { + super(other); + if (isValidValue(fields()[0], other.endpointResult)) { + this.endpointResult = data().deepCopy(fields()[0].schema(), other.endpointResult); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (other.hasEndpointResultBuilder()) { + this.endpointResultBuilder = + sparqles.avro.EndpointResult.newBuilder(other.getEndpointResultBuilder()); + } + if (isValidValue(fields()[1], other.results)) { + this.results = data().deepCopy(fields()[1].schema(), other.results); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - endpointResult = (sparqles.avro.EndpointResult) value$; - break; - case 1: - results = - (java.util.Map< - java.lang.CharSequence, - sparqles.avro.performance.PSingleResult>) - value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Creates a Builder by copying an existing PResult instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.performance.PResult other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.endpointResult)) { + this.endpointResult = data().deepCopy(fields()[0].schema(), other.endpointResult); + fieldSetFlags()[0] = true; + } + this.endpointResultBuilder = null; + if (isValidValue(fields()[1], other.results)) { + this.results = data().deepCopy(fields()[1].schema(), other.results); + fieldSetFlags()[1] = true; + } } - /** Gets the value of the 'endpointResult' field. */ + /** + * Gets the value of the 'endpointResult' field. + * + * @return The value. + */ public sparqles.avro.EndpointResult getEndpointResult() { - return endpointResult; + return endpointResult; } /** * Sets the value of the 'endpointResult' field. * - * @param value the value to set. + * @param value The value of 'endpointResult'. + * @return This builder. + */ + public sparqles.avro.performance.PResult.Builder setEndpointResult( + sparqles.avro.EndpointResult value) { + validate(fields()[0], value); + this.endpointResultBuilder = null; + this.endpointResult = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'endpointResult' field has been set. + * + * @return True if the 'endpointResult' field has been set, false otherwise. + */ + public boolean hasEndpointResult() { + return fieldSetFlags()[0]; + } + + /** + * Gets the Builder instance for the 'endpointResult' field and creates one if it doesn't exist + * yet. + * + * @return This builder. + */ + public sparqles.avro.EndpointResult.Builder getEndpointResultBuilder() { + if (endpointResultBuilder == null) { + if (hasEndpointResult()) { + setEndpointResultBuilder(sparqles.avro.EndpointResult.newBuilder(endpointResult)); + } else { + setEndpointResultBuilder(sparqles.avro.EndpointResult.newBuilder()); + } + } + return endpointResultBuilder; + } + + /** + * Sets the Builder instance for the 'endpointResult' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.performance.PResult.Builder setEndpointResultBuilder( + sparqles.avro.EndpointResult.Builder value) { + clearEndpointResult(); + endpointResultBuilder = value; + return this; + } + + /** + * Checks whether the 'endpointResult' field has an active Builder instance + * + * @return True if the 'endpointResult' field has an active Builder instance */ - public void setEndpointResult(sparqles.avro.EndpointResult value) { - this.endpointResult = value; + public boolean hasEndpointResultBuilder() { + return endpointResultBuilder != null; } - /** Gets the value of the 'results' field. */ + /** + * Clears the value of the 'endpointResult' field. + * + * @return This builder. + */ + public sparqles.avro.performance.PResult.Builder clearEndpointResult() { + endpointResult = null; + endpointResultBuilder = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'results' field. + * + * @return The value. + */ public java.util.Map - getResults() { - return results; + getResults() { + return results; } /** * Sets the value of the 'results' field. * - * @param value the value to set. + * @param value The value of 'results'. + * @return This builder. */ - public void setResults( - java.util.Map value) { - this.results = value; + public sparqles.avro.performance.PResult.Builder setResults( + java.util.Map value) { + validate(fields()[1], value); + this.results = value; + fieldSetFlags()[1] = true; + return this; } - /** RecordBuilder for PResult instances. */ - public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Checks whether the 'results' field has been set. + * + * @return True if the 'results' field has been set, false otherwise. + */ + public boolean hasResults() { + return fieldSetFlags()[1]; + } - private sparqles.avro.EndpointResult endpointResult; - private java.util.Map - results; + /** + * Clears the value of the 'results' field. + * + * @return This builder. + */ + public sparqles.avro.performance.PResult.Builder clearResults() { + results = null; + fieldSetFlags()[1] = false; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.performance.PResult.SCHEMA$); + @Override + @SuppressWarnings("unchecked") + public PResult build() { + try { + PResult record = new PResult(); + if (endpointResultBuilder != null) { + try { + record.endpointResult = this.endpointResultBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("endpointResult")); + throw e; + } + } else { + record.endpointResult = + fieldSetFlags()[0] + ? this.endpointResult + : (sparqles.avro.EndpointResult) defaultValue(fields()[0]); } + record.results = + fieldSetFlags()[1] + ? this.results + : (java.util.Map) + defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.performance.PResult.Builder other) { - super(other); - if (isValidValue(fields()[0], other.endpointResult)) { - this.endpointResult = data().deepCopy(fields()[0].schema(), other.endpointResult); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.results)) { - this.results = data().deepCopy(fields()[1].schema(), other.results); - fieldSetFlags()[1] = true; - } - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Creates a Builder by copying an existing PResult instance */ - private Builder(sparqles.avro.performance.PResult other) { - super(sparqles.avro.performance.PResult.SCHEMA$); - if (isValidValue(fields()[0], other.endpointResult)) { - this.endpointResult = data().deepCopy(fields()[0].schema(), other.endpointResult); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.results)) { - this.results = data().deepCopy(fields()[1].schema(), other.results); - fieldSetFlags()[1] = true; - } - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Gets the value of the 'endpointResult' field */ - public sparqles.avro.EndpointResult getEndpointResult() { - return endpointResult; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Sets the value of the 'endpointResult' field */ - public sparqles.avro.performance.PResult.Builder setEndpointResult( - sparqles.avro.EndpointResult value) { - validate(fields()[0], value); - this.endpointResult = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Checks whether the 'endpointResult' field has been set */ - public boolean hasEndpointResult() { - return fieldSetFlags()[0]; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Clears the value of the 'endpointResult' field */ - public sparqles.avro.performance.PResult.Builder clearEndpointResult() { - endpointResult = null; - fieldSetFlags()[0] = false; - return this; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + this.endpointResult.customEncode(out); - /** Gets the value of the 'results' field */ - public java.util.Map - getResults() { - return results; - } + long size0 = this.results.size(); + out.writeMapStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (java.util.Map.Entry e0 : + this.results.entrySet()) { + actualSize0++; + out.startItem(); + out.writeString(e0.getKey()); + sparqles.avro.performance.PSingleResult v0 = e0.getValue(); + v0.customEncode(out); + } + out.writeMapEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException( + "Map-size written was " + size0 + ", but element count was " + actualSize0 + "."); + } - /** Sets the value of the 'results' field */ - public sparqles.avro.performance.PResult.Builder setResults( - java.util.Map - value) { - validate(fields()[1], value); - this.results = value; - fieldSetFlags()[1] = true; - return this; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + if (this.endpointResult == null) { + this.endpointResult = new sparqles.avro.EndpointResult(); + } + this.endpointResult.customDecode(in); - /** Checks whether the 'results' field has been set */ - public boolean hasResults() { - return fieldSetFlags()[1]; + long size0 = in.readMapStart(); + java.util.Map m0 = + this.results; // Need fresh name due to limitation of macro system + if (m0 == null) { + m0 = + new java.util.HashMap( + (int) (size0 * 4) / 3 + 1); + this.results = m0; + } else m0.clear(); + for (; 0 < size0; size0 = in.mapNext()) { + for (; size0 != 0; size0--) { + java.lang.CharSequence k0 = null; + k0 = in.readString(k0 instanceof Utf8 ? (Utf8) k0 : null); + sparqles.avro.performance.PSingleResult v0 = null; + if (v0 == null) { + v0 = new sparqles.avro.performance.PSingleResult(); + } + v0.customDecode(in); + m0.put(k0, v0); } + } - /** Clears the value of the 'results' field */ - public sparqles.avro.performance.PResult.Builder clearResults() { - results = null; - fieldSetFlags()[1] = false; - return this; - } + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + if (this.endpointResult == null) { + this.endpointResult = new sparqles.avro.EndpointResult(); + } + this.endpointResult.customDecode(in); + break; - @Override - public PResult build() { - try { - PResult record = new PResult(); - record.endpointResult = - fieldSetFlags()[0] - ? this.endpointResult - : (sparqles.avro.EndpointResult) defaultValue(fields()[0]); - record.results = - fieldSetFlags()[1] - ? this.results - : (java.util.Map< - java.lang.CharSequence, - sparqles.avro.performance.PSingleResult>) - defaultValue(fields()[1]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); + case 1: + long size0 = in.readMapStart(); + java.util.Map m0 = + this.results; // Need fresh name due to limitation of macro system + if (m0 == null) { + m0 = + new java.util.HashMap< + java.lang.CharSequence, sparqles.avro.performance.PSingleResult>( + (int) (size0 * 4) / 3 + 1); + this.results = m0; + } else m0.clear(); + for (; 0 < size0; size0 = in.mapNext()) { + for (; size0 != 0; size0--) { + java.lang.CharSequence k0 = null; + k0 = in.readString(k0 instanceof Utf8 ? (Utf8) k0 : null); + sparqles.avro.performance.PSingleResult v0 = null; + if (v0 == null) { + v0 = new sparqles.avro.performance.PSingleResult(); + } + v0.customDecode(in); + m0.put(k0, v0); + } } + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/performance/PSingleResult.java b/backend/src/main/java/sparqles/avro/performance/PSingleResult.java index 3a05d5b9..28ed00b6 100644 --- a/backend/src/main/java/sparqles/avro/performance/PSingleResult.java +++ b/backend/src/main/java/sparqles/avro/performance/PSingleResult.java @@ -5,279 +5,638 @@ */ package sparqles.avro.performance; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class PSingleResult extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"PSingleResult\",\"namespace\":\"sparqles.avro.performance\",\"fields\":[{\"name\":\"query\",\"type\":\"string\"},{\"name\":\"cold\",\"type\":{\"type\":\"record\",\"name\":\"Run\",\"fields\":[{\"name\":\"frestout\",\"type\":\"long\"},{\"name\":\"solutions\",\"type\":\"int\"},{\"name\":\"inittime\",\"type\":\"long\"},{\"name\":\"exectime\",\"type\":\"long\"},{\"name\":\"closetime\",\"type\":\"long\"},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"exectout\",\"type\":\"long\"}]}},{\"name\":\"warm\",\"type\":\"Run\"}]}"); - @Deprecated public java.lang.CharSequence query; - @Deprecated public sparqles.avro.performance.Run cold; - @Deprecated public sparqles.avro.performance.Run warm; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -6493359156901817207L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"PSingleResult\",\"namespace\":\"sparqles.avro.performance\",\"fields\":[{\"name\":\"query\",\"type\":\"string\"},{\"name\":\"cold\",\"type\":{\"type\":\"record\",\"name\":\"Run\",\"fields\":[{\"name\":\"frestout\",\"type\":\"long\"},{\"name\":\"solutions\",\"type\":\"int\"},{\"name\":\"inittime\",\"type\":\"long\"},{\"name\":\"exectime\",\"type\":\"long\"},{\"name\":\"closetime\",\"type\":\"long\"},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"exectout\",\"type\":\"long\"}]}},{\"name\":\"warm\",\"type\":\"Run\"}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this PSingleResult to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a PSingleResult from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a PSingleResult instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static PSingleResult fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence query; + private sparqles.avro.performance.Run cold; + private sparqles.avro.performance.Run warm; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public PSingleResult() {} + + /** + * All-args constructor. + * + * @param query The new value for query + * @param cold The new value for cold + * @param warm The new value for warm + */ + public PSingleResult( + java.lang.CharSequence query, + sparqles.avro.performance.Run cold, + sparqles.avro.performance.Run warm) { + this.query = query; + this.cold = cold; + this.warm = warm; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return query; + case 1: + return cold; + case 2: + return warm; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + query = (java.lang.CharSequence) value$; + break; + case 1: + cold = (sparqles.avro.performance.Run) value$; + break; + case 2: + warm = (sparqles.avro.performance.Run) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'query' field. + * + * @return The value of the 'query' field. + */ + public java.lang.CharSequence getQuery() { + return query; + } + + /** + * Sets the value of the 'query' field. + * + * @param value the value to set. + */ + public void setQuery(java.lang.CharSequence value) { + this.query = value; + } + + /** + * Gets the value of the 'cold' field. + * + * @return The value of the 'cold' field. + */ + public sparqles.avro.performance.Run getCold() { + return cold; + } + + /** + * Sets the value of the 'cold' field. + * + * @param value the value to set. + */ + public void setCold(sparqles.avro.performance.Run value) { + this.cold = value; + } + + /** + * Gets the value of the 'warm' field. + * + * @return The value of the 'warm' field. + */ + public sparqles.avro.performance.Run getWarm() { + return warm; + } + + /** + * Sets the value of the 'warm' field. + * + * @param value the value to set. + */ + public void setWarm(sparqles.avro.performance.Run value) { + this.warm = value; + } + + /** + * Creates a new PSingleResult RecordBuilder. + * + * @return A new PSingleResult RecordBuilder + */ + public static sparqles.avro.performance.PSingleResult.Builder newBuilder() { + return new sparqles.avro.performance.PSingleResult.Builder(); + } + + /** + * Creates a new PSingleResult RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new PSingleResult RecordBuilder + */ + public static sparqles.avro.performance.PSingleResult.Builder newBuilder( + sparqles.avro.performance.PSingleResult.Builder other) { + if (other == null) { + return new sparqles.avro.performance.PSingleResult.Builder(); + } else { + return new sparqles.avro.performance.PSingleResult.Builder(other); + } + } + + /** + * Creates a new PSingleResult RecordBuilder by copying an existing PSingleResult instance. + * + * @param other The existing instance to copy. + * @return A new PSingleResult RecordBuilder + */ + public static sparqles.avro.performance.PSingleResult.Builder newBuilder( + sparqles.avro.performance.PSingleResult other) { + if (other == null) { + return new sparqles.avro.performance.PSingleResult.Builder(); + } else { + return new sparqles.avro.performance.PSingleResult.Builder(other); + } + } + + /** RecordBuilder for PSingleResult instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder + extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence query; + private sparqles.avro.performance.Run cold; + private sparqles.avro.performance.Run.Builder coldBuilder; + private sparqles.avro.performance.Run warm; + private sparqles.avro.performance.Run.Builder warmBuilder; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public PSingleResult() {} - - /** All-args constructor. */ - public PSingleResult( - java.lang.CharSequence query, - sparqles.avro.performance.Run cold, - sparqles.avro.performance.Run warm) { - this.query = query; - this.cold = cold; - this.warm = warm; + private Builder(sparqles.avro.performance.PSingleResult.Builder other) { + super(other); + if (isValidValue(fields()[0], other.query)) { + this.query = data().deepCopy(fields()[0].schema(), other.query); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.cold)) { + this.cold = data().deepCopy(fields()[1].schema(), other.cold); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (other.hasColdBuilder()) { + this.coldBuilder = sparqles.avro.performance.Run.newBuilder(other.getColdBuilder()); + } + if (isValidValue(fields()[2], other.warm)) { + this.warm = data().deepCopy(fields()[2].schema(), other.warm); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (other.hasWarmBuilder()) { + this.warmBuilder = sparqles.avro.performance.Run.newBuilder(other.getWarmBuilder()); + } } - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; + /** + * Creates a Builder by copying an existing PSingleResult instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.performance.PSingleResult other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.query)) { + this.query = data().deepCopy(fields()[0].schema(), other.query); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.cold)) { + this.cold = data().deepCopy(fields()[1].schema(), other.cold); + fieldSetFlags()[1] = true; + } + this.coldBuilder = null; + if (isValidValue(fields()[2], other.warm)) { + this.warm = data().deepCopy(fields()[2].schema(), other.warm); + fieldSetFlags()[2] = true; + } + this.warmBuilder = null; } - /** Creates a new PSingleResult RecordBuilder */ - public static sparqles.avro.performance.PSingleResult.Builder newBuilder() { - return new sparqles.avro.performance.PSingleResult.Builder(); + /** + * Gets the value of the 'query' field. + * + * @return The value. + */ + public java.lang.CharSequence getQuery() { + return query; } - /** Creates a new PSingleResult RecordBuilder by copying an existing Builder */ - public static sparqles.avro.performance.PSingleResult.Builder newBuilder( - sparqles.avro.performance.PSingleResult.Builder other) { - return new sparqles.avro.performance.PSingleResult.Builder(other); + /** + * Sets the value of the 'query' field. + * + * @param value The value of 'query'. + * @return This builder. + */ + public sparqles.avro.performance.PSingleResult.Builder setQuery(java.lang.CharSequence value) { + validate(fields()[0], value); + this.query = value; + fieldSetFlags()[0] = true; + return this; } - /** Creates a new PSingleResult RecordBuilder by copying an existing PSingleResult instance */ - public static sparqles.avro.performance.PSingleResult.Builder newBuilder( - sparqles.avro.performance.PSingleResult other) { - return new sparqles.avro.performance.PSingleResult.Builder(other); + /** + * Checks whether the 'query' field has been set. + * + * @return True if the 'query' field has been set, false otherwise. + */ + public boolean hasQuery() { + return fieldSetFlags()[0]; } - public org.apache.avro.Schema getSchema() { - return SCHEMA$; + /** + * Clears the value of the 'query' field. + * + * @return This builder. + */ + public sparqles.avro.performance.PSingleResult.Builder clearQuery() { + query = null; + fieldSetFlags()[0] = false; + return this; } - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return query; - case 1: - return cold; - case 2: - return warm; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Gets the value of the 'cold' field. + * + * @return The value. + */ + public sparqles.avro.performance.Run getCold() { + return cold; } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - query = (java.lang.CharSequence) value$; - break; - case 1: - cold = (sparqles.avro.performance.Run) value$; - break; - case 2: - warm = (sparqles.avro.performance.Run) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Sets the value of the 'cold' field. + * + * @param value The value of 'cold'. + * @return This builder. + */ + public sparqles.avro.performance.PSingleResult.Builder setCold( + sparqles.avro.performance.Run value) { + validate(fields()[1], value); + this.coldBuilder = null; + this.cold = value; + fieldSetFlags()[1] = true; + return this; } - /** Gets the value of the 'query' field. */ - public java.lang.CharSequence getQuery() { - return query; + /** + * Checks whether the 'cold' field has been set. + * + * @return True if the 'cold' field has been set, false otherwise. + */ + public boolean hasCold() { + return fieldSetFlags()[1]; } /** - * Sets the value of the 'query' field. + * Gets the Builder instance for the 'cold' field and creates one if it doesn't exist yet. * - * @param value the value to set. + * @return This builder. */ - public void setQuery(java.lang.CharSequence value) { - this.query = value; + public sparqles.avro.performance.Run.Builder getColdBuilder() { + if (coldBuilder == null) { + if (hasCold()) { + setColdBuilder(sparqles.avro.performance.Run.newBuilder(cold)); + } else { + setColdBuilder(sparqles.avro.performance.Run.newBuilder()); + } + } + return coldBuilder; } - /** Gets the value of the 'cold' field. */ - public sparqles.avro.performance.Run getCold() { - return cold; + /** + * Sets the Builder instance for the 'cold' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.performance.PSingleResult.Builder setColdBuilder( + sparqles.avro.performance.Run.Builder value) { + clearCold(); + coldBuilder = value; + return this; } /** - * Sets the value of the 'cold' field. + * Checks whether the 'cold' field has an active Builder instance * - * @param value the value to set. + * @return True if the 'cold' field has an active Builder instance */ - public void setCold(sparqles.avro.performance.Run value) { - this.cold = value; + public boolean hasColdBuilder() { + return coldBuilder != null; } - /** Gets the value of the 'warm' field. */ + /** + * Clears the value of the 'cold' field. + * + * @return This builder. + */ + public sparqles.avro.performance.PSingleResult.Builder clearCold() { + cold = null; + coldBuilder = null; + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'warm' field. + * + * @return The value. + */ public sparqles.avro.performance.Run getWarm() { - return warm; + return warm; } /** * Sets the value of the 'warm' field. * - * @param value the value to set. + * @param value The value of 'warm'. + * @return This builder. */ - public void setWarm(sparqles.avro.performance.Run value) { - this.warm = value; + public sparqles.avro.performance.PSingleResult.Builder setWarm( + sparqles.avro.performance.Run value) { + validate(fields()[2], value); + this.warmBuilder = null; + this.warm = value; + fieldSetFlags()[2] = true; + return this; } - /** RecordBuilder for PSingleResult instances. */ - public static class Builder - extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { - - private java.lang.CharSequence query; - private sparqles.avro.performance.Run cold; - private sparqles.avro.performance.Run warm; - - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.performance.PSingleResult.SCHEMA$); - } - - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.performance.PSingleResult.Builder other) { - super(other); - if (isValidValue(fields()[0], other.query)) { - this.query = data().deepCopy(fields()[0].schema(), other.query); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.cold)) { - this.cold = data().deepCopy(fields()[1].schema(), other.cold); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.warm)) { - this.warm = data().deepCopy(fields()[2].schema(), other.warm); - fieldSetFlags()[2] = true; - } - } - - /** Creates a Builder by copying an existing PSingleResult instance */ - private Builder(sparqles.avro.performance.PSingleResult other) { - super(sparqles.avro.performance.PSingleResult.SCHEMA$); - if (isValidValue(fields()[0], other.query)) { - this.query = data().deepCopy(fields()[0].schema(), other.query); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.cold)) { - this.cold = data().deepCopy(fields()[1].schema(), other.cold); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.warm)) { - this.warm = data().deepCopy(fields()[2].schema(), other.warm); - fieldSetFlags()[2] = true; - } - } - - /** Gets the value of the 'query' field */ - public java.lang.CharSequence getQuery() { - return query; - } - - /** Sets the value of the 'query' field */ - public sparqles.avro.performance.PSingleResult.Builder setQuery( - java.lang.CharSequence value) { - validate(fields()[0], value); - this.query = value; - fieldSetFlags()[0] = true; - return this; - } - - /** Checks whether the 'query' field has been set */ - public boolean hasQuery() { - return fieldSetFlags()[0]; - } - - /** Clears the value of the 'query' field */ - public sparqles.avro.performance.PSingleResult.Builder clearQuery() { - query = null; - fieldSetFlags()[0] = false; - return this; - } - - /** Gets the value of the 'cold' field */ - public sparqles.avro.performance.Run getCold() { - return cold; - } - - /** Sets the value of the 'cold' field */ - public sparqles.avro.performance.PSingleResult.Builder setCold( - sparqles.avro.performance.Run value) { - validate(fields()[1], value); - this.cold = value; - fieldSetFlags()[1] = true; - return this; - } + /** + * Checks whether the 'warm' field has been set. + * + * @return True if the 'warm' field has been set, false otherwise. + */ + public boolean hasWarm() { + return fieldSetFlags()[2]; + } - /** Checks whether the 'cold' field has been set */ - public boolean hasCold() { - return fieldSetFlags()[1]; + /** + * Gets the Builder instance for the 'warm' field and creates one if it doesn't exist yet. + * + * @return This builder. + */ + public sparqles.avro.performance.Run.Builder getWarmBuilder() { + if (warmBuilder == null) { + if (hasWarm()) { + setWarmBuilder(sparqles.avro.performance.Run.newBuilder(warm)); + } else { + setWarmBuilder(sparqles.avro.performance.Run.newBuilder()); } + } + return warmBuilder; + } - /** Clears the value of the 'cold' field */ - public sparqles.avro.performance.PSingleResult.Builder clearCold() { - cold = null; - fieldSetFlags()[1] = false; - return this; - } + /** + * Sets the Builder instance for the 'warm' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.performance.PSingleResult.Builder setWarmBuilder( + sparqles.avro.performance.Run.Builder value) { + clearWarm(); + warmBuilder = value; + return this; + } - /** Gets the value of the 'warm' field */ - public sparqles.avro.performance.Run getWarm() { - return warm; - } + /** + * Checks whether the 'warm' field has an active Builder instance + * + * @return True if the 'warm' field has an active Builder instance + */ + public boolean hasWarmBuilder() { + return warmBuilder != null; + } - /** Sets the value of the 'warm' field */ - public sparqles.avro.performance.PSingleResult.Builder setWarm( - sparqles.avro.performance.Run value) { - validate(fields()[2], value); - this.warm = value; - fieldSetFlags()[2] = true; - return this; - } + /** + * Clears the value of the 'warm' field. + * + * @return This builder. + */ + public sparqles.avro.performance.PSingleResult.Builder clearWarm() { + warm = null; + warmBuilder = null; + fieldSetFlags()[2] = false; + return this; + } - /** Checks whether the 'warm' field has been set */ - public boolean hasWarm() { - return fieldSetFlags()[2]; + @Override + @SuppressWarnings("unchecked") + public PSingleResult build() { + try { + PSingleResult record = new PSingleResult(); + record.query = + fieldSetFlags()[0] ? this.query : (java.lang.CharSequence) defaultValue(fields()[0]); + if (coldBuilder != null) { + try { + record.cold = this.coldBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("cold")); + throw e; + } + } else { + record.cold = + fieldSetFlags()[1] + ? this.cold + : (sparqles.avro.performance.Run) defaultValue(fields()[1]); } - - /** Clears the value of the 'warm' field */ - public sparqles.avro.performance.PSingleResult.Builder clearWarm() { - warm = null; - fieldSetFlags()[2] = false; - return this; + if (warmBuilder != null) { + try { + record.warm = this.warmBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("warm")); + throw e; + } + } else { + record.warm = + fieldSetFlags()[2] + ? this.warm + : (sparqles.avro.performance.Run) defaultValue(fields()[2]); } + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.query); + + this.cold.customEncode(out); + + this.warm.customEncode(out); + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.query = in.readString(this.query instanceof Utf8 ? (Utf8) this.query : null); + + if (this.cold == null) { + this.cold = new sparqles.avro.performance.Run(); + } + this.cold.customDecode(in); + + if (this.warm == null) { + this.warm = new sparqles.avro.performance.Run(); + } + this.warm.customDecode(in); + + } else { + for (int i = 0; i < 3; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.query = in.readString(this.query instanceof Utf8 ? (Utf8) this.query : null); + break; + + case 1: + if (this.cold == null) { + this.cold = new sparqles.avro.performance.Run(); + } + this.cold.customDecode(in); + break; - @Override - public PSingleResult build() { - try { - PSingleResult record = new PSingleResult(); - record.query = - fieldSetFlags()[0] - ? this.query - : (java.lang.CharSequence) defaultValue(fields()[0]); - record.cold = - fieldSetFlags()[1] - ? this.cold - : (sparqles.avro.performance.Run) defaultValue(fields()[1]); - record.warm = - fieldSetFlags()[2] - ? this.warm - : (sparqles.avro.performance.Run) defaultValue(fields()[2]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); + case 2: + if (this.warm == null) { + this.warm = new sparqles.avro.performance.Run(); } + this.warm.customDecode(in); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/performance/Run.java b/backend/src/main/java/sparqles/avro/performance/Run.java index aef369d0..957699cb 100644 --- a/backend/src/main/java/sparqles/avro/performance/Run.java +++ b/backend/src/main/java/sparqles/avro/performance/Run.java @@ -5,509 +5,877 @@ */ package sparqles.avro.performance; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class Run extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"Run\",\"namespace\":\"sparqles.avro.performance\",\"fields\":[{\"name\":\"frestout\",\"type\":\"long\"},{\"name\":\"solutions\",\"type\":\"int\"},{\"name\":\"inittime\",\"type\":\"long\"},{\"name\":\"exectime\",\"type\":\"long\"},{\"name\":\"closetime\",\"type\":\"long\"},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"exectout\",\"type\":\"long\"}]}"); - @Deprecated public long frestout; - @Deprecated public int solutions; - @Deprecated public long inittime; - @Deprecated public long exectime; - @Deprecated public long closetime; - @Deprecated public java.lang.CharSequence Exception; - @Deprecated public long exectout; + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 4884504287540360486L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"Run\",\"namespace\":\"sparqles.avro.performance\",\"fields\":[{\"name\":\"frestout\",\"type\":\"long\"},{\"name\":\"solutions\",\"type\":\"int\"},{\"name\":\"inittime\",\"type\":\"long\"},{\"name\":\"exectime\",\"type\":\"long\"},{\"name\":\"closetime\",\"type\":\"long\"},{\"name\":\"Exception\",\"type\":[\"string\",\"null\"]},{\"name\":\"exectout\",\"type\":\"long\"}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this Run to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a Run from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a Run instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static Run fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private long frestout; + private int solutions; + private long inittime; + private long exectime; + private long closetime; + private java.lang.CharSequence Exception; + private long exectout; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public Run() {} + + /** + * All-args constructor. + * + * @param frestout The new value for frestout + * @param solutions The new value for solutions + * @param inittime The new value for inittime + * @param exectime The new value for exectime + * @param closetime The new value for closetime + * @param Exception The new value for Exception + * @param exectout The new value for exectout + */ + public Run( + java.lang.Long frestout, + java.lang.Integer solutions, + java.lang.Long inittime, + java.lang.Long exectime, + java.lang.Long closetime, + java.lang.CharSequence Exception, + java.lang.Long exectout) { + this.frestout = frestout; + this.solutions = solutions; + this.inittime = inittime; + this.exectime = exectime; + this.closetime = closetime; + this.Exception = Exception; + this.exectout = exectout; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return frestout; + case 1: + return solutions; + case 2: + return inittime; + case 3: + return exectime; + case 4: + return closetime; + case 5: + return Exception; + case 6: + return exectout; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + frestout = (java.lang.Long) value$; + break; + case 1: + solutions = (java.lang.Integer) value$; + break; + case 2: + inittime = (java.lang.Long) value$; + break; + case 3: + exectime = (java.lang.Long) value$; + break; + case 4: + closetime = (java.lang.Long) value$; + break; + case 5: + Exception = (java.lang.CharSequence) value$; + break; + case 6: + exectout = (java.lang.Long) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'frestout' field. + * + * @return The value of the 'frestout' field. + */ + public long getFrestout() { + return frestout; + } + + /** + * Sets the value of the 'frestout' field. + * + * @param value the value to set. + */ + public void setFrestout(long value) { + this.frestout = value; + } + + /** + * Gets the value of the 'solutions' field. + * + * @return The value of the 'solutions' field. + */ + public int getSolutions() { + return solutions; + } + + /** + * Sets the value of the 'solutions' field. + * + * @param value the value to set. + */ + public void setSolutions(int value) { + this.solutions = value; + } + + /** + * Gets the value of the 'inittime' field. + * + * @return The value of the 'inittime' field. + */ + public long getInittime() { + return inittime; + } + + /** + * Sets the value of the 'inittime' field. + * + * @param value the value to set. + */ + public void setInittime(long value) { + this.inittime = value; + } + + /** + * Gets the value of the 'exectime' field. + * + * @return The value of the 'exectime' field. + */ + public long getExectime() { + return exectime; + } + + /** + * Sets the value of the 'exectime' field. + * + * @param value the value to set. + */ + public void setExectime(long value) { + this.exectime = value; + } + + /** + * Gets the value of the 'closetime' field. + * + * @return The value of the 'closetime' field. + */ + public long getClosetime() { + return closetime; + } + + /** + * Sets the value of the 'closetime' field. + * + * @param value the value to set. + */ + public void setClosetime(long value) { + this.closetime = value; + } + + /** + * Gets the value of the 'Exception' field. + * + * @return The value of the 'Exception' field. + */ + public java.lang.CharSequence getException() { + return Exception; + } + + /** + * Sets the value of the 'Exception' field. + * + * @param value the value to set. + */ + public void setException(java.lang.CharSequence value) { + this.Exception = value; + } + + /** + * Gets the value of the 'exectout' field. + * + * @return The value of the 'exectout' field. + */ + public long getExectout() { + return exectout; + } + + /** + * Sets the value of the 'exectout' field. + * + * @param value the value to set. + */ + public void setExectout(long value) { + this.exectout = value; + } + + /** + * Creates a new Run RecordBuilder. + * + * @return A new Run RecordBuilder + */ + public static sparqles.avro.performance.Run.Builder newBuilder() { + return new sparqles.avro.performance.Run.Builder(); + } + + /** + * Creates a new Run RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new Run RecordBuilder + */ + public static sparqles.avro.performance.Run.Builder newBuilder( + sparqles.avro.performance.Run.Builder other) { + if (other == null) { + return new sparqles.avro.performance.Run.Builder(); + } else { + return new sparqles.avro.performance.Run.Builder(other); + } + } + + /** + * Creates a new Run RecordBuilder by copying an existing Run instance. + * + * @param other The existing instance to copy. + * @return A new Run RecordBuilder + */ + public static sparqles.avro.performance.Run.Builder newBuilder( + sparqles.avro.performance.Run other) { + if (other == null) { + return new sparqles.avro.performance.Run.Builder(); + } else { + return new sparqles.avro.performance.Run.Builder(other); + } + } + + /** RecordBuilder for Run instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private long frestout; + private int solutions; + private long inittime; + private long exectime; + private long closetime; + private java.lang.CharSequence Exception; + private long exectout; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. */ - public Run() {} - - /** All-args constructor. */ - public Run( - java.lang.Long frestout, - java.lang.Integer solutions, - java.lang.Long inittime, - java.lang.Long exectime, - java.lang.Long closetime, - java.lang.CharSequence Exception, - java.lang.Long exectout) { - this.frestout = frestout; - this.solutions = solutions; - this.inittime = inittime; - this.exectime = exectime; - this.closetime = closetime; - this.Exception = Exception; - this.exectout = exectout; - } - - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; - } - - /** Creates a new Run RecordBuilder */ - public static sparqles.avro.performance.Run.Builder newBuilder() { - return new sparqles.avro.performance.Run.Builder(); - } - - /** Creates a new Run RecordBuilder by copying an existing Builder */ - public static sparqles.avro.performance.Run.Builder newBuilder( - sparqles.avro.performance.Run.Builder other) { - return new sparqles.avro.performance.Run.Builder(other); - } - - /** Creates a new Run RecordBuilder by copying an existing Run instance */ - public static sparqles.avro.performance.Run.Builder newBuilder( - sparqles.avro.performance.Run other) { - return new sparqles.avro.performance.Run.Builder(other); - } - - public org.apache.avro.Schema getSchema() { - return SCHEMA$; - } - - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return frestout; - case 1: - return solutions; - case 2: - return inittime; - case 3: - return exectime; - case 4: - return closetime; - case 5: - return Exception; - case 6: - return exectout; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + private Builder(sparqles.avro.performance.Run.Builder other) { + super(other); + if (isValidValue(fields()[0], other.frestout)) { + this.frestout = data().deepCopy(fields()[0].schema(), other.frestout); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.solutions)) { + this.solutions = data().deepCopy(fields()[1].schema(), other.solutions); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.inittime)) { + this.inittime = data().deepCopy(fields()[2].schema(), other.inittime); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.exectime)) { + this.exectime = data().deepCopy(fields()[3].schema(), other.exectime); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + if (isValidValue(fields()[4], other.closetime)) { + this.closetime = data().deepCopy(fields()[4].schema(), other.closetime); + fieldSetFlags()[4] = other.fieldSetFlags()[4]; + } + if (isValidValue(fields()[5], other.Exception)) { + this.Exception = data().deepCopy(fields()[5].schema(), other.Exception); + fieldSetFlags()[5] = other.fieldSetFlags()[5]; + } + if (isValidValue(fields()[6], other.exectout)) { + this.exectout = data().deepCopy(fields()[6].schema(), other.exectout); + fieldSetFlags()[6] = other.fieldSetFlags()[6]; + } } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - frestout = (java.lang.Long) value$; - break; - case 1: - solutions = (java.lang.Integer) value$; - break; - case 2: - inittime = (java.lang.Long) value$; - break; - case 3: - exectime = (java.lang.Long) value$; - break; - case 4: - closetime = (java.lang.Long) value$; - break; - case 5: - Exception = (java.lang.CharSequence) value$; - break; - case 6: - exectout = (java.lang.Long) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + /** + * Creates a Builder by copying an existing Run instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.performance.Run other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.frestout)) { + this.frestout = data().deepCopy(fields()[0].schema(), other.frestout); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.solutions)) { + this.solutions = data().deepCopy(fields()[1].schema(), other.solutions); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.inittime)) { + this.inittime = data().deepCopy(fields()[2].schema(), other.inittime); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.exectime)) { + this.exectime = data().deepCopy(fields()[3].schema(), other.exectime); + fieldSetFlags()[3] = true; + } + if (isValidValue(fields()[4], other.closetime)) { + this.closetime = data().deepCopy(fields()[4].schema(), other.closetime); + fieldSetFlags()[4] = true; + } + if (isValidValue(fields()[5], other.Exception)) { + this.Exception = data().deepCopy(fields()[5].schema(), other.Exception); + fieldSetFlags()[5] = true; + } + if (isValidValue(fields()[6], other.exectout)) { + this.exectout = data().deepCopy(fields()[6].schema(), other.exectout); + fieldSetFlags()[6] = true; + } } - /** Gets the value of the 'frestout' field. */ - public java.lang.Long getFrestout() { - return frestout; + /** + * Gets the value of the 'frestout' field. + * + * @return The value. + */ + public long getFrestout() { + return frestout; } /** * Sets the value of the 'frestout' field. * - * @param value the value to set. + * @param value The value of 'frestout'. + * @return This builder. */ - public void setFrestout(java.lang.Long value) { - this.frestout = value; + public sparqles.avro.performance.Run.Builder setFrestout(long value) { + validate(fields()[0], value); + this.frestout = value; + fieldSetFlags()[0] = true; + return this; } - /** Gets the value of the 'solutions' field. */ - public java.lang.Integer getSolutions() { - return solutions; + /** + * Checks whether the 'frestout' field has been set. + * + * @return True if the 'frestout' field has been set, false otherwise. + */ + public boolean hasFrestout() { + return fieldSetFlags()[0]; + } + + /** + * Clears the value of the 'frestout' field. + * + * @return This builder. + */ + public sparqles.avro.performance.Run.Builder clearFrestout() { + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'solutions' field. + * + * @return The value. + */ + public int getSolutions() { + return solutions; } /** * Sets the value of the 'solutions' field. * - * @param value the value to set. + * @param value The value of 'solutions'. + * @return This builder. */ - public void setSolutions(java.lang.Integer value) { - this.solutions = value; + public sparqles.avro.performance.Run.Builder setSolutions(int value) { + validate(fields()[1], value); + this.solutions = value; + fieldSetFlags()[1] = true; + return this; } - /** Gets the value of the 'inittime' field. */ - public java.lang.Long getInittime() { - return inittime; + /** + * Checks whether the 'solutions' field has been set. + * + * @return True if the 'solutions' field has been set, false otherwise. + */ + public boolean hasSolutions() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'solutions' field. + * + * @return This builder. + */ + public sparqles.avro.performance.Run.Builder clearSolutions() { + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'inittime' field. + * + * @return The value. + */ + public long getInittime() { + return inittime; } /** * Sets the value of the 'inittime' field. * - * @param value the value to set. + * @param value The value of 'inittime'. + * @return This builder. */ - public void setInittime(java.lang.Long value) { - this.inittime = value; + public sparqles.avro.performance.Run.Builder setInittime(long value) { + validate(fields()[2], value); + this.inittime = value; + fieldSetFlags()[2] = true; + return this; } - /** Gets the value of the 'exectime' field. */ - public java.lang.Long getExectime() { - return exectime; + /** + * Checks whether the 'inittime' field has been set. + * + * @return True if the 'inittime' field has been set, false otherwise. + */ + public boolean hasInittime() { + return fieldSetFlags()[2]; + } + + /** + * Clears the value of the 'inittime' field. + * + * @return This builder. + */ + public sparqles.avro.performance.Run.Builder clearInittime() { + fieldSetFlags()[2] = false; + return this; + } + + /** + * Gets the value of the 'exectime' field. + * + * @return The value. + */ + public long getExectime() { + return exectime; } /** * Sets the value of the 'exectime' field. * - * @param value the value to set. + * @param value The value of 'exectime'. + * @return This builder. */ - public void setExectime(java.lang.Long value) { - this.exectime = value; + public sparqles.avro.performance.Run.Builder setExectime(long value) { + validate(fields()[3], value); + this.exectime = value; + fieldSetFlags()[3] = true; + return this; } - /** Gets the value of the 'closetime' field. */ - public java.lang.Long getClosetime() { - return closetime; + /** + * Checks whether the 'exectime' field has been set. + * + * @return True if the 'exectime' field has been set, false otherwise. + */ + public boolean hasExectime() { + return fieldSetFlags()[3]; } /** - * Sets the value of the 'closetime' field. + * Clears the value of the 'exectime' field. * - * @param value the value to set. + * @return This builder. */ - public void setClosetime(java.lang.Long value) { - this.closetime = value; + public sparqles.avro.performance.Run.Builder clearExectime() { + fieldSetFlags()[3] = false; + return this; } - /** Gets the value of the 'Exception' field. */ - public java.lang.CharSequence getException() { - return Exception; + /** + * Gets the value of the 'closetime' field. + * + * @return The value. + */ + public long getClosetime() { + return closetime; } /** - * Sets the value of the 'Exception' field. + * Sets the value of the 'closetime' field. * - * @param value the value to set. + * @param value The value of 'closetime'. + * @return This builder. */ - public void setException(java.lang.CharSequence value) { - this.Exception = value; + public sparqles.avro.performance.Run.Builder setClosetime(long value) { + validate(fields()[4], value); + this.closetime = value; + fieldSetFlags()[4] = true; + return this; } - /** Gets the value of the 'exectout' field. */ - public java.lang.Long getExectout() { - return exectout; + /** + * Checks whether the 'closetime' field has been set. + * + * @return True if the 'closetime' field has been set, false otherwise. + */ + public boolean hasClosetime() { + return fieldSetFlags()[4]; } /** - * Sets the value of the 'exectout' field. + * Clears the value of the 'closetime' field. * - * @param value the value to set. + * @return This builder. */ - public void setExectout(java.lang.Long value) { - this.exectout = value; + public sparqles.avro.performance.Run.Builder clearClosetime() { + fieldSetFlags()[4] = false; + return this; } - /** RecordBuilder for Run instances. */ - public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Gets the value of the 'Exception' field. + * + * @return The value. + */ + public java.lang.CharSequence getException() { + return Exception; + } - private long frestout; - private int solutions; - private long inittime; - private long exectime; - private long closetime; - private java.lang.CharSequence Exception; - private long exectout; + /** + * Sets the value of the 'Exception' field. + * + * @param value The value of 'Exception'. + * @return This builder. + */ + public sparqles.avro.performance.Run.Builder setException(java.lang.CharSequence value) { + validate(fields()[5], value); + this.Exception = value; + fieldSetFlags()[5] = true; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.performance.Run.SCHEMA$); - } + /** + * Checks whether the 'Exception' field has been set. + * + * @return True if the 'Exception' field has been set, false otherwise. + */ + public boolean hasException() { + return fieldSetFlags()[5]; + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.performance.Run.Builder other) { - super(other); - if (isValidValue(fields()[0], other.frestout)) { - this.frestout = data().deepCopy(fields()[0].schema(), other.frestout); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.solutions)) { - this.solutions = data().deepCopy(fields()[1].schema(), other.solutions); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.inittime)) { - this.inittime = data().deepCopy(fields()[2].schema(), other.inittime); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.exectime)) { - this.exectime = data().deepCopy(fields()[3].schema(), other.exectime); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.closetime)) { - this.closetime = data().deepCopy(fields()[4].schema(), other.closetime); - fieldSetFlags()[4] = true; - } - if (isValidValue(fields()[5], other.Exception)) { - this.Exception = data().deepCopy(fields()[5].schema(), other.Exception); - fieldSetFlags()[5] = true; - } - if (isValidValue(fields()[6], other.exectout)) { - this.exectout = data().deepCopy(fields()[6].schema(), other.exectout); - fieldSetFlags()[6] = true; - } - } + /** + * Clears the value of the 'Exception' field. + * + * @return This builder. + */ + public sparqles.avro.performance.Run.Builder clearException() { + Exception = null; + fieldSetFlags()[5] = false; + return this; + } - /** Creates a Builder by copying an existing Run instance */ - private Builder(sparqles.avro.performance.Run other) { - super(sparqles.avro.performance.Run.SCHEMA$); - if (isValidValue(fields()[0], other.frestout)) { - this.frestout = data().deepCopy(fields()[0].schema(), other.frestout); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.solutions)) { - this.solutions = data().deepCopy(fields()[1].schema(), other.solutions); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.inittime)) { - this.inittime = data().deepCopy(fields()[2].schema(), other.inittime); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.exectime)) { - this.exectime = data().deepCopy(fields()[3].schema(), other.exectime); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.closetime)) { - this.closetime = data().deepCopy(fields()[4].schema(), other.closetime); - fieldSetFlags()[4] = true; - } - if (isValidValue(fields()[5], other.Exception)) { - this.Exception = data().deepCopy(fields()[5].schema(), other.Exception); - fieldSetFlags()[5] = true; - } - if (isValidValue(fields()[6], other.exectout)) { - this.exectout = data().deepCopy(fields()[6].schema(), other.exectout); - fieldSetFlags()[6] = true; - } - } + /** + * Gets the value of the 'exectout' field. + * + * @return The value. + */ + public long getExectout() { + return exectout; + } - /** Gets the value of the 'frestout' field */ - public java.lang.Long getFrestout() { - return frestout; - } + /** + * Sets the value of the 'exectout' field. + * + * @param value The value of 'exectout'. + * @return This builder. + */ + public sparqles.avro.performance.Run.Builder setExectout(long value) { + validate(fields()[6], value); + this.exectout = value; + fieldSetFlags()[6] = true; + return this; + } - /** Sets the value of the 'frestout' field */ - public sparqles.avro.performance.Run.Builder setFrestout(long value) { - validate(fields()[0], value); - this.frestout = value; - fieldSetFlags()[0] = true; - return this; - } + /** + * Checks whether the 'exectout' field has been set. + * + * @return True if the 'exectout' field has been set, false otherwise. + */ + public boolean hasExectout() { + return fieldSetFlags()[6]; + } - /** Checks whether the 'frestout' field has been set */ - public boolean hasFrestout() { - return fieldSetFlags()[0]; - } + /** + * Clears the value of the 'exectout' field. + * + * @return This builder. + */ + public sparqles.avro.performance.Run.Builder clearExectout() { + fieldSetFlags()[6] = false; + return this; + } - /** Clears the value of the 'frestout' field */ - public sparqles.avro.performance.Run.Builder clearFrestout() { - fieldSetFlags()[0] = false; - return this; - } + @Override + @SuppressWarnings("unchecked") + public Run build() { + try { + Run record = new Run(); + record.frestout = + fieldSetFlags()[0] ? this.frestout : (java.lang.Long) defaultValue(fields()[0]); + record.solutions = + fieldSetFlags()[1] ? this.solutions : (java.lang.Integer) defaultValue(fields()[1]); + record.inittime = + fieldSetFlags()[2] ? this.inittime : (java.lang.Long) defaultValue(fields()[2]); + record.exectime = + fieldSetFlags()[3] ? this.exectime : (java.lang.Long) defaultValue(fields()[3]); + record.closetime = + fieldSetFlags()[4] ? this.closetime : (java.lang.Long) defaultValue(fields()[4]); + record.Exception = + fieldSetFlags()[5] + ? this.Exception + : (java.lang.CharSequence) defaultValue(fields()[5]); + record.exectout = + fieldSetFlags()[6] ? this.exectout : (java.lang.Long) defaultValue(fields()[6]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Gets the value of the 'solutions' field */ - public java.lang.Integer getSolutions() { - return solutions; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Sets the value of the 'solutions' field */ - public sparqles.avro.performance.Run.Builder setSolutions(int value) { - validate(fields()[1], value); - this.solutions = value; - fieldSetFlags()[1] = true; - return this; - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Checks whether the 'solutions' field has been set */ - public boolean hasSolutions() { - return fieldSetFlags()[1]; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Clears the value of the 'solutions' field */ - public sparqles.avro.performance.Run.Builder clearSolutions() { - fieldSetFlags()[1] = false; - return this; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Gets the value of the 'inittime' field */ - public java.lang.Long getInittime() { - return inittime; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Sets the value of the 'inittime' field */ - public sparqles.avro.performance.Run.Builder setInittime(long value) { - validate(fields()[2], value); - this.inittime = value; - fieldSetFlags()[2] = true; - return this; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeLong(this.frestout); - /** Checks whether the 'inittime' field has been set */ - public boolean hasInittime() { - return fieldSetFlags()[2]; - } + out.writeInt(this.solutions); - /** Clears the value of the 'inittime' field */ - public sparqles.avro.performance.Run.Builder clearInittime() { - fieldSetFlags()[2] = false; - return this; - } + out.writeLong(this.inittime); - /** Gets the value of the 'exectime' field */ - public java.lang.Long getExectime() { - return exectime; - } + out.writeLong(this.exectime); - /** Sets the value of the 'exectime' field */ - public sparqles.avro.performance.Run.Builder setExectime(long value) { - validate(fields()[3], value); - this.exectime = value; - fieldSetFlags()[3] = true; - return this; - } + out.writeLong(this.closetime); - /** Checks whether the 'exectime' field has been set */ - public boolean hasExectime() { - return fieldSetFlags()[3]; - } + if (this.Exception == null) { + out.writeIndex(1); + out.writeNull(); + } else { + out.writeIndex(0); + out.writeString(this.Exception); + } - /** Clears the value of the 'exectime' field */ - public sparqles.avro.performance.Run.Builder clearExectime() { - fieldSetFlags()[3] = false; - return this; - } + out.writeLong(this.exectout); + } - /** Gets the value of the 'closetime' field */ - public java.lang.Long getClosetime() { - return closetime; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.frestout = in.readLong(); - /** Sets the value of the 'closetime' field */ - public sparqles.avro.performance.Run.Builder setClosetime(long value) { - validate(fields()[4], value); - this.closetime = value; - fieldSetFlags()[4] = true; - return this; - } + this.solutions = in.readInt(); - /** Checks whether the 'closetime' field has been set */ - public boolean hasClosetime() { - return fieldSetFlags()[4]; - } + this.inittime = in.readLong(); - /** Clears the value of the 'closetime' field */ - public sparqles.avro.performance.Run.Builder clearClosetime() { - fieldSetFlags()[4] = false; - return this; - } + this.exectime = in.readLong(); - /** Gets the value of the 'Exception' field */ - public java.lang.CharSequence getException() { - return Exception; - } + this.closetime = in.readLong(); - /** Sets the value of the 'Exception' field */ - public sparqles.avro.performance.Run.Builder setException(java.lang.CharSequence value) { - validate(fields()[5], value); - this.Exception = value; - fieldSetFlags()[5] = true; - return this; - } + if (in.readIndex() != 0) { + in.readNull(); + this.Exception = null; + } else { + this.Exception = + in.readString(this.Exception instanceof Utf8 ? (Utf8) this.Exception : null); + } - /** Checks whether the 'Exception' field has been set */ - public boolean hasException() { - return fieldSetFlags()[5]; - } + this.exectout = in.readLong(); - /** Clears the value of the 'Exception' field */ - public sparqles.avro.performance.Run.Builder clearException() { - Exception = null; - fieldSetFlags()[5] = false; - return this; - } + } else { + for (int i = 0; i < 7; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.frestout = in.readLong(); + break; - /** Gets the value of the 'exectout' field */ - public java.lang.Long getExectout() { - return exectout; - } + case 1: + this.solutions = in.readInt(); + break; - /** Sets the value of the 'exectout' field */ - public sparqles.avro.performance.Run.Builder setExectout(long value) { - validate(fields()[6], value); - this.exectout = value; - fieldSetFlags()[6] = true; - return this; - } + case 2: + this.inittime = in.readLong(); + break; - /** Checks whether the 'exectout' field has been set */ - public boolean hasExectout() { - return fieldSetFlags()[6]; - } + case 3: + this.exectime = in.readLong(); + break; - /** Clears the value of the 'exectout' field */ - public sparqles.avro.performance.Run.Builder clearExectout() { - fieldSetFlags()[6] = false; - return this; - } + case 4: + this.closetime = in.readLong(); + break; - @Override - public Run build() { - try { - Run record = new Run(); - record.frestout = - fieldSetFlags()[0] - ? this.frestout - : (java.lang.Long) defaultValue(fields()[0]); - record.solutions = - fieldSetFlags()[1] - ? this.solutions - : (java.lang.Integer) defaultValue(fields()[1]); - record.inittime = - fieldSetFlags()[2] - ? this.inittime - : (java.lang.Long) defaultValue(fields()[2]); - record.exectime = - fieldSetFlags()[3] - ? this.exectime - : (java.lang.Long) defaultValue(fields()[3]); - record.closetime = - fieldSetFlags()[4] - ? this.closetime - : (java.lang.Long) defaultValue(fields()[4]); - record.Exception = - fieldSetFlags()[5] - ? this.Exception - : (java.lang.CharSequence) defaultValue(fields()[5]); - record.exectout = - fieldSetFlags()[6] - ? this.exectout - : (java.lang.Long) defaultValue(fields()[6]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); + case 5: + if (in.readIndex() != 0) { + in.readNull(); + this.Exception = null; + } else { + this.Exception = + in.readString(this.Exception instanceof Utf8 ? (Utf8) this.Exception : null); } + break; + + case 6: + this.exectout = in.readLong(); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/avro/schedule/Schedule.java b/backend/src/main/java/sparqles/avro/schedule/Schedule.java index 5474a33d..b29fef8f 100644 --- a/backend/src/main/java/sparqles/avro/schedule/Schedule.java +++ b/backend/src/main/java/sparqles/avro/schedule/Schedule.java @@ -5,515 +5,1125 @@ */ package sparqles.avro.schedule; -@SuppressWarnings("all") +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + @org.apache.avro.specific.AvroGenerated public class Schedule extends org.apache.avro.specific.SpecificRecordBase - implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = - new org.apache.avro.Schema.Parser() - .parse( - "{\"type\":\"record\",\"name\":\"Schedule\",\"namespace\":\"sparqles.avro.schedule\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"ATask\",\"type\":[\"string\",\"null\"]},{\"name\":\"FTask\",\"type\":[\"string\",\"null\"]},{\"name\":\"PTask\",\"type\":[\"string\",\"null\"]},{\"name\":\"DTask\",\"type\":[\"string\",\"null\"]},{\"name\":\"ITask\",\"type\":[\"string\",\"null\"]},{\"name\":\"ETask\",\"type\":[\"string\",\"null\"]}]}"); - @Deprecated public sparqles.avro.Endpoint endpoint; - @Deprecated public java.lang.CharSequence ATask; - @Deprecated public java.lang.CharSequence FTask; - @Deprecated public java.lang.CharSequence PTask; - @Deprecated public java.lang.CharSequence DTask; - @Deprecated public java.lang.CharSequence ITask; - @Deprecated public java.lang.CharSequence ETask; - - /** - * Default constructor. Note that this does not initialize fields to their default values from - * the schema. If that is desired then one should use {@link \#newBuilder()}. - */ - public Schedule() {} - - /** All-args constructor. */ - public Schedule( - sparqles.avro.Endpoint endpoint, - java.lang.CharSequence ATask, - java.lang.CharSequence FTask, - java.lang.CharSequence PTask, - java.lang.CharSequence DTask, - java.lang.CharSequence ITask, - java.lang.CharSequence ETask) { - this.endpoint = endpoint; - this.ATask = ATask; - this.FTask = FTask; - this.PTask = PTask; - this.DTask = DTask; - this.ITask = ITask; - this.ETask = ETask; - } - - public static org.apache.avro.Schema getClassSchema() { - return SCHEMA$; - } - - /** Creates a new Schedule RecordBuilder */ - public static sparqles.avro.schedule.Schedule.Builder newBuilder() { - return new sparqles.avro.schedule.Schedule.Builder(); - } - - /** Creates a new Schedule RecordBuilder by copying an existing Builder */ - public static sparqles.avro.schedule.Schedule.Builder newBuilder( - sparqles.avro.schedule.Schedule.Builder other) { - return new sparqles.avro.schedule.Schedule.Builder(other); - } - - /** Creates a new Schedule RecordBuilder by copying an existing Schedule instance */ - public static sparqles.avro.schedule.Schedule.Builder newBuilder( - sparqles.avro.schedule.Schedule other) { - return new sparqles.avro.schedule.Schedule.Builder(other); - } - - public org.apache.avro.Schema getSchema() { - return SCHEMA$; - } - - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return endpoint; - case 1: - return ATask; - case 2: - return FTask; - case 3: - return PTask; - case 4: - return DTask; - case 5: - return ITask; - case 6: - return ETask; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 1371232787442374287L; + + public static final org.apache.avro.Schema SCHEMA$ = + new org.apache.avro.Schema.Parser() + .parse( + "{\"type\":\"record\",\"name\":\"Schedule\",\"namespace\":\"sparqles.avro.schedule\",\"fields\":[{\"name\":\"endpoint\",\"type\":{\"type\":\"record\",\"name\":\"Endpoint\",\"namespace\":\"sparqles.avro\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"datasets\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Dataset\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"}]}}}]}},{\"name\":\"ATask\",\"type\":[\"string\",\"null\"]},{\"name\":\"FTask\",\"type\":[\"string\",\"null\"]},{\"name\":\"PTask\",\"type\":[\"string\",\"null\"]},{\"name\":\"DTask\",\"type\":[\"string\",\"null\"]},{\"name\":\"CTask\",\"type\":[\"string\",\"null\"]},{\"name\":\"ITask\",\"type\":[\"string\",\"null\"]},{\"name\":\"ETask\",\"type\":[\"string\",\"null\"]}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link + * SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this Schedule to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a Schedule from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a Schedule instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of + * this class + */ + public static Schedule fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private sparqles.avro.Endpoint endpoint; + private java.lang.CharSequence ATask; + private java.lang.CharSequence FTask; + private java.lang.CharSequence PTask; + private java.lang.CharSequence DTask; + private java.lang.CharSequence CTask; + private java.lang.CharSequence ITask; + private java.lang.CharSequence ETask; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the + * schema. If that is desired then one should use newBuilder(). + */ + public Schedule() {} + + /** + * All-args constructor. + * + * @param endpoint The new value for endpoint + * @param ATask The new value for ATask + * @param FTask The new value for FTask + * @param PTask The new value for PTask + * @param DTask The new value for DTask + * @param CTask The new value for CTask + * @param ITask The new value for ITask + * @param ETask The new value for ETask + */ + public Schedule( + sparqles.avro.Endpoint endpoint, + java.lang.CharSequence ATask, + java.lang.CharSequence FTask, + java.lang.CharSequence PTask, + java.lang.CharSequence DTask, + java.lang.CharSequence CTask, + java.lang.CharSequence ITask, + java.lang.CharSequence ETask) { + this.endpoint = endpoint; + this.ATask = ATask; + this.FTask = FTask; + this.PTask = PTask; + this.DTask = DTask; + this.CTask = CTask; + this.ITask = ITask; + this.ETask = ETask; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return endpoint; + case 1: + return ATask; + case 2: + return FTask; + case 3: + return PTask; + case 4: + return DTask; + case 5: + return CTask; + case 6: + return ITask; + case 7: + return ETask; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); } + } - // Used by DatumReader. Applications should not call. - @SuppressWarnings(value = "unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - endpoint = (sparqles.avro.Endpoint) value$; - break; - case 1: - ATask = (java.lang.CharSequence) value$; - break; - case 2: - FTask = (java.lang.CharSequence) value$; - break; - case 3: - PTask = (java.lang.CharSequence) value$; - break; - case 4: - DTask = (java.lang.CharSequence) value$; - break; - case 5: - ITask = (java.lang.CharSequence) value$; - break; - case 6: - ETask = (java.lang.CharSequence) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + endpoint = (sparqles.avro.Endpoint) value$; + break; + case 1: + ATask = (java.lang.CharSequence) value$; + break; + case 2: + FTask = (java.lang.CharSequence) value$; + break; + case 3: + PTask = (java.lang.CharSequence) value$; + break; + case 4: + DTask = (java.lang.CharSequence) value$; + break; + case 5: + CTask = (java.lang.CharSequence) value$; + break; + case 6: + ITask = (java.lang.CharSequence) value$; + break; + case 7: + ETask = (java.lang.CharSequence) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'endpoint' field. + * + * @return The value of the 'endpoint' field. + */ + public sparqles.avro.Endpoint getEndpoint() { + return endpoint; + } + + /** + * Sets the value of the 'endpoint' field. + * + * @param value the value to set. + */ + public void setEndpoint(sparqles.avro.Endpoint value) { + this.endpoint = value; + } + + /** + * Gets the value of the 'ATask' field. + * + * @return The value of the 'ATask' field. + */ + public java.lang.CharSequence getATask() { + return ATask; + } + + /** + * Sets the value of the 'ATask' field. + * + * @param value the value to set. + */ + public void setATask(java.lang.CharSequence value) { + this.ATask = value; + } + + /** + * Gets the value of the 'FTask' field. + * + * @return The value of the 'FTask' field. + */ + public java.lang.CharSequence getFTask() { + return FTask; + } + + /** + * Sets the value of the 'FTask' field. + * + * @param value the value to set. + */ + public void setFTask(java.lang.CharSequence value) { + this.FTask = value; + } + + /** + * Gets the value of the 'PTask' field. + * + * @return The value of the 'PTask' field. + */ + public java.lang.CharSequence getPTask() { + return PTask; + } + + /** + * Sets the value of the 'PTask' field. + * + * @param value the value to set. + */ + public void setPTask(java.lang.CharSequence value) { + this.PTask = value; + } + + /** + * Gets the value of the 'DTask' field. + * + * @return The value of the 'DTask' field. + */ + public java.lang.CharSequence getDTask() { + return DTask; + } + + /** + * Sets the value of the 'DTask' field. + * + * @param value the value to set. + */ + public void setDTask(java.lang.CharSequence value) { + this.DTask = value; + } + + /** + * Gets the value of the 'CTask' field. + * + * @return The value of the 'CTask' field. + */ + public java.lang.CharSequence getCTask() { + return CTask; + } + + /** + * Sets the value of the 'CTask' field. + * + * @param value the value to set. + */ + public void setCTask(java.lang.CharSequence value) { + this.CTask = value; + } + + /** + * Gets the value of the 'ITask' field. + * + * @return The value of the 'ITask' field. + */ + public java.lang.CharSequence getITask() { + return ITask; + } + + /** + * Sets the value of the 'ITask' field. + * + * @param value the value to set. + */ + public void setITask(java.lang.CharSequence value) { + this.ITask = value; + } + + /** + * Gets the value of the 'ETask' field. + * + * @return The value of the 'ETask' field. + */ + public java.lang.CharSequence getETask() { + return ETask; + } + + /** + * Sets the value of the 'ETask' field. + * + * @param value the value to set. + */ + public void setETask(java.lang.CharSequence value) { + this.ETask = value; + } + + /** + * Creates a new Schedule RecordBuilder. + * + * @return A new Schedule RecordBuilder + */ + public static sparqles.avro.schedule.Schedule.Builder newBuilder() { + return new sparqles.avro.schedule.Schedule.Builder(); + } + + /** + * Creates a new Schedule RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new Schedule RecordBuilder + */ + public static sparqles.avro.schedule.Schedule.Builder newBuilder( + sparqles.avro.schedule.Schedule.Builder other) { + if (other == null) { + return new sparqles.avro.schedule.Schedule.Builder(); + } else { + return new sparqles.avro.schedule.Schedule.Builder(other); + } + } + + /** + * Creates a new Schedule RecordBuilder by copying an existing Schedule instance. + * + * @param other The existing instance to copy. + * @return A new Schedule RecordBuilder + */ + public static sparqles.avro.schedule.Schedule.Builder newBuilder( + sparqles.avro.schedule.Schedule other) { + if (other == null) { + return new sparqles.avro.schedule.Schedule.Builder(); + } else { + return new sparqles.avro.schedule.Schedule.Builder(other); + } + } + + /** RecordBuilder for Schedule instances. */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private sparqles.avro.Endpoint endpoint; + private sparqles.avro.Endpoint.Builder endpointBuilder; + private java.lang.CharSequence ATask; + private java.lang.CharSequence FTask; + private java.lang.CharSequence PTask; + private java.lang.CharSequence DTask; + private java.lang.CharSequence CTask; + private java.lang.CharSequence ITask; + private java.lang.CharSequence ETask; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } + + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(sparqles.avro.schedule.Schedule.Builder other) { + super(other); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (other.hasEndpointBuilder()) { + this.endpointBuilder = sparqles.avro.Endpoint.newBuilder(other.getEndpointBuilder()); + } + if (isValidValue(fields()[1], other.ATask)) { + this.ATask = data().deepCopy(fields()[1].schema(), other.ATask); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.FTask)) { + this.FTask = data().deepCopy(fields()[2].schema(), other.FTask); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.PTask)) { + this.PTask = data().deepCopy(fields()[3].schema(), other.PTask); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + if (isValidValue(fields()[4], other.DTask)) { + this.DTask = data().deepCopy(fields()[4].schema(), other.DTask); + fieldSetFlags()[4] = other.fieldSetFlags()[4]; + } + if (isValidValue(fields()[5], other.CTask)) { + this.CTask = data().deepCopy(fields()[5].schema(), other.CTask); + fieldSetFlags()[5] = other.fieldSetFlags()[5]; + } + if (isValidValue(fields()[6], other.ITask)) { + this.ITask = data().deepCopy(fields()[6].schema(), other.ITask); + fieldSetFlags()[6] = other.fieldSetFlags()[6]; + } + if (isValidValue(fields()[7], other.ETask)) { + this.ETask = data().deepCopy(fields()[7].schema(), other.ETask); + fieldSetFlags()[7] = other.fieldSetFlags()[7]; + } } - /** Gets the value of the 'endpoint' field. */ + /** + * Creates a Builder by copying an existing Schedule instance + * + * @param other The existing instance to copy. + */ + private Builder(sparqles.avro.schedule.Schedule other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.endpoint)) { + this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); + fieldSetFlags()[0] = true; + } + this.endpointBuilder = null; + if (isValidValue(fields()[1], other.ATask)) { + this.ATask = data().deepCopy(fields()[1].schema(), other.ATask); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.FTask)) { + this.FTask = data().deepCopy(fields()[2].schema(), other.FTask); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.PTask)) { + this.PTask = data().deepCopy(fields()[3].schema(), other.PTask); + fieldSetFlags()[3] = true; + } + if (isValidValue(fields()[4], other.DTask)) { + this.DTask = data().deepCopy(fields()[4].schema(), other.DTask); + fieldSetFlags()[4] = true; + } + if (isValidValue(fields()[5], other.CTask)) { + this.CTask = data().deepCopy(fields()[5].schema(), other.CTask); + fieldSetFlags()[5] = true; + } + if (isValidValue(fields()[6], other.ITask)) { + this.ITask = data().deepCopy(fields()[6].schema(), other.ITask); + fieldSetFlags()[6] = true; + } + if (isValidValue(fields()[7], other.ETask)) { + this.ETask = data().deepCopy(fields()[7].schema(), other.ETask); + fieldSetFlags()[7] = true; + } + } + + /** + * Gets the value of the 'endpoint' field. + * + * @return The value. + */ public sparqles.avro.Endpoint getEndpoint() { - return endpoint; + return endpoint; } /** * Sets the value of the 'endpoint' field. * - * @param value the value to set. + * @param value The value of 'endpoint'. + * @return This builder. + */ + public sparqles.avro.schedule.Schedule.Builder setEndpoint(sparqles.avro.Endpoint value) { + validate(fields()[0], value); + this.endpointBuilder = null; + this.endpoint = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'endpoint' field has been set. + * + * @return True if the 'endpoint' field has been set, false otherwise. + */ + public boolean hasEndpoint() { + return fieldSetFlags()[0]; + } + + /** + * Gets the Builder instance for the 'endpoint' field and creates one if it doesn't exist yet. + * + * @return This builder. + */ + public sparqles.avro.Endpoint.Builder getEndpointBuilder() { + if (endpointBuilder == null) { + if (hasEndpoint()) { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder(endpoint)); + } else { + setEndpointBuilder(sparqles.avro.Endpoint.newBuilder()); + } + } + return endpointBuilder; + } + + /** + * Sets the Builder instance for the 'endpoint' field + * + * @param value The builder instance that must be set. + * @return This builder. + */ + public sparqles.avro.schedule.Schedule.Builder setEndpointBuilder( + sparqles.avro.Endpoint.Builder value) { + clearEndpoint(); + endpointBuilder = value; + return this; + } + + /** + * Checks whether the 'endpoint' field has an active Builder instance + * + * @return True if the 'endpoint' field has an active Builder instance + */ + public boolean hasEndpointBuilder() { + return endpointBuilder != null; + } + + /** + * Clears the value of the 'endpoint' field. + * + * @return This builder. */ - public void setEndpoint(sparqles.avro.Endpoint value) { - this.endpoint = value; + public sparqles.avro.schedule.Schedule.Builder clearEndpoint() { + endpoint = null; + endpointBuilder = null; + fieldSetFlags()[0] = false; + return this; } - /** Gets the value of the 'ATask' field. */ + /** + * Gets the value of the 'ATask' field. + * + * @return The value. + */ public java.lang.CharSequence getATask() { - return ATask; + return ATask; } /** * Sets the value of the 'ATask' field. * - * @param value the value to set. + * @param value The value of 'ATask'. + * @return This builder. + */ + public sparqles.avro.schedule.Schedule.Builder setATask(java.lang.CharSequence value) { + validate(fields()[1], value); + this.ATask = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'ATask' field has been set. + * + * @return True if the 'ATask' field has been set, false otherwise. + */ + public boolean hasATask() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'ATask' field. + * + * @return This builder. */ - public void setATask(java.lang.CharSequence value) { - this.ATask = value; + public sparqles.avro.schedule.Schedule.Builder clearATask() { + ATask = null; + fieldSetFlags()[1] = false; + return this; } - /** Gets the value of the 'FTask' field. */ + /** + * Gets the value of the 'FTask' field. + * + * @return The value. + */ public java.lang.CharSequence getFTask() { - return FTask; + return FTask; } /** * Sets the value of the 'FTask' field. * - * @param value the value to set. + * @param value The value of 'FTask'. + * @return This builder. + */ + public sparqles.avro.schedule.Schedule.Builder setFTask(java.lang.CharSequence value) { + validate(fields()[2], value); + this.FTask = value; + fieldSetFlags()[2] = true; + return this; + } + + /** + * Checks whether the 'FTask' field has been set. + * + * @return True if the 'FTask' field has been set, false otherwise. + */ + public boolean hasFTask() { + return fieldSetFlags()[2]; + } + + /** + * Clears the value of the 'FTask' field. + * + * @return This builder. */ - public void setFTask(java.lang.CharSequence value) { - this.FTask = value; + public sparqles.avro.schedule.Schedule.Builder clearFTask() { + FTask = null; + fieldSetFlags()[2] = false; + return this; } - /** Gets the value of the 'PTask' field. */ + /** + * Gets the value of the 'PTask' field. + * + * @return The value. + */ public java.lang.CharSequence getPTask() { - return PTask; + return PTask; } /** * Sets the value of the 'PTask' field. * - * @param value the value to set. + * @param value The value of 'PTask'. + * @return This builder. + */ + public sparqles.avro.schedule.Schedule.Builder setPTask(java.lang.CharSequence value) { + validate(fields()[3], value); + this.PTask = value; + fieldSetFlags()[3] = true; + return this; + } + + /** + * Checks whether the 'PTask' field has been set. + * + * @return True if the 'PTask' field has been set, false otherwise. */ - public void setPTask(java.lang.CharSequence value) { - this.PTask = value; + public boolean hasPTask() { + return fieldSetFlags()[3]; } - /** Gets the value of the 'DTask' field. */ + /** + * Clears the value of the 'PTask' field. + * + * @return This builder. + */ + public sparqles.avro.schedule.Schedule.Builder clearPTask() { + PTask = null; + fieldSetFlags()[3] = false; + return this; + } + + /** + * Gets the value of the 'DTask' field. + * + * @return The value. + */ public java.lang.CharSequence getDTask() { - return DTask; + return DTask; } /** * Sets the value of the 'DTask' field. * - * @param value the value to set. + * @param value The value of 'DTask'. + * @return This builder. + */ + public sparqles.avro.schedule.Schedule.Builder setDTask(java.lang.CharSequence value) { + validate(fields()[4], value); + this.DTask = value; + fieldSetFlags()[4] = true; + return this; + } + + /** + * Checks whether the 'DTask' field has been set. + * + * @return True if the 'DTask' field has been set, false otherwise. + */ + public boolean hasDTask() { + return fieldSetFlags()[4]; + } + + /** + * Clears the value of the 'DTask' field. + * + * @return This builder. + */ + public sparqles.avro.schedule.Schedule.Builder clearDTask() { + DTask = null; + fieldSetFlags()[4] = false; + return this; + } + + /** + * Gets the value of the 'CTask' field. + * + * @return The value. + */ + public java.lang.CharSequence getCTask() { + return CTask; + } + + /** + * Sets the value of the 'CTask' field. + * + * @param value The value of 'CTask'. + * @return This builder. + */ + public sparqles.avro.schedule.Schedule.Builder setCTask(java.lang.CharSequence value) { + validate(fields()[5], value); + this.CTask = value; + fieldSetFlags()[5] = true; + return this; + } + + /** + * Checks whether the 'CTask' field has been set. + * + * @return True if the 'CTask' field has been set, false otherwise. + */ + public boolean hasCTask() { + return fieldSetFlags()[5]; + } + + /** + * Clears the value of the 'CTask' field. + * + * @return This builder. */ - public void setDTask(java.lang.CharSequence value) { - this.DTask = value; + public sparqles.avro.schedule.Schedule.Builder clearCTask() { + CTask = null; + fieldSetFlags()[5] = false; + return this; } - /** Gets the value of the 'ITask' field. */ + /** + * Gets the value of the 'ITask' field. + * + * @return The value. + */ public java.lang.CharSequence getITask() { - return ITask; + return ITask; } /** * Sets the value of the 'ITask' field. * - * @param value the value to set. + * @param value The value of 'ITask'. + * @return This builder. */ - public void setITask(java.lang.CharSequence value) { - this.ITask = value; + public sparqles.avro.schedule.Schedule.Builder setITask(java.lang.CharSequence value) { + validate(fields()[6], value); + this.ITask = value; + fieldSetFlags()[6] = true; + return this; } - /** Gets the value of the 'ETask' field. */ - public java.lang.CharSequence getETask() { - return ETask; + /** + * Checks whether the 'ITask' field has been set. + * + * @return True if the 'ITask' field has been set, false otherwise. + */ + public boolean hasITask() { + return fieldSetFlags()[6]; } /** - * Sets the value of the 'ETask' field. + * Clears the value of the 'ITask' field. * - * @param value the value to set. + * @return This builder. */ - public void setETask(java.lang.CharSequence value) { - this.ETask = value; + public sparqles.avro.schedule.Schedule.Builder clearITask() { + ITask = null; + fieldSetFlags()[6] = false; + return this; } - /** RecordBuilder for Schedule instances. */ - public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + /** + * Gets the value of the 'ETask' field. + * + * @return The value. + */ + public java.lang.CharSequence getETask() { + return ETask; + } - private sparqles.avro.Endpoint endpoint; - private java.lang.CharSequence ATask; - private java.lang.CharSequence FTask; - private java.lang.CharSequence PTask; - private java.lang.CharSequence DTask; - private java.lang.CharSequence ITask; - private java.lang.CharSequence ETask; + /** + * Sets the value of the 'ETask' field. + * + * @param value The value of 'ETask'. + * @return This builder. + */ + public sparqles.avro.schedule.Schedule.Builder setETask(java.lang.CharSequence value) { + validate(fields()[7], value); + this.ETask = value; + fieldSetFlags()[7] = true; + return this; + } - /** Creates a new Builder */ - private Builder() { - super(sparqles.avro.schedule.Schedule.SCHEMA$); - } + /** + * Checks whether the 'ETask' field has been set. + * + * @return True if the 'ETask' field has been set, false otherwise. + */ + public boolean hasETask() { + return fieldSetFlags()[7]; + } - /** Creates a Builder by copying an existing Builder */ - private Builder(sparqles.avro.schedule.Schedule.Builder other) { - super(other); - if (isValidValue(fields()[0], other.endpoint)) { - this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.ATask)) { - this.ATask = data().deepCopy(fields()[1].schema(), other.ATask); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.FTask)) { - this.FTask = data().deepCopy(fields()[2].schema(), other.FTask); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.PTask)) { - this.PTask = data().deepCopy(fields()[3].schema(), other.PTask); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.DTask)) { - this.DTask = data().deepCopy(fields()[4].schema(), other.DTask); - fieldSetFlags()[4] = true; - } - if (isValidValue(fields()[5], other.ITask)) { - this.ITask = data().deepCopy(fields()[5].schema(), other.ITask); - fieldSetFlags()[5] = true; - } - if (isValidValue(fields()[6], other.ETask)) { - this.ETask = data().deepCopy(fields()[6].schema(), other.ETask); - fieldSetFlags()[6] = true; - } - } + /** + * Clears the value of the 'ETask' field. + * + * @return This builder. + */ + public sparqles.avro.schedule.Schedule.Builder clearETask() { + ETask = null; + fieldSetFlags()[7] = false; + return this; + } - /** Creates a Builder by copying an existing Schedule instance */ - private Builder(sparqles.avro.schedule.Schedule other) { - super(sparqles.avro.schedule.Schedule.SCHEMA$); - if (isValidValue(fields()[0], other.endpoint)) { - this.endpoint = data().deepCopy(fields()[0].schema(), other.endpoint); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.ATask)) { - this.ATask = data().deepCopy(fields()[1].schema(), other.ATask); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.FTask)) { - this.FTask = data().deepCopy(fields()[2].schema(), other.FTask); - fieldSetFlags()[2] = true; - } - if (isValidValue(fields()[3], other.PTask)) { - this.PTask = data().deepCopy(fields()[3].schema(), other.PTask); - fieldSetFlags()[3] = true; - } - if (isValidValue(fields()[4], other.DTask)) { - this.DTask = data().deepCopy(fields()[4].schema(), other.DTask); - fieldSetFlags()[4] = true; - } - if (isValidValue(fields()[5], other.ITask)) { - this.ITask = data().deepCopy(fields()[5].schema(), other.ITask); - fieldSetFlags()[5] = true; - } - if (isValidValue(fields()[6], other.ETask)) { - this.ETask = data().deepCopy(fields()[6].schema(), other.ETask); - fieldSetFlags()[6] = true; - } + @Override + @SuppressWarnings("unchecked") + public Schedule build() { + try { + Schedule record = new Schedule(); + if (endpointBuilder != null) { + try { + record.endpoint = this.endpointBuilder.build(); + } catch (org.apache.avro.AvroMissingFieldException e) { + e.addParentField(record.getSchema().getField("endpoint")); + throw e; + } + } else { + record.endpoint = + fieldSetFlags()[0] + ? this.endpoint + : (sparqles.avro.Endpoint) defaultValue(fields()[0]); } + record.ATask = + fieldSetFlags()[1] ? this.ATask : (java.lang.CharSequence) defaultValue(fields()[1]); + record.FTask = + fieldSetFlags()[2] ? this.FTask : (java.lang.CharSequence) defaultValue(fields()[2]); + record.PTask = + fieldSetFlags()[3] ? this.PTask : (java.lang.CharSequence) defaultValue(fields()[3]); + record.DTask = + fieldSetFlags()[4] ? this.DTask : (java.lang.CharSequence) defaultValue(fields()[4]); + record.CTask = + fieldSetFlags()[5] ? this.CTask : (java.lang.CharSequence) defaultValue(fields()[5]); + record.ITask = + fieldSetFlags()[6] ? this.ITask : (java.lang.CharSequence) defaultValue(fields()[6]); + record.ETask = + fieldSetFlags()[7] ? this.ETask : (java.lang.CharSequence) defaultValue(fields()[7]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } - /** Gets the value of the 'endpoint' field */ - public sparqles.avro.Endpoint getEndpoint() { - return endpoint; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = + (org.apache.avro.io.DatumWriter) MODEL$.createDatumWriter(SCHEMA$); - /** Sets the value of the 'endpoint' field */ - public sparqles.avro.schedule.Schedule.Builder setEndpoint(sparqles.avro.Endpoint value) { - validate(fields()[0], value); - this.endpoint = value; - fieldSetFlags()[0] = true; - return this; - } + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } - /** Checks whether the 'endpoint' field has been set */ - public boolean hasEndpoint() { - return fieldSetFlags()[0]; - } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = + (org.apache.avro.io.DatumReader) MODEL$.createDatumReader(SCHEMA$); - /** Clears the value of the 'endpoint' field */ - public sparqles.avro.schedule.Schedule.Builder clearEndpoint() { - endpoint = null; - fieldSetFlags()[0] = false; - return this; - } + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } - /** Gets the value of the 'ATask' field */ - public java.lang.CharSequence getATask() { - return ATask; - } + @Override + protected boolean hasCustomCoders() { + return true; + } - /** Sets the value of the 'ATask' field */ - public sparqles.avro.schedule.Schedule.Builder setATask(java.lang.CharSequence value) { - validate(fields()[1], value); - this.ATask = value; - fieldSetFlags()[1] = true; - return this; - } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + this.endpoint.customEncode(out); - /** Checks whether the 'ATask' field has been set */ - public boolean hasATask() { - return fieldSetFlags()[1]; - } + if (this.ATask == null) { + out.writeIndex(1); + out.writeNull(); + } else { + out.writeIndex(0); + out.writeString(this.ATask); + } - /** Clears the value of the 'ATask' field */ - public sparqles.avro.schedule.Schedule.Builder clearATask() { - ATask = null; - fieldSetFlags()[1] = false; - return this; - } + if (this.FTask == null) { + out.writeIndex(1); + out.writeNull(); + } else { + out.writeIndex(0); + out.writeString(this.FTask); + } - /** Gets the value of the 'FTask' field */ - public java.lang.CharSequence getFTask() { - return FTask; - } + if (this.PTask == null) { + out.writeIndex(1); + out.writeNull(); + } else { + out.writeIndex(0); + out.writeString(this.PTask); + } - /** Sets the value of the 'FTask' field */ - public sparqles.avro.schedule.Schedule.Builder setFTask(java.lang.CharSequence value) { - validate(fields()[2], value); - this.FTask = value; - fieldSetFlags()[2] = true; - return this; - } + if (this.DTask == null) { + out.writeIndex(1); + out.writeNull(); + } else { + out.writeIndex(0); + out.writeString(this.DTask); + } - /** Checks whether the 'FTask' field has been set */ - public boolean hasFTask() { - return fieldSetFlags()[2]; - } + if (this.CTask == null) { + out.writeIndex(1); + out.writeNull(); + } else { + out.writeIndex(0); + out.writeString(this.CTask); + } - /** Clears the value of the 'FTask' field */ - public sparqles.avro.schedule.Schedule.Builder clearFTask() { - FTask = null; - fieldSetFlags()[2] = false; - return this; - } + if (this.ITask == null) { + out.writeIndex(1); + out.writeNull(); + } else { + out.writeIndex(0); + out.writeString(this.ITask); + } - /** Gets the value of the 'PTask' field */ - public java.lang.CharSequence getPTask() { - return PTask; - } + if (this.ETask == null) { + out.writeIndex(1); + out.writeNull(); + } else { + out.writeIndex(0); + out.writeString(this.ETask); + } + } - /** Sets the value of the 'PTask' field */ - public sparqles.avro.schedule.Schedule.Builder setPTask(java.lang.CharSequence value) { - validate(fields()[3], value); - this.PTask = value; - fieldSetFlags()[3] = true; - return this; - } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); + } + this.endpoint.customDecode(in); - /** Checks whether the 'PTask' field has been set */ - public boolean hasPTask() { - return fieldSetFlags()[3]; - } + if (in.readIndex() != 0) { + in.readNull(); + this.ATask = null; + } else { + this.ATask = in.readString(this.ATask instanceof Utf8 ? (Utf8) this.ATask : null); + } - /** Clears the value of the 'PTask' field */ - public sparqles.avro.schedule.Schedule.Builder clearPTask() { - PTask = null; - fieldSetFlags()[3] = false; - return this; - } + if (in.readIndex() != 0) { + in.readNull(); + this.FTask = null; + } else { + this.FTask = in.readString(this.FTask instanceof Utf8 ? (Utf8) this.FTask : null); + } - /** Gets the value of the 'DTask' field */ - public java.lang.CharSequence getDTask() { - return DTask; - } + if (in.readIndex() != 0) { + in.readNull(); + this.PTask = null; + } else { + this.PTask = in.readString(this.PTask instanceof Utf8 ? (Utf8) this.PTask : null); + } - /** Sets the value of the 'DTask' field */ - public sparqles.avro.schedule.Schedule.Builder setDTask(java.lang.CharSequence value) { - validate(fields()[4], value); - this.DTask = value; - fieldSetFlags()[4] = true; - return this; - } + if (in.readIndex() != 0) { + in.readNull(); + this.DTask = null; + } else { + this.DTask = in.readString(this.DTask instanceof Utf8 ? (Utf8) this.DTask : null); + } - /** Checks whether the 'DTask' field has been set */ - public boolean hasDTask() { - return fieldSetFlags()[4]; - } + if (in.readIndex() != 0) { + in.readNull(); + this.CTask = null; + } else { + this.CTask = in.readString(this.CTask instanceof Utf8 ? (Utf8) this.CTask : null); + } - /** Clears the value of the 'DTask' field */ - public sparqles.avro.schedule.Schedule.Builder clearDTask() { - DTask = null; - fieldSetFlags()[4] = false; - return this; - } + if (in.readIndex() != 0) { + in.readNull(); + this.ITask = null; + } else { + this.ITask = in.readString(this.ITask instanceof Utf8 ? (Utf8) this.ITask : null); + } - /** Gets the value of the 'ITask' field */ - public java.lang.CharSequence getITask() { - return ITask; - } + if (in.readIndex() != 0) { + in.readNull(); + this.ETask = null; + } else { + this.ETask = in.readString(this.ETask instanceof Utf8 ? (Utf8) this.ETask : null); + } - /** Sets the value of the 'ITask' field */ - public sparqles.avro.schedule.Schedule.Builder setITask(java.lang.CharSequence value) { - validate(fields()[5], value); - this.ITask = value; - fieldSetFlags()[5] = true; - return this; - } + } else { + for (int i = 0; i < 8; i++) { + switch (fieldOrder[i].pos()) { + case 0: + if (this.endpoint == null) { + this.endpoint = new sparqles.avro.Endpoint(); + } + this.endpoint.customDecode(in); + break; - /** Checks whether the 'ITask' field has been set */ - public boolean hasITask() { - return fieldSetFlags()[5]; - } + case 1: + if (in.readIndex() != 0) { + in.readNull(); + this.ATask = null; + } else { + this.ATask = in.readString(this.ATask instanceof Utf8 ? (Utf8) this.ATask : null); + } + break; - /** Clears the value of the 'ITask' field */ - public sparqles.avro.schedule.Schedule.Builder clearITask() { - ITask = null; - fieldSetFlags()[5] = false; - return this; - } + case 2: + if (in.readIndex() != 0) { + in.readNull(); + this.FTask = null; + } else { + this.FTask = in.readString(this.FTask instanceof Utf8 ? (Utf8) this.FTask : null); + } + break; - /** Gets the value of the 'ETask' field */ - public java.lang.CharSequence getETask() { - return ETask; - } + case 3: + if (in.readIndex() != 0) { + in.readNull(); + this.PTask = null; + } else { + this.PTask = in.readString(this.PTask instanceof Utf8 ? (Utf8) this.PTask : null); + } + break; - /** Sets the value of the 'ETask' field */ - public sparqles.avro.schedule.Schedule.Builder setETask(java.lang.CharSequence value) { - validate(fields()[6], value); - this.ETask = value; - fieldSetFlags()[6] = true; - return this; - } + case 4: + if (in.readIndex() != 0) { + in.readNull(); + this.DTask = null; + } else { + this.DTask = in.readString(this.DTask instanceof Utf8 ? (Utf8) this.DTask : null); + } + break; - /** Checks whether the 'ETask' field has been set */ - public boolean hasETask() { - return fieldSetFlags()[6]; - } + case 5: + if (in.readIndex() != 0) { + in.readNull(); + this.CTask = null; + } else { + this.CTask = in.readString(this.CTask instanceof Utf8 ? (Utf8) this.CTask : null); + } + break; - /** Clears the value of the 'ETask' field */ - public sparqles.avro.schedule.Schedule.Builder clearETask() { - ETask = null; - fieldSetFlags()[6] = false; - return this; - } + case 6: + if (in.readIndex() != 0) { + in.readNull(); + this.ITask = null; + } else { + this.ITask = in.readString(this.ITask instanceof Utf8 ? (Utf8) this.ITask : null); + } + break; - @Override - public Schedule build() { - try { - Schedule record = new Schedule(); - record.endpoint = - fieldSetFlags()[0] - ? this.endpoint - : (sparqles.avro.Endpoint) defaultValue(fields()[0]); - record.ATask = - fieldSetFlags()[1] - ? this.ATask - : (java.lang.CharSequence) defaultValue(fields()[1]); - record.FTask = - fieldSetFlags()[2] - ? this.FTask - : (java.lang.CharSequence) defaultValue(fields()[2]); - record.PTask = - fieldSetFlags()[3] - ? this.PTask - : (java.lang.CharSequence) defaultValue(fields()[3]); - record.DTask = - fieldSetFlags()[4] - ? this.DTask - : (java.lang.CharSequence) defaultValue(fields()[4]); - record.ITask = - fieldSetFlags()[5] - ? this.ITask - : (java.lang.CharSequence) defaultValue(fields()[5]); - record.ETask = - fieldSetFlags()[6] - ? this.ETask - : (java.lang.CharSequence) defaultValue(fields()[6]); - return record; - } catch (Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); + case 7: + if (in.readIndex() != 0) { + in.readNull(); + this.ETask = null; + } else { + this.ETask = in.readString(this.ETask instanceof Utf8 ? (Utf8) this.ETask : null); } + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); } + } } + } } diff --git a/backend/src/main/java/sparqles/core/CONSTANTS.java b/backend/src/main/java/sparqles/core/CONSTANTS.java index 38d989c2..24ece29c 100644 --- a/backend/src/main/java/sparqles/core/CONSTANTS.java +++ b/backend/src/main/java/sparqles/core/CONSTANTS.java @@ -1,32 +1,43 @@ package sparqles.core; -import java.net.URISyntaxException; -import sparqles.avro.Endpoint; - public class CONSTANTS { + /** Default length at which most strings will be cut off (in logs, message strings etc.) */ + public static final int STRING_LEN = 1024; + + private static final String SPARQLES_VERSION = "0.3.0"; + public static final String DEFAULT_HOST = "http://sparqles.okfn.org/"; + + /** Availability task */ + public static final String ATASK = "ATask"; + + /** Performance task */ + public static final String PTASK = "PTask"; + + /** Discoverability task */ + public static final String DTASK = "DTask"; + + /** Interoperability task */ + public static final String FTASK = "FTask"; + + /** Index view refresh task */ + public static final String ITASK = "ITask"; + + /** Endpoint list refresh task (from Datahub) */ + public static final String ETASK = "ETask"; + + /** Coherence task (proposed measure by the 3DFed project) */ + public static final String CTASK = "CTask"; + + /** Used only for robots.txt */ + public static final String USER_AGENT_STRING_RAW = "SPARQLESbot"; + + public static final String USER_AGENT_STRING = + "Mozilla/5.0 (compatible; SPARQLESbot/" + SPARQLES_VERSION + "; +%s)"; + + @Deprecated public static final int SOCKET_TIMEOUT = 16 * 1000; + @Deprecated public static final int CONNECTION_TIMEOUT = 16 * 1000; - public static final String ATASK = "ATask"; - public static final String PTASK = "PTask"; - public static final String DTASK = "DTask"; - public static final String FTASK = "FTask"; - public static final String ITASK = "ITask"; - public static final String ETASK = "ETask"; - - public static final String USER_AGENT = - "SPARQLES client using HTTPClient/4.2.3 (https://github.com/pyvandenbussche/sparqles)"; - public static final String ARQ_USER_AGENT = - "SPARQLES client using Apache-Jena-ARQ/2.11.1" - + " (https://github.com/pyvandenbussche/sparqles)"; - public static final int SOCKET_TIMEOUT = 16 * 1000; - public static final int CONNECTION_TIMEOUT = 16 * 1000; - - public static Endpoint SPARQLES = null; - - static { - try { - SPARQLES = EndpointFactory.newEndpoint("http://sparqles.okfn.org/"); - } catch (URISyntaxException e) { - e.printStackTrace(); - } - } + public static final String ANY_RDF_MIME_ACCEPT = + "application/rdf+xml, application/x-turtle, application/rdf+n3, application/xml, text/turtle," + + " text/rdf, text/plain;q=0.1"; } diff --git a/backend/src/main/java/sparqles/core/EndpointFactory.java b/backend/src/main/java/sparqles/core/EndpointFactory.java index 7831b3cc..e59b5da1 100644 --- a/backend/src/main/java/sparqles/core/EndpointFactory.java +++ b/backend/src/main/java/sparqles/core/EndpointFactory.java @@ -8,14 +8,14 @@ public class EndpointFactory { - public static Endpoint newEndpoint(URI uri) { - Endpoint ep = new Endpoint(); - ep.setUri(uri.toString()); - ep.setDatasets(new ArrayList()); - return ep; - } + public static Endpoint newEndpoint(URI uri) { + Endpoint ep = new Endpoint(); + ep.setUri(uri.toString()); + ep.setDatasets(new ArrayList()); + return ep; + } - public static Endpoint newEndpoint(String epURI) throws URISyntaxException { - return newEndpoint(new URI(epURI)); - } + public static Endpoint newEndpoint(String epURI) throws URISyntaxException { + return newEndpoint(new URI(epURI)); + } } diff --git a/backend/src/main/java/sparqles/core/EndpointTask.java b/backend/src/main/java/sparqles/core/EndpointTask.java index 62bfdd6f..f80650eb 100644 --- a/backend/src/main/java/sparqles/core/EndpointTask.java +++ b/backend/src/main/java/sparqles/core/EndpointTask.java @@ -17,96 +17,96 @@ * @author UmbrichJ */ public abstract class EndpointTask implements Task { - private static final Logger log = LoggerFactory.getLogger(EndpointTask.class); - protected final String _task; - private final String _id; - protected MongoDBManager _dbm; - protected FileManager _fm; - protected String _epURI; - protected Endpoint _ep; - private Analytics _analytics; - - public EndpointTask(Endpoint ep) { - _epURI = ep.getUri().toString(); - setEndpoint(ep); - - _task = this.getClass().getSimpleName(); - _id = _task + "(" + _epURI + ")"; - } - - public Endpoint getEndpoint() { - return _ep; - } - - public void setEndpoint(Endpoint ep) { - if (!_epURI.equals(ep.getUri().toString())) - log.error("Endpoint URIs do not match (was:{} is:{}", _epURI, ep.getUri()); - _ep = ep; - } - - public void setDBManager(MongoDBManager dbm) { - _dbm = dbm; - } - - public void setFileManager(FileManager fm) { - _fm = fm; - } - - @Override - public V call() throws Exception { - log.info("EXECUTE {}", this); - - long start = System.currentTimeMillis(); - EndpointResult epr = new EndpointResult(); - epr.setEndpoint(_ep); - epr.setStart(start); - boolean i_succ = true, a_succ = true, f_succ = true; - V v = null; - try { - - v = process(epr); - long end = System.currentTimeMillis(); - epr.setEnd(end); - - // insert into database - if (_dbm != null) i_succ = _dbm.insert(v); - - // write to file - if (_fm != null) f_succ = _fm.writeResult(v); - - // analyse the results - if (_analytics != null) a_succ = _analytics.analyse(v); - - log.info( - "EXECUTED {} in {} ms (idx:{}, disk:{}, analysed:{})", - _id, - end - start, - i_succ, - f_succ, - a_succ); - } catch (Exception e) { - log.error( - "FAILED {} (idx:{}, disk:{}, analysed:{}) {}", - this, - i_succ, - f_succ, - a_succ, - ExceptionHandler.logAndtoString(e, true)); - } - return v; - } - - public abstract V process(EndpointResult epr); - - @Override - public String toString() { - return _id; - } - - /** - * @param a - the analytics program for this task - */ - public void setAnalytics(Analytics a) { - _analytics = a; + private static final Logger log = LoggerFactory.getLogger(EndpointTask.class); + protected final String _task; + private final String _id; + protected MongoDBManager _dbm; + protected FileManager _fm; + protected String _epURI; + protected Endpoint _ep; + private Analytics _analytics; + + public EndpointTask(Endpoint ep) { + _epURI = ep.getUri().toString(); + setEndpoint(ep); + + _task = this.getClass().getSimpleName(); + _id = _task + "(" + _epURI + ")"; + } + + public Endpoint getEndpoint() { + return _ep; + } + + public void setEndpoint(Endpoint ep) { + if (!_epURI.equals(ep.getUri().toString())) + log.error("Endpoint URIs do not match (was:{} is:{}", _epURI, ep.getUri()); + _ep = ep; + } + + public void setDBManager(MongoDBManager dbm) { + _dbm = dbm; + } + + public void setFileManager(FileManager fm) { + _fm = fm; + } + + @Override + public V call() throws Exception { + log.info("EXECUTE {}", this); + + long start = System.currentTimeMillis(); + EndpointResult epr = new EndpointResult(); + epr.setEndpoint(_ep); + epr.setStart(start); + boolean i_succ = true, a_succ = true, f_succ = true; + V v = null; + try { + + v = process(epr); + long end = System.currentTimeMillis(); + epr.setEnd(end); + + // insert into database + if (_dbm != null) i_succ = _dbm.insert(v); + + // write to file + if (_fm != null) f_succ = _fm.writeResult(v); + + // analyse the results + if (_analytics != null) a_succ = _analytics.analyse(v); + + log.info( + "EXECUTED {} in {} ms (idx:{}, disk:{}, analysed:{})", + _id, + end - start, + i_succ, + f_succ, + a_succ); + } catch (Exception e) { + log.error( + "FAILED {} (idx:{}, disk:{}, analysed:{}) {}", + this, + i_succ, + f_succ, + a_succ, + ExceptionHandler.logAndtoString(e, true)); } + return v; + } + + public abstract V process(EndpointResult epr); + + @Override + public String toString() { + return _id; + } + + /** + * @param a - the analytics program for this task + */ + public void setAnalytics(Analytics a) { + _analytics = a; + } } diff --git a/backend/src/main/java/sparqles/core/Main.java b/backend/src/main/java/sparqles/core/Main.java index ef745958..c8e3dc49 100644 --- a/backend/src/main/java/sparqles/core/Main.java +++ b/backend/src/main/java/sparqles/core/Main.java @@ -24,124 +24,114 @@ */ public class Main { - private static final Logger log = LoggerFactory.getLogger(Main.class); + private static final Logger log = LoggerFactory.getLogger(Main.class); - private static final String PACKAGE_PREFIX = "sparqles.utils.cli"; - private static final String PATH_PREFIX = "/sparqles/utils/cli"; + private static final String PACKAGE_PREFIX = "sparqles.utils.cli"; + private static final String PATH_PREFIX = "/sparqles/utils/cli"; - public static void main(String[] args) throws ClassNotFoundException, IOException { - try { - if (args.length < 1) { - StringBuffer sb = new StringBuffer(); - sb.append("where one of"); + public static void main(String[] args) throws ClassNotFoundException, IOException { + try { + if (args.length < 1) { + StringBuffer sb = new StringBuffer(); + sb.append("where one of"); - Class[] classes = getClasses(PACKAGE_PREFIX); + Class[] classes = getClasses(PACKAGE_PREFIX); - for (Class c : classes) { - log.info("try to load {}", c.getName()); - // PACKAGE_PREFIX+"."+c.getSimpleName() - Class cls = Main.class.getClassLoader().loadClass(c.getName()); - if (CLIObject.class.isAssignableFrom(cls) - && !cls.getSimpleName().equals("CLIObject")) { - if (cls.getName() - .replaceAll(PACKAGE_PREFIX, "") - .equals("." + cls.getSimpleName())) { - CLIObject o = (CLIObject) cls.newInstance(); - sb.append("\n\t") - .append(o.getCommand()) - .append(" -- ") - .append(o.getDescription()); - } - } - } - usage(sb.toString()); + for (Class c : classes) { + log.info("try to load {}", c.getName()); + // PACKAGE_PREFIX+"."+c.getSimpleName() + Class cls = Main.class.getClassLoader().loadClass(c.getName()); + if (CLIObject.class.isAssignableFrom(cls) && !cls.getSimpleName().equals("CLIObject")) { + if (cls.getName().replaceAll(PACKAGE_PREFIX, "").equals("." + cls.getSimpleName())) { + CLIObject o = (CLIObject) cls.newInstance(); + sb.append("\n\t").append(o.getCommand()).append(" -- ").append(o.getDescription()); } - CLIObject cli = (CLIObject) Class.forName(PACKAGE_PREFIX + "." + args[0]).newInstance(); - cli.run(Arrays.copyOfRange(args, 1, args.length)); - System.exit(0); - } catch (Throwable e) { - e.printStackTrace(); - System.exit(-1); + } } + usage(sb.toString()); + } + CLIObject cli = (CLIObject) Class.forName(PACKAGE_PREFIX + "." + args[0]).newInstance(); + cli.run(Arrays.copyOfRange(args, 1, args.length)); + System.exit(0); + } catch (Throwable e) { + e.printStackTrace(); + System.exit(-1); } + } - private static void usage(String msg) { - System.err.println(msg); - System.exit(-1); - } + private static void usage(String msg) { + System.err.println(msg); + System.exit(-1); + } - private static Class[] getClasses(String packageName) - throws ClassNotFoundException, IOException { - String uri; - ArrayList classes = new ArrayList(); - try { - System.out.println(Main.class.getResource(PATH_PREFIX)); - uri = Main.class.getResource(PATH_PREFIX).toURI().toASCIIString(); - if (uri.startsWith("jar:file:")) { - classes = classesFromJar(uri); - } else { - ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); - String path = packageName.replace('.', '/'); - Enumeration resources = classLoader.getResources(path); - List dirs = new ArrayList(); - while (resources.hasMoreElements()) { - URL resource = resources.nextElement(); - dirs.add(new File(resource.getFile())); - } - for (File directory : dirs) { - classes.addAll(findClasses(directory, packageName)); - } - } - } catch (URISyntaxException e) { - e.printStackTrace(); + private static Class[] getClasses(String packageName) throws ClassNotFoundException, IOException { + String uri; + ArrayList classes = new ArrayList(); + try { + System.out.println(Main.class.getResource(PATH_PREFIX)); + uri = Main.class.getResource(PATH_PREFIX).toURI().toASCIIString(); + if (uri.startsWith("jar:file:")) { + classes = classesFromJar(uri); + } else { + ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); + String path = packageName.replace('.', '/'); + Enumeration resources = classLoader.getResources(path); + List dirs = new ArrayList(); + while (resources.hasMoreElements()) { + URL resource = resources.nextElement(); + dirs.add(new File(resource.getFile())); + } + for (File directory : dirs) { + classes.addAll(findClasses(directory, packageName)); } - // - // - return classes.toArray(new Class[classes.size()]); + } + } catch (URISyntaxException e) { + e.printStackTrace(); } + // + // + return classes.toArray(new Class[classes.size()]); + } - private static ArrayList classesFromJar(String uri) - throws FileNotFoundException, IOException, ClassNotFoundException { - ArrayList classes = new ArrayList(); - String jarURI = uri.substring("jar:file:".length(), uri.lastIndexOf("!")); - JarInputStream jarFile = new JarInputStream(new FileInputStream(jarURI)); - JarEntry jarEntry; - while (true) { - jarEntry = jarFile.getNextJarEntry(); - // System.out.println(jarEntry); - if (jarEntry == null) { - break; - } - if ((jarEntry.getName().startsWith(PACKAGE_PREFIX.replace(".", "/"))) - && (jarEntry.getName().endsWith(".class"))) { - String classEntry = jarEntry.getName().replaceAll("/", "\\."); - classes.add(Class.forName(classEntry.substring(0, classEntry.indexOf(".class")))); - } - } - return classes; + private static ArrayList classesFromJar(String uri) + throws FileNotFoundException, IOException, ClassNotFoundException { + ArrayList classes = new ArrayList(); + String jarURI = uri.substring("jar:file:".length(), uri.lastIndexOf("!")); + JarInputStream jarFile = new JarInputStream(new FileInputStream(jarURI)); + JarEntry jarEntry; + while (true) { + jarEntry = jarFile.getNextJarEntry(); + // System.out.println(jarEntry); + if (jarEntry == null) { + break; + } + if ((jarEntry.getName().startsWith(PACKAGE_PREFIX.replace(".", "/"))) + && (jarEntry.getName().endsWith(".class"))) { + String classEntry = jarEntry.getName().replaceAll("/", "\\."); + classes.add(Class.forName(classEntry.substring(0, classEntry.indexOf(".class")))); + } } + return classes; + } - private static List findClasses(File directory, String packageName) - throws ClassNotFoundException { - List classes = new ArrayList(); - if (!directory.exists()) { - return classes; - } - File[] files = directory.listFiles(); - for (File file : files) { - if (file.isDirectory()) { - assert !file.getName().contains("."); - classes.addAll(findClasses(file, packageName + "." + file.getName())); - } else if (file.getName().endsWith(".class")) { - if (!file.getName().equals("CLIObject.class")) - classes.add( - Class.forName( - packageName - + '.' - + file.getName() - .substring(0, file.getName().length() - 6))); - } - } - return classes; + private static List findClasses(File directory, String packageName) + throws ClassNotFoundException { + List classes = new ArrayList(); + if (!directory.exists()) { + return classes; + } + File[] files = directory.listFiles(); + for (File file : files) { + if (file.isDirectory()) { + assert !file.getName().contains("."); + classes.addAll(findClasses(file, packageName + "." + file.getName())); + } else if (file.getName().endsWith(".class")) { + if (!file.getName().equals("CLIObject.class")) + classes.add( + Class.forName( + packageName + '.' + file.getName().substring(0, file.getName().length() - 6))); + } } + return classes; + } } diff --git a/backend/src/main/java/sparqles/core/SPARQLESProperties.java b/backend/src/main/java/sparqles/core/SPARQLESProperties.java index 8a705f30..13887f89 100644 --- a/backend/src/main/java/sparqles/core/SPARQLESProperties.java +++ b/backend/src/main/java/sparqles/core/SPARQLESProperties.java @@ -3,107 +3,128 @@ import java.io.File; import java.io.FileInputStream; import java.io.IOException; +import java.net.URISyntaxException; import java.util.Properties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import sparqles.avro.Endpoint; public class SPARQLESProperties { - private static final Logger log = LoggerFactory.getLogger(SPARQLESProperties.class); - - private static String DATA_DIR = "./data"; - - private static String SCHEDULE_CRON; - private static String PTASK_QUERIES; - private static Integer SPARQL_WAITTIME = 5000; - private static Integer PTASK_WAITTIME = SPARQL_WAITTIME; - private static Integer FTASK_WAITTIME = SPARQL_WAITTIME; - private static String FTASK_QUERIES; - private static Integer TASK_THREADS = 10; - private static String ENDPOINT_LIST; - private static String DB_HOST = "localhost"; - private static int DB_PORT = 27017; - private static String DB_NAME = "sparqles"; - - public static String getSCHEDULE_CRON() { - return SCHEDULE_CRON; + private static final Logger log = LoggerFactory.getLogger(SPARQLESProperties.class); + private static Endpoint SPARQLES; + + private static String SPARQLES_HOST = CONSTANTS.DEFAULT_HOST; + + private static String DATA_DIR = "./data"; + + private static String SCHEDULE_CRON; + private static String PTASK_QUERIES; + private static Integer SPARQL_WAITTIME = 5000; + private static Integer PTASK_WAITTIME = SPARQL_WAITTIME; + private static Integer FTASK_WAITTIME = SPARQL_WAITTIME; + private static String FTASK_QUERIES; + private static Integer TASK_THREADS = 10; + private static String ENDPOINT_LIST; + private static String DB_HOST = "localhost"; + private static int DB_PORT = 27017; + private static String DB_NAME = "sparqles"; + + public static String getSCHEDULE_CRON() { + return SCHEDULE_CRON; + } + + public static String getDATA_DIR() { + return DATA_DIR; + } + + public static String getDB_HOST() { + return DB_HOST; + } + + public static String getDB_NAME() { + return DB_NAME; + } + + public static int getDB_PORT() { + return DB_PORT; + } + + public static String getPTASK_QUERIES() { + return PTASK_QUERIES; + } + + public static Integer getSPARQL_WAITTIME() { + return SPARQL_WAITTIME; + } + + public static Integer getPTASK_WAITTIME() { + return PTASK_WAITTIME; + } + + public static Integer getFTASK_WAITTIME() { + return FTASK_WAITTIME; + } + + public static String getFTASK_QUERIES() { + return FTASK_QUERIES; + } + + public static Integer getTASK_THREADS() { + return TASK_THREADS; + } + + public static Endpoint getSparqlesEndpoint() { + return SPARQLES; + } + + public static String getUserAgent() { + return String.format(CONSTANTS.USER_AGENT_STRING, SPARQLES_HOST); + } + + public static String getENDPOINT_LIST() { + return ENDPOINT_LIST; + } + + public static void init(File propFile) { + Properties props = new Properties(); + try { + props.load(new FileInputStream(propFile)); + init(props); + } catch (IOException e) { + log.error("Could not load properties from file: {}", propFile); } + } - public static String getDATA_DIR() { - return DATA_DIR; - } - - public static String getDB_HOST() { - return DB_HOST; - } + public static void init(Properties props) { + SPARQLES_HOST = props.getProperty("host", SPARQLES_HOST); - public static String getDB_NAME() { - return DB_NAME; - } - - public static int getDB_PORT() { - return DB_PORT; - } + DATA_DIR = props.getProperty("data.dir", DATA_DIR); - public static String getPTASK_QUERIES() { - return PTASK_QUERIES; - } + DB_HOST = props.getProperty("db.host", DB_HOST); + DB_NAME = props.getProperty("db.name", DB_NAME); + DB_PORT = Integer.valueOf(props.getProperty("db.port", "" + DB_PORT)); - public static Integer getSPARQL_WAITTIME() { - return SPARQL_WAITTIME; - } + FTASK_QUERIES = (props.getProperty("ftask.queries")); + PTASK_QUERIES = (props.getProperty("ptask.queries")); - public static Integer getPTASK_WAITTIME() { - return PTASK_WAITTIME; - } + TASK_THREADS = Integer.valueOf(props.getProperty("task.threads", "" + TASK_THREADS)); - public static Integer getFTASK_WAITTIME() { - return FTASK_WAITTIME; - } + ENDPOINT_LIST = (props.getProperty("endpoint.list")); - public static String getFTASK_QUERIES() { - return FTASK_QUERIES; - } + SPARQL_WAITTIME = Integer.valueOf(props.getProperty("waittime", "" + SPARQL_WAITTIME)); + PTASK_WAITTIME = Integer.valueOf(props.getProperty("ptask.waittime", "" + SPARQL_WAITTIME)); + FTASK_WAITTIME = Integer.valueOf(props.getProperty("ftask.waittime", "" + SPARQL_WAITTIME)); - public static Integer getTASK_THREADS() { - return TASK_THREADS; - } + SCHEDULE_CRON = props.getProperty("schedule.cron"); - public static String getENDPOINT_LIST() { - return ENDPOINT_LIST; + try { + SPARQLES = EndpointFactory.newEndpoint(SPARQLES_HOST); + } catch (URISyntaxException e) { + log.error("Bad URI for the SPARQLES instance: {} (uri={})", e.getMessage(), SPARQLES_HOST); + throw new RuntimeException(e); } - public static void init(File propFile) { - Properties props = new Properties(); - try { - props.load(new FileInputStream(propFile)); - init(props); - } catch (IOException e) { - log.error("Could not load properties from file: {}", propFile); - } - } - - public static void init(Properties props) { - - DATA_DIR = props.getProperty("data.dir", DATA_DIR); - - DB_HOST = props.getProperty("db.host", DB_HOST); - DB_NAME = props.getProperty("db.name", DB_NAME); - DB_PORT = Integer.valueOf(props.getProperty("db.port", "" + DB_PORT)); - - FTASK_QUERIES = (props.getProperty("ftask.queries")); - PTASK_QUERIES = (props.getProperty("ptask.queries")); - - TASK_THREADS = Integer.valueOf(props.getProperty("task.threads", "" + TASK_THREADS)); - - ENDPOINT_LIST = (props.getProperty("endpoint.list")); - - SPARQL_WAITTIME = Integer.valueOf(props.getProperty("waittime", "" + SPARQL_WAITTIME)); - PTASK_WAITTIME = Integer.valueOf(props.getProperty("ptask.waittime", "" + SPARQL_WAITTIME)); - FTASK_WAITTIME = Integer.valueOf(props.getProperty("ftask.waittime", "" + SPARQL_WAITTIME)); - - SCHEDULE_CRON = props.getProperty("schedule.cron"); - - Object[] t = {DATA_DIR, DB_HOST, DB_PORT}; - log.debug("[LOAD] properties: {}", props); - } + Object[] t = {DATA_DIR, DB_HOST, DB_PORT}; + log.debug("[LOAD] properties: {}", props); + } } diff --git a/backend/src/main/java/sparqles/core/Task.java b/backend/src/main/java/sparqles/core/Task.java index 50242bf4..2a35ad41 100644 --- a/backend/src/main/java/sparqles/core/Task.java +++ b/backend/src/main/java/sparqles/core/Task.java @@ -7,17 +7,17 @@ /** * A Task is a {@link Callable} connected to the database. * - * @param - return type restricted to AVRO objects * @author umbrichj + * @param - return type restricted to AVRO objects */ public interface Task extends Callable { - // public void execute(); + // public void execute(); - /** - * Set the MongoDBManager. - * - * @param dbm - */ - void setDBManager(MongoDBManager dbm); + /** + * Set the MongoDBManager. + * + * @param dbm + */ + void setDBManager(MongoDBManager dbm); } diff --git a/backend/src/main/java/sparqles/core/TaskFactory.java b/backend/src/main/java/sparqles/core/TaskFactory.java index 86e0ab40..e1751952 100644 --- a/backend/src/main/java/sparqles/core/TaskFactory.java +++ b/backend/src/main/java/sparqles/core/TaskFactory.java @@ -7,6 +7,7 @@ import org.slf4j.LoggerFactory; import sparqles.analytics.AAnalyser; import sparqles.analytics.Analytics; +import sparqles.analytics.CAnalyser; import sparqles.analytics.DAnalyser; import sparqles.analytics.FAnalyser; import sparqles.analytics.IndexViewAnalytics; @@ -14,6 +15,7 @@ import sparqles.analytics.RefreshDataHubTask; import sparqles.avro.Endpoint; import sparqles.core.availability.ATask; +import sparqles.core.calculation.CTask; import sparqles.core.discovery.DTask; import sparqles.core.interoperability.FTask; import sparqles.core.interoperability.SpecificFTask; @@ -23,43 +25,45 @@ import sparqles.utils.MongoDBManager; public class TaskFactory { - private static final Logger log = LoggerFactory.getLogger(TaskFactory.class); + private static final Logger log = LoggerFactory.getLogger(TaskFactory.class); - public static Task create(String task, String endpoint, MongoDBManager dbm, FileManager fm) - throws URISyntaxException { - Endpoint ep = EndpointFactory.newEndpoint(endpoint); - return create(task, ep, dbm, fm); - } - - public static Task create(String task, Endpoint ep, MongoDBManager dbm, FileManager fm) { - Task t = null; - Analytics a = null; - if (task.equalsIgnoreCase(PTASK)) { - t = new PTask(ep, SpecificPTask.values()); - a = new PAnalyser(dbm); - } else if (task.equalsIgnoreCase(ATASK)) { - t = new ATask(ep); - a = new AAnalyser(dbm); - } else if (task.equalsIgnoreCase(FTASK)) { - t = new FTask(ep, SpecificFTask.values()); - a = new FAnalyser(dbm); - } else if (task.equalsIgnoreCase(DTASK)) { - t = new DTask(ep); - a = new DAnalyser(dbm); - } else if (task.equalsIgnoreCase(ITASK)) { - t = new IndexViewAnalytics(); - } else if (task.equalsIgnoreCase(ETASK)) { - t = new RefreshDataHubTask(); - } else { - log.warn("Task {} not supported or known", task); - t = null; - } - if (dbm != null && t != null) t.setDBManager(dbm); - if (fm != null && t != null && t instanceof EndpointTask) - ((EndpointTask) t).setFileManager(fm); - if (t instanceof EndpointTask) ((EndpointTask) t).setAnalytics(a); + public static Task create(String task, String endpoint, MongoDBManager dbm, FileManager fm) + throws URISyntaxException { + Endpoint ep = EndpointFactory.newEndpoint(endpoint); + return create(task, ep, dbm, fm); + } - if (t != null) log.trace("Successfully create {} task for {}", task, ep.getUri()); - return t; + public static Task create(String task, Endpoint ep, MongoDBManager dbm, FileManager fm) { + Task t = null; + Analytics a = null; + if (task.equalsIgnoreCase(PTASK)) { + t = new PTask(ep, SpecificPTask.values()); + a = new PAnalyser(dbm); + } else if (task.equalsIgnoreCase(ATASK)) { + t = new ATask(ep); + a = new AAnalyser(dbm); + } else if (task.equalsIgnoreCase(FTASK)) { + t = new FTask(ep, SpecificFTask.values()); + a = new FAnalyser(dbm); + } else if (task.equalsIgnoreCase(DTASK)) { + t = new DTask(ep); + a = new DAnalyser(dbm); + } else if (task.equalsIgnoreCase(CTASK)) { + t = new CTask(ep); + a = new CAnalyser(dbm); + } else if (task.equalsIgnoreCase(ITASK)) { + t = new IndexViewAnalytics(); + } else if (task.equalsIgnoreCase(ETASK)) { + t = new RefreshDataHubTask(); + } else { + log.warn("Task {} not supported or known", task); + t = null; } + if (dbm != null && t != null) t.setDBManager(dbm); + if (fm != null && t != null && t instanceof EndpointTask) ((EndpointTask) t).setFileManager(fm); + if (t instanceof EndpointTask) ((EndpointTask) t).setAnalytics(a); + + if (t != null) log.trace("Successfully create {} task for {}", task, ep.getUri()); + return t; + } } diff --git a/backend/src/main/java/sparqles/core/availability/ATask.java b/backend/src/main/java/sparqles/core/availability/ATask.java index d10c7b34..b8457e59 100644 --- a/backend/src/main/java/sparqles/core/availability/ATask.java +++ b/backend/src/main/java/sparqles/core/availability/ATask.java @@ -1,13 +1,12 @@ package sparqles.core.availability; -import java.net.ConnectException; -import java.net.UnknownHostException; -import java.net.http.HttpConnectTimeoutException; -import javax.net.ssl.SSLHandshakeException; -import org.apache.http.HttpException; -import org.apache.http.conn.ConnectTimeoutException; +import static java.time.temporal.ChronoUnit.MILLIS; + +import java.time.Duration; +import java.util.Objects; +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.apache.jena.query.ARQ; import org.apache.jena.query.QueryExecution; -import org.apache.jena.sparql.engine.http.QueryExceptionHTTP; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import sparqles.avro.Endpoint; @@ -15,8 +14,7 @@ import sparqles.avro.availability.AResult; import sparqles.core.EndpointTask; import sparqles.core.interoperability.TaskRun; -import sparqles.utils.ExceptionHandler; -import sparqles.utils.QueryManager; +import sparqles.utils.*; /** * This class performs the required task to study the availability of an endpoint. @@ -28,167 +26,126 @@ */ public class ATask extends EndpointTask { - /** static class logger */ - private static final Logger log = LoggerFactory.getLogger(ATask.class); + /** static class logger */ + private static final Logger log = LoggerFactory.getLogger(ATask.class); - private static final String ASKQUERY = "ASK WHERE{?s ?p ?o}"; - private static final String SELECTQUERY = "SELECT ?s WHERE{?s ?p ?o} LIMIT 1"; + private static final String ASKQUERY = "ASK WHERE{?s ?p ?o}"; + private static final String SELECTQUERY = "SELECT ?s WHERE{?s ?p ?o} LIMIT 1"; - public ATask(Endpoint ep) { - super(ep); - } + public ATask(Endpoint ep) { + super(ep); + } - @Override - public AResult process(EndpointResult epr) { - AResult result = new AResult(); - result.setEndpointResult(epr); - result.setExplanation("Endpoint is operating normally"); + @Override + public AResult process(EndpointResult epr) { + AResult result = new AResult(); + result.setEndpointResult(epr); + result.setExplanation("✅ Endpoint is operating normally"); - long start = System.currentTimeMillis(); - try { - QueryExecution qe = QueryManager.getExecution(epr.getEndpoint(), ASKQUERY); - // FIXME: find a new way - // qe.setTimeout(TaskRun.A_FIRST_RESULT_TIMEOUT, - // TaskRun.A_FIRST_RESULT_TIMEOUT); - boolean response = qe.execAsk(); - if (response) { - result.setResponseTime((System.currentTimeMillis() - start)); - if ((System.currentTimeMillis() - start) > 20000) { - result.setIsAvailable(false); - result.setExplanation("SPARQL Endpoint is timeout"); - } else { - result.setIsAvailable(response); - result.setExplanation("Endpoint is operating normally"); - } - log.debug("executed ask {}", epr.getEndpoint().getUri().toString()); - return result; - } else { - return testSelect(epr); - } - } catch (InterruptedException e) { - String ex = ExceptionHandler.logAndtoString(e); - result.setException(ex); - result.setExplanation(ex); + long start = System.currentTimeMillis(); + try { + QueryExecution qe = + QueryManager.getExecution( + epr.getEndpoint(), ASKQUERY, Duration.of(TaskRun.A_FIRST_RESULT_TIMEOUT, MILLIS)); + qe.getContext().set(ARQ.httpQueryTimeout, TaskRun.A_FIRST_RESULT_TIMEOUT); - log.warn( - "failed ASK query for {}, {}", - _epURI, - ExceptionHandler.logAndtoString(e, true)); - return result; - } catch (Exception e) { - return testSelect(epr); + boolean response = qe.execAsk(); + if (response) { + result.setResponseTime((System.currentTimeMillis() - start)); + if ((System.currentTimeMillis() - start) > 20000) { + result.setIsAvailable(false); + result.setExplanation("SPARQL Endpoint is timeout"); + } else { + result.setIsAvailable(response); + result.setExplanation("Endpoint is operating normally"); + } + log.debug("executed ask {}", epr.getEndpoint().getUri().toString()); + } else { + result = testSelect(epr); + } + } catch (Exception e) { + var faultKind = FaultDiagnostic.faultKindForJenaQuery(e); + if (faultKind == FaultKind.UNKNOWN) { + result.setIsAvailable(false); + String ex = ExceptionHandler.logAndtoString(e); + result.setException(StringUtils.trunc(ex)); + result.setExplanation("Unknown error encountered while attempting an ASK query"); + log.warn( + "Unknown error encountered while attempting an ASK query" + " (type={})", + e.getClass().getName()); + log.debug("Stacktrace", e); + log.debug("Full stacktrace:\n{}", ExceptionHandler.toFullString(e)); + log.debug("Full cause:\n{}", ExceptionHandler.toFullCause(e)); + log.debug("Root cause:\n{}", ExceptionUtils.getRootCauseMessage(e)); + } else { + if (faultKind == FaultKind.BAD_REQUEST + || faultKind == FaultKind.BAD_RESPONSE + || faultKind == FaultKind.DOWN_TIMEOUT + || faultKind == FaultKind.BAD_SERVER_ERROR) { + result = testSelect(epr); + } else { + updateAResultFromFault(faultKind, result); } + } } + log.info( + "{} availability: {} ({} ms)", _epURI, result.getExplanation(), result.getResponseTime()); + return result; + } - private AResult testSelect(EndpointResult epr) { - AResult result = new AResult(); - result.setEndpointResult(epr); - result.setExplanation("Endpoint is operating normally"); - long start = System.currentTimeMillis(); - try { - QueryExecution qe = QueryManager.getExecution(epr.getEndpoint(), SELECTQUERY); - // FIXME - // qe.setTimeout(TaskRun.A_FIRST_RESULT_TIMEOUT, - // TaskRun.A_FIRST_RESULT_TIMEOUT); - boolean response = qe.execSelect().hasNext(); + private AResult testSelect(EndpointResult epr) { + AResult result = new AResult(); + result.setEndpointResult(epr); + result.setExplanation("Endpoint is operating normally"); + long start = System.currentTimeMillis(); + try { + QueryExecution qe = + QueryManager.getExecution( + epr.getEndpoint(), SELECTQUERY, Duration.of(TaskRun.A_FIRST_RESULT_TIMEOUT, MILLIS)); + boolean response = qe.execSelect().hasNext(); - if (response) { - result.setResponseTime((System.currentTimeMillis() - start)); - if ((System.currentTimeMillis() - start) > TaskRun.A_FIRST_RESULT_TIMEOUT) { - result.setIsAvailable(false); - result.setExplanation("SPARQL Endpoint is timeout"); - } else { - result.setIsAvailable(response); - result.setExplanation("Endpoint is operating normally"); - } - log.debug("executed select {}", epr.getEndpoint().getUri().toString()); - return result; - } else { - result.setIsAvailable(response); - log.debug("executed no response {}", epr.getEndpoint().getUri().toString()); - return result; - } - } catch (ConnectTimeoutException | ConnectException e) { - result.setIsAvailable(false); - String msg = "🐌 connection timeout while connecting to " + _epURI; - log.info(msg); - result.setExplanation(msg); - return result; - } catch (UnknownHostException e) { - result.setIsAvailable(false); - String msg = "🕳️ host not found while connecting to " + _epURI; - log.info(msg); - result.setExplanation(msg); - return result; - } catch (QueryExceptionHTTP | HttpException e) { - if (e.getCause() instanceof UnknownHostException) { - result.setIsAvailable(false); - String msg = "🕳️ host not found while connecting to " + _epURI; - log.info(msg); - result.setExplanation(msg); - return result; - } - if (e.getCause() instanceof ConnectTimeoutException - || e.getCause() instanceof ConnectException - || e.getCause() instanceof HttpConnectTimeoutException) { - result.setIsAvailable(false); - String msg = "🐌 connection timeout while connecting to " + _epURI; - log.info(msg); - result.setExplanation(msg); - return result; - } - if (e.getCause() instanceof SSLHandshakeException) { - result.setIsAvailable(false); - String msg = "🏰 failed to establish a TLS connection to " + _epURI; - log.info(msg); - result.setExplanation(msg); - return result; - } - if (e.getMessage().contains("400")) { - result.setIsAvailable(false); - String msg = - "👾 host did not like our request (400); while connecting to " + _epURI; - log.info(msg); - result.setExplanation(msg); - return result; - } else if (e.getMessage().contains("401")) { - result.setIsAvailable(false); - String msg = "✋ host requires authn (401); while connecting to " + _epURI; - log.info(msg); - result.setExplanation(msg); - result.setIsPrivate(true); - return result; - } else if (e.getMessage().contains("502")) { - result.setIsAvailable(false); - String msg = - "🕳 server is likely down behind reverse proxy (502); while connecting to " - + _epURI; - log.info(msg); - result.setExplanation(msg); - return result; - } else if (e.getMessage().contains("503")) { - result.setIsAvailable(false); - String msg = - "🕳 endpoint is overloaded or gone (503); while connecting to " + _epURI; - log.info(msg); - result.setExplanation(msg); - return result; - } - } catch (Exception e1) { - result.setIsAvailable(false); - String ex = ExceptionHandler.logAndtoString(e1); - result.setException(ex); - result.setExplanation(ex); - if (e1.getMessage() != null) - if (e1.getMessage().contains("401 Authorization Required")) - result.setIsPrivate(true); - - log.warn( - "failed SELECT query for {}, {} (type {})", - _epURI, - ExceptionHandler.logAndtoString(e1, true), - e1.getClass().getName()); + if (response) { + result.setResponseTime((System.currentTimeMillis() - start)); + if ((System.currentTimeMillis() - start) > TaskRun.A_FIRST_RESULT_TIMEOUT) { + result.setIsAvailable(false); + result.setExplanation("SPARQL Endpoint is timeout"); + } else { + result.setIsAvailable(response); + result.setExplanation("Endpoint is operating normally"); } - return result; + log.debug("executed select {}", epr.getEndpoint().getUri().toString()); + } else { + result.setIsAvailable(response); + log.debug("executed no response {}", epr.getEndpoint().getUri().toString()); + } + } catch (Exception e) { + var faultKind = FaultDiagnostic.faultKindForJenaQuery(e); + if (faultKind == FaultKind.UNKNOWN) { + result.setIsAvailable(false); + String ex = ExceptionHandler.logAndtoString(e); + result.setException(StringUtils.trunc(ex)); + result.setExplanation( + "Unknown error encountered while attempting an ASK query fallback (SELECT LIMIT 1)"); + log.warn( + "Unknown error encountered while attempting an ASK query fallback (SELECT LIMIT 1)" + + " (type={})", + e.getClass().getName()); + log.debug("Stacktrace", e); + } else { + updateAResultFromFault(faultKind, result); + } + } + log.info( + "{} availability: {} ({} ms)", _epURI, result.getExplanation(), result.getResponseTime()); + return result; + } + + public static void updateAResultFromFault(FaultKind faultKind, AResult result) { + result.setIsAvailable(false); + if (Objects.requireNonNull(faultKind) == FaultKind.UNKNOWN) { + throw new IllegalArgumentException(); + } else { + result.setExplanation(FaultDiagnostic.interpretFault(faultKind)); } + } } diff --git a/backend/src/main/java/sparqles/core/calculation/CTask.java b/backend/src/main/java/sparqles/core/calculation/CTask.java new file mode 100644 index 00000000..7368a61a --- /dev/null +++ b/backend/src/main/java/sparqles/core/calculation/CTask.java @@ -0,0 +1,571 @@ +package sparqles.core.calculation; + +import static java.time.temporal.ChronoUnit.*; + +import java.time.Duration; +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.HashSet; +import java.util.Set; +import org.apache.commons.math3.stat.descriptive.moment.Kurtosis; +import org.apache.jena.query.*; +import org.apache.jena.rdf.model.*; +import org.apache.jena.vocabulary.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import sparqles.avro.Endpoint; +import sparqles.avro.EndpointResult; +import sparqles.avro.calculation.CResult; +import sparqles.core.EndpointTask; +import sparqles.utils.QueryManager; + +public class CTask extends EndpointTask { + private static final Logger log = LoggerFactory.getLogger(CTask.class); + + private static final String sparqDescNS = "http://www.w3.org/ns/sparql-service-description#"; + private static final String voidNS = "http://rdfs.org/ns/void#"; + private static final String dctermsNS = "http://purl.org/dc/terms/"; + private static final String foafNS = "http://xmlns.com/foaf/0.1/"; + + private static final String queryPingEndpoint = "" + "ASK { ?s ?p ?o }"; + + private static final String queryNumberOfTriples = + "" + "SELECT (COUNT (*) as ?value)\n" + "WHERE { ?s ?p ?o }"; + + private static final String queryNumberOfEntities = + "" + + "PREFIX rdf: \n" + + "SELECT (COUNT (DISTINCT ?entity) as ?value)\n" + + "WHERE { ?entity rdf:type ?class }"; + + private static final String queryNumberOfClasses = + "" + + "PREFIX rdf: \n" + + "SELECT (COUNT (DISTINCT ?class) as ?value)\n" + + "WHERE { ?entity rdf:type ?class }"; + + private static final String queryNumberOfProperties = + "" + "SELECT (COUNT (DISTINCT ?p) as ?value)\n" + "WHERE { ?s ?p ?o }"; + + private static final String queryNumberOfSubjects = + "" + "SELECT (COUNT (DISTINCT ?s) as ?value)\n" + "WHERE { ?s ?p ?o }"; + + private static final String queryNumberOfObjects = + "" + "SELECT (COUNT (DISTINCT ?o) as ?value)\n" + "WHERE { ?s ?p ?o }"; + + private static final String queryExampleResource = + "" + + "PREFIX rdf: \n" + + "SELECT ?value\n" + + "WHERE { ?value rdf:type ?class }\n" + + "LIMIT 3"; + + public CTask(Endpoint ep) { + super(ep); + } + + @Override + public CResult process(EndpointResult epr) { + CResult result = new CResult(); + result.setEndpointResult(epr); + // if (!_epURI.equals("http://en.openei.org/sparql")) + // return result; + log.info("### execute {}", _epURI); + + // Code for generating a VoID and SPARQL Service Description profile for the endpoint. + // author: Milos Jovanovik (@mjovanovik) + + long triples = -1; + long entities = -1; + long classes = -1; + long properties = -1; + long distinctSubjects = -1; + long distinctObjects = -1; + java.util.List exampleResourceList = new java.util.ArrayList<>(); + String VoID = ""; + boolean VoIDPart = false; + String SD = ""; + boolean SDPart = false; + double coherence = -1.0; + double relationshipSpecialty = -1.0; + + // Check if the endpoint is accessible or not. + // If not, there's no need to try and generate a VoID profile for it. + Boolean ping = false; + // Query query1 = QueryFactory.create(queryPingEndpoint); + try { + var qexec1 = QueryManager.getExecution(_epURI, queryPingEndpoint); + ping = qexec1.execAsk(); + } catch (Exception e) { + log.info("[Error executing SPARQL query for {}]", _epURI); + log.info("[Error details: {}]", e.toString()); + } + + // If the endpoint is accessible, try to gather VoID statistics and generate the profile. + if (ping) { + log.info("[GENERATION of VoiD] {}", _epURI); + triples = executeLongQuery(_epURI, queryNumberOfTriples); + if (triples == -1) { + VoIDPart = true; + SDPart = true; + } + log.info("Number of triples in {}: {}", _epURI, triples); + // if (!_epURI.equals("http://sparql.uniprot.org")) // TODO: fix this hack + entities = executeLongQuery(_epURI, queryNumberOfEntities); + if (entities == -1) VoIDPart = true; + log.info("Number of entities in {}: {}", _epURI, entities); + classes = executeLongQuery(_epURI, queryNumberOfClasses); + if (classes == -1) VoIDPart = true; + log.info("Number of classes in {}: {}", _epURI, classes); + properties = executeLongQuery(_epURI, queryNumberOfProperties); + if (properties == -1) VoIDPart = true; + log.info("Number of properties in {}: {}", _epURI, properties); + // if (!_epURI.equals("http://fr.dbpedia.org/sparql") && + // !_epURI.equals("http://sparql.uniprot.org")) // TODO: fix this hack + distinctSubjects = executeLongQuery(_epURI, queryNumberOfSubjects); + if (distinctSubjects == -1) VoIDPart = true; + log.info("Number of distinct subjects in {}: {}", _epURI, distinctSubjects); + // if (!_epURI.equals("http://fr.dbpedia.org/sparql") && + // !_epURI.equals("http://sparql.uniprot.org")) // TODO: fix this hack + distinctObjects = executeLongQuery(_epURI, queryNumberOfObjects); + if (distinctObjects == -1) VoIDPart = true; + log.info("Number of distinct objects in {}: {}", _epURI, distinctObjects); + exampleResourceList = executeQuery(_epURI, queryExampleResource); + if (exampleResourceList.size() == 0) VoIDPart = true; + log.info("Number of example resources in {}: {}", _epURI, exampleResourceList.size()); + + try { + log.info("Coherence calculation for {} ...", _epURI); + coherence = calculateCoherence(_epURI); + } catch (Exception e) { + log.warn("[Error details: {}]", e.toString()); + } + try { + log.info("Relationship Specialty calculation {} ...", _epURI); + if (triples != -1 && distinctSubjects != -1) + relationshipSpecialty = calculateRelationshipSpecialty(_epURI, triples, distinctSubjects); + } catch (Exception e) { + log.warn("[Error details: {}]", e.toString()); + } + + // Separate model for the SPARQL Service Description + Model modelSD = ModelFactory.createDefaultModel(); + + // Separate model for the VoID Profile + Model modelVoID = ModelFactory.createDefaultModel(); + + // Resources for the SPARQL Service Description + Resource endpointEntitySD = modelSD.createResource(_epURI); + Resource sdService = modelSD.createResource(sparqDescNS + "Service"); + Resource sdDataset = modelSD.createResource(sparqDescNS + "Dataset"); + Resource sdGraph = modelSD.createResource(sparqDescNS + "Graph"); + + // Resources for the VoID Profile + Resource endpointEntityVoiD = modelVoID.createResource(_epURI); + Resource endpointEntityVoiDDescription = modelVoID.createResource(_epURI + "/profile"); + Resource voidDatasetDescription = modelVoID.createResource(voidNS + "DatasetDescription"); + Resource voidDataset = modelVoID.createResource(voidNS + "Dataset"); + Resource sparqlesEntity = + modelVoID.createResource( + "https://sparqles.demo.openlinksw.com"); // TODO: This is hardcoded for + // now, needs to + // be dynamic + + // Properties for the SPARQL Service Description + Property sdendpoint = modelSD.createProperty(sparqDescNS + "endpoint"); + Property sddefaultDataset = modelSD.createProperty(sparqDescNS + "defaultDataset"); + Property sddefaultGraph = modelSD.createProperty(sparqDescNS + "defaultGraph"); + Property voidtriplesSD = modelVoID.createProperty(voidNS + "triples"); + + // Properties for the VoID Profile + Property dctermsTitle = modelVoID.createProperty(dctermsNS + "title"); + Property dctermsCreator = modelVoID.createProperty(dctermsNS + "creator"); + Property dctermsDate = modelVoID.createProperty(dctermsNS + "date"); + Property foafprimaryTopic = modelVoID.createProperty(foafNS + "primaryTopic"); + Property voidtriples = modelVoID.createProperty(voidNS + "triples"); + Property voidentities = modelVoID.createProperty(voidNS + "entities"); + Property voidclasses = modelVoID.createProperty(voidNS + "classes"); + Property voidproperties = modelVoID.createProperty(voidNS + "properties"); + Property voiddistinctSubjects = modelVoID.createProperty(voidNS + "distinctSubjects"); + Property voiddistinctObjects = modelVoID.createProperty(voidNS + "distinctObjects"); + Property voidsparqlEndpoint = modelVoID.createProperty(voidNS + "sparqlEndpoint"); + Property voidexampleResource = modelVoID.createProperty(voidNS + "exampleResource"); + Property coherenceValue = + modelVoID.createProperty("https://www.3dfed.com/ontology/coherence"); + Property relationshipSpecialtyValue = + modelVoID.createProperty("https://www.3dfed.com/ontology/relationshipSpecialty"); + + // get current date + LocalDate currentDate = LocalDate.now(); + String currentDateString = currentDate.format(DateTimeFormatter.ISO_DATE); + // Literal currentDateLiteral = model.createTypedLiteral(currentDateString, XSD.date); + Literal currentDateLiteral = modelVoID.createLiteral(currentDateString); + + // construct the SPARQL Service Description in RDF + endpointEntitySD.addProperty(RDF.type, sdService); + endpointEntitySD.addProperty(sdendpoint, endpointEntitySD); + endpointEntitySD.addProperty( + sddefaultDataset, + modelSD + .createResource() + .addProperty(RDF.type, sdDataset) + .addProperty( + sddefaultGraph, + modelSD + .createResource() + .addProperty(RDF.type, sdGraph) + .addProperty(voidtriplesSD, Long.toString(triples)))); + + // construct the VoID Profile in RDF + endpointEntityVoiDDescription.addProperty(RDF.type, voidDatasetDescription); + endpointEntityVoiDDescription.addProperty( + dctermsTitle, "Automatically constructed VoID description for a SPARQL Endpoint"); + endpointEntityVoiDDescription.addProperty(dctermsCreator, sparqlesEntity); + endpointEntityVoiDDescription.addProperty(dctermsDate, currentDateLiteral); + endpointEntityVoiDDescription.addProperty(foafprimaryTopic, endpointEntityVoiD); + + endpointEntityVoiD.addProperty(RDF.type, voidDataset); + endpointEntityVoiD.addProperty(voidsparqlEndpoint, endpointEntityVoiD); + for (int i = 0; i < exampleResourceList.size(); i++) + endpointEntityVoiD.addProperty( + voidexampleResource, modelVoID.createResource(exampleResourceList.get(i).toString())); + endpointEntityVoiD.addProperty(voidtriples, Long.toString(triples)); + endpointEntityVoiD.addProperty(voidentities, Long.toString(entities)); + endpointEntityVoiD.addProperty(voidclasses, Long.toString(classes)); + endpointEntityVoiD.addProperty(voidproperties, Long.toString(properties)); + endpointEntityVoiD.addProperty(voiddistinctSubjects, Long.toString(distinctSubjects)); + endpointEntityVoiD.addProperty(voiddistinctObjects, Long.toString(distinctObjects)); + + // add the Coherence and Relationship Specialty values for the endpoint + endpointEntityVoiD.addProperty(coherenceValue, Double.toString(coherence)); + endpointEntityVoiD.addProperty( + relationshipSpecialtyValue, Double.toString(relationshipSpecialty)); + + // the SD and VoID profiles have been generated, now we persist it + java.io.StringWriter stringModelVoID = new java.io.StringWriter(); + modelVoID.write(stringModelVoID, "TURTLE"); + VoID = stringModelVoID.toString(); + + java.io.StringWriter stringModelSD = new java.io.StringWriter(); + modelSD.write(stringModelSD, "TURTLE"); + SD = stringModelSD.toString(); + } + + result.setTriples(triples); + result.setEntities(entities); + result.setClasses(classes); + result.setProperties(properties); + result.setDistinctSubjects(distinctSubjects); + result.setDistinctObjects(distinctObjects); + result.setExampleResources(exampleResourceList); + result.setVoID(VoID); + result.setVoIDPart(VoIDPart); + result.setSD(SD); + result.setSDPart(SDPart); + result.setCoherence(coherence); + result.setRS(relationshipSpecialty); + + log.info("$$$ executed {}", this); + + return result; + } + + public long executeLongQuery(String endpointURL, String queryText) { + long result = -1; + try { + ResultSet results; + try (QueryExecution qexec = + QueryManager.getExecution(endpointURL, queryText, Duration.of(10, MINUTES))) { + results = qexec.execSelect(); + if (results.hasNext()) { + QuerySolution thisRow = results.next(); + result = ((Literal) thisRow.get("value")).getLong(); + } + } + } catch (Exception e) { + log.warn("[Error executing SPARQL query for {}]", endpointURL); + log.warn("[SPARQL query: {}]", queryText); + log.warn("[Error details: {}]", e.toString()); + } + return result; + } + + public java.util.List executeQuery(String endpointURL, String queryText) { + java.util.List list = new java.util.ArrayList<>(); + try { + // Query query = QueryFactory.create(queryText); + ResultSet results; + try (QueryExecution qexec = QueryManager.getExecution(endpointURL, queryText)) { + // qexec.setTimeout(10, TimeUnit.MINUTES); + results = qexec.execSelect(); + if (results != null) { + while (results.hasNext()) { + QuerySolution thisRow = results.next(); + list.add(((Resource) thisRow.get("value")).toString()); + } + } + } + } catch (Exception e) { + log.warn("[Error executing SPARQL query for {}]", endpointURL); + log.warn("[SPARQL query: {}]", queryText); + log.warn("[Error details: {}]", e.toString()); + } + return list; + } + + public double calculateCoherence(String endpointUrl) { + Set types = getRDFTypes(endpointUrl); + int typesSize = types.size(); + log.info("Number of types in {}: {}", endpointUrl, typesSize); + // if(types.size()==0) return 0; // the SPARQL query has failed, so we cannot calculate the + // coherence + double weightedDenomSum = getTypesWeightedDenomSum(types, endpointUrl); + log.info("Weighted denom sum in {}: {}", endpointUrl, weightedDenomSum); + // if(weightedDenomSum==0) return 0; // the SPARQL query has failed, so we cannot calculate + // the + // coherence + double structuredness = 0; + int i = 1; + for (String type : types) { + log.info("Processing type {}/{} in coherence of {}", i, typesSize, endpointUrl); + long occurenceSum = 0; + Set typePredicates = getTypePredicates(type, endpointUrl); + long typeInstancesSize = getTypeInstancesSize(type, endpointUrl); + // if(typeInstancesSize==0) return 0; // the SPARQL query has failed, so we cannot + // calculate + // the coherence + for (String predicate : typePredicates) { + long predicateOccurences = getOccurences(predicate, type, endpointUrl); + occurenceSum = (occurenceSum + predicateOccurences); + } + double denom = typePredicates.size() * typeInstancesSize; + if (typePredicates.size() == 0) denom = 1; + double coverage = occurenceSum / denom; + double weightedCoverage = (typePredicates.size() + typeInstancesSize) / weightedDenomSum; + structuredness = (structuredness + (coverage * weightedCoverage)); + i++; + } + return structuredness; + } + + public static Set getRDFTypes(String endpoint) { + Set types = new HashSet(); + String queryString = "" + "SELECT DISTINCT ?type\n" + "WHERE { ?s a ?type }"; + try { + // Query query = QueryFactory.create(queryString); + ResultSet res; + try (QueryExecution qExec = QueryManager.getExecution(endpoint, queryString)) { + // qExec.setTimeout(10, TimeUnit.MINUTES); + res = qExec.execSelect(); + while (res.hasNext()) { + types.add(res.next().get("type").toString()); + } + } + } catch (Exception e) { + log.warn("[Error executing SPARQL query for {}]", endpoint); + log.warn("[SPARQL query: {}]", queryString); + throw new IllegalStateException(e); + } + return types; + } + + public static double getTypesWeightedDenomSum(Set types, String endpoint) { + double sum = 0; + int typesSize = types.size(); + int i = 1; + for (String type : types) { + log.info("Processing type {}/{} in coherence of {}", i, typesSize, endpoint); + long typeInstancesSize = getTypeInstancesSize(type, endpoint); + long typePredicatesSize = getTypePredicates(type, endpoint).size(); + sum = sum + typeInstancesSize + typePredicatesSize; + i++; + } + return sum; + } + + public static long getTypeInstancesSize(String type, String endpoint) { + long typeInstancesSize = 0; + String queryString = + "" + + "SELECT (COUNT (DISTINCT ?s) as ?cnt ) \n" + + "WHERE {\n" + + " ?s a <" + + type.replaceAll("\\s", "") + + "> . " + + " ?s ?p ?o" + + "}"; + try { + // Query query = QueryFactory.create(queryString); + ResultSet res; + try (QueryExecution qExec = QueryManager.getExecution(endpoint, queryString)) { + // qExec.setTimeout(10, TimeUnit.MINUTES); + + res = qExec.execSelect(); + while (res.hasNext()) { + typeInstancesSize = Long.parseLong(res.next().get("cnt").asLiteral().getString()); + } + } + } catch (Exception e) { + log.warn("[Error executing SPARQL query for {}]", endpoint); + log.warn("[SPARQL query: {}]", queryString); + throw new IllegalStateException(e); + } + return typeInstancesSize; + } + + public static Set getTypePredicates(String type, String endpoint) { + Set typePredicates = new HashSet(); + String queryString = + "" + + "SELECT DISTINCT ?typePred \n" + + "WHERE { \n" + + " ?s a <" + + type.replaceAll("\\s", "") + + "> . " + + " ?s ?typePred ?o" + + "}"; + try { + // Query query = QueryFactory.create(queryString); + ResultSet res; + try (QueryExecution qExec = QueryManager.getExecution(endpoint, queryString)) { + // qExec.setTimeout(10, TimeUnit.MINUTES); + + res = qExec.execSelect(); + while (res.hasNext()) { + String predicate = res.next().get("typePred").toString(); + if (!predicate.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")) + typePredicates.add(predicate); + } + } + } catch (Exception e) { + log.warn("[Error executing SPARQL query for {}]", endpoint); + log.warn("[SPARQL query: {}]", queryString); + throw new IllegalStateException(e); + } + return typePredicates; + } + + public static long getOccurences(String predicate, String type, String endpoint) { + long predicateOccurences = 0; + String queryString = + "" + + "SELECT (COUNT (DISTINCT ?s) as ?occurences) \n" + + "WHERE { \n" + + " ?s a <" + + type.replaceAll("\\s", "") + + "> . " + + " ?s <" + + predicate + + "> ?o" + + "}"; + try { + // Query query = QueryFactory.create(queryString); + try (QueryExecution qExec = QueryManager.getExecution(endpoint, queryString)) { + // qExec.setTimeout(10, TimeUnit.MINUTES); + + ResultSet res = qExec.execSelect(); + while (res.hasNext()) + predicateOccurences = + Long.parseLong(res.next().get("occurences").asLiteral().getString()); + } + } catch (Exception e) { + log.warn("[Error executing SPARQL query for {}]", endpoint); + log.warn("[SPARQL query: {}]", queryString); + throw new IllegalStateException(e); + } + return predicateOccurences; + } + + public double calculateRelationshipSpecialty( + String endpoint, long numOfTriples, long numOfSubjects) { + Set predicates = getRelationshipPredicates(endpoint); + int predicatesSize = predicates.size(); + log.info("Number of predicates in {}: {}", endpoint, predicatesSize); + long datasetSize = numOfTriples; + long subjects = numOfSubjects; + Kurtosis kurt = new Kurtosis(); + double relationshipSpecialty = 0; + int i = 1; + for (String predicate : predicates) { + log.info("Processing predicate {}/{} in RS of {}", i, predicatesSize, endpoint); + double[] occurences = getOccurences(predicate, endpoint, subjects); + double kurtosis = kurt.evaluate(occurences); + // long tpSize = getPredicateSize(predicate, endpoint, namedGraph); + long tpSize = getPredicateSize(predicate, endpoint); + relationshipSpecialty = relationshipSpecialty + (tpSize * kurtosis / datasetSize); + i++; + } + return relationshipSpecialty; + } + + public static Set getRelationshipPredicates(String endpoint) { + Set predicates = new HashSet(); + String queryString; + queryString = "SELECT DISTINCT ?p WHERE {?s ?p ?o . FILTER isIRI(?o) } "; + try { + // Query query = QueryFactory.create(queryString); + try (QueryExecution qExec = QueryManager.getExecution(endpoint, queryString)) { + // qExec.setTimeout(10, TimeUnit.MINUTES); + ResultSet res = qExec.execSelect(); + while (res.hasNext()) predicates.add(res.next().get("p").toString()); + } + } catch (Exception e) { + log.warn("[Error executing SPARQL query for {}]", endpoint); + log.warn("[SPARQL query: {}]", queryString); + throw new IllegalStateException(e); + } + return predicates; + } + + public static double[] getOccurences(String predicate, String endpoint, long subjects) { + double[] occurences = new double[(int) subjects + 1]; + String queryString; + queryString = + "SELECT (count(?o) as ?occ) WHERE { ?res <" + predicate + "> ?o . } Group by ?res"; + try { + // Query query = QueryFactory.create(queryString); + try (QueryExecution qExec = QueryManager.getExecution(endpoint, queryString)) { + // qExec.setTimeout(10, TimeUnit.MINUTES); + ResultSet res = qExec.execSelect(); + int i = 0; + while (res.hasNext()) { + occurences[i] = res.next().get("occ").asLiteral().getDouble(); + i++; + } + if (i == 0) occurences[0] = 1; + } + } catch (Exception e) { + log.warn("[Error executing SPARQL query for {}]", endpoint); + log.warn("[SPARQL query: {}]", queryString); + throw new IllegalStateException(e); + } + return occurences; + } + + public static long getPredicateSize(String predicate, String endpoint) { + long count = 0; + String queryString = ""; + queryString = + "" + + "SELECT (COUNT (*) as ?total) \n" + + "WHERE { \n" + + " ?s <" + + predicate + + "> ?o" + + "}"; + try { + // Query query = QueryFactory.create(queryString); + try (QueryExecution qExec = QueryManager.getExecution(endpoint, queryString)) { + // qExec.setTimeout(10, TimeUnit.MINUTES); + ResultSet res = qExec.execSelect(); + while (res.hasNext()) + count = Long.parseLong(res.next().get("total").asLiteral().getString()); + } + } catch (Exception e) { + log.warn("[Error executing SPARQL query for {}]", endpoint); + log.warn("[SPARQL query: {}]", queryString); + throw new IllegalStateException(e); + } + return count; + } +} diff --git a/backend/src/main/java/sparqles/core/discovery/DTask.java b/backend/src/main/java/sparqles/core/discovery/DTask.java index 63efd400..d3b2a663 100644 --- a/backend/src/main/java/sparqles/core/discovery/DTask.java +++ b/backend/src/main/java/sparqles/core/discovery/DTask.java @@ -41,6 +41,7 @@ import sparqles.avro.discovery.RobotsTXT; import sparqles.core.CONSTANTS; import sparqles.core.EndpointTask; +import sparqles.core.SPARQLESProperties; import sparqles.utils.ConnectionManager; import sparqles.utils.ExceptionHandler; import sparqles.utils.QueryManager; @@ -55,418 +56,407 @@ * @author UmbrichJ */ public class DTask extends EndpointTask { - public static final String header = - "application/rdf+xml, text/rdf, text/rdf+xml, application/rdf"; - public static final String EPURL = "EPURL"; - private static final Logger log = LoggerFactory.getLogger(DTask.class); - private static final ConnectionManager cm = new ConnectionManager(null, 0, null, null, 50); - private static final String sparqDescNS = "http://www.w3.org/ns/sparql-service-description#"; - private static final String voidNS = "http://rdfs.org/ns/void#"; - private static final String query = - "" - + "PREFIX rdf: \n" - + "PREFIX void: \n" - + "SELECT DISTINCT * \n" - + "WHERE {\n" - + "?ds a void:Dataset .\n" - + "?ds void:sparqlEndpoint %%s .\n" - + "?ds ?p ?o .\n" - + "}"; - - public DTask(Endpoint ep) { - super(ep); + public static final String header = + "application/rdf+xml, text/rdf, text/rdf+xml, application/rdf"; + public static final String EPURL = "EPURL"; + private static final Logger log = LoggerFactory.getLogger(DTask.class); + private static final ConnectionManager cm = new ConnectionManager(null, 0, null, null, 50); + private static final String sparqDescNS = "http://www.w3.org/ns/sparql-service-description#"; + private static final String voidNS = "http://rdfs.org/ns/void#"; + private static final String query = + "" + + "PREFIX rdf: \n" + + "PREFIX void: \n" + + "SELECT DISTINCT * \n" + + "WHERE {\n" + + "?ds a void:Dataset .\n" + + "?ds void:sparqlEndpoint %%s .\n" + + "?ds ?p ?o .\n" + + "}"; + + public DTask(Endpoint ep) { + super(ep); + } + + @Override + public DResult process(EndpointResult epr) { + DResult result = new DResult(); + result.setEndpointResult(epr); + log.debug("execute {}", _epURI); + + result.setDescriptionFiles((List) new ArrayList()); + + int failures = 0; + + // RobotsTXT run + log.debug("execute {} {}", "robots", _epURI); + RobotsTXT rtxt = new RobotsTXT(false, false, false, false, false, false, ""); + result.setRobotsTXT(rtxt); + + // check everytime for updated robots.txts + Robots rob = fetchRobotsTXT(); + + // get list of existing robots.txt + List r = new ArrayList(); + if (_dbm != null) { + r = _dbm.getResults(_ep, Robots.class, Robots.SCHEMA$); } - - @Override - public DResult process(EndpointResult epr) { - DResult result = new DResult(); - result.setEndpointResult(epr); - log.debug("execute {}", _epURI); - - result.setDescriptionFiles((List) new ArrayList()); - - int failures = 0; - - // RobotsTXT run - log.debug("execute {} {}", "robots", _epURI); - RobotsTXT rtxt = new RobotsTXT(false, false, false, false, false, false, ""); - result.setRobotsTXT(rtxt); - - // check everytime for updated robots.txts - Robots rob = fetchRobotsTXT(); - - // get list of existing robots.txt - List r = new ArrayList(); - if (_dbm != null) { - r = _dbm.getResults(_ep, Robots.class, Robots.SCHEMA$); - } - if (r.size() == 0 && _dbm != null) { - // first robots.txt test, insert into DB - _dbm.insert(rob); - } else if (_dbm != null) { - if (rob.getRespCode().toString().startsWith("5")) { - // there was a server error, try to get the last stored robots.txt - if (r.size() == 1) { - rob = r.get(0); - } - } else { - // update robots txt - _dbm.update(rob); - } - } - if (rob.getRespCode() == 200) rtxt.setHasRobotsTXT(true); - - boolean isRobotsAllowed = checkRobotsTxt(rob, rob.getEndpoint().getUri().toString()); - rtxt.setAllowedByRobotsTXT(isRobotsAllowed); - - log.debug("execute {} {}", "sitemap", _epURI); - // discovery void and sparqles via semantic sitemap.xml - // http://vocab.deri.ie/void/guide#sec_5_2_Discovery_via_sitemaps - parseSitemapXML(rob, rtxt, result); - - // inspect HTTP Get - // ok we checked the robots.txt, now we do a http get on the sparql URL - log.debug("execute {} {}", "httpget", _epURI); - try { - URI epURL = new URI(_ep.getUri().toString()); - DGETInfo info = checkForVoid(epURL.toString(), EPURL, rob); - result.getDescriptionFiles().add(info); - } catch (Exception e) { - log.debug("[EXC] HTTP GET " + _epURI, ExceptionHandler.logAndtoString(e, true)); - } - log.debug("execute {} {}", "well-known", _epURI); - try { - // well-known location - URI epURL = new URI(_ep.getUri().toString()); - URL wellknown = - new URI( - epURL.getScheme(), - epURL.getAuthority(), - "/.well-known/void", - null, - null) - .toURL(); - DGETInfo info = checkForVoid(wellknown.toString(), "wellknown", rob); - result.getDescriptionFiles().add(info); - } catch (Exception e) { - log.debug("[EXC] HTTP well known " + _epURI, e); + if (r.size() == 0 && _dbm != null) { + // first robots.txt test, insert into DB + _dbm.insert(rob); + } else if (_dbm != null) { + if (String.valueOf(rob.getRespCode()).startsWith("5")) { + // there was a server error, try to get the last stored robots.txt + if (r.size() == 1) { + rob = r.get(0); } + } else { + // update robots txt + _dbm.update(rob); + } + } + if (rob.getRespCode() == 200) rtxt.setHasRobotsTXT(true); + + boolean isRobotsAllowed = checkRobotsTxt(rob, rob.getEndpoint().getUri().toString()); + rtxt.setAllowedByRobotsTXT(isRobotsAllowed); + + log.debug("execute {} {}", "sitemap", _epURI); + // discovery void and sparqles via semantic sitemap.xml + // http://vocab.deri.ie/void/guide#sec_5_2_Discovery_via_sitemaps + parseSitemapXML(rob, rtxt, result); + + // inspect HTTP Get + // ok we checked the robots.txt, now we do a http get on the sparql URL + log.debug("execute {} {}", "httpget", _epURI); + try { + URI epURL = new URI(_ep.getUri().toString()); + DGETInfo info = checkForVoid(epURL.toString(), EPURL, rob); + result.getDescriptionFiles().add(info); + } catch (Exception e) { + log.debug("[EXC] HTTP GET " + _epURI, ExceptionHandler.logAndtoString(e, true)); + } + log.debug("execute {} {}", "well-known", _epURI); + try { + // well-known location + URI epURL = new URI(_ep.getUri().toString()); + URL wellknown = + new URI(epURL.getScheme(), epURL.getAuthority(), "/.well-known/void", null, null).toURL(); + DGETInfo info = checkForVoid(wellknown.toString(), "wellknown", rob); + result.getDescriptionFiles().add(info); + } catch (Exception e) { + log.debug("[EXC] HTTP well known " + _epURI, e); + } - List queryInfos = new ArrayList(); - result.setQueryInfo(queryInfos); + List queryInfos = new ArrayList(); + result.setQueryInfo(queryInfos); - log.debug("execute {} {}", "query-self", _epURI); - // maybe the endpoint has data about itself - queryInfos.add(query(_ep.getUri().toString(), "query-self")); + log.debug("execute {} {}", "query-self", _epURI); + // maybe the endpoint has data about itself + queryInfos.add(query(_ep.getUri().toString(), "query-self")); - log.info("executed {}", this); + log.info("executed {}", this); - return result; - } + return result; + } - private QueryInfo query(String epURL, String operation) { - QueryInfo info = new QueryInfo(); - info.setURL(epURL); - info.setOperation(operation); + private QueryInfo query(String epURL, String operation) { + QueryInfo info = new QueryInfo(); + info.setURL(epURL); + info.setOperation(operation); - String queryString = query.replaceAll("%%s", "<" + _ep.getUri() + ">"); + String queryString = query.replaceAll("%%s", "<" + _ep.getUri() + ">"); - HashSet voidAset = new HashSet(); + HashSet voidAset = new HashSet(); - ArrayList voidA = new ArrayList(); - info.setResults(voidA); + ArrayList voidA = new ArrayList(); + info.setResults(voidA); - // initializing queryExecution factory with remote service. - QueryExecution qexec = null; - try { - qexec = QueryManager.getExecution(epURL, queryString); + // initializing queryExecution factory with remote service. + QueryExecution qexec = null; + try { + qexec = QueryManager.getExecution(epURL, queryString); - boolean results = false; + boolean results = false; - ResultSet resSet = qexec.execSelect(); - ResultSetRewindable reswind = ResultSetFactory.makeRewindable(resSet); + ResultSet resSet = qexec.execSelect(); + ResultSetRewindable reswind = ResultSetFactory.makeRewindable(resSet); - while (reswind.hasNext()) { - RDFNode dataset = reswind.next().get("ds"); - voidAset.add(dataset.toString()); - } + while (reswind.hasNext()) { + RDFNode dataset = reswind.next().get("ds"); + voidAset.add(dataset.toString()); + } - voidA.addAll(voidAset); - log.info("Found {} results", reswind.getRowNumber()); - } catch (Exception e1) { - info.setException(ExceptionHandler.logAndtoString(e1)); - log.debug("[EXEC] SPARQL query to " + epURL + " for " + _epURI, e1); - } finally { - if (qexec != null) qexec.close(); - } - return info; + voidA.addAll(voidAset); + log.info("Found {} results", reswind.getRowNumber()); + } catch (Exception e1) { + info.setException(ExceptionHandler.logAndtoString(e1)); + log.debug("[EXEC] SPARQL query to " + epURL + " for " + _epURI, e1); + } finally { + if (qexec != null) qexec.close(); } - - // for void - // sc:sparqlEndpointLocation - - /** - * Find information about sitemap.xml in robots.txt by parsing the robots.txt content for the - * "Sitemap:" value. Next, retrieve the sitemap.xml and look for sc:sparqlEndpointLocation - * - * @param rob - * @param rtxt - * @param result - */ - private void parseSitemapXML(Robots rob, RobotsTXT rtxt, DResult result) { - String robotsContent = rob.getContent().toString(); - - URL sitemapURL = null; - if (robotsContent != null) { - BufferedReader bufReader = new BufferedReader(new StringReader(robotsContent)); - String line = null; - try { - while ((line = bufReader.readLine()) != null) { - if (line.trim().startsWith("Sitemap")) { - - sitemapURL = new URL(line.substring(line.indexOf(":") + 1).trim()); - break; - } - } - } catch (IOException e) { - e.printStackTrace(); - } - } - if (sitemapURL != null) { - rtxt.setSitemapXML(true); - HttpGet get = null; - try { - - get = new HttpGet(sitemapURL.toURI()); - HttpResponse resp = cm.connect(get); - log.debug("parseSitemapXML: Connected to {}", get); - String conent = EntityUtils.toString(resp.getEntity()); - log.debug("parseSitemapXML: received content {}", conent.length()); - - SAXParserFactory factory = SAXParserFactory.newInstance(); - SAXParser saxParser = factory.newSAXParser(); - saxParser.getXMLReader().setFeature("http://xml.org/sax/features/namespaces", true); - InputStream is = new ByteArrayInputStream(conent.getBytes()); - SitemapHandler handler = new SitemapHandler(rtxt, result, _ep.getUri().toString()); - saxParser.parse(is, handler); - // - // // System.out.println(conent); - // DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); - // dbFactory.setNamespaceAware(true); - // DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); - // Document doc = dBuilder.parse(new ByteArrayInputStream(conent.getBytes())); - // doc.getDocumentElement().normalize(); - // - // NodeList nodeList = - // doc.getElementsByTagNameNS("http://sw.deri.org/2007/07/sitemapextension/scschema.xsd", - // "sparqlEndpointLocation"); - // rtxt.setSitemapXMLSPARQL(nodeList.getLength()!=0); - // for (int temp = 0; temp < nodeList.getLength(); temp++) { - // Node nNode = nodeList.item(temp); - //// System.out.println(nNode.getTextContent()); - // if(_ep.getUri().toString().equalsIgnoreCase(nNode.getTextContent())){ - // rtxt.setSitemapXMLSPARQLMatch(true); - // } - // } - // nodeList = - // doc.getElementsByTagNameNS("http://sw.deri.org/2007/07/sitemapextension/scschema.xsd", - // "datasetURI"); - // for (int temp = 0; temp < nodeList.getLength(); temp++) { - // Node nNode = nodeList.item(temp); - // DGETInfo info = checkForVoid(nNode.getTextContent(), "sitemap.xml_link",rob); - // result.getDescriptionFiles().add(info); - // } - } catch (Exception e) { - log.debug("[EXEC] Sitemap for " + _epURI, e); - rtxt.setException(ExceptionHandler.logAndtoString(e)); - } finally { - if (get != null) { - get.releaseConnection(); - } - } + return info; + } + + // for void + // sc:sparqlEndpointLocation + + /** + * Find information about sitemap.xml in robots.txt by parsing the robots.txt content for the + * "Sitemap:" value. Next, retrieve the sitemap.xml and look for sc:sparqlEndpointLocation + * + * @param rob + * @param rtxt + * @param result + */ + private void parseSitemapXML(Robots rob, RobotsTXT rtxt, DResult result) { + String robotsContent = rob.getContent().toString(); + + URL sitemapURL = null; + if (robotsContent != null) { + BufferedReader bufReader = new BufferedReader(new StringReader(robotsContent)); + String line = null; + try { + while ((line = bufReader.readLine()) != null) { + if (line.trim().startsWith("Sitemap")) { + + sitemapURL = new URL(line.substring(line.indexOf(":") + 1).trim()); + break; + } } + } catch (IOException e) { + e.printStackTrace(); + } } - - private DGETInfo checkForVoid(String url, String operation, Robots rob) { - DGETInfo info = new DGETInfo(); - info.setOperation(operation); - info.setURL(url); - info.setResponseServer("missing"); - - boolean isRobotsAllowed = checkRobotsTxt(rob, url); - info.setAllowedByRobotsTXT(isRobotsAllowed); - - HashMap voidPred = new HashMap(); - HashMap spdsPred = new HashMap(); - info.setSPARQLDESCpreds(spdsPred); - info.setVoiDpreds(voidPred); - - HttpGet request = new HttpGet(url); - request.addHeader( - "accept", - "application/rdf+xml, application/x-turtle, application/rdf+n3, application/xml," - + " text/turtle, text/rdf, text/plain;q=0.1"); - request.addHeader("User-Agent", CONSTANTS.USER_AGENT); - log.info("GET {}", request); - HttpResponse resp; - try { - resp = cm.connect(request); - log.debug("checkForVoid: Connected to {}", request); - String type = getType(resp); - - String status = "" + resp.getStatusLine().getStatusCode(); - info.setResponseCode(status); - - Header[] header = resp.getAllHeaders(); - - // 1) CHeck the header for information - parseHeaders(info, header); - if (status.startsWith("2")) { - String content = EntityUtils.toString(resp.getEntity()); - content = content.replaceAll("\t", " "); - info.setContent(content); - - var tripleIter = AsyncParser.asyncParseTriples(url); - - tripleIter.forEachRemaining( - t -> { - String pred = t.getPredicate().toString(); - if (pred.startsWith(sparqDescNS)) { - update(pred.replace(sparqDescNS, ""), spdsPred); - } else if (pred.startsWith(voidNS)) { - update(pred.replace(voidNS, ""), voidPred); - } - }); - } - } catch (Exception e) { - log.warn( - "failed checking for VOID " + url + " for " + _epURI, - ExceptionHandler.logAndtoString(e, true)); - info.setException(ExceptionHandler.logAndtoString(e)); - } finally { - request.releaseConnection(); + if (sitemapURL != null) { + rtxt.setSitemapXML(true); + HttpGet get = null; + try { + + get = new HttpGet(sitemapURL.toURI()); + HttpResponse resp = cm.connect(get); + log.debug("parseSitemapXML: Connected to {}", get); + String conent = EntityUtils.toString(resp.getEntity()); + log.debug("parseSitemapXML: received content {}", conent.length()); + + SAXParserFactory factory = SAXParserFactory.newInstance(); + SAXParser saxParser = factory.newSAXParser(); + saxParser.getXMLReader().setFeature("http://xml.org/sax/features/namespaces", true); + InputStream is = new ByteArrayInputStream(conent.getBytes()); + SitemapHandler handler = new SitemapHandler(rtxt, result, _ep.getUri().toString()); + saxParser.parse(is, handler); + // + // // System.out.println(conent); + // DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); + // dbFactory.setNamespaceAware(true); + // DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); + // Document doc = dBuilder.parse(new ByteArrayInputStream(conent.getBytes())); + // doc.getDocumentElement().normalize(); + // + // NodeList nodeList = + // doc.getElementsByTagNameNS("http://sw.deri.org/2007/07/sitemapextension/scschema.xsd", + // "sparqlEndpointLocation"); + // rtxt.setSitemapXMLSPARQL(nodeList.getLength()!=0); + // for (int temp = 0; temp < nodeList.getLength(); temp++) { + // Node nNode = nodeList.item(temp); + //// System.out.println(nNode.getTextContent()); + // if(_ep.getUri().toString().equalsIgnoreCase(nNode.getTextContent())){ + // rtxt.setSitemapXMLSPARQLMatch(true); + // } + // } + // nodeList = + // doc.getElementsByTagNameNS("http://sw.deri.org/2007/07/sitemapextension/scschema.xsd", + // "datasetURI"); + // for (int temp = 0; temp < nodeList.getLength(); temp++) { + // Node nNode = nodeList.item(temp); + // DGETInfo info = checkForVoid(nNode.getTextContent(), "sitemap.xml_link",rob); + // result.getDescriptionFiles().add(info); + // } + } catch (Exception e) { + log.debug("[EXEC] Sitemap for " + _epURI, e); + rtxt.setException(ExceptionHandler.logAndtoString(e)); + } finally { + if (get != null) { + get.releaseConnection(); } - return info; + } + } + } + + private DGETInfo checkForVoid(String url, String operation, Robots rob) { + DGETInfo info = new DGETInfo(); + info.setOperation(operation); + info.setURL(url); + info.setResponseServer("missing"); + + boolean isRobotsAllowed = checkRobotsTxt(rob, url); + info.setAllowedByRobotsTXT(isRobotsAllowed); + + HashMap voidPred = new HashMap(); + HashMap spdsPred = new HashMap(); + info.setSPARQLDESCpreds(spdsPred); + info.setVoiDpreds(voidPred); + + HttpGet request = new HttpGet(url); + request.addHeader("accept", CONSTANTS.ANY_RDF_MIME_ACCEPT); + request.addHeader("User-Agent", SPARQLESProperties.getUserAgent()); + log.info("GET {}", request); + HttpResponse resp; + try { + resp = cm.connect(request); + log.debug("checkForVoid: Connected to {}", request); + String type = getType(resp); + + String status = "" + resp.getStatusLine().getStatusCode(); + info.setResponseCode(status); + + Header[] header = resp.getAllHeaders(); + + // 1) CHeck the header for information + parseHeaders(info, header); + if (status.startsWith("2")) { + String content = EntityUtils.toString(resp.getEntity()); + content = content.replaceAll("\t", " "); + info.setContent(content); + + var tripleIter = AsyncParser.asyncParseTriples(url); + + tripleIter.forEachRemaining( + t -> { + String pred = t.getPredicate().toString(); + if (pred.startsWith(sparqDescNS)) { + update(pred.replace(sparqDescNS, ""), spdsPred); + } else if (pred.startsWith(voidNS)) { + update(pred.replace(voidNS, ""), voidPred); + } + }); + } + } catch (Exception e) { + log.warn( + "failed checking for VOID " + url + " for " + _epURI, + ExceptionHandler.logAndtoString(e, true)); + info.setException(ExceptionHandler.logAndtoString(e)); + } finally { + request.releaseConnection(); + } + return info; + } + + private Robots fetchRobotsTXT() { + Robots rob = new Robots(); + rob.setEndpoint(_ep); + rob.setRespCode(-1); + rob.setContent(""); + URI robotsOnHost; + URI host = null; + try { + host = new URI(_ep.getUri().toString()); + robotsOnHost = new URI(host.getScheme(), host.getAuthority(), "/robots.txt", null, null); + } catch (URISyntaxException e) { + log.debug("[EXEC] ROBOTS for " + _epURI, e); + rob.setException(ExceptionHandler.logAndtoString(e)); + return rob; } - private Robots fetchRobotsTXT() { - Robots rob = new Robots(); - rob.setEndpoint(_ep); - rob.setRespCode(-1); - rob.setContent(""); - URI robotsOnHost; - URI host = null; - try { - host = new URI(_ep.getUri().toString()); - robotsOnHost = - new URI(host.getScheme(), host.getAuthority(), "/robots.txt", null, null); - } catch (URISyntaxException e) { - log.debug("[EXEC] ROBOTS for " + _epURI, e); - rob.setException(ExceptionHandler.logAndtoString(e)); - return rob; - } + HttpGet hget = new HttpGet(robotsOnHost); + HttpResponse hres; + try { + hres = cm.connect(hget); + HttpEntity hen = hres.getEntity(); - HttpGet hget = new HttpGet(robotsOnHost); - HttpResponse hres; - try { - hres = cm.connect(hget); - HttpEntity hen = hres.getEntity(); - - int status = hres.getStatusLine().getStatusCode(); - rob.setRespCode(status); - if (status == 200) { - if (hen != null) { - - String content = EntityUtils.toString(hen); - rob.setContent(content); - } - } - hget.abort(); - } catch (Exception e1) { - log.debug("[EXEC] ROBOTS for " + _epURI, e1); - rob.setException(ExceptionHandler.logAndtoString(e1)); - } finally { - hget.releaseConnection(); - } - return rob; - } + int status = hres.getStatusLine().getStatusCode(); + rob.setRespCode(status); + if (status == 200) { + if (hen != null) { - private boolean checkRobotsTxt(Robots rob, String uri) { - NoRobotClient _nrc = new NoRobotClient(CONSTANTS.USER_AGENT); - - try { - URI host = new URI(uri); - try { - if (!((host.getPath() == null || host.getPath().equals("")) - && host.getQuery() == null - && host.getFragment() == null)) - // If the URI host comes for whatever reason with - // path, query, or fragment, strip it. - _nrc.parse( - rob.getContent().toString(), - (new URI(host.getScheme(), host.getAuthority(), null, null, null)) - .toURL()); - else _nrc.parse(rob.getContent().toString(), host.toURL()); - - } catch (NoRobotException e) { - log.debug("no robots.txt for " + host); - return true; - } - return _nrc.isUrlAllowed(host.toURL()); - - } catch (Exception e1) { - log.warn( - "failed checking for ROBOTS PARSE for " + _epURI, - ExceptionHandler.logAndtoString(e1, true)); + String content = EntityUtils.toString(hen); + rob.setContent(content); } - return true; + } + hget.abort(); + } catch (Exception e1) { + log.debug("[EXEC] ROBOTS for " + _epURI, e1); + rob.setException(ExceptionHandler.logAndtoString(e1)); + } finally { + hget.releaseConnection(); } + return rob; + } + + private boolean checkRobotsTxt(Robots rob, String uri) { + NoRobotClient _nrc = new NoRobotClient(CONSTANTS.USER_AGENT_STRING_RAW); + + try { + URI host = new URI(uri); + try { + if (!((host.getPath() == null || host.getPath().equals("")) + && host.getQuery() == null + && host.getFragment() == null)) + // If the URI host comes for whatever reason with + // path, query, or fragment, strip it. + _nrc.parse( + rob.getContent().toString(), + (new URI(host.getScheme(), host.getAuthority(), null, null, null)).toURL()); + else _nrc.parse(rob.getContent().toString(), host.toURL()); + + } catch (NoRobotException e) { + log.debug("no robots.txt for " + host); + return true; + } + return _nrc.isUrlAllowed(host.toURL()); - private void parseHeaders(DGETInfo info, Header[] header) { - for (int i = 0; i < header.length; i++) { - String name = header[i].getName(); - if (name.equals("Content-Type")) { - info.setResponseType(new Utf8(header[i].getValue())); - } - if (name.equals("Server")) { - info.setResponseServer(parseServer(header[i].getValue())); - } - if (name.equals("Link")) { - info.setResponseLink(parseServer(header[i].getValue())); - } - } + } catch (Exception e1) { + log.warn( + "failed checking for ROBOTS PARSE for " + _epURI, + ExceptionHandler.logAndtoString(e1, true)); } + return true; + } + + private void parseHeaders(DGETInfo info, Header[] header) { + for (int i = 0; i < header.length; i++) { + String name = header[i].getName(); + if (name.equals("Content-Type")) { + info.setResponseType(new Utf8(header[i].getValue())); + } + if (name.equals("Server")) { + info.setResponseServer(parseServer(header[i].getValue())); + } + if (name.equals("Link")) { + info.setResponseLink(parseServer(header[i].getValue())); + } + } + } - private Lang getLangFromType(String type) { - if (type.contains("application/x-turtle") || type.contains("text/turtle")) return Lang.TTL; - if (type.contains("application/rdf+xml") || type.contains("application/xml")) - return Lang.RDFXML; - if (type.contains("text/plain")) return Lang.RDFXML; - if (type.contains("text/rdf+n3")) return Lang.N3; + private Lang getLangFromType(String type) { + if (type.contains("application/x-turtle") || type.contains("text/turtle")) return Lang.TTL; + if (type.contains("application/rdf+xml") || type.contains("application/xml")) + return Lang.RDFXML; + if (type.contains("text/plain")) return Lang.RDFXML; + if (type.contains("text/rdf+n3")) return Lang.N3; - return Lang.RDFXML; - } + return Lang.RDFXML; + } - private CharSequence parseServer(String value) { - String server = value.trim(); - if (server.contains("/")) server = server.substring(0, server.indexOf("/")); + private CharSequence parseServer(String value) { + String server = value.trim(); + if (server.contains("/")) server = server.substring(0, server.indexOf("/")); - if (server.contains("(")) server = server.substring(0, server.indexOf("(")); + if (server.contains("(")) server = server.substring(0, server.indexOf("(")); - return new Utf8(server); - } + return new Utf8(server); + } - private void update(CharSequence key, Map map) { - if (map.containsKey(key)) map.put(key, ((Integer) map.get(key)) + 1); - else map.put(key, 1); - } + private void update(CharSequence key, Map map) { + if (map.containsKey(key)) map.put(key, ((Integer) map.get(key)) + 1); + else map.put(key, 1); + } - private String getType(HttpResponse response) { - String type = ""; - org.apache.http.Header ct = response.getFirstHeader("Content-Type"); - if (ct != null) { - type = response.getFirstHeader("Content-Type").getValue(); - } - return type; + private String getType(HttpResponse response) { + String type = ""; + org.apache.http.Header ct = response.getFirstHeader("Content-Type"); + if (ct != null) { + type = response.getFirstHeader("Content-Type").getValue(); } + return type; + } } diff --git a/backend/src/main/java/sparqles/core/discovery/SitemapHandler.java b/backend/src/main/java/sparqles/core/discovery/SitemapHandler.java index 3eea69a5..1520df08 100644 --- a/backend/src/main/java/sparqles/core/discovery/SitemapHandler.java +++ b/backend/src/main/java/sparqles/core/discovery/SitemapHandler.java @@ -9,70 +9,70 @@ import sparqles.avro.discovery.RobotsTXT; public class SitemapHandler extends DefaultHandler { - private static final Logger log = LoggerFactory.getLogger(SitemapHandler.class); + private static final Logger log = LoggerFactory.getLogger(SitemapHandler.class); - boolean sparqlEP = false, datasetURI = false; - private RobotsTXT _rtxt; - private DResult _result; - private String _epURI; + boolean sparqlEP = false, datasetURI = false; + private RobotsTXT _rtxt; + private DResult _result; + private String _epURI; - public SitemapHandler(RobotsTXT rtxt, DResult result, String epURI) { - _rtxt = rtxt; - _result = result; - _epURI = epURI; - } + public SitemapHandler(RobotsTXT rtxt, DResult result, String epURI) { + _rtxt = rtxt; + _result = result; + _epURI = epURI; + } - @Override - public void startPrefixMapping(String prefix, String uri) throws SAXException { - System.out.println("Prefix: " + prefix + " uri: " + uri); - } + @Override + public void startPrefixMapping(String prefix, String uri) throws SAXException { + System.out.println("Prefix: " + prefix + " uri: " + uri); + } - @Override - public void startElement(String uri, String localName, String qName, Attributes attributes) - throws SAXException { - super.startElement(uri, localName, qName, attributes); - sparqlEP = - uri.equalsIgnoreCase("http://sw.deri.org/2007/07/sitemapextension/scschema.xsd") - && localName.equalsIgnoreCase("sparqlEndpointLocation"); - datasetURI = - uri.equalsIgnoreCase("http://sw.deri.org/2007/07/sitemapextension/scschema.xsd") - && localName.equalsIgnoreCase("datasetURI"); + @Override + public void startElement(String uri, String localName, String qName, Attributes attributes) + throws SAXException { + super.startElement(uri, localName, qName, attributes); + sparqlEP = + uri.equalsIgnoreCase("http://sw.deri.org/2007/07/sitemapextension/scschema.xsd") + && localName.equalsIgnoreCase("sparqlEndpointLocation"); + datasetURI = + uri.equalsIgnoreCase("http://sw.deri.org/2007/07/sitemapextension/scschema.xsd") + && localName.equalsIgnoreCase("datasetURI"); - // if(sparqlEP || datasetURI) - // System.out.println("Start: "+uri+ " lname:"+localName+" qName:"+qName + " attr:"+ - // attributes); - if (sparqlEP) _rtxt.setSitemapXMLSPARQL(true); - } + // if(sparqlEP || datasetURI) + // System.out.println("Start: "+uri+ " lname:"+localName+" qName:"+qName + " attr:"+ + // attributes); + if (sparqlEP) _rtxt.setSitemapXMLSPARQL(true); + } - @Override - public void endElement(String uri, String localName, String qName) throws SAXException { - if (sparqlEP - && uri.equalsIgnoreCase("http://sw.deri.org/2007/07/sitemapextension/scschema.xsd") - && localName.equalsIgnoreCase("sparqlEndpointLocation")) { - // System.out.println("end: "+uri + " lname:"+localName+" qName:"+qName ); - sparqlEP = false; - } - if (datasetURI - && uri.equalsIgnoreCase("http://sw.deri.org/2007/07/sitemapextension/scschema.xsd") - && localName.equalsIgnoreCase("datasetURI")) { - // System.out.println("end: "+uri + " lname:"+localName+" qName:"+qName ); - datasetURI = false; - } + @Override + public void endElement(String uri, String localName, String qName) throws SAXException { + if (sparqlEP + && uri.equalsIgnoreCase("http://sw.deri.org/2007/07/sitemapextension/scschema.xsd") + && localName.equalsIgnoreCase("sparqlEndpointLocation")) { + // System.out.println("end: "+uri + " lname:"+localName+" qName:"+qName ); + sparqlEP = false; } + if (datasetURI + && uri.equalsIgnoreCase("http://sw.deri.org/2007/07/sitemapextension/scschema.xsd") + && localName.equalsIgnoreCase("datasetURI")) { + // System.out.println("end: "+uri + " lname:"+localName+" qName:"+qName ); + datasetURI = false; + } + } - @Override - public void characters(char[] ch, int start, int length) throws SAXException { - if (sparqlEP) { - String ep = new String(new String(ch, start, length)); + @Override + public void characters(char[] ch, int start, int length) throws SAXException { + if (sparqlEP) { + String ep = new String(new String(ch, start, length)); - boolean match = _epURI.equals(ep); - log.debug("sparqlEP : " + new String(ch, start, length) + " match:" + match); - // check if the URL matches - _rtxt.setSitemapXMLSPARQLMatch(match); - } - if (datasetURI) { - // TODO look if that is a voiD file, might be very expensive - log.debug("datasetURI : " + new String(ch, start, length)); - } + boolean match = _epURI.equals(ep); + log.debug("sparqlEP : " + new String(ch, start, length) + " match:" + match); + // check if the URL matches + _rtxt.setSitemapXMLSPARQLMatch(match); + } + if (datasetURI) { + // TODO look if that is a voiD file, might be very expensive + log.debug("datasetURI : " + new String(ch, start, length)); } + } } diff --git a/backend/src/main/java/sparqles/core/interoperability/FRun.java b/backend/src/main/java/sparqles/core/interoperability/FRun.java index bdebc0fc..6a9296e9 100644 --- a/backend/src/main/java/sparqles/core/interoperability/FRun.java +++ b/backend/src/main/java/sparqles/core/interoperability/FRun.java @@ -8,25 +8,25 @@ public class FRun extends TaskRun { - private static final Logger log = LoggerFactory.getLogger(FRun.class); + private static final Logger log = LoggerFactory.getLogger(FRun.class); - public FRun(Endpoint ep, String queryFile) { - this(ep, queryFile, System.currentTimeMillis()); - } + public FRun(Endpoint ep, String queryFile) { + this(ep, queryFile, System.currentTimeMillis()); + } - public FRun(Endpoint ep, String queryFile, Long start) { - super(ep, queryFile, SPARQLESProperties.getFTASK_QUERIES(), start, log); - } + public FRun(Endpoint ep, String queryFile, Long start) { + super(ep, queryFile, SPARQLESProperties.getFTASK_QUERIES(), start, log); + } - public FSingleResult execute() { - FSingleResult result = new FSingleResult(); + public FSingleResult execute() { + FSingleResult result = new FSingleResult(); - result.setQuery(_query); + result.setQuery(_query); - log.debug("[run] {} over {}", _queryFile, _ep.getUri()); + log.debug("[run] {} over {}", _queryFile, _ep.getUri()); - result.setRun(run()); + result.setRun(run()); - return result; - } + return result; + } } diff --git a/backend/src/main/java/sparqles/core/interoperability/FTask.java b/backend/src/main/java/sparqles/core/interoperability/FTask.java index 3972d529..abb94f32 100644 --- a/backend/src/main/java/sparqles/core/interoperability/FTask.java +++ b/backend/src/main/java/sparqles/core/interoperability/FTask.java @@ -16,89 +16,83 @@ public class FTask extends EndpointTask { - private static final Logger log = LoggerFactory.getLogger(FTask.class); - - String query; - PrintStream out; - - Exception query_exc; - private SpecificFTask[] _tasks; - - public FTask(Endpoint ep, SpecificFTask... tasks) { - super(ep); - _tasks = tasks; - Object[] s = {ep.getUri().toString(), tasks.length, SPARQLESProperties.getFTASK_WAITTIME()}; - log.debug( - "INIT {} with {} tasks and {} ms wait time", - this, - tasks.length, - SPARQLESProperties.getPTASK_WAITTIME()); - } - - @Override - public FResult process(EndpointResult epr) { - FResult res = new FResult(); - res.setEndpointResult(epr); - - Map results = - new HashMap(_tasks.length); - - int failures = 0, consequExcept = 0; - for (SpecificFTask sp : _tasks) { - log.debug("execute {}:{}", this, sp.name()); - - FRun run = sp.get(epr); - FSingleResult fres = null; - if (consequExcept >= _tasks.length) { - log.debug("skipping {}:{} due to {} consecutive ex ", this, sp.name()); - fres = new FSingleResult(); - - Run r = - new Run( - PRun.A_FIRST_RESULT_TIMEOUT, - -1, - 0L, - 0L, - 0L, - (CharSequence) - ("Test Aborted due to " - + consequExcept - + " consecutive exceptions"), - PRun.EXECUTION_TIMEOUT); - fres.setRun(r); - fres.setQuery(run.getQuery()); - } else { - log.debug("executing {}:{}", this, sp.name()); - fres = run.execute(); - } - - results.put(sp.name(), fres); - - if (fres.getRun().getException() != null) { - failures++; - - String exec = fres.getRun().getException().toString(); - if (exec.contains("QueryExceptionHTTP") || exec.contains("HttpException")) { - consequExcept++; - } else { - consequExcept = 0; - } - - log.debug("failed {} exec: {}", this, exec); - } - try { - Thread.sleep(SPARQLESProperties.getFTASK_WAITTIME()); - } catch (InterruptedException e) { - e.printStackTrace(); - } + private static final Logger log = LoggerFactory.getLogger(FTask.class); + + String query; + PrintStream out; + + Exception query_exc; + private SpecificFTask[] _tasks; + + public FTask(Endpoint ep, SpecificFTask... tasks) { + super(ep); + _tasks = tasks; + Object[] s = {ep.getUri().toString(), tasks.length, SPARQLESProperties.getFTASK_WAITTIME()}; + log.debug( + "INIT {} with {} tasks and {} ms wait time", + this, + tasks.length, + SPARQLESProperties.getPTASK_WAITTIME()); + } + + @Override + public FResult process(EndpointResult epr) { + FResult res = new FResult(); + res.setEndpointResult(epr); + + Map results = + new HashMap(_tasks.length); + + int failures = 0, consequExcept = 0; + for (SpecificFTask sp : _tasks) { + log.debug("execute {}:{}", this, sp.name()); + + FRun run = sp.get(epr); + FSingleResult fres = null; + if (consequExcept >= _tasks.length) { + log.debug("skipping {}:{} due to {} consecutive ex ", this, sp.name()); + fres = new FSingleResult(); + + Run r = + new Run( + PRun.A_FIRST_RESULT_TIMEOUT, + -1, + 0L, + 0L, + 0L, + (CharSequence) ("Test Aborted due to " + consequExcept + " consecutive exceptions"), + PRun.EXECUTION_TIMEOUT); + fres.setRun(r); + fres.setQuery(run.getQuery()); + } else { + log.debug("executing {}:{}", this, sp.name()); + fres = run.execute(); + } + + results.put(sp.name(), fres); + + if (fres.getRun().getException() != null) { + failures++; + + String exec = fres.getRun().getException().toString(); + if (exec.contains("QueryExceptionHTTP") || exec.contains("HttpException")) { + consequExcept++; + } else { + consequExcept = 0; } - res.setResults(results); - log.info( - "executed {} {}/{} tasks without error", - this, - _tasks.length - failures, - _tasks.length); - - return res; + + log.debug("failed {} exec: {}", this, exec); + } + try { + Thread.sleep(SPARQLESProperties.getFTASK_WAITTIME()); + } catch (InterruptedException e) { + e.printStackTrace(); + } } + res.setResults(results); + log.info( + "executed {} {}/{} tasks without error", this, _tasks.length - failures, _tasks.length); + + return res; + } } diff --git a/backend/src/main/java/sparqles/core/interoperability/SpecificFTask.java b/backend/src/main/java/sparqles/core/interoperability/SpecificFTask.java index 475d5bab..68017715 100644 --- a/backend/src/main/java/sparqles/core/interoperability/SpecificFTask.java +++ b/backend/src/main/java/sparqles/core/interoperability/SpecificFTask.java @@ -6,83 +6,83 @@ import sparqles.avro.EndpointResult; public enum SpecificFTask { - /* - */ - SPARQL1_ASK("sparql10/ASK[.].rq"), - SPARQL1_CON("sparql10/CON[.].rq"), - SPARQL1_CONJOIN("sparql10/CON[JOIN].rq"), - SPARQL1_CONOPT("sparql10/CON[OPT].rq"), - SPARQL1_SELDISTINCT("sparql10/SEL-DISTINCT[.].rq"), - SPARQL1_SELREDUCED("sparql10/SEL-REDUCED[.].rq"), - SPARQL1_SEL("sparql10/SEL[.].rq"), - SPARQL1_SELBNODE("sparql10/SEL[BNODE].rq"), - SPARQL1_SELEMPTY("sparql10/SEL[EMPTY].rq"), - SPARQL1_SELFILBOUND("sparql10/SEL[FIL(!BOUND)].rq"), - SPARQL1_SELFILBLANK("sparql10/SEL[FIL(BLANK)].rq"), - SPARQL1_SELFILBOOL("sparql10/SEL[FIL(BOOL)].rq"), - SPARQL1_SELFILIRI("sparql10/SEL[FIL(IRI)].rq.rq"), - SPARQL1_SELFILNUM("sparql10/SEL[FIL(NUM)].rq"), - SPARQL1_SELFILREGEXI("sparql10/SEL[FIL(REGEX-i)].rq"), - SPARQL1_SELFILREGEX("sparql10/SEL[FIL(REGEX)].rq"), - SPARQL1_SELFILSTR("sparql10/SEL[FIL(STR)].rq"), - SPARQL1_SELFROM("sparql10/SEL[FROM].rq"), - SPARQL1_SELGRAPHJOIN("sparql10/SEL[GRAPH;JOIN].rq"), - SPARQL1_SELGRAPHUNION("sparql10/SEL[GRAPH;UNION].rq"), - SPARQL1_SELGRAPH("sparql10/SEL[GRAPH].rq"), - SPARQL1_SELJOIN("sparql10/SEL[JOIN].rq"), - SPARQL1_SELOPT("sparql10/SEL[OPT].rq"), - SPARQL1_SELUNION("sparql10/SEL[UNION].rq"), + /* + */ + SPARQL1_ASK("sparql10/ASK[.].rq"), + SPARQL1_CON("sparql10/CON[.].rq"), + SPARQL1_CONJOIN("sparql10/CON[JOIN].rq"), + SPARQL1_CONOPT("sparql10/CON[OPT].rq"), + SPARQL1_SELDISTINCT("sparql10/SEL-DISTINCT[.].rq"), + SPARQL1_SELREDUCED("sparql10/SEL-REDUCED[.].rq"), + SPARQL1_SEL("sparql10/SEL[.].rq"), + SPARQL1_SELBNODE("sparql10/SEL[BNODE].rq"), + SPARQL1_SELEMPTY("sparql10/SEL[EMPTY].rq"), + SPARQL1_SELFILBOUND("sparql10/SEL[FIL(!BOUND)].rq"), + SPARQL1_SELFILBLANK("sparql10/SEL[FIL(BLANK)].rq"), + SPARQL1_SELFILBOOL("sparql10/SEL[FIL(BOOL)].rq"), + SPARQL1_SELFILIRI("sparql10/SEL[FIL(IRI)].rq.rq"), + SPARQL1_SELFILNUM("sparql10/SEL[FIL(NUM)].rq"), + SPARQL1_SELFILREGEXI("sparql10/SEL[FIL(REGEX-i)].rq"), + SPARQL1_SELFILREGEX("sparql10/SEL[FIL(REGEX)].rq"), + SPARQL1_SELFILSTR("sparql10/SEL[FIL(STR)].rq"), + SPARQL1_SELFROM("sparql10/SEL[FROM].rq"), + SPARQL1_SELGRAPHJOIN("sparql10/SEL[GRAPH;JOIN].rq"), + SPARQL1_SELGRAPHUNION("sparql10/SEL[GRAPH;UNION].rq"), + SPARQL1_SELGRAPH("sparql10/SEL[GRAPH].rq"), + SPARQL1_SELJOIN("sparql10/SEL[JOIN].rq"), + SPARQL1_SELOPT("sparql10/SEL[OPT].rq"), + SPARQL1_SELUNION("sparql10/SEL[UNION].rq"), - // SPARQL1_SELORDERBYASC ("sparql10/SEL[.]*ORDERBY-ASC.rq"), - // SPARQL1_SELORDERBYDESC ("sparql10/SEL[.]*ORDERBY-DESC.rq"), - // SPARQL1_SELORDERBY ("sparql10/SEL[.]*ORDERBY.rq"), - // SPARQL1_SELORDERBYOFFSET("sparql10/SEL[.]*ORDERBY*OFFSET.rq"), + // SPARQL1_SELORDERBYASC ("sparql10/SEL[.]*ORDERBY-ASC.rq"), + // SPARQL1_SELORDERBYDESC ("sparql10/SEL[.]*ORDERBY-DESC.rq"), + // SPARQL1_SELORDERBY ("sparql10/SEL[.]*ORDERBY.rq"), + // SPARQL1_SELORDERBYOFFSET("sparql10/SEL[.]*ORDERBY*OFFSET.rq"), - SPARQL11_ASKFILNIN("sparql11/ASK[FIL(!IN)].rq"), - SPARQL11_CON("sparql11/CON-[.].rq"), - SPARQL11_SELAVG("sparql11/SEL[AVG].rq"), - SPARQL11_SELBIND("sparql11/SEL[BIND].rq"), - SPARQL11_SELFILNEXISTS("sparql11/SEL[FIL(!EXISTS)].rq"), - SPARQL11_SELFILABS("sparql11/SEL[FIL(ABS)].rq"), - SPARQL11_SELFILCONTAINS("sparql11/SEL[FIL(CONTAINS)].rq"), - SPARQL11_SELFILEXISTS("sparql11/SEL[FIL(EXISTS)].rq"), - SPARQL11_SELFILSTART("sparql11/SEL[FIL(START)].rq"), - SPARQL11_SELMAX("sparql11/SEL[MAX].rq"), - SPARQL11_SELMIN("sparql11/SEL[MIN].rq"), - SPARQL11_SELMINUS("sparql11/SEL[MINUS].rq"), - SPARQL11_SELPATH("sparql11/SEL[PATHS].rq"), - SPARQL11_SELSERVICE("sparql11/SEL[SERVICE].rq"), - SPARQL11_SELSUBQGRAPH("sparql11/SEL[SUBQ;GRAPH].rq"), - SPARQL11_SELSUBQ("sparql11/SEL[SUBQ].rq"), - SPARQL11_SELSUM("sparql11/SEL[SUM].rq"), - SPARQL11_SELVALUES("sparql11/SEL[VALUES].rq"); - // SPARQL11_SELAVGGROUPBY ("sparql11/SEL[AVG]*GROUPBY.rq"), - // SPARQL11_SELCOUNTGROUPBY ("sparql11/SEL[COUNT]*GROUPBY.rq"); + SPARQL11_ASKFILNIN("sparql11/ASK[FIL(!IN)].rq"), + SPARQL11_CON("sparql11/CON-[.].rq"), + SPARQL11_SELAVG("sparql11/SEL[AVG].rq"), + SPARQL11_SELBIND("sparql11/SEL[BIND].rq"), + SPARQL11_SELFILNEXISTS("sparql11/SEL[FIL(!EXISTS)].rq"), + SPARQL11_SELFILABS("sparql11/SEL[FIL(ABS)].rq"), + SPARQL11_SELFILCONTAINS("sparql11/SEL[FIL(CONTAINS)].rq"), + SPARQL11_SELFILEXISTS("sparql11/SEL[FIL(EXISTS)].rq"), + SPARQL11_SELFILSTART("sparql11/SEL[FIL(START)].rq"), + SPARQL11_SELMAX("sparql11/SEL[MAX].rq"), + SPARQL11_SELMIN("sparql11/SEL[MIN].rq"), + SPARQL11_SELMINUS("sparql11/SEL[MINUS].rq"), + SPARQL11_SELPATH("sparql11/SEL[PATHS].rq"), + SPARQL11_SELSERVICE("sparql11/SEL[SERVICE].rq"), + SPARQL11_SELSUBQGRAPH("sparql11/SEL[SUBQ;GRAPH].rq"), + SPARQL11_SELSUBQ("sparql11/SEL[SUBQ].rq"), + SPARQL11_SELSUM("sparql11/SEL[SUM].rq"), + SPARQL11_SELVALUES("sparql11/SEL[VALUES].rq"); + // SPARQL11_SELAVGGROUPBY ("sparql11/SEL[AVG]*GROUPBY.rq"), + // SPARQL11_SELCOUNTGROUPBY ("sparql11/SEL[COUNT]*GROUPBY.rq"); - private String query; + private String query; - private SpecificFTask(String query) { - this.query = query; - } + private SpecificFTask(String query) { + this.query = query; + } - public static List allTasks(Endpoint ep) { - List res = new ArrayList(); + public static List allTasks(Endpoint ep) { + List res = new ArrayList(); - for (SpecificFTask action : values()) { - res.add(action.get(ep)); - } - return res; + for (SpecificFTask action : values()) { + res.add(action.get(ep)); } + return res; + } - public String toString() { - return query; - } + public String toString() { + return query; + } - public FRun get(Endpoint ep) { - return new FRun(ep, query); - } + public FRun get(Endpoint ep) { + return new FRun(ep, query); + } - public FRun get(EndpointResult epr) { - return new FRun(epr.getEndpoint(), query, epr.getStart()); - } + public FRun get(EndpointResult epr) { + return new FRun(epr.getEndpoint(), query, epr.getStart()); + } } diff --git a/backend/src/main/java/sparqles/core/interoperability/TaskRun.java b/backend/src/main/java/sparqles/core/interoperability/TaskRun.java index 1c6fdaa6..70492e09 100644 --- a/backend/src/main/java/sparqles/core/interoperability/TaskRun.java +++ b/backend/src/main/java/sparqles/core/interoperability/TaskRun.java @@ -2,179 +2,157 @@ import java.util.Iterator; import org.apache.jena.graph.Triple; -import org.apache.jena.query.Query; -import org.apache.jena.query.QueryException; -import org.apache.jena.query.QueryExecution; -import org.apache.jena.query.QueryFactory; -import org.apache.jena.query.QuerySolution; -import org.apache.jena.query.ResultSet; +import org.apache.jena.query.*; import org.apache.jena.sparql.util.FmtUtils; import org.slf4j.Logger; import sparqles.avro.Endpoint; import sparqles.avro.performance.Run; -import sparqles.utils.ExceptionHandler; -import sparqles.utils.FileManager; -import sparqles.utils.QueryManager; +import sparqles.utils.*; public abstract class TaskRun { - public static final long FIRST_RESULT_TIMEOUT = 60 * 1000; - public static final long EXECUTION_TIMEOUT = 15 * 60 * 1000; - public static final long A_FIRST_RESULT_TIMEOUT = 30 * 1000; - public static final long A_EXECUTION_TIMEOUT = 60 * 1000; - protected FileManager _fm; - protected String _query; - protected String _queryFile; - protected Endpoint _ep; - private Long _start; - private Logger log; - - public TaskRun(Endpoint ep, String queryFile, String queryRoot, Long start, Logger log) { - _queryFile = queryFile; - this.log = log; - - _query = QueryManager.getQuery(queryRoot, queryFile); - _ep = ep; - _start = start; - } - - private static String toString(QuerySolution qs, boolean first) { - StringBuffer vars = new StringBuffer(); - StringBuffer sb = new StringBuffer(); - Iterator varns = qs.varNames(); - while (varns.hasNext()) { - String varName = varns.next(); - if (first) { - vars.append(varName + "\t"); - } - sb.append(FmtUtils.stringForObject(qs.get(varName)) + "\t"); + public static final long FIRST_RESULT_TIMEOUT = 60 * 1000; + public static final long EXECUTION_TIMEOUT = 15 * 60 * 1000; + public static final long A_FIRST_RESULT_TIMEOUT = 10 * 1000; + // public static final long A_EXECUTION_TIMEOUT = 60 * 1000; + protected FileManager _fm; + protected String _query; + protected String _queryFile; + protected Endpoint _ep; + private Long _start; + private Logger log; + + public TaskRun(Endpoint ep, String queryFile, String queryRoot, Long start, Logger log) { + _queryFile = queryFile; + this.log = log; + + _query = QueryManager.getQuery(queryRoot, queryFile); + _ep = ep; + _start = start; + } + + public String getQuery() { + return _query; + } + + protected Run run() { + + Run r = new Run(); + r.setFrestout(FIRST_RESULT_TIMEOUT); + r.setExectout(EXECUTION_TIMEOUT); + + long b4 = 0; + long cnxion = 0; + int sols = 0; + try { + b4 = System.currentTimeMillis(); + + Query q = QueryFactory.create(this._query); + QueryExecution qexec = QueryManager.getExecution(_ep, _query); + + qexec.getContext().set(ARQ.httpQueryTimeout, FIRST_RESULT_TIMEOUT); + cnxion = System.currentTimeMillis(); + + if (q.isSelectType()) { + + ResultSet results = qexec.execSelect(); + if (_fm != null) sols = _fm.writeSPARQLResults(results, _queryFile, _ep, _start); + else { + sols = skipSPARQLResults(results, _queryFile, _ep, _start); } - if (first) return vars.toString() + "\n" + sb.toString(); - return sb.toString(); - } - - public String getQuery() { - return _query; - } + } else if (q.isAskType()) { + boolean result = qexec.execAsk(); + if (result) sols = 1; + else sols = -1; + } else if (q.isDescribeType()) { + Iterator triples = qexec.execDescribeTriples(); - protected Run run() { - - Run r = new Run(); - r.setFrestout(FIRST_RESULT_TIMEOUT); - r.setExectout(EXECUTION_TIMEOUT); - - long b4 = 0; - long cnxion = 0; - int sols = 0; - try { - b4 = System.currentTimeMillis(); - - Query q = QueryFactory.create(this._query); - QueryExecution qexec = QueryManager.getExecution(_ep, _query); - - // FIXME: find a new way to set these timeouts - // qexec.setTimeout(FIRST_RESULT_TIMEOUT, FIRST_RESULT_TIMEOUT); - cnxion = System.currentTimeMillis(); - - if (q.isSelectType()) { - - ResultSet results = qexec.execSelect(); - if (_fm != null) sols = _fm.writeSPARQLResults(results, _queryFile, _ep, _start); - else { - sols = skipSPARQLResults(results, _queryFile, _ep, _start); - } - - } else if (q.isAskType()) { - boolean result = qexec.execAsk(); - if (result) sols = 1; - else sols = -1; - } else if (q.isDescribeType()) { - Iterator triples = qexec.execDescribeTriples(); - - if (_fm != null) sols = _fm.writeSPARQLResults(triples, _queryFile, _ep, _start); - else { - sols = skipSPARQLResults(triples, _queryFile, _ep, _start); - } - - } else if (q.isConstructType()) { - Iterator triples = qexec.execConstructTriples(); - if (_fm != null) sols = _fm.writeSPARQLResults(triples, _queryFile, _ep, _start); - else { - sols = skipSPARQLResults(triples, _queryFile, _ep, _start); - } - } else { - throw new UnsupportedOperationException( - "What query is this? (Not SELECT|ASK|DESCRIBE|CONSTRUCT). " + q); - } - - long iter = System.currentTimeMillis(); - qexec.close(); - long close = System.currentTimeMillis(); - - r.setSolutions(sols); - r.setInittime((cnxion - b4)); - r.setExectime((iter - b4)); - r.setClosetime((close - b4)); - - // System.out.println( _ep.getUri() + "\t" + sols - // + "\t" + (cnxion - b4) + "\t" + (iter - b4) + "\t" - // + (close - b4)); - } catch (QueryException e) { - r.setException(ExceptionHandler.getExceptionSummary(e.getMessage())); - if (log.isInfoEnabled()) { - String cause; - if (e.getCause() != null) { - cause = - ExceptionHandler.getExceptionSummary(e.getCause().getMessage()) - + " !" - + e.getCause().getClass().getSimpleName(); - } else { - cause = ExceptionHandler.getExceptionSummary(e.getMessage()); - } - log.info( - "SPARQL query failed against endpoint {} (cause: {}...)", - _ep.getUri(), - cause); - } - } catch (Exception e) { - log.debug( - "[EXC] {} over {}; {}:{}:", - _queryFile, - _ep.getUri().toString(), - e.getClass().getSimpleName(), - e.getMessage(), - e.getCause()); - r.setException(ExceptionHandler.toFullString(e)); + if (_fm != null) sols = _fm.writeSPARQLResults(triples, _queryFile, _ep, _start); + else { + sols = skipSPARQLResults(triples, _queryFile, _ep, _start); } - return r; + } else if (q.isConstructType()) { + Iterator triples = qexec.execConstructTriples(); + if (_fm != null) sols = _fm.writeSPARQLResults(triples, _queryFile, _ep, _start); + else { + sols = skipSPARQLResults(triples, _queryFile, _ep, _start); + } + } else { + throw new UnsupportedOperationException( + "What query is this? (Not SELECT|ASK|DESCRIBE|CONSTRUCT). " + q); + } + + long iter = System.currentTimeMillis(); + qexec.close(); + long close = System.currentTimeMillis(); + + r.setSolutions(sols); + r.setInittime((cnxion - b4)); + r.setExectime((iter - b4)); + r.setClosetime((close - b4)); + + // System.out.println( _ep.getUri() + "\t" + sols + // + "\t" + (cnxion - b4) + "\t" + (iter - b4) + "\t" + // + (close - b4)); + } catch (Exception e) { + var faultKind = FaultDiagnostic.faultKindForJenaQuery(e); + if (faultKind == FaultKind.UNKNOWN) { + // String ex = ExceptionHandler.logAndtoString(e); + r.setException(StringUtils.trunc(e.getMessage())); + log.warn( + "Unknown error encountered while attempting a SPARQL query (type={})", + e.getClass().getName()); + log.debug("Stacktrace", e); + } else { + r.setException(FaultDiagnostic.interpretFault(faultKind)); + } } - public void setFileManager(FileManager fm) { - _fm = fm; - } + return r; + } - public int skipSPARQLResults( - Iterator triples, String queryFile, Endpoint ep, Long start) { + public void setFileManager(FileManager fm) { + _fm = fm; + } - int sols = 0; - while (triples.hasNext()) { - triples.next(); - sols++; - } - return sols; + public int skipSPARQLResults( + Iterator triples, String queryFile, Endpoint ep, Long start) { + + int sols = 0; + while (triples.hasNext()) { + triples.next(); + sols++; } + return sols; + } - public int skipSPARQLResults(ResultSet results, String queryFile, Endpoint ep, Long start) { + public int skipSPARQLResults(ResultSet results, String queryFile, Endpoint ep, Long start) { - int sols = 0; - while (results.hasNext()) { - QuerySolution qs = results.nextSolution(); - // toString(qs, sols == 0); - sols++; - } + int sols = 0; + while (results.hasNext()) { + QuerySolution qs = results.nextSolution(); + // toString(qs, sols == 0); + sols++; + } - return sols; + return sols; + } + + private static String toString(QuerySolution qs, boolean first) { + StringBuffer vars = new StringBuffer(); + StringBuffer sb = new StringBuffer(); + Iterator varns = qs.varNames(); + while (varns.hasNext()) { + String varName = varns.next(); + if (first) { + vars.append(varName + "\t"); + } + sb.append(FmtUtils.stringForObject(qs.get(varName)) + "\t"); } + + if (first) return vars.toString() + "\n" + sb.toString(); + return sb.toString(); + } } diff --git a/backend/src/main/java/sparqles/core/interoperability/Utils.java b/backend/src/main/java/sparqles/core/interoperability/Utils.java index 428ebadf..93e4f268 100644 --- a/backend/src/main/java/sparqles/core/interoperability/Utils.java +++ b/backend/src/main/java/sparqles/core/interoperability/Utils.java @@ -1,8 +1,8 @@ package sparqles.core.interoperability; public class Utils { - public static String removeNewlines(String in) { - if (in == null) return null; - return in.replaceAll("\r", " ").replaceAll("\n", " "); - } + public static String removeNewlines(String in) { + if (in == null) return null; + return in.replaceAll("\r", " ").replaceAll("\n", " "); + } } diff --git a/backend/src/main/java/sparqles/core/performance/PRun.java b/backend/src/main/java/sparqles/core/performance/PRun.java index 5f66219a..05f4bfee 100644 --- a/backend/src/main/java/sparqles/core/performance/PRun.java +++ b/backend/src/main/java/sparqles/core/performance/PRun.java @@ -9,32 +9,32 @@ public class PRun extends TaskRun { - private static final Logger log = LoggerFactory.getLogger(PRun.class); + private static final Logger log = LoggerFactory.getLogger(PRun.class); - public PRun(Endpoint ep, String queryFile) { - this(ep, queryFile, System.currentTimeMillis()); - } - - public PRun(Endpoint ep, String queryFile, Long start) { - super(ep, queryFile, SPARQLESProperties.getPTASK_QUERIES(), start, log); - } + public PRun(Endpoint ep, String queryFile) { + this(ep, queryFile, System.currentTimeMillis()); + } - public PSingleResult execute() { - PSingleResult result = new PSingleResult(); + public PRun(Endpoint ep, String queryFile, Long start) { + super(ep, queryFile, SPARQLESProperties.getPTASK_QUERIES(), start, log); + } - result.setQuery(_query); + public PSingleResult execute() { + PSingleResult result = new PSingleResult(); - log.debug("RUN COLD {} over {}", _queryFile, _ep.getUri()); - result.setCold(run()); + result.setQuery(_query); - try { - Thread.sleep(SPARQLESProperties.getPTASK_WAITTIME()); - } catch (InterruptedException e) { - e.printStackTrace(); - } - log.debug("RUN WARM {} over {}", _queryFile, _ep.getUri()); - result.setWarm(run()); + log.debug("RUN COLD {} over {}", _queryFile, _ep.getUri()); + result.setCold(run()); - return result; + try { + Thread.sleep(SPARQLESProperties.getPTASK_WAITTIME()); + } catch (InterruptedException e) { + log.warn("Interrupted before a WARM run could be attempted"); } + log.debug("RUN WARM {} over {}", _queryFile, _ep.getUri()); + result.setWarm(run()); + + return result; + } } diff --git a/backend/src/main/java/sparqles/core/performance/PTask.java b/backend/src/main/java/sparqles/core/performance/PTask.java index 07d373ea..24f6249d 100644 --- a/backend/src/main/java/sparqles/core/performance/PTask.java +++ b/backend/src/main/java/sparqles/core/performance/PTask.java @@ -15,91 +15,85 @@ public class PTask extends EndpointTask { - private static final Logger log = LoggerFactory.getLogger(PTask.class); + private static final Logger log = LoggerFactory.getLogger(PTask.class); - String query; - PrintStream out; + String query; + PrintStream out; - Exception query_exc; - private SpecificPTask[] _tasks; + Exception query_exc; + private SpecificPTask[] _tasks; - public PTask(Endpoint ep, SpecificPTask... tasks) { - super(ep); - _tasks = tasks; - log.debug( - "INIT {} with {} tasks and {} ms wait time", - this, - tasks.length, - SPARQLESProperties.getPTASK_WAITTIME()); - } - - @Override - public PResult process(EndpointResult epr) { - PResult res = new PResult(); - res.setEndpointResult(epr); + public PTask(Endpoint ep, SpecificPTask... tasks) { + super(ep); + _tasks = tasks; + log.debug( + "INIT {} with {} tasks and {} ms wait time", + this, + tasks.length, + SPARQLESProperties.getPTASK_WAITTIME()); + } - Map results = - new HashMap(_tasks.length); + @Override + public PResult process(EndpointResult epr) { + PResult res = new PResult(); + res.setEndpointResult(epr); - int consequExcept = 0; - int failures = 0; - for (SpecificPTask sp : _tasks) { - PRun run = sp.get(epr.getEndpoint()); + Map results = + new HashMap(_tasks.length); - PSingleResult pres = null; - if (consequExcept >= _tasks.length) { - log.debug("skipping {}:{} due to {} consecutive ex ", this, sp.name()); - pres = new PSingleResult(); + int consequExcept = 0; + int failures = 0; + for (SpecificPTask sp : _tasks) { + PRun run = sp.get(epr.getEndpoint()); - Run r = - new Run( - PRun.A_FIRST_RESULT_TIMEOUT, - -1, - 0L, - 0L, - 0L, - (CharSequence) - ("Test Aborted due to " - + consequExcept - + " consecutive exceptions"), - PRun.EXECUTION_TIMEOUT); - pres.setCold(r); - pres.setWarm(r); + PSingleResult pres = null; + if (consequExcept >= _tasks.length) { + log.debug("skipping {}:{} due to {} consecutive ex ", this, sp.name()); + pres = new PSingleResult(); - pres.setQuery(run.getQuery()); - } else { - log.debug("executing {}:{}", this, sp.name()); - pres = run.execute(); - } - results.put(sp.name(), pres); + Run r = + new Run( + PRun.A_FIRST_RESULT_TIMEOUT, + -1, + 0L, + 0L, + 0L, + (CharSequence) ("Test Aborted due to " + consequExcept + " consecutive exceptions"), + PRun.EXECUTION_TIMEOUT); + pres.setCold(r); + pres.setWarm(r); - if (pres.getCold().getException() != null || pres.getWarm().getException() != null) { - failures++; - String cold = "", warm = ""; - if (pres.getCold().getException() != null) { - cold = pres.getCold().getException().toString(); - consequExcept++; - } else consequExcept = 0; - if (pres.getWarm().getException() != null) { - warm = pres.getWarm().getException().toString(); - consequExcept++; - } else consequExcept = 0; + pres.setQuery(run.getQuery()); + } else { + log.debug("executing {}:{}", this, sp.name()); + pres = run.execute(); + } + results.put(sp.name(), pres); - log.debug("failed {} (cold: {}, warm: {})", this, cold, warm); - } - try { - Thread.sleep(SPARQLESProperties.getPTASK_WAITTIME()); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - res.setResults(results); + if (pres.getCold().getException() != null || pres.getWarm().getException() != null) { + failures++; + String cold = "", warm = ""; + if (pres.getCold().getException() != null) { + cold = pres.getCold().getException().toString(); + consequExcept++; + } else consequExcept = 0; + if (pres.getWarm().getException() != null) { + warm = pres.getWarm().getException().toString(); + consequExcept++; + } else consequExcept = 0; - log.info( - "executed {} {}/{} tasks without error", - this, - _tasks.length - failures, - _tasks.length); - return res; + log.debug("failed {} (cold: {}, warm: {})", this, cold, warm); + } + try { + Thread.sleep(SPARQLESProperties.getPTASK_WAITTIME()); + } catch (InterruptedException e) { + e.printStackTrace(); + } } + res.setResults(results); + + log.info( + "executed {} {}/{} tasks without error", this, _tasks.length - failures, _tasks.length); + return res; + } } diff --git a/backend/src/main/java/sparqles/core/performance/SpecificPTask.java b/backend/src/main/java/sparqles/core/performance/SpecificPTask.java index 32a43d7b..5941e9ae 100644 --- a/backend/src/main/java/sparqles/core/performance/SpecificPTask.java +++ b/backend/src/main/java/sparqles/core/performance/SpecificPTask.java @@ -5,74 +5,74 @@ import sparqles.avro.Endpoint; public enum SpecificPTask { - /* - ask-o.txt - ask-p.txt - ask-po.txt - ask-s.txt - ask-so.txt - ask-sp.txt - ask-spo.txt - count-class.txt - count-pred.txt - join-oo.txt - join-so.txt - join-ss.txt - limit1.txt - limit100001.txt - limit12500.txt - limit25000.txt - limit3125.txt - limit5.txt - limit50000.txt - limit6250.txt + /* + ask-o.txt + ask-p.txt + ask-po.txt + ask-s.txt + ask-so.txt + ask-sp.txt + ask-spo.txt + count-class.txt + count-pred.txt + join-oo.txt + join-so.txt + join-ss.txt + limit1.txt + limit100001.txt + limit12500.txt + limit25000.txt + limit3125.txt + limit5.txt + limit50000.txt + limit6250.txt - */ - ASKO("ask-o.txt"), - ASKP("ask-p.txt"), - ASKPO("ask-po.txt"), - ASKS("ask-s.txt"), - ASKSO("ask-so.txt"), - ASKSP("ask-sp.txt"), - ASKSPO("ask-spo.txt"), - COUNTCLASS("count-class.txt"), - COUNTPRED("count-pred.txt"), - JOINOO("join-oo.txt"), - JOINSO("join-so.txt"), - JOINSS("join-ss.txt"), - LIMIT1("limit1.txt"), - LIMIT100K1("limit100001.txt"), - LIMIT12500("limit12500.txt"), - LIMIT25K("limit25000.txt"), - LIMIT3125("limit3125.txt"), - LIMIT5("limit5.txt"), - LIMIT50K("limit50000.txt"), - LIMIT6250("limit6250.txt"); + */ + ASKO("ask-o.txt"), + ASKP("ask-p.txt"), + ASKPO("ask-po.txt"), + ASKS("ask-s.txt"), + ASKSO("ask-so.txt"), + ASKSP("ask-sp.txt"), + ASKSPO("ask-spo.txt"), + COUNTCLASS("count-class.txt"), + COUNTPRED("count-pred.txt"), + JOINOO("join-oo.txt"), + JOINSO("join-so.txt"), + JOINSS("join-ss.txt"), + LIMIT1("limit1.txt"), + LIMIT100K1("limit100001.txt"), + LIMIT12500("limit12500.txt"), + LIMIT25K("limit25000.txt"), + LIMIT3125("limit3125.txt"), + LIMIT5("limit5.txt"), + LIMIT50K("limit50000.txt"), + LIMIT6250("limit6250.txt"); - private String query; + private String query; - private SpecificPTask(String query) { - this.query = query; - } + private SpecificPTask(String query) { + this.query = query; + } - public static List allTasks(Endpoint ep) { - List res = new ArrayList(); + public static List allTasks(Endpoint ep) { + List res = new ArrayList(); - for (SpecificPTask action : values()) { - res.add(action.get(ep)); - } - return res; + for (SpecificPTask action : values()) { + res.add(action.get(ep)); } + return res; + } - public String toString() { - return query; - } + public String toString() { + return query; + } - public PRun get(Endpoint ep) { - String rquery = query; + public PRun get(Endpoint ep) { + String rquery = query; - System.out.println(rquery); - return new PRun(ep, rquery); - } + System.out.println(rquery); + return new PRun(ep, rquery); + } } diff --git a/backend/src/main/java/sparqles/core/performance/Utils.java b/backend/src/main/java/sparqles/core/performance/Utils.java index c09d93ec..34c615c2 100644 --- a/backend/src/main/java/sparqles/core/performance/Utils.java +++ b/backend/src/main/java/sparqles/core/performance/Utils.java @@ -1,8 +1,8 @@ package sparqles.core.performance; public class Utils { - public static String removeNewlines(String in) { - if (in == null) return null; - return in.replaceAll("\r", " ").replaceAll("\n", " "); - } + public static String removeNewlines(String in) { + if (in == null) return null; + return in.replaceAll("\r", " ").replaceAll("\n", " "); + } } diff --git a/backend/src/main/java/sparqles/paper/AvailabilityIndexConverter.java b/backend/src/main/java/sparqles/paper/AvailabilityIndexConverter.java index f367ef89..a7a89576 100644 --- a/backend/src/main/java/sparqles/paper/AvailabilityIndexConverter.java +++ b/backend/src/main/java/sparqles/paper/AvailabilityIndexConverter.java @@ -10,96 +10,96 @@ import sparqles.paper.objects.AvailIndexJson; public class AvailabilityIndexConverter extends CmdGeneral { - private String availabilityEvoPath = null; + private String availabilityEvoPath = null; - public AvailabilityIndexConverter(String[] args) { - super(args); - getUsage().startCategory("Arguments"); - getUsage() - .addUsage( - "availability-evo.dat", - "absolute path for the availability-evo.dat csv file (e.g. /home/...)"); - } + public AvailabilityIndexConverter(String[] args) { + super(args); + getUsage().startCategory("Arguments"); + getUsage() + .addUsage( + "availability-evo.dat", + "absolute path for the availability-evo.dat csv file (e.g. /home/...)"); + } - /** - * @param args - */ - public static void main(String[] args) { - new AvailabilityIndexConverter(args).mainRun(); - } + /** + * @param args + */ + public static void main(String[] args) { + new AvailabilityIndexConverter(args).mainRun(); + } - @Override - protected String getCommandName() { - return "availability-evo.dat"; - } + @Override + protected String getCommandName() { + return "availability-evo.dat"; + } - @Override - protected String getSummary() { - return getCommandName() + " availability-evo.dat (e.g. /home/...)"; - } + @Override + protected String getSummary() { + return getCommandName() + " availability-evo.dat (e.g. /home/...)"; + } - @Override - protected void processModulesAndArgs() { - if (getPositional().size() < 1) { - this.printHelp(); - } - availabilityEvoPath = getPositionalArg(0); + @Override + protected void processModulesAndArgs() { + if (getPositional().size() < 1) { + this.printHelp(); } + availabilityEvoPath = getPositionalArg(0); + } - @Override - protected void exec() { - try { - Gson gson = new Gson(); - AvailIndexJson availIndexJson = new AvailIndexJson(); - BufferedReader br = - Files.newBufferedReader(Paths.get(availabilityEvoPath), StandardCharsets.UTF_8); - int cpt = 0; - for (String line = null; (line = br.readLine()) != null; ) { - if (cpt == 0) { - availIndexJson.addHeader(line); - } else { - availIndexJson.addValue(line); - } - cpt++; - } - System.out.println(gson.toJson(availIndexJson)); - - } catch (IOException e) { - e.printStackTrace(); + @Override + protected void exec() { + try { + Gson gson = new Gson(); + AvailIndexJson availIndexJson = new AvailIndexJson(); + BufferedReader br = + Files.newBufferedReader(Paths.get(availabilityEvoPath), StandardCharsets.UTF_8); + int cpt = 0; + for (String line = null; (line = br.readLine()) != null; ) { + if (cpt == 0) { + availIndexJson.addHeader(line); + } else { + availIndexJson.addValue(line); } + cpt++; + } + System.out.println(gson.toJson(availIndexJson)); + + } catch (IOException e) { + e.printStackTrace(); } + } - // private void writeFile(String content, String fileName){ - // if(!outputFolderFile.exists())outputFolderFile.mkdir(); - // FileOutputStream fop = null; - // File file; - // - // try { - // - // file = new File(outputFolderFile.getAbsolutePath()+"/"+fileName); - // if(file.exists())file.delete(); - // file.createNewFile(); - // - // fop = new FileOutputStream(file); - // - // // get the content in bytes - // byte[] contentInBytes = content.getBytes(); - // - // fop.write(contentInBytes); - // fop.flush(); - // fop.close(); - // - // } catch (IOException e) { - // e.printStackTrace(); - // } finally { - // try { - // if (fop != null) { - // fop.close(); - // } - // } catch (IOException e) { - // e.printStackTrace(); - // } - // } - // } + // private void writeFile(String content, String fileName){ + // if(!outputFolderFile.exists())outputFolderFile.mkdir(); + // FileOutputStream fop = null; + // File file; + // + // try { + // + // file = new File(outputFolderFile.getAbsolutePath()+"/"+fileName); + // if(file.exists())file.delete(); + // file.createNewFile(); + // + // fop = new FileOutputStream(file); + // + // // get the content in bytes + // byte[] contentInBytes = content.getBytes(); + // + // fop.write(contentInBytes); + // fop.flush(); + // fop.close(); + // + // } catch (IOException e) { + // e.printStackTrace(); + // } finally { + // try { + // if (fop != null) { + // fop.close(); + // } + // } catch (IOException e) { + // e.printStackTrace(); + // } + // } + // } } diff --git a/backend/src/main/java/sparqles/paper/AvailabilityStats.java b/backend/src/main/java/sparqles/paper/AvailabilityStats.java index 8c5342dc..9cc510e0 100644 --- a/backend/src/main/java/sparqles/paper/AvailabilityStats.java +++ b/backend/src/main/java/sparqles/paper/AvailabilityStats.java @@ -19,163 +19,157 @@ import sparqles.paper.objects.AvailJson; public class AvailabilityStats extends CmdGeneral { - private String atasksPath = null; - private File listEndpointsFile = null; - private File outputFolderFile = null; - - public AvailabilityStats(String[] args) { - super(args); - getUsage().startCategory("Arguments"); - getUsage() - .addUsage( - "atasks", - "absolute path for the availability atasks.json file (e.g. /home/...)"); - getUsage() - .addUsage( - "listEndpointsFile", - "absolute path for the list of endpoints file (e.g. /home/...)"); - getUsage() - .addUsage( - "outputFolderPath", - "absolute path for the output folder where stats will be generated (e.g." - + " /home/...)"); + private String atasksPath = null; + private File listEndpointsFile = null; + private File outputFolderFile = null; + + public AvailabilityStats(String[] args) { + super(args); + getUsage().startCategory("Arguments"); + getUsage() + .addUsage( + "atasks", "absolute path for the availability atasks.json file (e.g. /home/...)"); + getUsage() + .addUsage( + "listEndpointsFile", "absolute path for the list of endpoints file (e.g. /home/...)"); + getUsage() + .addUsage( + "outputFolderPath", + "absolute path for the output folder where stats will be generated (e.g." + + " /home/...)"); + } + + /** + * @param args + */ + public static void main(String[] args) { + new AvailabilityStats(args).mainRun(); + } + + @Override + protected String getCommandName() { + return "availabilityStats"; + } + + @Override + protected String getSummary() { + return getCommandName() + " atasks (e.g. /home/...)"; + } + + @Override + protected void processModulesAndArgs() { + if (getPositional().size() < 3) { + printHelp(); } - - /** - * @param args - */ - public static void main(String[] args) { - new AvailabilityStats(args).mainRun(); - } - - @Override - protected String getCommandName() { - return "availabilityStats"; - } - - @Override - protected String getSummary() { - return getCommandName() + " atasks (e.g. /home/...)"; + atasksPath = getPositionalArg(0); + try { + listEndpointsFile = new File(getPositionalArg(1)); + outputFolderFile = new File(getPositionalArg(2)); + } catch (Exception e) { + e.printStackTrace(); } - - @Override - protected void processModulesAndArgs() { - if (getPositional().size() < 3) { - printHelp(); - } - atasksPath = getPositionalArg(0); - try { - listEndpointsFile = new File(getPositionalArg(1)); - outputFolderFile = new File(getPositionalArg(2)); - } catch (Exception e) { - e.printStackTrace(); + } + + @Override + protected void exec() { + try { + Gson gson = new Gson(); + + // read the list of endpoints + AvailEpFromList[] epArray = + gson.fromJson(new FileReader(listEndpointsFile), AvailEpFromList[].class); + List epList = new ArrayList<>(); + for (int i = 0; i < epArray.length; i++) { + epList.add(epArray[i].getUri()); + // System.out.println(epArray[i].getUri()); + } + + List eps = new ArrayList(); + BufferedReader br = Files.newBufferedReader(Paths.get(atasksPath), StandardCharsets.UTF_8); + int cpt = 0; + for (String line = null; (line = br.readLine()) != null; ) { + + // System.out.println(line); + AvailJson obj = gson.fromJson(line, AvailJson.class); + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); + if (obj.getStartDate() != null && obj.getSPARQLURI() != null) { + // + // System.out.println(obj.getSPARQLURI()+"\t"+obj.isAvailable()+"\t"+sdf.format(obj.getStartDate())); + AvailEp ep = new AvailEp(obj.getSPARQLURI()); + int indexEp = eps.indexOf(ep); + if (indexEp > -1) { // means it already exists + eps.get(indexEp).addResult(obj.getStartDate(), obj.isAvailable()); + } else { + ep.addResult(obj.getStartDate(), obj.isAvailable()); + eps.add(ep); + } } - } - - @Override - protected void exec() { - try { - Gson gson = new Gson(); - - // read the list of endpoints - AvailEpFromList[] epArray = - gson.fromJson(new FileReader(listEndpointsFile), AvailEpFromList[].class); - List epList = new ArrayList<>(); - for (int i = 0; i < epArray.length; i++) { - epList.add(epArray[i].getUri()); - // System.out.println(epArray[i].getUri()); - } - - List eps = new ArrayList(); - BufferedReader br = - Files.newBufferedReader(Paths.get(atasksPath), StandardCharsets.UTF_8); - int cpt = 0; - for (String line = null; (line = br.readLine()) != null; ) { - - // System.out.println(line); - AvailJson obj = gson.fromJson(line, AvailJson.class); - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); - if (obj.getStartDate() != null && obj.getSPARQLURI() != null) { - // - // System.out.println(obj.getSPARQLURI()+"\t"+obj.isAvailable()+"\t"+sdf.format(obj.getStartDate())); - AvailEp ep = new AvailEp(obj.getSPARQLURI()); - int indexEp = eps.indexOf(ep); - if (indexEp > -1) { // means it already exists - eps.get(indexEp).addResult(obj.getStartDate(), obj.isAvailable()); - } else { - ep.addResult(obj.getStartDate(), obj.isAvailable()); - eps.add(ep); - } - } - // if(cpt>1000000)break; - cpt++; - } - StringBuilder sbListEPsAlive = new StringBuilder(); - StringBuilder sbListEPsExtraPresent = new StringBuilder(); - StringBuilder sbListEPsNotPresent = new StringBuilder(); - AvailEvolMonthList availEvolMonthList = new AvailEvolMonthList(); - - for (AvailEp availEp : eps) { - if (epList.contains(availEp.getEpURI())) { - epList.remove(availEp.getEpURI()); - availEp.prettyPrint(); - // availEp.uriPrint(); - if (availEp.isAlive(4)) - sbListEPsAlive.append( - availEp.getEpURI() + System.getProperty("line.separator")); - for (String[] availPerMonth : availEp.getAvailPerMonth()) { - availEvolMonthList.addEp( - availPerMonth[0], Float.parseFloat(availPerMonth[1])); - } - } else { - sbListEPsExtraPresent.append( - availEp.getEpURI() + System.getProperty("line.separator")); - } - } - for (String ep : epList) { - sbListEPsNotPresent.append(ep + System.getProperty("line.separator")); - } - - writeFile(sbListEPsExtraPresent.toString(), "epsExtra.csv"); - writeFile(sbListEPsNotPresent.toString(), "epsNotPresent.csv"); - writeFile(sbListEPsAlive.toString(), "epsAlive.csv"); - writeFile(availEvolMonthList.csvPrintNb(), "availability-evo.csv"); - writeFile(availEvolMonthList.csvPrintPercent(), "availability-percent.csv"); - } catch (IOException e) { - e.printStackTrace(); + // if(cpt>1000000)break; + cpt++; + } + StringBuilder sbListEPsAlive = new StringBuilder(); + StringBuilder sbListEPsExtraPresent = new StringBuilder(); + StringBuilder sbListEPsNotPresent = new StringBuilder(); + AvailEvolMonthList availEvolMonthList = new AvailEvolMonthList(); + + for (AvailEp availEp : eps) { + if (epList.contains(availEp.getEpURI())) { + epList.remove(availEp.getEpURI()); + availEp.prettyPrint(); + // availEp.uriPrint(); + if (availEp.isAlive(4)) + sbListEPsAlive.append(availEp.getEpURI() + System.getProperty("line.separator")); + for (String[] availPerMonth : availEp.getAvailPerMonth()) { + availEvolMonthList.addEp(availPerMonth[0], Float.parseFloat(availPerMonth[1])); + } + } else { + sbListEPsExtraPresent.append(availEp.getEpURI() + System.getProperty("line.separator")); } + } + for (String ep : epList) { + sbListEPsNotPresent.append(ep + System.getProperty("line.separator")); + } + + writeFile(sbListEPsExtraPresent.toString(), "epsExtra.csv"); + writeFile(sbListEPsNotPresent.toString(), "epsNotPresent.csv"); + writeFile(sbListEPsAlive.toString(), "epsAlive.csv"); + writeFile(availEvolMonthList.csvPrintNb(), "availability-evo.csv"); + writeFile(availEvolMonthList.csvPrintPercent(), "availability-percent.csv"); + } catch (IOException e) { + e.printStackTrace(); } + } - private void writeFile(String content, String fileName) { - if (!outputFolderFile.exists()) outputFolderFile.mkdir(); - FileOutputStream fop = null; - File file; + private void writeFile(String content, String fileName) { + if (!outputFolderFile.exists()) outputFolderFile.mkdir(); + FileOutputStream fop = null; + File file; - try { + try { - file = new File(outputFolderFile.getAbsolutePath() + "/" + fileName); - if (file.exists()) file.delete(); - file.createNewFile(); + file = new File(outputFolderFile.getAbsolutePath() + "/" + fileName); + if (file.exists()) file.delete(); + file.createNewFile(); - fop = new FileOutputStream(file); + fop = new FileOutputStream(file); - // get the content in bytes - byte[] contentInBytes = content.getBytes(); + // get the content in bytes + byte[] contentInBytes = content.getBytes(); - fop.write(contentInBytes); - fop.flush(); - fop.close(); + fop.write(contentInBytes); + fop.flush(); + fop.close(); - } catch (IOException e) { - e.printStackTrace(); - } finally { - try { - if (fop != null) { - fop.close(); - } - } catch (IOException e) { - e.printStackTrace(); - } + } catch (IOException e) { + e.printStackTrace(); + } finally { + try { + if (fop != null) { + fop.close(); } + } catch (IOException e) { + e.printStackTrace(); + } } + } } diff --git a/backend/src/main/java/sparqles/paper/DiscoverabilityStats.java b/backend/src/main/java/sparqles/paper/DiscoverabilityStats.java index 7dcf7a33..ab82cc96 100644 --- a/backend/src/main/java/sparqles/paper/DiscoverabilityStats.java +++ b/backend/src/main/java/sparqles/paper/DiscoverabilityStats.java @@ -2,5 +2,5 @@ public class DiscoverabilityStats { - public static void main(String[] args) {} + public static void main(String[] args) {} } diff --git a/backend/src/main/java/sparqles/paper/Test.java b/backend/src/main/java/sparqles/paper/Test.java index d90f84d3..05b60be9 100644 --- a/backend/src/main/java/sparqles/paper/Test.java +++ b/backend/src/main/java/sparqles/paper/Test.java @@ -31,670 +31,658 @@ public class Test { - public static void main(String[] args) { - HashSet aliveEPS = new HashSet(); + public static void main(String[] args) { + HashSet aliveEPS = new HashSet(); + try { + BufferedReader reader = new BufferedReader(new FileReader(new File("epsAlive.csv"))); + String ep = null; + while ((ep = reader.readLine()) != null) { + // System.out.println(ep); + aliveEPS.add(ep.toLowerCase()); + } + System.out.println(aliveEPS.size()); + + } catch (IOException e1) { + e1.printStackTrace(); + } + try { + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); + Gson gson = new Gson(); + + BufferedReader br = Files.newBufferedReader(Paths.get("dtasks.json"), StandardCharsets.UTF_8); + + Map> wellknown = new HashMap>(); + Map> get = new HashMap>(); + + Map> query = new HashMap>(); + + Map> map = new HashMap>(); + Map objectmap = new HashMap(); + Map datemap = new HashMap(); + + HashSet filtered = new HashSet(); + HashSet all = new HashSet(); + DResult res = new DResult(); + for (String line = null; (line = br.readLine()) != null; ) { + JsonElement jelement = new JsonParser().parse(line); + JsonObject jobject = jelement.getAsJsonObject(); + DBObject dbObject = (DBObject) JSON.parse(line); + SpecificDatumReader r = new SpecificDatumReader(DResult.class); + JsonDecoder dec; try { - BufferedReader reader = new BufferedReader(new FileReader(new File("epsAlive.csv"))); - String ep = null; - while ((ep = reader.readLine()) != null) { - // System.out.println(ep); - aliveEPS.add(ep.toLowerCase()); + dec = DecoderFactory.get().jsonDecoder(res.getSchema(), dbObject.toString()); + DResult t = (DResult) r.read(null, dec); + + String uri = t.getEndpointResult().getEndpoint().getUri().toString().toLowerCase(); + all.add(uri); + if (!aliveEPS.contains(uri)) { + filtered.add(uri); + if (uri.contains("http://eurostat.linked-statistics.org")) System.out.println(uri); + continue; + } + + Date cur = new Date(t.getEndpointResult().getStart()); + + if (t.getQueryInfo().size() > 1) System.out.println(t); + QueryInfo qi = t.getQueryInfo().get(0); + Map m = query.get(uri); + if (m == null) { + m = new HashMap(); + query.put(uri, m); + } + m.put(cur, qi); + + for (DGETInfo d : t.getDescriptionFiles()) { + if (uri.equals("http://eurostat.linked-statistics.org/sparql")) { + System.out.println(""); } - System.out.println(aliveEPS.size()); - - } catch (IOException e1) { - e1.printStackTrace(); - } - try { - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); - Gson gson = new Gson(); - - BufferedReader br = - Files.newBufferedReader(Paths.get("dtasks.json"), StandardCharsets.UTF_8); - - Map> wellknown = new HashMap>(); - Map> get = new HashMap>(); - - Map> query = new HashMap>(); - - Map> map = new HashMap>(); - Map objectmap = new HashMap(); - Map datemap = new HashMap(); - - HashSet filtered = new HashSet(); - HashSet all = new HashSet(); - DResult res = new DResult(); - for (String line = null; (line = br.readLine()) != null; ) { - JsonElement jelement = new JsonParser().parse(line); - JsonObject jobject = jelement.getAsJsonObject(); - DBObject dbObject = (DBObject) JSON.parse(line); - SpecificDatumReader r = new SpecificDatumReader(DResult.class); - JsonDecoder dec; - try { - dec = DecoderFactory.get().jsonDecoder(res.getSchema(), dbObject.toString()); - DResult t = (DResult) r.read(null, dec); - - String uri = - t.getEndpointResult().getEndpoint().getUri().toString().toLowerCase(); - all.add(uri); - if (!aliveEPS.contains(uri)) { - filtered.add(uri); - if (uri.contains("http://eurostat.linked-statistics.org")) - System.out.println(uri); - continue; - } - - Date cur = new Date(t.getEndpointResult().getStart()); - - if (t.getQueryInfo().size() > 1) System.out.println(t); - QueryInfo qi = t.getQueryInfo().get(0); - Map m = query.get(uri); - if (m == null) { - m = new HashMap(); - query.put(uri, m); - } - m.put(cur, qi); - - for (DGETInfo d : t.getDescriptionFiles()) { - if (uri.equals("http://eurostat.linked-statistics.org/sparql")) { - System.out.println(""); - } - if (d.getOperation().toString().equalsIgnoreCase("EPURL")) { - Map a = get.get(uri); - if (a == null) { - a = new HashMap(); - get.put(uri, a); - } - a.put(cur, d); - } else if (d.getOperation().toString().equalsIgnoreCase("wellknown")) { - Map a = wellknown.get(uri); - if (a == null) { - a = new HashMap(); - wellknown.put(uri, a); - } - a.put(cur, d); - } - } - // List d = map.get(uri); - // if(d==null){ - // d = new ArrayList(); map.put(uri, d); - // } - // d.add(cur); - // - // DResult[] dobj = objectmap.get(uri); - // if(dobj==null){ - // dobj = new DResult[2];objectmap.put(uri, dobj); - // } - // Date[] datem = datemap.get(uri); - // if(datem==null){ - // datem = new Date[2];datemap.put(uri, datem); - // } - // - // if(datem[0] == null || datem[0].getTime()>cur.getTime()){ - // datem[0] = cur; - // dobj[0]= t; - // } - // if(datem[1] == null || datem[1].getTime() a = get.get(uri); + if (a == null) { + a = new HashMap(); + get.put(uri, a); + } + a.put(cur, d); + } else if (d.getOperation().toString().equalsIgnoreCase("wellknown")) { + Map a = wellknown.get(uri); + if (a == null) { + a = new HashMap(); + wellknown.put(uri, a); + } + a.put(cur, d); } - System.out.println("ALL: " + all.size()); - System.out.println("filtered: " + filtered.size()); - all.removeAll(filtered); - System.out.println(all.size()); - aliveEPS.removeAll(all); - System.out.println(aliveEPS); - System.out.println("query: " + query.size()); - System.out.println("get: " + get.size()); - System.out.println("well: " + wellknown.size()); - // System.exit(0); - - computeServerNames(get); - // - // System.out.println("\n ### WellKnown ###\n"); - computeWellKnown(wellknown, "wellknown"); - // - // System.out.println("\n ### GET ###\n"); - computeWellKnown(get, "get"); - // System.out.println("\n ### QUERY ###\n"); - computeQueryResults(query); - - // - //// for(String ep: map.keySet()) - //// System.out.println(ep+" "+map.get(ep)); - //// System.exit(0); - // - // System.out.println("____"); - // int total=0; - // int diff=0; - // int serverNameChanged=0,serverNamesWithoutError=0; - // - // - // - // int[][] voidCount = {{0,0},{0,0},{0,0}}; - // int[][] sdcount = {{0,0},{0,0},{0,0}}; - // - // Map> [] respCodes = new Map[2]; - //// Map> [] servers = new Map[2]; - // - // Map serverNameChangeMap = new HashMap(); - // - // Map [] epsServers = new Map[2]; - // - // FileWriter fw = new FileWriter("discoverability.csv"); - // - // fw.write("ep,server0,server1\n"); - // - // Map ops = new HashMap(); - // ops.put("EPURL",0);ops.put("wellknown",1); - // - // - // for(String uri: map.keySet()){ - // String s=uri; - // total++; - // if(datemap.get(uri)[0].getTime() == datemap.get(uri)[1].getTime()){ - //// System.out.println("EP:"+uri+" missing two snapshots"); - //// System.out.println("EP:"+uri+" "+datemap.get(uri)[0]+" "+datemap.get(uri)[1]); - // continue; - // } - // diff++; - // String[] serverNames = new String[2]; - // - // for(int i=0; i<2;i++){ - // if(respCodes[i] == null) - // respCodes[i] = new HashMap>(); - // - // if(servers[i] == null) - // servers[i] = new HashMap>(); - // - // if(epsServers[i]==null) - // epsServers[i]= new HashMap(); - // - // DResult pres = objectmap.get(uri)[i]; - // - // - // System.out.println(pres.getQueryInfo()); - // - // boolean voiddesc = false,sddesc=false; - // boolean cvoid= false, csd=false; - // - // for(DGETInfo info : pres.getDescriptionFiles()){ - // String op = info.getOperation().toString(); - // - // //update resp code dist - // Map r = respCodes[i].get(op); - // if(r==null){ - // r = new HashMap(); - // respCodes[i].put(op,r); - // } - // String respCode=null; - // if(info.getResponseCode() == null) respCode ="null"; - // else respCode = info.getResponseCode().toString(); - // - // Integer c = r.get(respCode); - // if (c==null) c=0; - // r.put(respCode, c+1); - // - // - // Map ser = servers[i].get(op); - // if(ser==null){ - // ser = new HashMap(); - // servers[i].put(op,ser); - // } - // String serverName ="error"; - // if(info.getResponseCode()!=null){ - // serverName = info.getResponseServer().toString(); - // } - // if(op.equalsIgnoreCase("EPURL")) - // serverNames[i]=serverName; - // c = ser.get(serverName); - // if (c==null) c=0; - // ser.put(serverName, c+1); - // if(op.equalsIgnoreCase("sitemap.xml_link")) continue; - // if(info.getSPARQLDESCpreds().size() >0 ){ - // csd=true; - // sdcount[ops.get(op)][i]+=1; - // } - // if(info.getVoiDpreds().size() >0 ){ - // cvoid=true; - // System.out.println(op+" "+ops.get(op)); - // voidCount[ops.get(op)][i]+=1; - // } - // } - // if(csd){ - // sdcount[2][i]+=1; - // } - // if(cvoid ){ - // voidCount[2][i]+=1; - // } - // } - // if(serverNames[0] !="error" & serverNames[1] != "error"){ - // serverNamesWithoutError++; - // if(!serverNames[0].equalsIgnoreCase(serverNames[1])){ - // Integer c= serverNameChangeMap.get(serverNames[0]+"->"+serverNames[1]); - // if(c==null)c=0; - // serverNameChangeMap.put(serverNames[0]+"->"+serverNames[1], c+1); - // serverNameChanged++; - // System.out.println(uri+" "+Arrays.toString(serverNames)); - // System.out.println("Before"); - // DResult pres = objectmap.get(uri)[0]; - // for(DGETInfo info : pres.getDescriptionFiles()) - // System.out.println(info); - // System.out.println("after"); - // pres = objectmap.get(uri)[1]; - // for(DGETInfo info : pres.getDescriptionFiles()) - // System.out.println(info); - // System.out.println("_______"); - // - // } - // } - // } - // System.out.println("Summary"); - // System.out.println("Total endpoints: "+total); - // System.out.println(" Considered endpoints: "+diff); - // System.out.println(" serverNamesWithoutError: "+serverNamesWithoutError); - // System.out.println(" serverNameChanged: "+serverNameChanged); - // - // for(int i=0; i<2;i++){ - // System.out.println("i="+i); - // System.out.println("\t"+servers[i].size()+" Servers"); - // System.out.println(" Response codes"); - // for(Entry> ent: respCodes[i].entrySet()){ - // System.out.println(" "+ent.getKey()); - // for(Entry ent1: ent.getValue().entrySet()){ - // System.out.println("\t"+ent1.getKey()+" "+ent1.getValue()); - // } - // } - // } - // for(int i=0; i<2;i++){ - // System.out.println("i="+i); - // System.out.println("\t"+servers[i].size()+" Servers"); - // System.out.println(" Server"); - // for(Entry> ent: servers[i].entrySet()){ - // System.out.println(" "+ent.getKey()); - // for(Entry ent1: ent.getValue().entrySet()){ - // System.out.println("\t"+ent1.getKey()+" "+ent1.getValue()); - // } - // } - // } - // for(Entry ent: serverNameChangeMap.entrySet()){ - // System.out.println(ent.getKey()+" "+ent.getValue()); - // } - // - // - // System.out.println("VOID 0"+voidCount[0][0]+" "+voidCount[0][1]); - // System.out.println("VOID 1"+voidCount[1][0]+" "+voidCount[1][1]); - // System.out.println("VOID 2"+voidCount[2][0]+" "+voidCount[2][1]); - // System.out.println(); - // System.out.println("SD 0"+sdcount[0][0]+" "+sdcount[0][1]); - // System.out.println("SD 1"+sdcount[1][0]+" "+sdcount[1][1]); - // System.out.println("SD 2"+sdcount[2][0]+" "+sdcount[2][1]); + } + // List d = map.get(uri); + // if(d==null){ + // d = new ArrayList(); map.put(uri, d); + // } + // d.add(cur); + // + // DResult[] dobj = objectmap.get(uri); + // if(dobj==null){ + // dobj = new DResult[2];objectmap.put(uri, dobj); + // } + // Date[] datem = datemap.get(uri); + // if(datem==null){ + // datem = new Date[2];datemap.put(uri, datem); + // } + // + // if(datem[0] == null || datem[0].getTime()>cur.getTime()){ + // datem[0] = cur; + // dobj[0]= t; + // } + // if(datem[1] == null || datem[1].getTime()> [] respCodes = new Map[2]; + //// Map> [] servers = new Map[2]; + // + // Map serverNameChangeMap = new HashMap(); + // + // Map [] epsServers = new Map[2]; + // + // FileWriter fw = new FileWriter("discoverability.csv"); + // + // fw.write("ep,server0,server1\n"); + // + // Map ops = new HashMap(); + // ops.put("EPURL",0);ops.put("wellknown",1); + // + // + // for(String uri: map.keySet()){ + // String s=uri; + // total++; + // if(datemap.get(uri)[0].getTime() == datemap.get(uri)[1].getTime()){ + //// System.out.println("EP:"+uri+" missing two snapshots"); + //// System.out.println("EP:"+uri+" "+datemap.get(uri)[0]+" "+datemap.get(uri)[1]); + // continue; + // } + // diff++; + // String[] serverNames = new String[2]; + // + // for(int i=0; i<2;i++){ + // if(respCodes[i] == null) + // respCodes[i] = new HashMap>(); + // + // if(servers[i] == null) + // servers[i] = new HashMap>(); + // + // if(epsServers[i]==null) + // epsServers[i]= new HashMap(); + // + // DResult pres = objectmap.get(uri)[i]; + // + // + // System.out.println(pres.getQueryInfo()); + // + // boolean voiddesc = false,sddesc=false; + // boolean cvoid= false, csd=false; + // + // for(DGETInfo info : pres.getDescriptionFiles()){ + // String op = info.getOperation().toString(); + // + // //update resp code dist + // Map r = respCodes[i].get(op); + // if(r==null){ + // r = new HashMap(); + // respCodes[i].put(op,r); + // } + // String respCode=null; + // if(info.getResponseCode() == null) respCode ="null"; + // else respCode = info.getResponseCode().toString(); + // + // Integer c = r.get(respCode); + // if (c==null) c=0; + // r.put(respCode, c+1); + // + // + // Map ser = servers[i].get(op); + // if(ser==null){ + // ser = new HashMap(); + // servers[i].put(op,ser); + // } + // String serverName ="error"; + // if(info.getResponseCode()!=null){ + // serverName = info.getResponseServer().toString(); + // } + // if(op.equalsIgnoreCase("EPURL")) + // serverNames[i]=serverName; + // c = ser.get(serverName); + // if (c==null) c=0; + // ser.put(serverName, c+1); + // if(op.equalsIgnoreCase("sitemap.xml_link")) continue; + // if(info.getSPARQLDESCpreds().size() >0 ){ + // csd=true; + // sdcount[ops.get(op)][i]+=1; + // } + // if(info.getVoiDpreds().size() >0 ){ + // cvoid=true; + // System.out.println(op+" "+ops.get(op)); + // voidCount[ops.get(op)][i]+=1; + // } + // } + // if(csd){ + // sdcount[2][i]+=1; + // } + // if(cvoid ){ + // voidCount[2][i]+=1; + // } + // } + // if(serverNames[0] !="error" & serverNames[1] != "error"){ + // serverNamesWithoutError++; + // if(!serverNames[0].equalsIgnoreCase(serverNames[1])){ + // Integer c= serverNameChangeMap.get(serverNames[0]+"->"+serverNames[1]); + // if(c==null)c=0; + // serverNameChangeMap.put(serverNames[0]+"->"+serverNames[1], c+1); + // serverNameChanged++; + // System.out.println(uri+" "+Arrays.toString(serverNames)); + // System.out.println("Before"); + // DResult pres = objectmap.get(uri)[0]; + // for(DGETInfo info : pres.getDescriptionFiles()) + // System.out.println(info); + // System.out.println("after"); + // pres = objectmap.get(uri)[1]; + // for(DGETInfo info : pres.getDescriptionFiles()) + // System.out.println(info); + // System.out.println("_______"); + // + // } + // } + // } + // System.out.println("Summary"); + // System.out.println("Total endpoints: "+total); + // System.out.println(" Considered endpoints: "+diff); + // System.out.println(" serverNamesWithoutError: "+serverNamesWithoutError); + // System.out.println(" serverNameChanged: "+serverNameChanged); + // + // for(int i=0; i<2;i++){ + // System.out.println("i="+i); + // System.out.println("\t"+servers[i].size()+" Servers"); + // System.out.println(" Response codes"); + // for(Entry> ent: respCodes[i].entrySet()){ + // System.out.println(" "+ent.getKey()); + // for(Entry ent1: ent.getValue().entrySet()){ + // System.out.println("\t"+ent1.getKey()+" "+ent1.getValue()); + // } + // } + // } + // for(int i=0; i<2;i++){ + // System.out.println("i="+i); + // System.out.println("\t"+servers[i].size()+" Servers"); + // System.out.println(" Server"); + // for(Entry> ent: servers[i].entrySet()){ + // System.out.println(" "+ent.getKey()); + // for(Entry ent1: ent.getValue().entrySet()){ + // System.out.println("\t"+ent1.getKey()+" "+ent1.getValue()); + // } + // } + // } + // for(Entry ent: serverNameChangeMap.entrySet()){ + // System.out.println(ent.getKey()+" "+ent.getValue()); + // } + // + // + // System.out.println("VOID 0"+voidCount[0][0]+" "+voidCount[0][1]); + // System.out.println("VOID 1"+voidCount[1][0]+" "+voidCount[1][1]); + // System.out.println("VOID 2"+voidCount[2][0]+" "+voidCount[2][1]); + // System.out.println(); + // System.out.println("SD 0"+sdcount[0][0]+" "+sdcount[0][1]); + // System.out.println("SD 1"+sdcount[1][0]+" "+sdcount[1][1]); + // System.out.println("SD 2"+sdcount[2][0]+" "+sdcount[2][1]); + + } catch (IOException e) { + e.printStackTrace(); } - - private static void computeQueryResults(Map> query) - throws IOException { - int removed = 0; - int added = 0, newdiscoverd = 0; - Map respCodes = new HashMap(); - FileWriter fw = new FileWriter(new File("query_results.tsv")); - fw.write("Endpoint\tnumber of voiD datasets 2013\tnumber of voiD datasets 2015\n"); - for (Entry> ent : query.entrySet()) { - Integer[] resp = new Integer[2]; - if (ent.getKey().equals("http://eurostat.linked-statistics.org/sparql")) { - System.out.println(); - } - for (Entry v : ent.getValue().entrySet()) { - if (1900 + v.getKey().getYear() == 2013) { - if (v.getValue().getException() == null) { - if (resp[0] == null || resp[0] < v.getValue().getResults().size()) - resp[0] = v.getValue().getResults().size(); - } - if (v.getValue().getException() != null) { - resp[0] = -1; - } - } - if (1900 + v.getKey().getYear() == 2015) { - if (v.getValue().getException() == null) { - if (resp[1] == null || resp[1] < v.getValue().getResults().size()) - resp[1] = v.getValue().getResults().size(); - } - if (v.getValue().getException() != null) { - resp[1] = -1; - } - } - } - fw.write(ent.getKey() + "\t" + resp[0] + "\t" + resp[1] + "\n"); - if (resp[0] != null || resp[1] != null) { - if (resp[0] != null && resp[1] == null) - System.out.println( - "Exceptions: " + ent.getKey() + " " + resp[0] + " " + resp[1]); - else if (resp[0] == null && resp[1] != null) { - System.out.println( - "-Exceptions: " + ent.getKey() + " " + resp[0] + " " + resp[1]); - if (resp[1] > 0) newdiscoverd++; - } else if (resp[0] == 0 && resp[1] != 0) { - System.out.println("ADDED: " + ent.getKey() + " " + resp[0] + " " + resp[1]); - added++; - - } else if (resp[0] != 0 && resp[1] == 0) { - System.out.println("REMOVED: " + ent.getKey() + " " + resp[0] + " " + resp[1]); - removed++; - } - - // if(resp[0]!=null && resp[1] !=null && ){ - - // } - } + } + + private static void computeQueryResults(Map> query) + throws IOException { + int removed = 0; + int added = 0, newdiscoverd = 0; + Map respCodes = new HashMap(); + FileWriter fw = new FileWriter(new File("query_results.tsv")); + fw.write("Endpoint\tnumber of voiD datasets 2013\tnumber of voiD datasets 2015\n"); + for (Entry> ent : query.entrySet()) { + Integer[] resp = new Integer[2]; + if (ent.getKey().equals("http://eurostat.linked-statistics.org/sparql")) { + System.out.println(); + } + for (Entry v : ent.getValue().entrySet()) { + if (1900 + v.getKey().getYear() == 2013) { + if (v.getValue().getException() == null) { + if (resp[0] == null || resp[0] < v.getValue().getResults().size()) + resp[0] = v.getValue().getResults().size(); + } + if (v.getValue().getException() != null) { + resp[0] = -1; + } } - fw.close(); - System.out.println("removed: " + removed); - System.out.println("addedd: " + added); - System.out.println("newdiscoverd: " + newdiscoverd); - } - - private static void computeWellKnown(Map> wellknown, String name) - throws IOException { - Map voidResults = new HashMap(); - Map sdResults = new HashMap(); - Map respCodes = new HashMap(); - - System.out.println("#######NAME: " + name); - Map resp2013 = new HashMap(); - Map resp2015 = new HashMap(); - Map respChanges = new HashMap(); - - FileWriter fw = new FileWriter(new File(name + "_details.tsv")); - fw.write("EP\trespcode 2013\tvoid 2013\tsd 2013\trespcode 2015\tvoid 2015\tsd 2015\n"); - - for (Entry> ent : wellknown.entrySet()) { - if (ent.getKey().contains("http://data.lenka.no/")) { - System.out.println(""); - } - Boolean[] vd = new Boolean[2]; - Boolean[] sd = new Boolean[2]; - String[] resp = new String[2]; - for (Entry v : ent.getValue().entrySet()) { - if (1900 + v.getKey().getYear() == 2013) { - if (v.getValue().getResponseCode() != null - && v.getValue().getResponseCode().toString().equals("200")) { - sd[0] = v.getValue().getSPARQLDESCpreds().size() > 0; - vd[0] = v.getValue().getVoiDpreds().size() > 0; - resp[0] = v.getValue().getResponseCode().toString(); - } - if (resp[0] == null) { - if (v.getValue().getResponseCode() != null) - resp[0] = v.getValue().getResponseCode().toString(); - } - } - if (1900 + v.getKey().getYear() == 2015) { - if (v.getValue().getResponseCode() != null - && v.getValue().getResponseCode().toString().equals("200")) { - sd[1] = v.getValue().getSPARQLDESCpreds().size() > 0; - vd[1] = v.getValue().getVoiDpreds().size() > 0; - resp[1] = v.getValue().getResponseCode().toString(); - } - if (resp[1] == null) { - if (v.getValue().getResponseCode() != null) - resp[1] = v.getValue().getResponseCode().toString(); - else if (v.getValue().getException() != null) { - resp[1] = "-1"; - } - } - } - } - fw.write( - ent.getKey() - + "\t" - + resp[0] - + "\t" - + vd[0] - + "\t" - + sd[0] - + "\t" - + resp[1] - + "\t" - + vd[1] - + "\t" - + sd[1] - + "\n"); - voidResults.put(ent.getKey(), vd); - sdResults.put(ent.getKey(), sd); - - respCodes.put(ent.getKey(), resp); - - if (resp[0] != null && resp[1] != null) { - if (!resp[0].equalsIgnoreCase(resp[1])) { - String s = - resp[0] + "->" + resp[1] + " VOID " + vd[0] + " " + vd[1] + " SD " - + sd[0] + " " + sd[1]; - Integer c = respChanges.get(s); - if (c == null) c = 0; - respChanges.put(s, c + 1); - - if (resp[1] != null && resp[1].equals("200") && vd[1] != null && vd[1]) - System.out.println( - "Void added: " + ent.getKey() + " " + resp[0] + " " + resp[1]); - if (resp[0] != null - && resp[0].equals("200") - && resp[1] != null - && !resp[1].equals("200") - && vd[0] != null - && vd[0]) - System.out.println( - "Void removed: " + ent.getKey() + " " + resp[0] + " " + resp[1]); - if (resp[1] != null && resp[1].equals("200") && sd[1] != null && sd[1]) - System.out.println( - "SD added: " + ent.getKey() + " " + resp[0] + " " + resp[1]); - if (resp[0] != null - && resp[0].equals("200") - && resp[1] != null - && !resp[1].equals("200") - && sd[0] != null - && sd[0]) - System.out.println( - "SD removed: " + ent.getKey() + " " + resp[0] + " " + resp[1]); - } - if (resp[1].equals("200") && resp[0].equals("200")) { - if (vd[0] != null && vd[1] != null && vd[0] != vd[1]) { - String s = - resp[0] + "->" + resp[1] + " VOID " + vd[0] + " " + vd[1] + " SD " - + sd[0] + " " + sd[1]; - if (vd[0]) { - System.out.println("VOID REMOVED: " + ent.getKey() + " " + s); - // for(Entry v: ent.getValue().entrySet()){ - // System.out.println(v.getKey()+" "+v.getValue()); - // } - } else System.out.println("VOID ADDED: " + ent.getKey() + " " + s); - } - if (sd[0] != null && sd[1] != null && sd[0] != sd[1]) { - String s = - resp[0] + "->" + resp[1] + " VOID " + vd[0] + " " + vd[1] + " SD " - + sd[0] + " " + sd[1]; - if (sd[0]) { - System.out.println("SD REMOVED: " + ent.getKey() + " " + s); - for (Entry v : ent.getValue().entrySet()) { - System.out.println(v.getKey() + " " + v.getValue()); - } - } else System.out.println("SD ADDED: " + ent.getKey() + " " + s); - } - } - } - String r = resp[0]; - if (r == null) r = "error"; - Integer c = resp2013.get(r); - if (c == null) c = 0; - resp2013.put(r, c + 1); - - r = resp[1]; - if (r == null) r = "error"; - c = resp2015.get(r); - if (c == null) c = 0; - resp2015.put(r, c + 1); + if (1900 + v.getKey().getYear() == 2015) { + if (v.getValue().getException() == null) { + if (resp[1] == null || resp[1] < v.getValue().getResults().size()) + resp[1] = v.getValue().getResults().size(); + } + if (v.getValue().getException() != null) { + resp[1] = -1; + } } - fw.close(); - - System.out.println("ResponseCodes"); - - System.out.println("\n----2013"); - for (Entry ent : resp2013.entrySet()) - System.out.println(ent.getKey() + " " + ent.getValue()); - System.out.println("\n----2015"); - for (Entry ent : resp2015.entrySet()) - System.out.println(ent.getKey() + " " + ent.getValue()); - System.out.println("-------"); - for (Entry ent : respChanges.entrySet()) - System.out.println(ent.getKey() + " " + ent.getValue()); - - // VOID - int non = 0, error = 0; - int y2013 = 0, y2015 = 0; - Map voidChange = new HashMap(); - for (Entry ent : voidResults.entrySet()) { - if (ent.getValue()[0] == null || ent.getValue()[1] == null) { - non++; - if (ent.getValue()[0] == null) y2013++; - if (ent.getValue()[1] == null) y2015++; - } else if (ent.getValue()[0] != ent.getValue()[1]) { - String s = ent.getValue()[0] + "->" + ent.getValue()[1]; - Integer c = voidChange.get(s); - System.out.println("VOID: " + ent.getKey()); - if (c == null) c = 0; - voidChange.put(s, c + 1); - } + } + fw.write(ent.getKey() + "\t" + resp[0] + "\t" + resp[1] + "\n"); + if (resp[0] != null || resp[1] != null) { + if (resp[0] != null && resp[1] == null) + System.out.println("Exceptions: " + ent.getKey() + " " + resp[0] + " " + resp[1]); + else if (resp[0] == null && resp[1] != null) { + System.out.println("-Exceptions: " + ent.getKey() + " " + resp[0] + " " + resp[1]); + if (resp[1] > 0) newdiscoverd++; + } else if (resp[0] == 0 && resp[1] != 0) { + System.out.println("ADDED: " + ent.getKey() + " " + resp[0] + " " + resp[1]); + added++; + + } else if (resp[0] != 0 && resp[1] == 0) { + System.out.println("REMOVED: " + ent.getKey() + " " + resp[0] + " " + resp[1]); + removed++; } - System.out.println("Void: " + voidResults.size() + " none:" + non + " error:" + error); - System.out.println("Valid 200 responses " + (voidResults.size() - non)); - System.out.println("2013: " + y2013 + " y2015:" + y2015); - System.out.println("Changes: " + voidChange.size()); - for (Entry ent : voidChange.entrySet()) { - System.out.println(ent.getKey() + " " + ent.getValue()); + // if(resp[0]!=null && resp[1] !=null && ){ + + // } + } + } + fw.close(); + System.out.println("removed: " + removed); + System.out.println("addedd: " + added); + System.out.println("newdiscoverd: " + newdiscoverd); + } + + private static void computeWellKnown(Map> wellknown, String name) + throws IOException { + Map voidResults = new HashMap(); + Map sdResults = new HashMap(); + Map respCodes = new HashMap(); + + System.out.println("#######NAME: " + name); + Map resp2013 = new HashMap(); + Map resp2015 = new HashMap(); + Map respChanges = new HashMap(); + + FileWriter fw = new FileWriter(new File(name + "_details.tsv")); + fw.write("EP\trespcode 2013\tvoid 2013\tsd 2013\trespcode 2015\tvoid 2015\tsd 2015\n"); + + for (Entry> ent : wellknown.entrySet()) { + if (ent.getKey().contains("http://data.lenka.no/")) { + System.out.println(""); + } + Boolean[] vd = new Boolean[2]; + Boolean[] sd = new Boolean[2]; + String[] resp = new String[2]; + for (Entry v : ent.getValue().entrySet()) { + if (1900 + v.getKey().getYear() == 2013) { + if (v.getValue().getResponseCode() != null + && v.getValue().getResponseCode().toString().equals("200")) { + sd[0] = v.getValue().getSPARQLDESCpreds().size() > 0; + vd[0] = v.getValue().getVoiDpreds().size() > 0; + resp[0] = v.getValue().getResponseCode().toString(); + } + if (resp[0] == null) { + if (v.getValue().getResponseCode() != null) + resp[0] = v.getValue().getResponseCode().toString(); + } } - non = 0; - Map sdChange = new HashMap(); - for (Entry ent : sdResults.entrySet()) { - if (ent.getValue()[0] == null || ent.getValue()[1] == null) { - non++; - } else if (ent.getValue()[0] != ent.getValue()[1]) { - System.out.println("SD: " + ent.getKey()); - String s = ent.getValue()[0] + "->" + ent.getValue()[1]; - Integer c = sdChange.get(s); - if (c == null) c = 0; - sdChange.put(s, c + 1); + if (1900 + v.getKey().getYear() == 2015) { + if (v.getValue().getResponseCode() != null + && v.getValue().getResponseCode().toString().equals("200")) { + sd[1] = v.getValue().getSPARQLDESCpreds().size() > 0; + vd[1] = v.getValue().getVoiDpreds().size() > 0; + resp[1] = v.getValue().getResponseCode().toString(); + } + if (resp[1] == null) { + if (v.getValue().getResponseCode() != null) + resp[1] = v.getValue().getResponseCode().toString(); + else if (v.getValue().getException() != null) { + resp[1] = "-1"; } + } + } + } + fw.write( + ent.getKey() + + "\t" + + resp[0] + + "\t" + + vd[0] + + "\t" + + sd[0] + + "\t" + + resp[1] + + "\t" + + vd[1] + + "\t" + + sd[1] + + "\n"); + voidResults.put(ent.getKey(), vd); + sdResults.put(ent.getKey(), sd); + + respCodes.put(ent.getKey(), resp); + + if (resp[0] != null && resp[1] != null) { + if (!resp[0].equalsIgnoreCase(resp[1])) { + String s = + resp[0] + "->" + resp[1] + " VOID " + vd[0] + " " + vd[1] + " SD " + sd[0] + " " + + sd[1]; + Integer c = respChanges.get(s); + if (c == null) c = 0; + respChanges.put(s, c + 1); + + if (resp[1] != null && resp[1].equals("200") && vd[1] != null && vd[1]) + System.out.println("Void added: " + ent.getKey() + " " + resp[0] + " " + resp[1]); + if (resp[0] != null + && resp[0].equals("200") + && resp[1] != null + && !resp[1].equals("200") + && vd[0] != null + && vd[0]) + System.out.println("Void removed: " + ent.getKey() + " " + resp[0] + " " + resp[1]); + if (resp[1] != null && resp[1].equals("200") && sd[1] != null && sd[1]) + System.out.println("SD added: " + ent.getKey() + " " + resp[0] + " " + resp[1]); + if (resp[0] != null + && resp[0].equals("200") + && resp[1] != null + && !resp[1].equals("200") + && sd[0] != null + && sd[0]) + System.out.println("SD removed: " + ent.getKey() + " " + resp[0] + " " + resp[1]); } - System.out.println("SD: " + sdResults.size() + " none:" + non + " error:" + error); - System.out.println(sdResults.size() - non); - System.out.println("Changes: " + sdChange.size()); - for (Entry ent : sdChange.entrySet()) { - System.out.println(ent.getKey() + " " + ent.getValue()); + if (resp[1].equals("200") && resp[0].equals("200")) { + if (vd[0] != null && vd[1] != null && vd[0] != vd[1]) { + String s = + resp[0] + "->" + resp[1] + " VOID " + vd[0] + " " + vd[1] + " SD " + sd[0] + " " + + sd[1]; + if (vd[0]) { + System.out.println("VOID REMOVED: " + ent.getKey() + " " + s); + // for(Entry v: ent.getValue().entrySet()){ + // System.out.println(v.getKey()+" "+v.getValue()); + // } + } else System.out.println("VOID ADDED: " + ent.getKey() + " " + s); + } + if (sd[0] != null && sd[1] != null && sd[0] != sd[1]) { + String s = + resp[0] + "->" + resp[1] + " VOID " + vd[0] + " " + vd[1] + " SD " + sd[0] + " " + + sd[1]; + if (sd[0]) { + System.out.println("SD REMOVED: " + ent.getKey() + " " + s); + for (Entry v : ent.getValue().entrySet()) { + System.out.println(v.getKey() + " " + v.getValue()); + } + } else System.out.println("SD ADDED: " + ent.getKey() + " " + s); + } } + } + String r = resp[0]; + if (r == null) r = "error"; + Integer c = resp2013.get(r); + if (c == null) c = 0; + resp2013.put(r, c + 1); + + r = resp[1]; + if (r == null) r = "error"; + c = resp2015.get(r); + if (c == null) c = 0; + resp2015.put(r, c + 1); + } + fw.close(); + + System.out.println("ResponseCodes"); + + System.out.println("\n----2013"); + for (Entry ent : resp2013.entrySet()) + System.out.println(ent.getKey() + " " + ent.getValue()); + System.out.println("\n----2015"); + for (Entry ent : resp2015.entrySet()) + System.out.println(ent.getKey() + " " + ent.getValue()); + System.out.println("-------"); + for (Entry ent : respChanges.entrySet()) + System.out.println(ent.getKey() + " " + ent.getValue()); + + // VOID + int non = 0, error = 0; + int y2013 = 0, y2015 = 0; + Map voidChange = new HashMap(); + for (Entry ent : voidResults.entrySet()) { + if (ent.getValue()[0] == null || ent.getValue()[1] == null) { + non++; + if (ent.getValue()[0] == null) y2013++; + if (ent.getValue()[1] == null) y2015++; + } else if (ent.getValue()[0] != ent.getValue()[1]) { + String s = ent.getValue()[0] + "->" + ent.getValue()[1]; + Integer c = voidChange.get(s); + System.out.println("VOID: " + ent.getKey()); + if (c == null) c = 0; + voidChange.put(s, c + 1); + } } - private static void computeServerNames(Map> get) - throws IOException { - Map servers = new HashMap(); - // EP Server - FileWriter f = new FileWriter(new File("server_names_detail.tsv")); - f.write("Endpoint\tservername 2013\t servername 2015\n"); - for (Entry> ent : get.entrySet()) { - String[] snames = new String[2]; - - for (Entry v : ent.getValue().entrySet()) { - String server = "error"; - // System.out.println(v.getValue().getResponseServer()); - if (v.getValue().getResponseServer() != null) - server = v.getValue().getResponseServer().toString(); - else { - System.out.println(v.getValue()); - System.out.println(v.getValue().getResponseCode()); - } - if (1900 + v.getKey().getYear() == 2013) { - if (snames[0] == null - || (!server.equalsIgnoreCase("error") - && (snames[0].equalsIgnoreCase("error") - || snames[0].equalsIgnoreCase("missing")))) - snames[0] = server; - - } else if (1900 + v.getKey().getYear() == 2015) { - if (snames[1] == null - || (!server.equalsIgnoreCase("error") - && (snames[1].equalsIgnoreCase("error") - || snames[1].equalsIgnoreCase("missing")))) - snames[1] = server; - } - } - // System.out.println(Arrays.toString(snames)); - servers.put(ent.getKey(), snames); - f.write(ent.getKey() + "\t" + snames[0] + "\t" + snames[1] + "\n"); + System.out.println("Void: " + voidResults.size() + " none:" + non + " error:" + error); + System.out.println("Valid 200 responses " + (voidResults.size() - non)); + System.out.println("2013: " + y2013 + " y2015:" + y2015); + System.out.println("Changes: " + voidChange.size()); + for (Entry ent : voidChange.entrySet()) { + System.out.println(ent.getKey() + " " + ent.getValue()); + } + non = 0; + Map sdChange = new HashMap(); + for (Entry ent : sdResults.entrySet()) { + if (ent.getValue()[0] == null || ent.getValue()[1] == null) { + non++; + } else if (ent.getValue()[0] != ent.getValue()[1]) { + System.out.println("SD: " + ent.getKey()); + String s = ent.getValue()[0] + "->" + ent.getValue()[1]; + Integer c = sdChange.get(s); + if (c == null) c = 0; + sdChange.put(s, c + 1); + } + } + System.out.println("SD: " + sdResults.size() + " none:" + non + " error:" + error); + System.out.println(sdResults.size() - non); + System.out.println("Changes: " + sdChange.size()); + for (Entry ent : sdChange.entrySet()) { + System.out.println(ent.getKey() + " " + ent.getValue()); + } + } + + private static void computeServerNames(Map> get) throws IOException { + Map servers = new HashMap(); + // EP Server + FileWriter f = new FileWriter(new File("server_names_detail.tsv")); + f.write("Endpoint\tservername 2013\t servername 2015\n"); + for (Entry> ent : get.entrySet()) { + String[] snames = new String[2]; + + for (Entry v : ent.getValue().entrySet()) { + String server = "error"; + // System.out.println(v.getValue().getResponseServer()); + if (v.getValue().getResponseServer() != null) + server = v.getValue().getResponseServer().toString(); + else { + System.out.println(v.getValue()); + System.out.println(v.getValue().getResponseCode()); } - f.close(); - - Map sn2013 = new HashMap(); - Map sn2015 = new HashMap(); - - int non = 0, error = 0; - Map serverChange = new HashMap(); - for (Entry ent : servers.entrySet()) { - if (ent.getValue()[0] == null || ent.getValue()[1] == null) { - non++; - System.out.println(ent.getKey() + " " + Arrays.toString(ent.getValue())); - } else if (!ent.getValue()[0].equalsIgnoreCase(ent.getValue()[1])) { - String s = ent.getValue()[0] + "->" + ent.getValue()[1]; - Integer c = serverChange.get(s); - if (c == null) c = 0; - serverChange.put(s, c + 1); - } - if (ent.getValue()[0] != null && ent.getValue()[1] != null) { - String sn13 = "null"; - String sn15 = "null"; - if (ent.getValue()[0] != null) sn13 = ent.getValue()[0]; - if (ent.getValue()[1] != null) sn15 = ent.getValue()[1]; - Integer c = sn2013.get(sn13); - Integer c1 = sn2015.get(sn15); - if (c == null) c = 0; - if (c1 == null) c1 = 0; - sn2013.put(sn13, c + 1); - sn2015.put(sn15, c1 + 1); - } + if (1900 + v.getKey().getYear() == 2013) { + if (snames[0] == null + || (!server.equalsIgnoreCase("error") + && (snames[0].equalsIgnoreCase("error") + || snames[0].equalsIgnoreCase("missing")))) snames[0] = server; + + } else if (1900 + v.getKey().getYear() == 2015) { + if (snames[1] == null + || (!server.equalsIgnoreCase("error") + && (snames[1].equalsIgnoreCase("error") + || snames[1].equalsIgnoreCase("missing")))) snames[1] = server; } + } + // System.out.println(Arrays.toString(snames)); + servers.put(ent.getKey(), snames); + f.write(ent.getKey() + "\t" + snames[0] + "\t" + snames[1] + "\n"); + } + f.close(); + + Map sn2013 = new HashMap(); + Map sn2015 = new HashMap(); + + int non = 0, error = 0; + Map serverChange = new HashMap(); + for (Entry ent : servers.entrySet()) { + if (ent.getValue()[0] == null || ent.getValue()[1] == null) { + non++; + System.out.println(ent.getKey() + " " + Arrays.toString(ent.getValue())); + } else if (!ent.getValue()[0].equalsIgnoreCase(ent.getValue()[1])) { + String s = ent.getValue()[0] + "->" + ent.getValue()[1]; + Integer c = serverChange.get(s); + if (c == null) c = 0; + serverChange.put(s, c + 1); + } + if (ent.getValue()[0] != null && ent.getValue()[1] != null) { + String sn13 = "null"; + String sn15 = "null"; + if (ent.getValue()[0] != null) sn13 = ent.getValue()[0]; + if (ent.getValue()[1] != null) sn15 = ent.getValue()[1]; + Integer c = sn2013.get(sn13); + Integer c1 = sn2015.get(sn15); + if (c == null) c = 0; + if (c1 == null) c1 = 0; + sn2013.put(sn13, c + 1); + sn2015.put(sn15, c1 + 1); + } + } - System.out.println("Servers: " + servers.size() + " none:" + non + " error:" + error); - System.out.println(servers.size() - non); - System.out.println("Changes: " + serverChange.size()); - for (Entry ent : serverChange.entrySet()) { - System.out.println(ent.getKey() + " " + ent.getValue()); + System.out.println("Servers: " + servers.size() + " none:" + non + " error:" + error); + System.out.println(servers.size() - non); + System.out.println("Changes: " + serverChange.size()); + for (Entry ent : serverChange.entrySet()) { + System.out.println(ent.getKey() + " " + ent.getValue()); + } + FileWriter fw; + try { + fw = new FileWriter(new File("server_names.tsv")); + fw.write("#server\t2013\t2015\n"); + for (String sn : sn2013.keySet()) { + int c = 0; + if (sn2015.containsKey(sn)) { + c = sn2015.remove(sn); } - FileWriter fw; - try { - fw = new FileWriter(new File("server_names.tsv")); - fw.write("#server\t2013\t2015\n"); - for (String sn : sn2013.keySet()) { - int c = 0; - if (sn2015.containsKey(sn)) { - c = sn2015.remove(sn); - } - fw.write(sn + "\t" + sn2013.get(sn) + "\t" + c + "\n"); - } - for (String sn : sn2015.keySet()) { - int c = 0; - if (sn2013.containsKey(sn)) { - c = sn2013.remove(sn); - } - fw.write(sn + "\t" + c + "\t" + sn2015.get(sn) + "\n"); - } - fw.close(); - } catch (IOException e) { - e.printStackTrace(); + fw.write(sn + "\t" + sn2013.get(sn) + "\t" + c + "\n"); + } + for (String sn : sn2015.keySet()) { + int c = 0; + if (sn2013.containsKey(sn)) { + c = sn2013.remove(sn); } + fw.write(sn + "\t" + c + "\t" + sn2015.get(sn) + "\n"); + } + fw.close(); + } catch (IOException e) { + e.printStackTrace(); } + } } diff --git a/backend/src/main/java/sparqles/paper/objects/AMonth.java b/backend/src/main/java/sparqles/paper/objects/AMonth.java index 6d58dc3c..cfae5bc8 100644 --- a/backend/src/main/java/sparqles/paper/objects/AMonth.java +++ b/backend/src/main/java/sparqles/paper/objects/AMonth.java @@ -3,71 +3,71 @@ import java.util.Date; public class AMonth { - private Date date = null; - private int zeroFive = 0; - private int fiveSeventyfive = 0; - private int seventyfiveNintyfive = 0; - private int nintyfiveNintynine = 0; - private int nintynineHundred = 0; + private Date date = null; + private int zeroFive = 0; + private int fiveSeventyfive = 0; + private int seventyfiveNintyfive = 0; + private int nintyfiveNintynine = 0; + private int nintynineHundred = 0; - public Date getDate() { - return date; - } + public Date getDate() { + return date; + } - public void setDate(Date date) { - this.date = date; - } + public void setDate(Date date) { + this.date = date; + } - public int getZeroFive() { - return zeroFive; - } + public int getZeroFive() { + return zeroFive; + } - public void setZeroFive(int zeroFive) { - this.zeroFive = zeroFive; - } + public void setZeroFive(int zeroFive) { + this.zeroFive = zeroFive; + } - public int getFiveSeventyfive() { - return fiveSeventyfive; - } + public int getFiveSeventyfive() { + return fiveSeventyfive; + } - public void setFiveSeventyfive(int fiveSeventyfive) { - this.fiveSeventyfive = fiveSeventyfive; - } + public void setFiveSeventyfive(int fiveSeventyfive) { + this.fiveSeventyfive = fiveSeventyfive; + } - public int getSeventyfiveNintyfive() { - return seventyfiveNintyfive; - } + public int getSeventyfiveNintyfive() { + return seventyfiveNintyfive; + } - public void setSeventyfiveNintyfive(int seventyfiveNintyfive) { - this.seventyfiveNintyfive = seventyfiveNintyfive; - } + public void setSeventyfiveNintyfive(int seventyfiveNintyfive) { + this.seventyfiveNintyfive = seventyfiveNintyfive; + } - public int getNintyfiveNintynine() { - return nintyfiveNintynine; - } + public int getNintyfiveNintynine() { + return nintyfiveNintynine; + } - public void setNintyfiveNintynine(int nintyfiveNintynine) { - this.nintyfiveNintynine = nintyfiveNintynine; - } + public void setNintyfiveNintynine(int nintyfiveNintynine) { + this.nintyfiveNintynine = nintyfiveNintynine; + } - public int getNintynineHundred() { - return nintynineHundred; - } + public int getNintynineHundred() { + return nintynineHundred; + } - public void setNintynineHundred(int nintynineHundred) { - this.nintynineHundred = nintynineHundred; - } + public void setNintynineHundred(int nintynineHundred) { + this.nintynineHundred = nintynineHundred; + } - public void addEndpoint(long availTests, long unavailTests) { - // prevent from division by 0 - if (availTests + unavailTests == 0) zeroFive++; - else { - double perc = ((double) availTests / (availTests + unavailTests)) * 100; - if (perc <= 5) zeroFive++; - else if (perc > 5 && perc <= 75) fiveSeventyfive++; - else if (perc > 75 && perc <= 95) seventyfiveNintyfive++; - else if (perc > 95 && perc <= 99) nintyfiveNintynine++; - else if (perc > 99) nintynineHundred++; - } + public void addEndpoint(long availTests, long unavailTests) { + // prevent from division by 0 + if (availTests + unavailTests == 0) zeroFive++; + else { + double perc = ((double) availTests / (availTests + unavailTests)) * 100; + if (perc <= 5) zeroFive++; + else if (perc > 5 && perc <= 75) fiveSeventyfive++; + else if (perc > 75 && perc <= 95) seventyfiveNintyfive++; + else if (perc > 95 && perc <= 99) nintyfiveNintynine++; + else if (perc > 99) nintynineHundred++; } + } } diff --git a/backend/src/main/java/sparqles/paper/objects/AvailEp.java b/backend/src/main/java/sparqles/paper/objects/AvailEp.java index 0e7c36d9..83a7ecc2 100644 --- a/backend/src/main/java/sparqles/paper/objects/AvailEp.java +++ b/backend/src/main/java/sparqles/paper/objects/AvailEp.java @@ -10,140 +10,140 @@ import java.util.Locale; public class AvailEp { - private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM"); - private static DecimalFormat df = - new DecimalFormat("0.####", new DecimalFormatSymbols(Locale.US)); - private String epURI = null; - private List records = new ArrayList(); - - public AvailEp(String epURI) { - this.epURI = epURI; + private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM"); + private static DecimalFormat df = + new DecimalFormat("0.####", new DecimalFormatSymbols(Locale.US)); + private String epURI = null; + private List records = new ArrayList(); + + public AvailEp(String epURI) { + this.epURI = epURI; + } + + public String getEpURI() { + return epURI; + } + + public void setEpURI(String epURI) { + this.epURI = epURI; + } + + public List getRecords() { + return records; + } + + public void setRecords(List records) { + this.records = records; + } + + public void addResult(Date date, boolean isAvailable) { + AvailEpMonthRecord record = new AvailEpMonthRecord(sdf.format(date)); + int indexMonth = records.indexOf(record); + if (indexMonth > -1) { // means it already exists + records.get(indexMonth).addTest(isAvailable); + } else { + record.addTest(isAvailable); + records.add(record); } - - public String getEpURI() { - return epURI; + } + + public List getAvailPerMonth() { + List availPerMonthList = new ArrayList<>(); + for (int i = 0; i < records.size(); i++) { + AvailEpMonthRecord record = records.get(i); + String[] availPerMonth = + new String[] { + record.getDate(), df.format(record.getNbSuccessTest() / record.getNbTests()) + }; + availPerMonthList.add(availPerMonth); } - - public void setEpURI(String epURI) { - this.epURI = epURI; + return availPerMonthList; + } + + @Override + public boolean equals(Object object) { + if (object != null && object instanceof AvailEp) + return ((AvailEp) object).getEpURI().equals(this.epURI); + return false; + } + + public void prettyPrint() { + System.out.println(epURI); + for (AvailEpMonthRecord record : records) { + System.out.println( + "\t[" + + record.getDate() + + "]\t" + + df.format(record.getNbSuccessTest() / record.getNbTests())); + } + } + + public void uriPrint() { + System.out.println(epURI); + } + + public boolean isAlive(int monthWindow) { + int nbMonthAlive = 0; + Date today = new Date(); + + for (int i = 0; i < monthWindow; i++) { + int indexMonth = records.indexOf(new AvailEpMonthRecord(sdf.format(today))); + if (indexMonth > -1) { // means it already exists + AvailEpMonthRecord record = records.get(indexMonth); + if (record.getNbSuccessTest() / record.getNbTests() > 0) nbMonthAlive++; + } + Calendar c = Calendar.getInstance(); + c.setTime(today); + c.add(Calendar.MONTH, -1); + today = c.getTime(); } - public List getRecords() { - return records; + return (nbMonthAlive > 0); // will return false for endpoints not present in the window + } + + public class AvailEpMonthRecord { + String date = null; // yyyy-MM + double nbTests = 0; + double nbSuccessTest = 0; + + public AvailEpMonthRecord(String date) { + this.date = date; } - public void setRecords(List records) { - this.records = records; + public void addTest(boolean isAvailable) { + nbTests++; + if (isAvailable) nbSuccessTest++; } - public void addResult(Date date, boolean isAvailable) { - AvailEpMonthRecord record = new AvailEpMonthRecord(sdf.format(date)); - int indexMonth = records.indexOf(record); - if (indexMonth > -1) { // means it already exists - records.get(indexMonth).addTest(isAvailable); - } else { - record.addTest(isAvailable); - records.add(record); - } + public String getDate() { + return date; } - public List getAvailPerMonth() { - List availPerMonthList = new ArrayList<>(); - for (int i = 0; i < records.size(); i++) { - AvailEpMonthRecord record = records.get(i); - String[] availPerMonth = - new String[] { - record.getDate(), df.format(record.getNbSuccessTest() / record.getNbTests()) - }; - availPerMonthList.add(availPerMonth); - } - return availPerMonthList; + public void setDate(String date) { + this.date = date; } - @Override - public boolean equals(Object object) { - if (object != null && object instanceof AvailEp) - return ((AvailEp) object).getEpURI().equals(this.epURI); - return false; + public double getNbTests() { + return nbTests; } - public void prettyPrint() { - System.out.println(epURI); - for (AvailEpMonthRecord record : records) { - System.out.println( - "\t[" - + record.getDate() - + "]\t" - + df.format(record.getNbSuccessTest() / record.getNbTests())); - } + public void setNbTests(int nbTests) { + this.nbTests = nbTests; } - public void uriPrint() { - System.out.println(epURI); + public double getNbSuccessTest() { + return nbSuccessTest; } - public boolean isAlive(int monthWindow) { - int nbMonthAlive = 0; - Date today = new Date(); - - for (int i = 0; i < monthWindow; i++) { - int indexMonth = records.indexOf(new AvailEpMonthRecord(sdf.format(today))); - if (indexMonth > -1) { // means it already exists - AvailEpMonthRecord record = records.get(indexMonth); - if (record.getNbSuccessTest() / record.getNbTests() > 0) nbMonthAlive++; - } - Calendar c = Calendar.getInstance(); - c.setTime(today); - c.add(Calendar.MONTH, -1); - today = c.getTime(); - } - - return (nbMonthAlive > 0); // will return false for endpoints not present in the window + public void setNbSuccessTest(int nbSuccessTest) { + this.nbSuccessTest = nbSuccessTest; } - public class AvailEpMonthRecord { - String date = null; // yyyy-MM - double nbTests = 0; - double nbSuccessTest = 0; - - public AvailEpMonthRecord(String date) { - this.date = date; - } - - public void addTest(boolean isAvailable) { - nbTests++; - if (isAvailable) nbSuccessTest++; - } - - public String getDate() { - return date; - } - - public void setDate(String date) { - this.date = date; - } - - public double getNbTests() { - return nbTests; - } - - public void setNbTests(int nbTests) { - this.nbTests = nbTests; - } - - public double getNbSuccessTest() { - return nbSuccessTest; - } - - public void setNbSuccessTest(int nbSuccessTest) { - this.nbSuccessTest = nbSuccessTest; - } - - @Override - public boolean equals(Object object) { - if (object != null && object instanceof AvailEpMonthRecord) - return ((AvailEpMonthRecord) object).getDate().equals(this.date); - return false; - } + @Override + public boolean equals(Object object) { + if (object != null && object instanceof AvailEpMonthRecord) + return ((AvailEpMonthRecord) object).getDate().equals(this.date); + return false; } + } } diff --git a/backend/src/main/java/sparqles/paper/objects/AvailEpFromList.java b/backend/src/main/java/sparqles/paper/objects/AvailEpFromList.java index a35322ec..0e55e750 100644 --- a/backend/src/main/java/sparqles/paper/objects/AvailEpFromList.java +++ b/backend/src/main/java/sparqles/paper/objects/AvailEpFromList.java @@ -1,13 +1,13 @@ package sparqles.paper.objects; public class AvailEpFromList { - private String uri = null; + private String uri = null; - public String getUri() { - return uri; - } + public String getUri() { + return uri; + } - public void setUri(String uri) { - this.uri = uri; - } + public void setUri(String uri) { + this.uri = uri; + } } diff --git a/backend/src/main/java/sparqles/paper/objects/AvailEvolMonthList.java b/backend/src/main/java/sparqles/paper/objects/AvailEvolMonthList.java index 0731a130..1ae7ee28 100644 --- a/backend/src/main/java/sparqles/paper/objects/AvailEvolMonthList.java +++ b/backend/src/main/java/sparqles/paper/objects/AvailEvolMonthList.java @@ -5,176 +5,175 @@ import java.util.List; public class AvailEvolMonthList { - // private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM"); - // private static DecimalFormat df = new DecimalFormat("0.####",new - // DecimalFormatSymbols(Locale.US)); - private List months = new ArrayList(); - - public void addEp(String date, float avail) { - AvailEvolMonth record = new AvailEvolMonth(date); - int indexMonth = months.indexOf(record); - if (indexMonth > -1) { // means it already exists - record = months.get(indexMonth); - } else months.add(record); // means it does not exist yet - - if (avail <= 0.05) record.addZero_five(); - else if (avail > 0.05 && avail <= 0.75) record.addFive_seventyFive(); - else if (avail > 0.75 && avail <= 0.95) record.addSeventyFive_nintyFive(); - else if (avail > 0.95 && avail <= 0.99) record.addNintyFive_nintyNine(); - else if (avail > 0.99) record.addNintyNine_hundred(); - } - - public String csvPrintNb() { - StringBuilder sb = new StringBuilder(); - sb.append("month\t0-5\t5-75\t75-95\t95-99\t99-100" + System.getProperty("line.separator")); - Collections.sort(months); - for (AvailEvolMonth month : months) { - sb.append( - month.getDate() - + "\t" - + month.getZero_five() - + "\t" - + (month.getZero_five() + month.getFive_seventyFive()) - + "\t" - + (month.getZero_five() - + month.getFive_seventyFive() - + month.getSeventyFive_nintyFive()) - + "\t" - + (month.getZero_five() - + month.getFive_seventyFive() - + month.getSeventyFive_nintyFive() - + month.getNintyFive_nintyNine()) - + "\t" - + (month.getZero_five() - + month.getFive_seventyFive() - + month.getSeventyFive_nintyFive() - + month.getNintyFive_nintyNine() - + month.getNintyNine_hundred()) - + System.getProperty("line.separator")); - } - return sb.toString(); - } - - public String csvPrintPercent() { - StringBuilder sb = new StringBuilder(); - sb.append("month\t0-5\t5-75\t75-95\t95-99\t99-100" + System.getProperty("line.separator")); - Collections.sort(months); - for (AvailEvolMonth month : months) { - sb.append( - month.getDate() - + "\t" - + (new Float(month.getZero_five()) / new Float(month.getNbTotal())) - * 100 - + "\t" - + (new Float(month.getZero_five() + month.getFive_seventyFive()) - / new Float(month.getNbTotal())) - * 100 - + "\t" - + (new Float( - month.getZero_five() - + month.getFive_seventyFive() - + month.getSeventyFive_nintyFive()) - / new Float(month.getNbTotal())) - * 100 - + "\t" - + (new Float( - month.getZero_five() - + month.getFive_seventyFive() - + month.getSeventyFive_nintyFive() - + month.getNintyFive_nintyNine()) - / new Float(month.getNbTotal())) - * 100 - + "\t" - + (new Float( - month.getZero_five() - + month.getFive_seventyFive() - + month.getSeventyFive_nintyFive() - + month.getNintyFive_nintyNine() - + month.getNintyNine_hundred()) - / new Float(month.getNbTotal())) - * 100 - + System.getProperty("line.separator")); - } - return sb.toString(); - } - - private class AvailEvolMonth implements Comparable { - String date = null; // yyyy-MM - long zero_five = 0; - long five_seventyFive = 0; - long seventyFive_nintyFive = 0; - long nintyFive_nintyNine = 0; - long nintyNine_hundred = 0; - long nbTotal = 0; - - public AvailEvolMonth(String date) { - this.date = date; - } - - public String getDate() { - return date; - } - - public long getZero_five() { - return zero_five; - } - - public void addZero_five() { - this.zero_five++; - this.nbTotal++; - } - - public long getFive_seventyFive() { - return five_seventyFive; - } - - public void addFive_seventyFive() { - this.five_seventyFive++; - this.nbTotal++; - } - - public long getSeventyFive_nintyFive() { - return seventyFive_nintyFive; - } - - public void addSeventyFive_nintyFive() { - this.seventyFive_nintyFive++; - this.nbTotal++; - } - - public long getNintyFive_nintyNine() { - return nintyFive_nintyNine; - } - - public void addNintyFive_nintyNine() { - this.nintyFive_nintyNine++; - this.nbTotal++; - } - - public long getNintyNine_hundred() { - return nintyNine_hundred; - } - - public void addNintyNine_hundred() { - this.nintyNine_hundred++; - this.nbTotal++; - } - - public long getNbTotal() { - return nbTotal; - } - - @Override - public boolean equals(Object object) { - if (object != null && object instanceof AvailEvolMonth) - return ((AvailEvolMonth) object).getDate().equals(this.date); - return false; - } - - @Override - public int compareTo(AvailEvolMonth arg0) { - // TODO Auto-generated method stub - return this.getDate().compareTo(arg0.getDate()); - } + // private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM"); + // private static DecimalFormat df = new DecimalFormat("0.####",new + // DecimalFormatSymbols(Locale.US)); + private List months = new ArrayList(); + + public void addEp(String date, float avail) { + AvailEvolMonth record = new AvailEvolMonth(date); + int indexMonth = months.indexOf(record); + if (indexMonth > -1) { // means it already exists + record = months.get(indexMonth); + } else months.add(record); // means it does not exist yet + + if (avail <= 0.05) record.addZero_five(); + else if (avail > 0.05 && avail <= 0.75) record.addFive_seventyFive(); + else if (avail > 0.75 && avail <= 0.95) record.addSeventyFive_nintyFive(); + else if (avail > 0.95 && avail <= 0.99) record.addNintyFive_nintyNine(); + else if (avail > 0.99) record.addNintyNine_hundred(); + } + + public String csvPrintNb() { + StringBuilder sb = new StringBuilder(); + sb.append("month\t0-5\t5-75\t75-95\t95-99\t99-100" + System.getProperty("line.separator")); + Collections.sort(months); + for (AvailEvolMonth month : months) { + sb.append( + month.getDate() + + "\t" + + month.getZero_five() + + "\t" + + (month.getZero_five() + month.getFive_seventyFive()) + + "\t" + + (month.getZero_five() + + month.getFive_seventyFive() + + month.getSeventyFive_nintyFive()) + + "\t" + + (month.getZero_five() + + month.getFive_seventyFive() + + month.getSeventyFive_nintyFive() + + month.getNintyFive_nintyNine()) + + "\t" + + (month.getZero_five() + + month.getFive_seventyFive() + + month.getSeventyFive_nintyFive() + + month.getNintyFive_nintyNine() + + month.getNintyNine_hundred()) + + System.getProperty("line.separator")); } + return sb.toString(); + } + + public String csvPrintPercent() { + StringBuilder sb = new StringBuilder(); + sb.append("month\t0-5\t5-75\t75-95\t95-99\t99-100" + System.getProperty("line.separator")); + Collections.sort(months); + for (AvailEvolMonth month : months) { + sb.append( + month.getDate() + + "\t" + + (new Float(month.getZero_five()) / new Float(month.getNbTotal())) * 100 + + "\t" + + (new Float(month.getZero_five() + month.getFive_seventyFive()) + / new Float(month.getNbTotal())) + * 100 + + "\t" + + (new Float( + month.getZero_five() + + month.getFive_seventyFive() + + month.getSeventyFive_nintyFive()) + / new Float(month.getNbTotal())) + * 100 + + "\t" + + (new Float( + month.getZero_five() + + month.getFive_seventyFive() + + month.getSeventyFive_nintyFive() + + month.getNintyFive_nintyNine()) + / new Float(month.getNbTotal())) + * 100 + + "\t" + + (new Float( + month.getZero_five() + + month.getFive_seventyFive() + + month.getSeventyFive_nintyFive() + + month.getNintyFive_nintyNine() + + month.getNintyNine_hundred()) + / new Float(month.getNbTotal())) + * 100 + + System.getProperty("line.separator")); + } + return sb.toString(); + } + + private class AvailEvolMonth implements Comparable { + String date = null; // yyyy-MM + long zero_five = 0; + long five_seventyFive = 0; + long seventyFive_nintyFive = 0; + long nintyFive_nintyNine = 0; + long nintyNine_hundred = 0; + long nbTotal = 0; + + public AvailEvolMonth(String date) { + this.date = date; + } + + public String getDate() { + return date; + } + + public long getZero_five() { + return zero_five; + } + + public void addZero_five() { + this.zero_five++; + this.nbTotal++; + } + + public long getFive_seventyFive() { + return five_seventyFive; + } + + public void addFive_seventyFive() { + this.five_seventyFive++; + this.nbTotal++; + } + + public long getSeventyFive_nintyFive() { + return seventyFive_nintyFive; + } + + public void addSeventyFive_nintyFive() { + this.seventyFive_nintyFive++; + this.nbTotal++; + } + + public long getNintyFive_nintyNine() { + return nintyFive_nintyNine; + } + + public void addNintyFive_nintyNine() { + this.nintyFive_nintyNine++; + this.nbTotal++; + } + + public long getNintyNine_hundred() { + return nintyNine_hundred; + } + + public void addNintyNine_hundred() { + this.nintyNine_hundred++; + this.nbTotal++; + } + + public long getNbTotal() { + return nbTotal; + } + + @Override + public boolean equals(Object object) { + if (object != null && object instanceof AvailEvolMonth) + return ((AvailEvolMonth) object).getDate().equals(this.date); + return false; + } + + @Override + public int compareTo(AvailEvolMonth arg0) { + // TODO Auto-generated method stub + return this.getDate().compareTo(arg0.getDate()); + } + } } diff --git a/backend/src/main/java/sparqles/paper/objects/AvailIndexJson.java b/backend/src/main/java/sparqles/paper/objects/AvailIndexJson.java index 1a754f59..a7752b5b 100644 --- a/backend/src/main/java/sparqles/paper/objects/AvailIndexJson.java +++ b/backend/src/main/java/sparqles/paper/objects/AvailIndexJson.java @@ -8,75 +8,76 @@ import java.util.Locale; public class AvailIndexJson { - List availability = new ArrayList(); + List availability = new ArrayList(); - public void addHeader(String header) { - String[] parts = header.split("\\t"); - for (int i = 1; i < parts.length; i++) { - AvailKeyValue record = new AvailKeyValue(parts[i], i); - availability.add(record); - } + public void addHeader(String header) { + String[] parts = header.split("\\t"); + for (int i = 1; i < parts.length; i++) { + AvailKeyValue record = new AvailKeyValue(parts[i], i); + availability.add(record); } + } - public void addValue(String value) { - try { - String[] parts = value.split("\\t"); - if (parts.length > 1) { - int previousPart = 0; - for (int i = 1; i < parts.length; i++) { - SimpleDateFormat parserSDF = new SimpleDateFormat("MMM-yy", Locale.UK); - final Date date = parserSDF.parse(parts[0]); - final int currentPart = Integer.parseInt(parts[i]); - final int finalPreviousPart = previousPart; - for (AvailKeyValue record : availability) { - if (record.getIndex() == i) { - record.getValues() - .add( - new ArrayList() { - { - add(date.getTime()); - add(new Long(currentPart - finalPreviousPart)); - } - }); - break; + public void addValue(String value) { + try { + String[] parts = value.split("\\t"); + if (parts.length > 1) { + int previousPart = 0; + for (int i = 1; i < parts.length; i++) { + SimpleDateFormat parserSDF = new SimpleDateFormat("MMM-yy", Locale.UK); + final Date date = parserSDF.parse(parts[0]); + final int currentPart = Integer.parseInt(parts[i]); + final int finalPreviousPart = previousPart; + for (AvailKeyValue record : availability) { + if (record.getIndex() == i) { + record + .getValues() + .add( + new ArrayList() { + { + add(date.getTime()); + add(new Long(currentPart - finalPreviousPart)); } - } - previousPart = Integer.parseInt(parts[i]); - } + }); + break; } - } catch (ParseException e) { - e.printStackTrace(); + } + previousPart = Integer.parseInt(parts[i]); } + } + } catch (ParseException e) { + e.printStackTrace(); } + } - private class AvailKeyValue { - private String key = null; - private int index = 0; - private List> values = new ArrayList>(); + private class AvailKeyValue { + private String key = null; + private int index = 0; + private List> values = new ArrayList>(); - public AvailKeyValue(String key, int index) { - this.key = key; - this.index = index; - } + public AvailKeyValue(String key, int index) { + this.key = key; + this.index = index; + } - public String getKey() { - return key; - } + public String getKey() { + return key; + } - public void setKey(String key) { - this.key = key; - } + public void setKey(String key) { + this.key = key; + } - public int getIndex() { - return index; - } + public int getIndex() { + return index; + } - public void setIndex(int index) { - this.index = index; - } + public void setIndex(int index) { + this.index = index; + } - public List> getValues() { - return values; - } + public List> getValues() { + return values; } + } } diff --git a/backend/src/main/java/sparqles/paper/objects/AvailJson.java b/backend/src/main/java/sparqles/paper/objects/AvailJson.java index c5a98fd9..7e318cea 100644 --- a/backend/src/main/java/sparqles/paper/objects/AvailJson.java +++ b/backend/src/main/java/sparqles/paper/objects/AvailJson.java @@ -3,86 +3,86 @@ import java.util.Date; public class AvailJson { - private boolean isAvailable = false; - private EndpointResult endpointResult; + private boolean isAvailable = false; + private EndpointResult endpointResult; - public AvailJson() {} + public AvailJson() {} - public boolean isAvailable() { - return isAvailable; - } + public boolean isAvailable() { + return isAvailable; + } - public void setAvailable(boolean isAvailable) { - this.isAvailable = isAvailable; - } + public void setAvailable(boolean isAvailable) { + this.isAvailable = isAvailable; + } - public EndpointResult getEndpointResult() { - return endpointResult; - } + public EndpointResult getEndpointResult() { + return endpointResult; + } - public void setEndpointResult(EndpointResult endpointResult) { - this.endpointResult = endpointResult; - } + public void setEndpointResult(EndpointResult endpointResult) { + this.endpointResult = endpointResult; + } - public String getSPARQLURI() { - if (endpointResult != null && endpointResult.getEndpoint() != null) - return endpointResult.getEndpoint().getUri(); - return null; - } + public String getSPARQLURI() { + if (endpointResult != null && endpointResult.getEndpoint() != null) + return endpointResult.getEndpoint().getUri(); + return null; + } - public Date getStartDate() { - if (endpointResult != null) return new Date(endpointResult.getStart()); - return null; - } + public Date getStartDate() { + if (endpointResult != null) return new Date(endpointResult.getStart()); + return null; + } - public Date getEndDate() { - if (endpointResult != null) return new Date(endpointResult.getEnd()); - return null; - } + public Date getEndDate() { + if (endpointResult != null) return new Date(endpointResult.getEnd()); + return null; + } - public class EndpointResult { - private long start; - private long end; - private Endpoint endpoint; + public class EndpointResult { + private long start; + private long end; + private Endpoint endpoint; - public EndpointResult() {} + public EndpointResult() {} - public long getStart() { - return start; - } + public long getStart() { + return start; + } - public void setStart(long start) { - this.start = start; - } + public void setStart(long start) { + this.start = start; + } - public long getEnd() { - return end; - } + public long getEnd() { + return end; + } - public void setEnd(long end) { - this.end = end; - } + public void setEnd(long end) { + this.end = end; + } - public Endpoint getEndpoint() { - return endpoint; - } + public Endpoint getEndpoint() { + return endpoint; + } - public void setEndpoint(Endpoint endpoint) { - this.endpoint = endpoint; - } + public void setEndpoint(Endpoint endpoint) { + this.endpoint = endpoint; } + } - public class Endpoint { - private String uri; + public class Endpoint { + private String uri; - public Endpoint() {} + public Endpoint() {} - public String getUri() { - return uri; - } + public String getUri() { + return uri; + } - public void setUri(String uri) { - this.uri = uri; - } + public void setUri(String uri) { + this.uri = uri; } + } } diff --git a/backend/src/main/java/sparqles/schedule/CronExpression.java b/backend/src/main/java/sparqles/schedule/CronExpression.java index a91133f5..999ec3d0 100644 --- a/backend/src/main/java/sparqles/schedule/CronExpression.java +++ b/backend/src/main/java/sparqles/schedule/CronExpression.java @@ -164,1395 +164,1385 @@ */ public class CronExpression implements Serializable, Cloneable { - protected static final int SECOND = 0; - protected static final int MINUTE = 1; - protected static final int HOUR = 2; - protected static final int DAY_OF_MONTH = 3; - protected static final int MONTH = 4; - protected static final int DAY_OF_WEEK = 5; - protected static final int YEAR = 6; - protected static final int ALL_SPEC_INT = 99; // '*' - protected static final int NO_SPEC_INT = 98; // '?' - protected static final Integer ALL_SPEC = new Integer(ALL_SPEC_INT); - protected static final Integer NO_SPEC = new Integer(NO_SPEC_INT); - protected static final Map monthMap = new HashMap(20); - protected static final Map dayMap = new HashMap(60); - private static final long serialVersionUID = 12423409423L; - - static { - monthMap.put("JAN", new Integer(0)); - monthMap.put("FEB", new Integer(1)); - monthMap.put("MAR", new Integer(2)); - monthMap.put("APR", new Integer(3)); - monthMap.put("MAY", new Integer(4)); - monthMap.put("JUN", new Integer(5)); - monthMap.put("JUL", new Integer(6)); - monthMap.put("AUG", new Integer(7)); - monthMap.put("SEP", new Integer(8)); - monthMap.put("OCT", new Integer(9)); - monthMap.put("NOV", new Integer(10)); - monthMap.put("DEC", new Integer(11)); - - dayMap.put("SUN", new Integer(1)); - dayMap.put("MON", new Integer(2)); - dayMap.put("TUE", new Integer(3)); - dayMap.put("WED", new Integer(4)); - dayMap.put("THU", new Integer(5)); - dayMap.put("FRI", new Integer(6)); - dayMap.put("SAT", new Integer(7)); + protected static final int SECOND = 0; + protected static final int MINUTE = 1; + protected static final int HOUR = 2; + protected static final int DAY_OF_MONTH = 3; + protected static final int MONTH = 4; + protected static final int DAY_OF_WEEK = 5; + protected static final int YEAR = 6; + protected static final int ALL_SPEC_INT = 99; // '*' + protected static final int NO_SPEC_INT = 98; // '?' + protected static final Integer ALL_SPEC = new Integer(ALL_SPEC_INT); + protected static final Integer NO_SPEC = new Integer(NO_SPEC_INT); + protected static final Map monthMap = new HashMap(20); + protected static final Map dayMap = new HashMap(60); + private static final long serialVersionUID = 12423409423L; + + static { + monthMap.put("JAN", new Integer(0)); + monthMap.put("FEB", new Integer(1)); + monthMap.put("MAR", new Integer(2)); + monthMap.put("APR", new Integer(3)); + monthMap.put("MAY", new Integer(4)); + monthMap.put("JUN", new Integer(5)); + monthMap.put("JUL", new Integer(6)); + monthMap.put("AUG", new Integer(7)); + monthMap.put("SEP", new Integer(8)); + monthMap.put("OCT", new Integer(9)); + monthMap.put("NOV", new Integer(10)); + monthMap.put("DEC", new Integer(11)); + + dayMap.put("SUN", new Integer(1)); + dayMap.put("MON", new Integer(2)); + dayMap.put("TUE", new Integer(3)); + dayMap.put("WED", new Integer(4)); + dayMap.put("THU", new Integer(5)); + dayMap.put("FRI", new Integer(6)); + dayMap.put("SAT", new Integer(7)); + } + + protected transient TreeSet seconds; + protected transient TreeSet minutes; + protected transient TreeSet hours; + protected transient TreeSet daysOfMonth; + protected transient TreeSet months; + protected transient TreeSet daysOfWeek; + protected transient TreeSet years; + protected transient boolean lastdayOfWeek = false; + protected transient int nthdayOfWeek = 0; + protected transient boolean lastdayOfMonth = false; + protected transient boolean nearestWeekday = false; + protected transient boolean expressionParsed = false; + private String cronExpression = null; + private TimeZone timeZone = null; + + /** + * Constructs a new CronExpression based on the specified parameter. + * + * @param cronExpression String representation of the cron expression the new object should + * represent + * @throws java.text.ParseException if the string expression cannot be parsed into a valid + * CronExpression + */ + public CronExpression(String cronExpression) throws ParseException { + if (cronExpression == null) { + throw new IllegalArgumentException("cronExpression cannot be null"); } - protected transient TreeSet seconds; - protected transient TreeSet minutes; - protected transient TreeSet hours; - protected transient TreeSet daysOfMonth; - protected transient TreeSet months; - protected transient TreeSet daysOfWeek; - protected transient TreeSet years; - protected transient boolean lastdayOfWeek = false; - protected transient int nthdayOfWeek = 0; - protected transient boolean lastdayOfMonth = false; - protected transient boolean nearestWeekday = false; - protected transient boolean expressionParsed = false; - private String cronExpression = null; - private TimeZone timeZone = null; - - /** - * Constructs a new CronExpression based on the specified parameter. - * - * @param cronExpression String representation of the cron expression the new object should - * represent - * @throws java.text.ParseException if the string expression cannot be parsed into a valid - * - * CronExpression - */ - public CronExpression(String cronExpression) throws ParseException { - if (cronExpression == null) { - throw new IllegalArgumentException("cronExpression cannot be null"); - } - - this.cronExpression = cronExpression.toUpperCase(Locale.US); + this.cronExpression = cronExpression.toUpperCase(Locale.US); - buildExpression(this.cronExpression); - } + buildExpression(this.cronExpression); + } - /** - * Indicates whether the specified cron expression can be parsed into a valid cron expression - * - * @param cronExpression the expression to evaluate - * @return a boolean indicating whether the given expression is a valid cron expression - */ - public static boolean isValidExpression(String cronExpression) { - - try { - new CronExpression(cronExpression); - } catch (ParseException pe) { - return false; - } + /** + * Indicates whether the specified cron expression can be parsed into a valid cron expression + * + * @param cronExpression the expression to evaluate + * @return a boolean indicating whether the given expression is a valid cron expression + */ + public static boolean isValidExpression(String cronExpression) { - return true; + try { + new CronExpression(cronExpression); + } catch (ParseException pe) { + return false; } - /** - * Indicates whether the given date satisfies the cron expression. Note that milliseconds are - * ignored, so two Dates falling on different milliseconds of the same second will always have - * the same result here. - * - * @param date the date to evaluate - * @return a boolean indicating whether the given date satisfies the cron expression - */ - public boolean isSatisfiedBy(Date date) { - Calendar testDateCal = Calendar.getInstance(getTimeZone()); - testDateCal.setTime(date); - testDateCal.set(Calendar.MILLISECOND, 0); - Date originalDate = testDateCal.getTime(); - - testDateCal.add(Calendar.SECOND, -1); - - Date timeAfter = getTimeAfter(testDateCal.getTime()); - - return ((timeAfter != null) && (timeAfter.equals(originalDate))); + return true; + } + + /** + * Indicates whether the given date satisfies the cron expression. Note that milliseconds are + * ignored, so two Dates falling on different milliseconds of the same second will always have the + * same result here. + * + * @param date the date to evaluate + * @return a boolean indicating whether the given date satisfies the cron expression + */ + public boolean isSatisfiedBy(Date date) { + Calendar testDateCal = Calendar.getInstance(getTimeZone()); + testDateCal.setTime(date); + testDateCal.set(Calendar.MILLISECOND, 0); + Date originalDate = testDateCal.getTime(); + + testDateCal.add(Calendar.SECOND, -1); + + Date timeAfter = getTimeAfter(testDateCal.getTime()); + + return ((timeAfter != null) && (timeAfter.equals(originalDate))); + } + + /** + * Returns the next date/time after the given date/time which satisfies the cron + * expression. + * + * @param date the date/time at which to begin the search for the next valid date/time + * @return the next valid date/time + */ + public Date getNextValidTimeAfter(Date date) { + return getTimeAfter(date); + } + + /** + * Returns the next date/time after the given date/time which does not satisfy the + * expression + * + * @param date the date/time at which to begin the search for the next invalid date/time + * @return the next valid date/time + */ + public Date getNextInvalidTimeAfter(Date date) { + long difference = 1000; + + // move back to the nearest second so differences will be accurate + Calendar adjustCal = Calendar.getInstance(getTimeZone()); + adjustCal.setTime(date); + adjustCal.set(Calendar.MILLISECOND, 0); + Date lastDate = adjustCal.getTime(); + + Date newDate = null; + + // TODO: (QUARTZ-481) IMPROVE THIS! The following is a BAD solution to this problem. + // Performance + // will be very bad here, depending on the cron expression. It is, however A solution. + + // keep getting the next included time until it's farther than one second + // apart. At that point, lastDate is the last valid fire time. We return + // the second immediately following it. + while (difference == 1000) { + newDate = getTimeAfter(lastDate); + + difference = newDate.getTime() - lastDate.getTime(); + + if (difference == 1000) { + lastDate = newDate; + } } - /** - * Returns the next date/time after the given date/time which satisfies the cron - * expression. - * - * @param date the date/time at which to begin the search for the next valid date/time - * @return the next valid date/time - */ - public Date getNextValidTimeAfter(Date date) { - return getTimeAfter(date); + return new Date(lastDate.getTime() + 1000); + } + + /** Returns the time zone for which this CronExpression will be resolved. */ + public TimeZone getTimeZone() { + if (timeZone == null) { + timeZone = TimeZone.getDefault(); } - /** - * Returns the next date/time after the given date/time which does not satisfy the - * expression - * - * @param date the date/time at which to begin the search for the next invalid date/time - * @return the next valid date/time - */ - public Date getNextInvalidTimeAfter(Date date) { - long difference = 1000; - - // move back to the nearest second so differences will be accurate - Calendar adjustCal = Calendar.getInstance(getTimeZone()); - adjustCal.setTime(date); - adjustCal.set(Calendar.MILLISECOND, 0); - Date lastDate = adjustCal.getTime(); - - Date newDate = null; - - // TODO: (QUARTZ-481) IMPROVE THIS! The following is a BAD solution to this problem. - // Performance - // will be very bad here, depending on the cron expression. It is, however A solution. - - // keep getting the next included time until it's farther than one second - // apart. At that point, lastDate is the last valid fire time. We return - // the second immediately following it. - while (difference == 1000) { - newDate = getTimeAfter(lastDate); - - difference = newDate.getTime() - lastDate.getTime(); - - if (difference == 1000) { - lastDate = newDate; - } + return timeZone; + } + + /** Sets the time zone for which this CronExpression will be resolved. */ + public void setTimeZone(TimeZone timeZone) { + this.timeZone = timeZone; + } + + /** + * Returns the string representation of the CronExpression + * + * @return a string representation of the CronExpression + */ + public String toString() { + return cronExpression; + } + + //////////////////////////////////////////////////////////////////////////// + // + // Expression Parsing Functions + // + //////////////////////////////////////////////////////////////////////////// + + protected void buildExpression(String expression) throws ParseException { + expressionParsed = true; + + try { + + if (seconds == null) { + seconds = new TreeSet(); + } + if (minutes == null) { + minutes = new TreeSet(); + } + if (hours == null) { + hours = new TreeSet(); + } + if (daysOfMonth == null) { + daysOfMonth = new TreeSet(); + } + if (months == null) { + months = new TreeSet(); + } + if (daysOfWeek == null) { + daysOfWeek = new TreeSet(); + } + if (years == null) { + years = new TreeSet(); + } + + int exprOn = SECOND; + + StringTokenizer exprsTok = new StringTokenizer(expression, " \t", false); + + while (exprsTok.hasMoreTokens() && exprOn <= YEAR) { + String expr = exprsTok.nextToken().trim(); + + // throw an exception if L is used with other days of the month + if (exprOn == DAY_OF_MONTH + && expr.indexOf('L') != -1 + && expr.length() > 1 + && expr.indexOf(",") >= 0) { + throw new ParseException( + "Support for specifying 'L' and 'LW' with other days of the month is" + + " not implemented", + -1); + } + // throw an exception if L is used with other days of the week + if (exprOn == DAY_OF_WEEK + && expr.indexOf('L') != -1 + && expr.length() > 1 + && expr.indexOf(",") >= 0) { + throw new ParseException( + "Support for specifying 'L' with other days of the week is not" + " implemented", -1); } - return new Date(lastDate.getTime() + 1000); - } - - /** Returns the time zone for which this CronExpression will be resolved. */ - public TimeZone getTimeZone() { - if (timeZone == null) { - timeZone = TimeZone.getDefault(); + StringTokenizer vTok = new StringTokenizer(expr, ","); + while (vTok.hasMoreTokens()) { + String v = vTok.nextToken(); + storeExpressionVals(0, v, exprOn); } - return timeZone; + exprOn++; + } + + if (exprOn <= DAY_OF_WEEK) { + throw new ParseException("Unexpected end of expression.", expression.length()); + } + + if (exprOn <= YEAR) { + storeExpressionVals(0, "*", YEAR); + } + + TreeSet dow = getSet(DAY_OF_WEEK); + TreeSet dom = getSet(DAY_OF_MONTH); + + // Copying the logic from the UnsupportedOperationException below + boolean dayOfMSpec = !dom.contains(NO_SPEC); + boolean dayOfWSpec = !dow.contains(NO_SPEC); + + if (dayOfMSpec && !dayOfWSpec) { + // skip + } else if (dayOfWSpec && !dayOfMSpec) { + // skip + } else { + throw new ParseException( + "Support for specifying both a day-of-week AND a day-of-month parameter is" + + " not implemented.", + 0); + } + } catch (ParseException pe) { + throw pe; + } catch (Exception e) { + throw new ParseException("Illegal cron expression format (" + e.toString() + ")", 0); } + } - /** Sets the time zone for which this CronExpression will be resolved. */ - public void setTimeZone(TimeZone timeZone) { - this.timeZone = timeZone; - } + protected int storeExpressionVals(int pos, String s, int type) throws ParseException { - /** - * Returns the string representation of the CronExpression - * - * @return a string representation of the CronExpression - */ - public String toString() { - return cronExpression; + int incr = 0; + int i = skipWhiteSpace(pos, s); + if (i >= s.length()) { + return i; } - - //////////////////////////////////////////////////////////////////////////// - // - // Expression Parsing Functions - // - //////////////////////////////////////////////////////////////////////////// - - protected void buildExpression(String expression) throws ParseException { - expressionParsed = true; - - try { - - if (seconds == null) { - seconds = new TreeSet(); - } - if (minutes == null) { - minutes = new TreeSet(); - } - if (hours == null) { - hours = new TreeSet(); - } - if (daysOfMonth == null) { - daysOfMonth = new TreeSet(); - } - if (months == null) { - months = new TreeSet(); - } - if (daysOfWeek == null) { - daysOfWeek = new TreeSet(); - } - if (years == null) { - years = new TreeSet(); - } - - int exprOn = SECOND; - - StringTokenizer exprsTok = new StringTokenizer(expression, " \t", false); - - while (exprsTok.hasMoreTokens() && exprOn <= YEAR) { - String expr = exprsTok.nextToken().trim(); - - // throw an exception if L is used with other days of the month - if (exprOn == DAY_OF_MONTH - && expr.indexOf('L') != -1 - && expr.length() > 1 - && expr.indexOf(",") >= 0) { - throw new ParseException( - "Support for specifying 'L' and 'LW' with other days of the month is" - + " not implemented", - -1); - } - // throw an exception if L is used with other days of the week - if (exprOn == DAY_OF_WEEK - && expr.indexOf('L') != -1 - && expr.length() > 1 - && expr.indexOf(",") >= 0) { - throw new ParseException( - "Support for specifying 'L' with other days of the week is not" - + " implemented", - -1); - } - - StringTokenizer vTok = new StringTokenizer(expr, ","); - while (vTok.hasMoreTokens()) { - String v = vTok.nextToken(); - storeExpressionVals(0, v, exprOn); - } - - exprOn++; - } - - if (exprOn <= DAY_OF_WEEK) { - throw new ParseException("Unexpected end of expression.", expression.length()); - } - - if (exprOn <= YEAR) { - storeExpressionVals(0, "*", YEAR); - } - - TreeSet dow = getSet(DAY_OF_WEEK); - TreeSet dom = getSet(DAY_OF_MONTH); - - // Copying the logic from the UnsupportedOperationException below - boolean dayOfMSpec = !dom.contains(NO_SPEC); - boolean dayOfWSpec = !dow.contains(NO_SPEC); - - if (dayOfMSpec && !dayOfWSpec) { - // skip - } else if (dayOfWSpec && !dayOfMSpec) { - // skip - } else { - throw new ParseException( - "Support for specifying both a day-of-week AND a day-of-month parameter is" - + " not implemented.", - 0); - } - } catch (ParseException pe) { - throw pe; - } catch (Exception e) { - throw new ParseException("Illegal cron expression format (" + e.toString() + ")", 0); + char c = s.charAt(i); + if ((c >= 'A') && (c <= 'Z') && (!s.equals("L")) && (!s.equals("LW"))) { + String sub = s.substring(i, i + 3); + int sval = -1; + int eval = -1; + if (type == MONTH) { + sval = getMonthNumber(sub) + 1; + if (sval <= 0) { + throw new ParseException("Invalid Month value: '" + sub + "'", i); } - } - - protected int storeExpressionVals(int pos, String s, int type) throws ParseException { - - int incr = 0; - int i = skipWhiteSpace(pos, s); - if (i >= s.length()) { - return i; + if (s.length() > i + 3) { + c = s.charAt(i + 3); + if (c == '-') { + i += 4; + sub = s.substring(i, i + 3); + eval = getMonthNumber(sub) + 1; + if (eval <= 0) { + throw new ParseException("Invalid Month value: '" + sub + "'", i); + } + } } - char c = s.charAt(i); - if ((c >= 'A') && (c <= 'Z') && (!s.equals("L")) && (!s.equals("LW"))) { - String sub = s.substring(i, i + 3); - int sval = -1; - int eval = -1; - if (type == MONTH) { - sval = getMonthNumber(sub) + 1; - if (sval <= 0) { - throw new ParseException("Invalid Month value: '" + sub + "'", i); - } - if (s.length() > i + 3) { - c = s.charAt(i + 3); - if (c == '-') { - i += 4; - sub = s.substring(i, i + 3); - eval = getMonthNumber(sub) + 1; - if (eval <= 0) { - throw new ParseException("Invalid Month value: '" + sub + "'", i); - } - } - } - } else if (type == DAY_OF_WEEK) { - sval = getDayOfWeekNumber(sub); - if (sval < 0) { - throw new ParseException("Invalid Day-of-Week value: '" + sub + "'", i); - } - if (s.length() > i + 3) { - c = s.charAt(i + 3); - if (c == '-') { - i += 4; - sub = s.substring(i, i + 3); - eval = getDayOfWeekNumber(sub); - if (eval < 0) { - throw new ParseException("Invalid Day-of-Week value: '" + sub + "'", i); - } - } else if (c == '#') { - try { - i += 4; - nthdayOfWeek = Integer.parseInt(s.substring(i)); - if (nthdayOfWeek < 1 || nthdayOfWeek > 5) { - throw new Exception(); - } - } catch (Exception e) { - throw new ParseException( - "A numeric value between 1 and 5 must follow the '#' option", - i); - } - } else if (c == 'L') { - lastdayOfWeek = true; - i++; - } - } - - } else { - throw new ParseException("Illegal characters for this position: '" + sub + "'", i); - } - if (eval != -1) { - incr = 1; - } - addToSet(sval, eval, incr, type); - return (i + 3); + } else if (type == DAY_OF_WEEK) { + sval = getDayOfWeekNumber(sub); + if (sval < 0) { + throw new ParseException("Invalid Day-of-Week value: '" + sub + "'", i); } - - if (c == '?') { - i++; - if ((i + 1) < s.length() && (s.charAt(i) != ' ' && s.charAt(i + 1) != '\t')) { - throw new ParseException("Illegal character after '?': " + s.charAt(i), i); - } - if (type != DAY_OF_WEEK && type != DAY_OF_MONTH) { - throw new ParseException( - "'?' can only be specfied for Day-of-Month or Day-of-Week.", i); - } - if (type == DAY_OF_WEEK && !lastdayOfMonth) { - int val = ((Integer) daysOfMonth.last()).intValue(); - if (val == NO_SPEC_INT) { - throw new ParseException( - "'?' can only be specfied for Day-of-Month -OR- Day-of-Week.", i); - } - } - - addToSet(NO_SPEC_INT, -1, 0, type); - return i; - } - - if (c == '*' || c == '/') { - if (c == '*' && (i + 1) >= s.length()) { - addToSet(ALL_SPEC_INT, -1, incr, type); - return i + 1; - } else if (c == '/' - && ((i + 1) >= s.length() - || s.charAt(i + 1) == ' ' - || s.charAt(i + 1) == '\t')) { - throw new ParseException("'/' must be followed by an integer.", i); - } else if (c == '*') { - i++; - } - c = s.charAt(i); - if (c == '/') { // is an increment specified? - i++; - if (i >= s.length()) { - throw new ParseException("Unexpected end of string.", i); - } - - incr = getNumericValue(s, i); - - i++; - if (incr > 10) { - i++; - } - if (incr > 59 && (type == SECOND || type == MINUTE)) { - throw new ParseException("Increment > 60 : " + incr, i); - } else if (incr > 23 && (type == HOUR)) { - throw new ParseException("Increment > 24 : " + incr, i); - } else if (incr > 31 && (type == DAY_OF_MONTH)) { - throw new ParseException("Increment > 31 : " + incr, i); - } else if (incr > 7 && (type == DAY_OF_WEEK)) { - throw new ParseException("Increment > 7 : " + incr, i); - } else if (incr > 12 && (type == MONTH)) { - throw new ParseException("Increment > 12 : " + incr, i); - } - } else { - incr = 1; - } - - addToSet(ALL_SPEC_INT, -1, incr, type); - return i; - } else if (c == 'L') { - i++; - if (type == DAY_OF_MONTH) { - lastdayOfMonth = true; - } - if (type == DAY_OF_WEEK) { - addToSet(7, 7, 0, type); - } - if (type == DAY_OF_MONTH && s.length() > i) { - c = s.charAt(i); - if (c == 'W') { - nearestWeekday = true; - i++; - } + if (s.length() > i + 3) { + c = s.charAt(i + 3); + if (c == '-') { + i += 4; + sub = s.substring(i, i + 3); + eval = getDayOfWeekNumber(sub); + if (eval < 0) { + throw new ParseException("Invalid Day-of-Week value: '" + sub + "'", i); + } + } else if (c == '#') { + try { + i += 4; + nthdayOfWeek = Integer.parseInt(s.substring(i)); + if (nthdayOfWeek < 1 || nthdayOfWeek > 5) { + throw new Exception(); + } + } catch (Exception e) { + throw new ParseException( + "A numeric value between 1 and 5 must follow the '#' option", i); } - return i; - } else if (c >= '0' && c <= '9') { - int val = Integer.parseInt(String.valueOf(c)); + } else if (c == 'L') { + lastdayOfWeek = true; i++; - if (i >= s.length()) { - addToSet(val, -1, -1, type); - } else { - c = s.charAt(i); - if (c >= '0' && c <= '9') { - ValueSet vs = getValue(val, s, i); - val = vs.value; - i = vs.pos; - } - i = checkNext(i, s, val, type); - return i; - } - } else { - throw new ParseException("Unexpected character: " + c, i); + } } - return i; + } else { + throw new ParseException("Illegal characters for this position: '" + sub + "'", i); + } + if (eval != -1) { + incr = 1; + } + addToSet(sval, eval, incr, type); + return (i + 3); } - protected int checkNext(int pos, String s, int val, int type) throws ParseException { + if (c == '?') { + i++; + if ((i + 1) < s.length() && (s.charAt(i) != ' ' && s.charAt(i + 1) != '\t')) { + throw new ParseException("Illegal character after '?': " + s.charAt(i), i); + } + if (type != DAY_OF_WEEK && type != DAY_OF_MONTH) { + throw new ParseException("'?' can only be specfied for Day-of-Month or Day-of-Week.", i); + } + if (type == DAY_OF_WEEK && !lastdayOfMonth) { + int val = ((Integer) daysOfMonth.last()).intValue(); + if (val == NO_SPEC_INT) { + throw new ParseException( + "'?' can only be specfied for Day-of-Month -OR- Day-of-Week.", i); + } + } - int end = -1; - int i = pos; + addToSet(NO_SPEC_INT, -1, 0, type); + return i; + } + if (c == '*' || c == '/') { + if (c == '*' && (i + 1) >= s.length()) { + addToSet(ALL_SPEC_INT, -1, incr, type); + return i + 1; + } else if (c == '/' + && ((i + 1) >= s.length() || s.charAt(i + 1) == ' ' || s.charAt(i + 1) == '\t')) { + throw new ParseException("'/' must be followed by an integer.", i); + } else if (c == '*') { + i++; + } + c = s.charAt(i); + if (c == '/') { // is an increment specified? + i++; if (i >= s.length()) { - addToSet(val, end, -1, type); - return i; + throw new ParseException("Unexpected end of string.", i); } - char c = s.charAt(pos); + incr = getNumericValue(s, i); - if (c == 'L') { - if (type == DAY_OF_WEEK) { - if (val < 1 || val > 7) - throw new ParseException("Day-of-Week values must be between 1 and 7", -1); - lastdayOfWeek = true; - } else { - throw new ParseException("'L' option is not valid here. (pos=" + i + ")", i); - } - TreeSet set = getSet(type); - set.add(new Integer(val)); - i++; - return i; - } - - if (c == 'W') { - if (type == DAY_OF_MONTH) { - nearestWeekday = true; - } else { - throw new ParseException("'W' option is not valid here. (pos=" + i + ")", i); - } - TreeSet set = getSet(type); - set.add(new Integer(val)); - i++; - return i; + i++; + if (incr > 10) { + i++; } - - if (c == '#') { - if (type != DAY_OF_WEEK) { - throw new ParseException("'#' option is not valid here. (pos=" + i + ")", i); - } - i++; - try { - nthdayOfWeek = Integer.parseInt(s.substring(i)); - if (nthdayOfWeek < 1 || nthdayOfWeek > 5) { - throw new Exception(); - } - } catch (Exception e) { - throw new ParseException( - "A numeric value between 1 and 5 must follow the '#' option", i); - } - - TreeSet set = getSet(type); - set.add(new Integer(val)); - i++; - return i; + if (incr > 59 && (type == SECOND || type == MINUTE)) { + throw new ParseException("Increment > 60 : " + incr, i); + } else if (incr > 23 && (type == HOUR)) { + throw new ParseException("Increment > 24 : " + incr, i); + } else if (incr > 31 && (type == DAY_OF_MONTH)) { + throw new ParseException("Increment > 31 : " + incr, i); + } else if (incr > 7 && (type == DAY_OF_WEEK)) { + throw new ParseException("Increment > 7 : " + incr, i); + } else if (incr > 12 && (type == MONTH)) { + throw new ParseException("Increment > 12 : " + incr, i); } - - if (c == '-') { - i++; - c = s.charAt(i); - int v = Integer.parseInt(String.valueOf(c)); - end = v; - i++; - if (i >= s.length()) { - addToSet(val, end, 1, type); - return i; - } - c = s.charAt(i); - if (c >= '0' && c <= '9') { - ValueSet vs = getValue(v, s, i); - int v1 = vs.value; - end = v1; - i = vs.pos; - } - if (i < s.length() && ((c = s.charAt(i)) == '/')) { - i++; - c = s.charAt(i); - int v2 = Integer.parseInt(String.valueOf(c)); - i++; - if (i >= s.length()) { - addToSet(val, end, v2, type); - return i; - } - c = s.charAt(i); - if (c >= '0' && c <= '9') { - ValueSet vs = getValue(v2, s, i); - int v3 = vs.value; - addToSet(val, end, v3, type); - i = vs.pos; - return i; - } else { - addToSet(val, end, v2, type); - return i; - } - } else { - addToSet(val, end, 1, type); - return i; - } + } else { + incr = 1; + } + + addToSet(ALL_SPEC_INT, -1, incr, type); + return i; + } else if (c == 'L') { + i++; + if (type == DAY_OF_MONTH) { + lastdayOfMonth = true; + } + if (type == DAY_OF_WEEK) { + addToSet(7, 7, 0, type); + } + if (type == DAY_OF_MONTH && s.length() > i) { + c = s.charAt(i); + if (c == 'W') { + nearestWeekday = true; + i++; } - - if (c == '/') { - i++; - c = s.charAt(i); - int v2 = Integer.parseInt(String.valueOf(c)); - i++; - if (i >= s.length()) { - addToSet(val, end, v2, type); - return i; - } - c = s.charAt(i); - if (c >= '0' && c <= '9') { - ValueSet vs = getValue(v2, s, i); - int v3 = vs.value; - addToSet(val, end, v3, type); - i = vs.pos; - return i; - } else { - throw new ParseException("Unexpected character '" + c + "' after '/'", i); - } + } + return i; + } else if (c >= '0' && c <= '9') { + int val = Integer.parseInt(String.valueOf(c)); + i++; + if (i >= s.length()) { + addToSet(val, -1, -1, type); + } else { + c = s.charAt(i); + if (c >= '0' && c <= '9') { + ValueSet vs = getValue(val, s, i); + val = vs.value; + i = vs.pos; } - - addToSet(val, end, 0, type); - i++; + i = checkNext(i, s, val, type); return i; + } + } else { + throw new ParseException("Unexpected character: " + c, i); } - public String getCronExpression() { - return cronExpression; - } + return i; + } - public String getExpressionSummary() { - StringBuffer buf = new StringBuffer(); - - buf.append("seconds: "); - buf.append(getExpressionSetSummary(seconds)); - buf.append("\n"); - buf.append("minutes: "); - buf.append(getExpressionSetSummary(minutes)); - buf.append("\n"); - buf.append("hours: "); - buf.append(getExpressionSetSummary(hours)); - buf.append("\n"); - buf.append("daysOfMonth: "); - buf.append(getExpressionSetSummary(daysOfMonth)); - buf.append("\n"); - buf.append("months: "); - buf.append(getExpressionSetSummary(months)); - buf.append("\n"); - buf.append("daysOfWeek: "); - buf.append(getExpressionSetSummary(daysOfWeek)); - buf.append("\n"); - buf.append("lastdayOfWeek: "); - buf.append(lastdayOfWeek); - buf.append("\n"); - buf.append("nearestWeekday: "); - buf.append(nearestWeekday); - buf.append("\n"); - buf.append("NthDayOfWeek: "); - buf.append(nthdayOfWeek); - buf.append("\n"); - buf.append("lastdayOfMonth: "); - buf.append(lastdayOfMonth); - buf.append("\n"); - buf.append("years: "); - buf.append(getExpressionSetSummary(years)); - buf.append("\n"); - - return buf.toString(); - } + protected int checkNext(int pos, String s, int val, int type) throws ParseException { - protected String getExpressionSetSummary(java.util.Set set) { + int end = -1; + int i = pos; - if (set.contains(NO_SPEC)) { - return "?"; - } - if (set.contains(ALL_SPEC)) { - return "*"; - } - - StringBuffer buf = new StringBuffer(); - - Iterator itr = set.iterator(); - boolean first = true; - while (itr.hasNext()) { - Integer iVal = (Integer) itr.next(); - String val = iVal.toString(); - if (!first) { - buf.append(","); - } - buf.append(val); - first = false; - } + if (i >= s.length()) { + addToSet(val, end, -1, type); + return i; + } - return buf.toString(); + char c = s.charAt(pos); + + if (c == 'L') { + if (type == DAY_OF_WEEK) { + if (val < 1 || val > 7) + throw new ParseException("Day-of-Week values must be between 1 and 7", -1); + lastdayOfWeek = true; + } else { + throw new ParseException("'L' option is not valid here. (pos=" + i + ")", i); + } + TreeSet set = getSet(type); + set.add(new Integer(val)); + i++; + return i; } - protected String getExpressionSetSummary(java.util.ArrayList list) { + if (c == 'W') { + if (type == DAY_OF_MONTH) { + nearestWeekday = true; + } else { + throw new ParseException("'W' option is not valid here. (pos=" + i + ")", i); + } + TreeSet set = getSet(type); + set.add(new Integer(val)); + i++; + return i; + } - if (list.contains(NO_SPEC)) { - return "?"; - } - if (list.contains(ALL_SPEC)) { - return "*"; + if (c == '#') { + if (type != DAY_OF_WEEK) { + throw new ParseException("'#' option is not valid here. (pos=" + i + ")", i); + } + i++; + try { + nthdayOfWeek = Integer.parseInt(s.substring(i)); + if (nthdayOfWeek < 1 || nthdayOfWeek > 5) { + throw new Exception(); } - - StringBuffer buf = new StringBuffer(); - - Iterator itr = list.iterator(); - boolean first = true; - while (itr.hasNext()) { - Integer iVal = (Integer) itr.next(); - String val = iVal.toString(); - if (!first) { - buf.append(","); - } - buf.append(val); - first = false; - } - - return buf.toString(); + } catch (Exception e) { + throw new ParseException("A numeric value between 1 and 5 must follow the '#' option", i); + } + + TreeSet set = getSet(type); + set.add(new Integer(val)); + i++; + return i; } - protected int skipWhiteSpace(int i, String s) { - for (; i < s.length() && (s.charAt(i) == ' ' || s.charAt(i) == '\t'); i++) { - ; + if (c == '-') { + i++; + c = s.charAt(i); + int v = Integer.parseInt(String.valueOf(c)); + end = v; + i++; + if (i >= s.length()) { + addToSet(val, end, 1, type); + return i; + } + c = s.charAt(i); + if (c >= '0' && c <= '9') { + ValueSet vs = getValue(v, s, i); + int v1 = vs.value; + end = v1; + i = vs.pos; + } + if (i < s.length() && ((c = s.charAt(i)) == '/')) { + i++; + c = s.charAt(i); + int v2 = Integer.parseInt(String.valueOf(c)); + i++; + if (i >= s.length()) { + addToSet(val, end, v2, type); + return i; } - + c = s.charAt(i); + if (c >= '0' && c <= '9') { + ValueSet vs = getValue(v2, s, i); + int v3 = vs.value; + addToSet(val, end, v3, type); + i = vs.pos; + return i; + } else { + addToSet(val, end, v2, type); + return i; + } + } else { + addToSet(val, end, 1, type); return i; + } } - protected int findNextWhiteSpace(int i, String s) { - for (; i < s.length() && (s.charAt(i) != ' ' || s.charAt(i) != '\t'); i++) { - ; - } - + if (c == '/') { + i++; + c = s.charAt(i); + int v2 = Integer.parseInt(String.valueOf(c)); + i++; + if (i >= s.length()) { + addToSet(val, end, v2, type); + return i; + } + c = s.charAt(i); + if (c >= '0' && c <= '9') { + ValueSet vs = getValue(v2, s, i); + int v3 = vs.value; + addToSet(val, end, v3, type); + i = vs.pos; return i; + } else { + throw new ParseException("Unexpected character '" + c + "' after '/'", i); + } } - protected void addToSet(int val, int end, int incr, int type) throws ParseException { - - TreeSet set = getSet(type); - - if (type == SECOND || type == MINUTE) { - if ((val < 0 || val > 59 || end > 59) && (val != ALL_SPEC_INT)) { - throw new ParseException("Minute and Second values must be between 0 and 59", -1); - } - } else if (type == HOUR) { - if ((val < 0 || val > 23 || end > 23) && (val != ALL_SPEC_INT)) { - throw new ParseException("Hour values must be between 0 and 23", -1); - } - } else if (type == DAY_OF_MONTH) { - if ((val < 1 || val > 31 || end > 31) - && (val != ALL_SPEC_INT) - && (val != NO_SPEC_INT)) { - throw new ParseException("Day of month values must be between 1 and 31", -1); - } - } else if (type == MONTH) { - if ((val < 1 || val > 12 || end > 12) && (val != ALL_SPEC_INT)) { - throw new ParseException("Month values must be between 1 and 12", -1); - } - } else if (type == DAY_OF_WEEK) { - if ((val == 0 || val > 7 || end > 7) && (val != ALL_SPEC_INT) && (val != NO_SPEC_INT)) { - throw new ParseException("Day-of-Week values must be between 1 and 7", -1); - } - } + addToSet(val, end, 0, type); + i++; + return i; + } + + public String getCronExpression() { + return cronExpression; + } + + public String getExpressionSummary() { + StringBuffer buf = new StringBuffer(); + + buf.append("seconds: "); + buf.append(getExpressionSetSummary(seconds)); + buf.append("\n"); + buf.append("minutes: "); + buf.append(getExpressionSetSummary(minutes)); + buf.append("\n"); + buf.append("hours: "); + buf.append(getExpressionSetSummary(hours)); + buf.append("\n"); + buf.append("daysOfMonth: "); + buf.append(getExpressionSetSummary(daysOfMonth)); + buf.append("\n"); + buf.append("months: "); + buf.append(getExpressionSetSummary(months)); + buf.append("\n"); + buf.append("daysOfWeek: "); + buf.append(getExpressionSetSummary(daysOfWeek)); + buf.append("\n"); + buf.append("lastdayOfWeek: "); + buf.append(lastdayOfWeek); + buf.append("\n"); + buf.append("nearestWeekday: "); + buf.append(nearestWeekday); + buf.append("\n"); + buf.append("NthDayOfWeek: "); + buf.append(nthdayOfWeek); + buf.append("\n"); + buf.append("lastdayOfMonth: "); + buf.append(lastdayOfMonth); + buf.append("\n"); + buf.append("years: "); + buf.append(getExpressionSetSummary(years)); + buf.append("\n"); + + return buf.toString(); + } + + protected String getExpressionSetSummary(java.util.Set set) { + + if (set.contains(NO_SPEC)) { + return "?"; + } + if (set.contains(ALL_SPEC)) { + return "*"; + } - if ((incr == 0 || incr == -1) && val != ALL_SPEC_INT) { - if (val != -1) { - set.add(new Integer(val)); - } else { - set.add(NO_SPEC); - } + StringBuffer buf = new StringBuffer(); + + Iterator itr = set.iterator(); + boolean first = true; + while (itr.hasNext()) { + Integer iVal = (Integer) itr.next(); + String val = iVal.toString(); + if (!first) { + buf.append(","); + } + buf.append(val); + first = false; + } - return; - } + return buf.toString(); + } - int startAt = val; - int stopAt = end; + protected String getExpressionSetSummary(java.util.ArrayList list) { - if (val == ALL_SPEC_INT && incr <= 0) { - incr = 1; - set.add(ALL_SPEC); // put in a marker, but also fill values - } + if (list.contains(NO_SPEC)) { + return "?"; + } + if (list.contains(ALL_SPEC)) { + return "*"; + } - if (type == SECOND || type == MINUTE) { - if (stopAt == -1) { - stopAt = 59; - } - if (startAt == -1 || startAt == ALL_SPEC_INT) { - startAt = 0; - } - } else if (type == HOUR) { - if (stopAt == -1) { - stopAt = 23; - } - if (startAt == -1 || startAt == ALL_SPEC_INT) { - startAt = 0; - } - } else if (type == DAY_OF_MONTH) { - if (stopAt == -1) { - stopAt = 31; - } - if (startAt == -1 || startAt == ALL_SPEC_INT) { - startAt = 1; - } - } else if (type == MONTH) { - if (stopAt == -1) { - stopAt = 12; - } - if (startAt == -1 || startAt == ALL_SPEC_INT) { - startAt = 1; - } - } else if (type == DAY_OF_WEEK) { - if (stopAt == -1) { - stopAt = 7; - } - if (startAt == -1 || startAt == ALL_SPEC_INT) { - startAt = 1; - } - } else if (type == YEAR) { - if (stopAt == -1) { - stopAt = 2299; - } - if (startAt == -1 || startAt == ALL_SPEC_INT) { - startAt = 1970; - } - } + StringBuffer buf = new StringBuffer(); + + Iterator itr = list.iterator(); + boolean first = true; + while (itr.hasNext()) { + Integer iVal = (Integer) itr.next(); + String val = iVal.toString(); + if (!first) { + buf.append(","); + } + buf.append(val); + first = false; + } - // if the end of the range is before the start, then we need to overflow into - // the next day, month etc. This is done by adding the maximum amount for that - // type, and using modulus max to determine the value being added. - int max = -1; - if (stopAt < startAt) { - switch (type) { - case SECOND: - max = 60; - break; - case MINUTE: - max = 60; - break; - case HOUR: - max = 24; - break; - case MONTH: - max = 12; - break; - case DAY_OF_WEEK: - max = 7; - break; - case DAY_OF_MONTH: - max = 31; - break; - case YEAR: - throw new IllegalArgumentException("Start year must be less than stop year"); - default: - throw new IllegalArgumentException("Unexpected type encountered"); - } - stopAt += max; - } + return buf.toString(); + } - for (int i = startAt; i <= stopAt; i += incr) { - if (max == -1) { - // ie: there's no max to overflow over - set.add(new Integer(i)); - } else { - // take the modulus to get the real value - int i2 = i % max; + protected int skipWhiteSpace(int i, String s) { + for (; i < s.length() && (s.charAt(i) == ' ' || s.charAt(i) == '\t'); i++) { + ; + } - // 1-indexed ranges should not include 0, and should include their max - if (i2 == 0 && (type == MONTH || type == DAY_OF_WEEK || type == DAY_OF_MONTH)) { - i2 = max; - } + return i; + } - set.add(new Integer(i2)); - } - } + protected int findNextWhiteSpace(int i, String s) { + for (; i < s.length() && (s.charAt(i) != ' ' || s.charAt(i) != '\t'); i++) { + ; } - protected TreeSet getSet(int type) { - switch (type) { - case SECOND: - return seconds; - case MINUTE: - return minutes; - case HOUR: - return hours; - case DAY_OF_MONTH: - return daysOfMonth; - case MONTH: - return months; - case DAY_OF_WEEK: - return daysOfWeek; - case YEAR: - return years; - default: - return null; - } + return i; + } + + protected void addToSet(int val, int end, int incr, int type) throws ParseException { + + TreeSet set = getSet(type); + + if (type == SECOND || type == MINUTE) { + if ((val < 0 || val > 59 || end > 59) && (val != ALL_SPEC_INT)) { + throw new ParseException("Minute and Second values must be between 0 and 59", -1); + } + } else if (type == HOUR) { + if ((val < 0 || val > 23 || end > 23) && (val != ALL_SPEC_INT)) { + throw new ParseException("Hour values must be between 0 and 23", -1); + } + } else if (type == DAY_OF_MONTH) { + if ((val < 1 || val > 31 || end > 31) && (val != ALL_SPEC_INT) && (val != NO_SPEC_INT)) { + throw new ParseException("Day of month values must be between 1 and 31", -1); + } + } else if (type == MONTH) { + if ((val < 1 || val > 12 || end > 12) && (val != ALL_SPEC_INT)) { + throw new ParseException("Month values must be between 1 and 12", -1); + } + } else if (type == DAY_OF_WEEK) { + if ((val == 0 || val > 7 || end > 7) && (val != ALL_SPEC_INT) && (val != NO_SPEC_INT)) { + throw new ParseException("Day-of-Week values must be between 1 and 7", -1); + } } - protected ValueSet getValue(int v, String s, int i) { - char c = s.charAt(i); - String s1 = String.valueOf(v); - while (c >= '0' && c <= '9') { - s1 += c; - i++; - if (i >= s.length()) { - break; - } - c = s.charAt(i); - } - ValueSet val = new ValueSet(); + if ((incr == 0 || incr == -1) && val != ALL_SPEC_INT) { + if (val != -1) { + set.add(new Integer(val)); + } else { + set.add(NO_SPEC); + } - val.pos = (i < s.length()) ? i : i + 1; - val.value = Integer.parseInt(s1); - return val; + return; } - protected int getNumericValue(String s, int i) { - int endOfVal = findNextWhiteSpace(i, s); - String val = s.substring(i, endOfVal); - return Integer.parseInt(val); - } - - protected int getMonthNumber(String s) { - Integer integer = (Integer) monthMap.get(s); + int startAt = val; + int stopAt = end; - if (integer == null) { - return -1; - } + if (val == ALL_SPEC_INT && incr <= 0) { + incr = 1; + set.add(ALL_SPEC); // put in a marker, but also fill values + } - return integer.intValue(); + if (type == SECOND || type == MINUTE) { + if (stopAt == -1) { + stopAt = 59; + } + if (startAt == -1 || startAt == ALL_SPEC_INT) { + startAt = 0; + } + } else if (type == HOUR) { + if (stopAt == -1) { + stopAt = 23; + } + if (startAt == -1 || startAt == ALL_SPEC_INT) { + startAt = 0; + } + } else if (type == DAY_OF_MONTH) { + if (stopAt == -1) { + stopAt = 31; + } + if (startAt == -1 || startAt == ALL_SPEC_INT) { + startAt = 1; + } + } else if (type == MONTH) { + if (stopAt == -1) { + stopAt = 12; + } + if (startAt == -1 || startAt == ALL_SPEC_INT) { + startAt = 1; + } + } else if (type == DAY_OF_WEEK) { + if (stopAt == -1) { + stopAt = 7; + } + if (startAt == -1 || startAt == ALL_SPEC_INT) { + startAt = 1; + } + } else if (type == YEAR) { + if (stopAt == -1) { + stopAt = 2299; + } + if (startAt == -1 || startAt == ALL_SPEC_INT) { + startAt = 1970; + } } - protected int getDayOfWeekNumber(String s) { - Integer integer = (Integer) dayMap.get(s); + // if the end of the range is before the start, then we need to overflow into + // the next day, month etc. This is done by adding the maximum amount for that + // type, and using modulus max to determine the value being added. + int max = -1; + if (stopAt < startAt) { + switch (type) { + case SECOND: + max = 60; + break; + case MINUTE: + max = 60; + break; + case HOUR: + max = 24; + break; + case MONTH: + max = 12; + break; + case DAY_OF_WEEK: + max = 7; + break; + case DAY_OF_MONTH: + max = 31; + break; + case YEAR: + throw new IllegalArgumentException("Start year must be less than stop year"); + default: + throw new IllegalArgumentException("Unexpected type encountered"); + } + stopAt += max; + } - if (integer == null) { - return -1; + for (int i = startAt; i <= stopAt; i += incr) { + if (max == -1) { + // ie: there's no max to overflow over + set.add(new Integer(i)); + } else { + // take the modulus to get the real value + int i2 = i % max; + + // 1-indexed ranges should not include 0, and should include their max + if (i2 == 0 && (type == MONTH || type == DAY_OF_WEEK || type == DAY_OF_MONTH)) { + i2 = max; } - return integer.intValue(); + set.add(new Integer(i2)); + } } + } + + protected TreeSet getSet(int type) { + switch (type) { + case SECOND: + return seconds; + case MINUTE: + return minutes; + case HOUR: + return hours; + case DAY_OF_MONTH: + return daysOfMonth; + case MONTH: + return months; + case DAY_OF_WEEK: + return daysOfWeek; + case YEAR: + return years; + default: + return null; + } + } + + protected ValueSet getValue(int v, String s, int i) { + char c = s.charAt(i); + String s1 = String.valueOf(v); + while (c >= '0' && c <= '9') { + s1 += c; + i++; + if (i >= s.length()) { + break; + } + c = s.charAt(i); + } + ValueSet val = new ValueSet(); - //////////////////////////////////////////////////////////////////////////// - // - // Computation Functions - // - //////////////////////////////////////////////////////////////////////////// - - protected Date getTimeAfter(Date afterTime) { - - // Computation is based on Gregorian year only. - Calendar cl = new java.util.GregorianCalendar(getTimeZone()); - - // move ahead one second, since we're computing the time *after* the - // given time - afterTime = new Date(afterTime.getTime() + 1000); - // CronTrigger does not deal with milliseconds - cl.setTime(afterTime); - cl.set(Calendar.MILLISECOND, 0); - - boolean gotOne = false; - // loop until we've computed the next time, or we've past the endTime - while (!gotOne) { + val.pos = (i < s.length()) ? i : i + 1; + val.value = Integer.parseInt(s1); + return val; + } - // if (endTime != null && cl.getTime().after(endTime)) return null; - if (cl.get(Calendar.YEAR) > 2999) { // prevent endless loop... - return null; - } + protected int getNumericValue(String s, int i) { + int endOfVal = findNextWhiteSpace(i, s); + String val = s.substring(i, endOfVal); + return Integer.parseInt(val); + } - SortedSet st = null; - int t = 0; + protected int getMonthNumber(String s) { + Integer integer = (Integer) monthMap.get(s); - int sec = cl.get(Calendar.SECOND); - int min = cl.get(Calendar.MINUTE); + if (integer == null) { + return -1; + } - // get second................................................. - st = seconds.tailSet(new Integer(sec)); - if (st != null && st.size() != 0) { - sec = ((Integer) st.first()).intValue(); - } else { - sec = ((Integer) seconds.first()).intValue(); - min++; - cl.set(Calendar.MINUTE, min); - } - cl.set(Calendar.SECOND, sec); - - min = cl.get(Calendar.MINUTE); - int hr = cl.get(Calendar.HOUR_OF_DAY); - t = -1; - - // get minute................................................. - st = minutes.tailSet(new Integer(min)); - if (st != null && st.size() != 0) { - t = min; - min = ((Integer) st.first()).intValue(); - } else { - min = ((Integer) minutes.first()).intValue(); - hr++; - } - if (min != t) { - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, min); - setCalendarHour(cl, hr); - continue; - } - cl.set(Calendar.MINUTE, min); - - hr = cl.get(Calendar.HOUR_OF_DAY); - int day = cl.get(Calendar.DAY_OF_MONTH); - t = -1; - - // get hour................................................... - st = hours.tailSet(new Integer(hr)); - if (st != null && st.size() != 0) { - t = hr; - hr = ((Integer) st.first()).intValue(); - } else { - hr = ((Integer) hours.first()).intValue(); - day++; - } - if (hr != t) { - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.DAY_OF_MONTH, day); - setCalendarHour(cl, hr); - continue; - } - cl.set(Calendar.HOUR_OF_DAY, hr); - - day = cl.get(Calendar.DAY_OF_MONTH); - int mon = cl.get(Calendar.MONTH) + 1; - // '+ 1' because calendar is 0-based for this field, and we are - // 1-based - t = -1; - int tmon = mon; - - // get day................................................... - boolean dayOfMSpec = !daysOfMonth.contains(NO_SPEC); - boolean dayOfWSpec = !daysOfWeek.contains(NO_SPEC); - if (dayOfMSpec && !dayOfWSpec) { // get day by day of month rule - st = daysOfMonth.tailSet(new Integer(day)); - if (lastdayOfMonth) { - if (!nearestWeekday) { - t = day; - day = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); - } else { - t = day; - day = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); - - java.util.Calendar tcal = java.util.Calendar.getInstance(getTimeZone()); - tcal.set(Calendar.SECOND, 0); - tcal.set(Calendar.MINUTE, 0); - tcal.set(Calendar.HOUR_OF_DAY, 0); - tcal.set(Calendar.DAY_OF_MONTH, day); - tcal.set(Calendar.MONTH, mon - 1); - tcal.set(Calendar.YEAR, cl.get(Calendar.YEAR)); - - int ldom = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); - int dow = tcal.get(Calendar.DAY_OF_WEEK); - - if (dow == Calendar.SATURDAY && day == 1) { - day += 2; - } else if (dow == Calendar.SATURDAY) { - day -= 1; - } else if (dow == Calendar.SUNDAY && day == ldom) { - day -= 2; - } else if (dow == Calendar.SUNDAY) { - day += 1; - } - - tcal.set(Calendar.SECOND, sec); - tcal.set(Calendar.MINUTE, min); - tcal.set(Calendar.HOUR_OF_DAY, hr); - tcal.set(Calendar.DAY_OF_MONTH, day); - tcal.set(Calendar.MONTH, mon - 1); - Date nTime = tcal.getTime(); - if (nTime.before(afterTime)) { - day = 1; - mon++; - } - } - } else if (nearestWeekday) { - t = day; - day = ((Integer) daysOfMonth.first()).intValue(); - - java.util.Calendar tcal = java.util.Calendar.getInstance(getTimeZone()); - tcal.set(Calendar.SECOND, 0); - tcal.set(Calendar.MINUTE, 0); - tcal.set(Calendar.HOUR_OF_DAY, 0); - tcal.set(Calendar.DAY_OF_MONTH, day); - tcal.set(Calendar.MONTH, mon - 1); - tcal.set(Calendar.YEAR, cl.get(Calendar.YEAR)); - - int ldom = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); - int dow = tcal.get(Calendar.DAY_OF_WEEK); - - if (dow == Calendar.SATURDAY && day == 1) { - day += 2; - } else if (dow == Calendar.SATURDAY) { - day -= 1; - } else if (dow == Calendar.SUNDAY && day == ldom) { - day -= 2; - } else if (dow == Calendar.SUNDAY) { - day += 1; - } - - tcal.set(Calendar.SECOND, sec); - tcal.set(Calendar.MINUTE, min); - tcal.set(Calendar.HOUR_OF_DAY, hr); - tcal.set(Calendar.DAY_OF_MONTH, day); - tcal.set(Calendar.MONTH, mon - 1); - Date nTime = tcal.getTime(); - if (nTime.before(afterTime)) { - day = ((Integer) daysOfMonth.first()).intValue(); - mon++; - } - } else if (st != null && st.size() != 0) { - t = day; - day = ((Integer) st.first()).intValue(); - // make sure we don't over-run a short month, such as february - int lastDay = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); - if (day > lastDay) { - day = ((Integer) daysOfMonth.first()).intValue(); - mon++; - } - } else { - day = ((Integer) daysOfMonth.first()).intValue(); - mon++; - } - - if (day != t || mon != tmon) { - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.HOUR_OF_DAY, 0); - cl.set(Calendar.DAY_OF_MONTH, day); - cl.set(Calendar.MONTH, mon - 1); - // '- 1' because calendar is 0-based for this field, and we - // are 1-based - continue; - } - } else if (dayOfWSpec && !dayOfMSpec) { // get day by day of week rule - if (lastdayOfWeek) { // are we looking for the last XXX day of - // the month? - int dow = ((Integer) daysOfWeek.first()).intValue(); // desired - // d-o-w - int cDow = cl.get(Calendar.DAY_OF_WEEK); // current d-o-w - int daysToAdd = 0; - if (cDow < dow) { - daysToAdd = dow - cDow; - } - if (cDow > dow) { - daysToAdd = dow + (7 - cDow); - } - - int lDay = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); - - if (day + daysToAdd > lDay) { // did we already miss the - // last one? - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.HOUR_OF_DAY, 0); - cl.set(Calendar.DAY_OF_MONTH, 1); - cl.set(Calendar.MONTH, mon); - // no '- 1' here because we are promoting the month - continue; - } - - // find date of last occurance of this day in this month... - while ((day + daysToAdd + 7) <= lDay) { - daysToAdd += 7; - } - - day += daysToAdd; - - if (daysToAdd > 0) { - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.HOUR_OF_DAY, 0); - cl.set(Calendar.DAY_OF_MONTH, day); - cl.set(Calendar.MONTH, mon - 1); - // '- 1' here because we are not promoting the month - continue; - } - - } else if (nthdayOfWeek != 0) { - // are we looking for the Nth XXX day in the month? - int dow = ((Integer) daysOfWeek.first()).intValue(); // desired - // d-o-w - int cDow = cl.get(Calendar.DAY_OF_WEEK); // current d-o-w - int daysToAdd = 0; - if (cDow < dow) { - daysToAdd = dow - cDow; - } else if (cDow > dow) { - daysToAdd = dow + (7 - cDow); - } - - boolean dayShifted = false; - if (daysToAdd > 0) { - dayShifted = true; - } - - day += daysToAdd; - int weekOfMonth = day / 7; - if (day % 7 > 0) { - weekOfMonth++; - } - - daysToAdd = (nthdayOfWeek - weekOfMonth) * 7; - day += daysToAdd; - if (daysToAdd < 0 || day > getLastDayOfMonth(mon, cl.get(Calendar.YEAR))) { - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.HOUR_OF_DAY, 0); - cl.set(Calendar.DAY_OF_MONTH, 1); - cl.set(Calendar.MONTH, mon); - // no '- 1' here because we are promoting the month - continue; - } else if (daysToAdd > 0 || dayShifted) { - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.HOUR_OF_DAY, 0); - cl.set(Calendar.DAY_OF_MONTH, day); - cl.set(Calendar.MONTH, mon - 1); - // '- 1' here because we are NOT promoting the month - continue; - } - } else { - int cDow = cl.get(Calendar.DAY_OF_WEEK); // current d-o-w - int dow = ((Integer) daysOfWeek.first()).intValue(); // desired - // d-o-w - st = daysOfWeek.tailSet(new Integer(cDow)); - if (st != null && st.size() > 0) { - dow = ((Integer) st.first()).intValue(); - } - - int daysToAdd = 0; - if (cDow < dow) { - daysToAdd = dow - cDow; - } - if (cDow > dow) { - daysToAdd = dow + (7 - cDow); - } - - int lDay = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); - - if (day + daysToAdd > lDay) { // will we pass the end of - // the month? - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.HOUR_OF_DAY, 0); - cl.set(Calendar.DAY_OF_MONTH, 1); - cl.set(Calendar.MONTH, mon); - // no '- 1' here because we are promoting the month - continue; - } else if (daysToAdd > 0) { // are we swithing days? - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.HOUR_OF_DAY, 0); - cl.set(Calendar.DAY_OF_MONTH, day + daysToAdd); - cl.set(Calendar.MONTH, mon - 1); - // '- 1' because calendar is 0-based for this field, - // and we are 1-based - continue; - } - } - } else { // dayOfWSpec && !dayOfMSpec - throw new UnsupportedOperationException( - "Support for specifying both a day-of-week AND a day-of-month parameter is" - + " not implemented."); - // TODO: - } - cl.set(Calendar.DAY_OF_MONTH, day); + return integer.intValue(); + } - mon = cl.get(Calendar.MONTH) + 1; - // '+ 1' because calendar is 0-based for this field, and we are - // 1-based - int year = cl.get(Calendar.YEAR); - t = -1; + protected int getDayOfWeekNumber(String s) { + Integer integer = (Integer) dayMap.get(s); - // test for expressions that never generate a valid fire date, - // but keep looping... - if (year > 2299) { - return null; - } + if (integer == null) { + return -1; + } - // get month................................................... - st = months.tailSet(new Integer(mon)); - if (st != null && st.size() != 0) { - t = mon; - mon = ((Integer) st.first()).intValue(); - } else { - mon = ((Integer) months.first()).intValue(); - year++; - } - if (mon != t) { - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.HOUR_OF_DAY, 0); - cl.set(Calendar.DAY_OF_MONTH, 1); - cl.set(Calendar.MONTH, mon - 1); - // '- 1' because calendar is 0-based for this field, and we are - // 1-based - cl.set(Calendar.YEAR, year); - continue; - } - cl.set(Calendar.MONTH, mon - 1); - // '- 1' because calendar is 0-based for this field, and we are - // 1-based - - year = cl.get(Calendar.YEAR); - t = -1; - - // get year................................................... - st = years.tailSet(new Integer(year)); - if (st != null && st.size() != 0) { - t = year; - year = ((Integer) st.first()).intValue(); - } else { - return null; // ran out of years... - } + return integer.intValue(); + } - if (year != t) { - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.HOUR_OF_DAY, 0); - cl.set(Calendar.DAY_OF_MONTH, 1); - cl.set(Calendar.MONTH, 0); - // '- 1' because calendar is 0-based for this field, and we are - // 1-based - cl.set(Calendar.YEAR, year); - continue; - } - cl.set(Calendar.YEAR, year); + //////////////////////////////////////////////////////////////////////////// + // + // Computation Functions + // + //////////////////////////////////////////////////////////////////////////// - gotOne = true; - } // while( !done ) + protected Date getTimeAfter(Date afterTime) { - return cl.getTime(); - } + // Computation is based on Gregorian year only. + Calendar cl = new java.util.GregorianCalendar(getTimeZone()); - /** - * Advance the calendar to the particular hour paying particular attention to daylight saving - * problems. - * - * @param cal - * @param hour - */ - protected void setCalendarHour(Calendar cal, int hour) { - cal.set(java.util.Calendar.HOUR_OF_DAY, hour); - if (cal.get(java.util.Calendar.HOUR_OF_DAY) != hour && hour != 24) { - cal.set(java.util.Calendar.HOUR_OF_DAY, hour + 1); - } - } + // move ahead one second, since we're computing the time *after* the + // given time + afterTime = new Date(afterTime.getTime() + 1000); + // CronTrigger does not deal with milliseconds + cl.setTime(afterTime); + cl.set(Calendar.MILLISECOND, 0); - /** - * NOT YET IMPLEMENTED: Returns the time before the given time that the CronExpression - * matches. - */ - protected Date getTimeBefore(Date endTime) { - // TODO: implement QUARTZ-423 - return null; - } + boolean gotOne = false; + // loop until we've computed the next time, or we've past the endTime + while (!gotOne) { - /** - * NOT YET IMPLEMENTED: Returns the final time that the CronExpression will match. - */ - public Date getFinalFireTime() { - // TODO: implement QUARTZ-423 + // if (endTime != null && cl.getTime().after(endTime)) return null; + if (cl.get(Calendar.YEAR) > 2999) { // prevent endless loop... return null; - } - - protected boolean isLeapYear(int year) { - return ((year % 4 == 0 && year % 100 != 0) || (year % 400 == 0)); - } + } + + SortedSet st = null; + int t = 0; + + int sec = cl.get(Calendar.SECOND); + int min = cl.get(Calendar.MINUTE); + + // get second................................................. + st = seconds.tailSet(new Integer(sec)); + if (st != null && st.size() != 0) { + sec = ((Integer) st.first()).intValue(); + } else { + sec = ((Integer) seconds.first()).intValue(); + min++; + cl.set(Calendar.MINUTE, min); + } + cl.set(Calendar.SECOND, sec); + + min = cl.get(Calendar.MINUTE); + int hr = cl.get(Calendar.HOUR_OF_DAY); + t = -1; + + // get minute................................................. + st = minutes.tailSet(new Integer(min)); + if (st != null && st.size() != 0) { + t = min; + min = ((Integer) st.first()).intValue(); + } else { + min = ((Integer) minutes.first()).intValue(); + hr++; + } + if (min != t) { + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, min); + setCalendarHour(cl, hr); + continue; + } + cl.set(Calendar.MINUTE, min); + + hr = cl.get(Calendar.HOUR_OF_DAY); + int day = cl.get(Calendar.DAY_OF_MONTH); + t = -1; + + // get hour................................................... + st = hours.tailSet(new Integer(hr)); + if (st != null && st.size() != 0) { + t = hr; + hr = ((Integer) st.first()).intValue(); + } else { + hr = ((Integer) hours.first()).intValue(); + day++; + } + if (hr != t) { + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.DAY_OF_MONTH, day); + setCalendarHour(cl, hr); + continue; + } + cl.set(Calendar.HOUR_OF_DAY, hr); + + day = cl.get(Calendar.DAY_OF_MONTH); + int mon = cl.get(Calendar.MONTH) + 1; + // '+ 1' because calendar is 0-based for this field, and we are + // 1-based + t = -1; + int tmon = mon; + + // get day................................................... + boolean dayOfMSpec = !daysOfMonth.contains(NO_SPEC); + boolean dayOfWSpec = !daysOfWeek.contains(NO_SPEC); + if (dayOfMSpec && !dayOfWSpec) { // get day by day of month rule + st = daysOfMonth.tailSet(new Integer(day)); + if (lastdayOfMonth) { + if (!nearestWeekday) { + t = day; + day = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); + } else { + t = day; + day = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); + + java.util.Calendar tcal = java.util.Calendar.getInstance(getTimeZone()); + tcal.set(Calendar.SECOND, 0); + tcal.set(Calendar.MINUTE, 0); + tcal.set(Calendar.HOUR_OF_DAY, 0); + tcal.set(Calendar.DAY_OF_MONTH, day); + tcal.set(Calendar.MONTH, mon - 1); + tcal.set(Calendar.YEAR, cl.get(Calendar.YEAR)); + + int ldom = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); + int dow = tcal.get(Calendar.DAY_OF_WEEK); + + if (dow == Calendar.SATURDAY && day == 1) { + day += 2; + } else if (dow == Calendar.SATURDAY) { + day -= 1; + } else if (dow == Calendar.SUNDAY && day == ldom) { + day -= 2; + } else if (dow == Calendar.SUNDAY) { + day += 1; + } + + tcal.set(Calendar.SECOND, sec); + tcal.set(Calendar.MINUTE, min); + tcal.set(Calendar.HOUR_OF_DAY, hr); + tcal.set(Calendar.DAY_OF_MONTH, day); + tcal.set(Calendar.MONTH, mon - 1); + Date nTime = tcal.getTime(); + if (nTime.before(afterTime)) { + day = 1; + mon++; + } + } + } else if (nearestWeekday) { + t = day; + day = ((Integer) daysOfMonth.first()).intValue(); + + java.util.Calendar tcal = java.util.Calendar.getInstance(getTimeZone()); + tcal.set(Calendar.SECOND, 0); + tcal.set(Calendar.MINUTE, 0); + tcal.set(Calendar.HOUR_OF_DAY, 0); + tcal.set(Calendar.DAY_OF_MONTH, day); + tcal.set(Calendar.MONTH, mon - 1); + tcal.set(Calendar.YEAR, cl.get(Calendar.YEAR)); + + int ldom = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); + int dow = tcal.get(Calendar.DAY_OF_WEEK); + + if (dow == Calendar.SATURDAY && day == 1) { + day += 2; + } else if (dow == Calendar.SATURDAY) { + day -= 1; + } else if (dow == Calendar.SUNDAY && day == ldom) { + day -= 2; + } else if (dow == Calendar.SUNDAY) { + day += 1; + } + + tcal.set(Calendar.SECOND, sec); + tcal.set(Calendar.MINUTE, min); + tcal.set(Calendar.HOUR_OF_DAY, hr); + tcal.set(Calendar.DAY_OF_MONTH, day); + tcal.set(Calendar.MONTH, mon - 1); + Date nTime = tcal.getTime(); + if (nTime.before(afterTime)) { + day = ((Integer) daysOfMonth.first()).intValue(); + mon++; + } + } else if (st != null && st.size() != 0) { + t = day; + day = ((Integer) st.first()).intValue(); + // make sure we don't over-run a short month, such as february + int lastDay = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); + if (day > lastDay) { + day = ((Integer) daysOfMonth.first()).intValue(); + mon++; + } + } else { + day = ((Integer) daysOfMonth.first()).intValue(); + mon++; + } - protected int getLastDayOfMonth(int monthNum, int year) { - - switch (monthNum) { - case 1: - return 31; - case 2: - return (isLeapYear(year)) ? 29 : 28; - case 3: - return 31; - case 4: - return 30; - case 5: - return 31; - case 6: - return 30; - case 7: - return 31; - case 8: - return 31; - case 9: - return 30; - case 10: - return 31; - case 11: - return 30; - case 12: - return 31; - default: - throw new IllegalArgumentException("Illegal month number: " + monthNum); + if (day != t || mon != tmon) { + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.HOUR_OF_DAY, 0); + cl.set(Calendar.DAY_OF_MONTH, day); + cl.set(Calendar.MONTH, mon - 1); + // '- 1' because calendar is 0-based for this field, and we + // are 1-based + continue; } + } else if (dayOfWSpec && !dayOfMSpec) { // get day by day of week rule + if (lastdayOfWeek) { // are we looking for the last XXX day of + // the month? + int dow = ((Integer) daysOfWeek.first()).intValue(); // desired + // d-o-w + int cDow = cl.get(Calendar.DAY_OF_WEEK); // current d-o-w + int daysToAdd = 0; + if (cDow < dow) { + daysToAdd = dow - cDow; + } + if (cDow > dow) { + daysToAdd = dow + (7 - cDow); + } + + int lDay = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); + + if (day + daysToAdd > lDay) { // did we already miss the + // last one? + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.HOUR_OF_DAY, 0); + cl.set(Calendar.DAY_OF_MONTH, 1); + cl.set(Calendar.MONTH, mon); + // no '- 1' here because we are promoting the month + continue; + } + + // find date of last occurance of this day in this month... + while ((day + daysToAdd + 7) <= lDay) { + daysToAdd += 7; + } + + day += daysToAdd; + + if (daysToAdd > 0) { + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.HOUR_OF_DAY, 0); + cl.set(Calendar.DAY_OF_MONTH, day); + cl.set(Calendar.MONTH, mon - 1); + // '- 1' here because we are not promoting the month + continue; + } + + } else if (nthdayOfWeek != 0) { + // are we looking for the Nth XXX day in the month? + int dow = ((Integer) daysOfWeek.first()).intValue(); // desired + // d-o-w + int cDow = cl.get(Calendar.DAY_OF_WEEK); // current d-o-w + int daysToAdd = 0; + if (cDow < dow) { + daysToAdd = dow - cDow; + } else if (cDow > dow) { + daysToAdd = dow + (7 - cDow); + } + + boolean dayShifted = false; + if (daysToAdd > 0) { + dayShifted = true; + } + + day += daysToAdd; + int weekOfMonth = day / 7; + if (day % 7 > 0) { + weekOfMonth++; + } + + daysToAdd = (nthdayOfWeek - weekOfMonth) * 7; + day += daysToAdd; + if (daysToAdd < 0 || day > getLastDayOfMonth(mon, cl.get(Calendar.YEAR))) { + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.HOUR_OF_DAY, 0); + cl.set(Calendar.DAY_OF_MONTH, 1); + cl.set(Calendar.MONTH, mon); + // no '- 1' here because we are promoting the month + continue; + } else if (daysToAdd > 0 || dayShifted) { + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.HOUR_OF_DAY, 0); + cl.set(Calendar.DAY_OF_MONTH, day); + cl.set(Calendar.MONTH, mon - 1); + // '- 1' here because we are NOT promoting the month + continue; + } + } else { + int cDow = cl.get(Calendar.DAY_OF_WEEK); // current d-o-w + int dow = ((Integer) daysOfWeek.first()).intValue(); // desired + // d-o-w + st = daysOfWeek.tailSet(new Integer(cDow)); + if (st != null && st.size() > 0) { + dow = ((Integer) st.first()).intValue(); + } + + int daysToAdd = 0; + if (cDow < dow) { + daysToAdd = dow - cDow; + } + if (cDow > dow) { + daysToAdd = dow + (7 - cDow); + } + + int lDay = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); + + if (day + daysToAdd > lDay) { // will we pass the end of + // the month? + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.HOUR_OF_DAY, 0); + cl.set(Calendar.DAY_OF_MONTH, 1); + cl.set(Calendar.MONTH, mon); + // no '- 1' here because we are promoting the month + continue; + } else if (daysToAdd > 0) { // are we swithing days? + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.HOUR_OF_DAY, 0); + cl.set(Calendar.DAY_OF_MONTH, day + daysToAdd); + cl.set(Calendar.MONTH, mon - 1); + // '- 1' because calendar is 0-based for this field, + // and we are 1-based + continue; + } + } + } else { // dayOfWSpec && !dayOfMSpec + throw new UnsupportedOperationException( + "Support for specifying both a day-of-week AND a day-of-month parameter is" + + " not implemented."); + // TODO: + } + cl.set(Calendar.DAY_OF_MONTH, day); + + mon = cl.get(Calendar.MONTH) + 1; + // '+ 1' because calendar is 0-based for this field, and we are + // 1-based + int year = cl.get(Calendar.YEAR); + t = -1; + + // test for expressions that never generate a valid fire date, + // but keep looping... + if (year > 2299) { + return null; + } + + // get month................................................... + st = months.tailSet(new Integer(mon)); + if (st != null && st.size() != 0) { + t = mon; + mon = ((Integer) st.first()).intValue(); + } else { + mon = ((Integer) months.first()).intValue(); + year++; + } + if (mon != t) { + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.HOUR_OF_DAY, 0); + cl.set(Calendar.DAY_OF_MONTH, 1); + cl.set(Calendar.MONTH, mon - 1); + // '- 1' because calendar is 0-based for this field, and we are + // 1-based + cl.set(Calendar.YEAR, year); + continue; + } + cl.set(Calendar.MONTH, mon - 1); + // '- 1' because calendar is 0-based for this field, and we are + // 1-based + + year = cl.get(Calendar.YEAR); + t = -1; + + // get year................................................... + st = years.tailSet(new Integer(year)); + if (st != null && st.size() != 0) { + t = year; + year = ((Integer) st.first()).intValue(); + } else { + return null; // ran out of years... + } + + if (year != t) { + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.HOUR_OF_DAY, 0); + cl.set(Calendar.DAY_OF_MONTH, 1); + cl.set(Calendar.MONTH, 0); + // '- 1' because calendar is 0-based for this field, and we are + // 1-based + cl.set(Calendar.YEAR, year); + continue; + } + cl.set(Calendar.YEAR, year); + + gotOne = true; + } // while( !done ) + + return cl.getTime(); + } + + /** + * Advance the calendar to the particular hour paying particular attention to daylight saving + * problems. + * + * @param cal + * @param hour + */ + protected void setCalendarHour(Calendar cal, int hour) { + cal.set(java.util.Calendar.HOUR_OF_DAY, hour); + if (cal.get(java.util.Calendar.HOUR_OF_DAY) != hour && hour != 24) { + cal.set(java.util.Calendar.HOUR_OF_DAY, hour + 1); } - - private void readObject(java.io.ObjectInputStream stream) - throws java.io.IOException, ClassNotFoundException { - - stream.defaultReadObject(); - try { - buildExpression(cronExpression); - } catch (Exception ignore) { - } // never happens + } + + /** + * NOT YET IMPLEMENTED: Returns the time before the given time that the CronExpression + * matches. + */ + protected Date getTimeBefore(Date endTime) { + // TODO: implement QUARTZ-423 + return null; + } + + /** + * NOT YET IMPLEMENTED: Returns the final time that the CronExpression will match. + */ + public Date getFinalFireTime() { + // TODO: implement QUARTZ-423 + return null; + } + + protected boolean isLeapYear(int year) { + return ((year % 4 == 0 && year % 100 != 0) || (year % 400 == 0)); + } + + protected int getLastDayOfMonth(int monthNum, int year) { + + switch (monthNum) { + case 1: + return 31; + case 2: + return (isLeapYear(year)) ? 29 : 28; + case 3: + return 31; + case 4: + return 30; + case 5: + return 31; + case 6: + return 30; + case 7: + return 31; + case 8: + return 31; + case 9: + return 30; + case 10: + return 31; + case 11: + return 30; + case 12: + return 31; + default: + throw new IllegalArgumentException("Illegal month number: " + monthNum); } - - public Object clone() { - CronExpression copy = null; - try { - copy = new CronExpression(getCronExpression()); - if (getTimeZone() != null) copy.setTimeZone((TimeZone) getTimeZone().clone()); - } catch (ParseException ex) { // never happens since the source is valid... - throw new IncompatibleClassChangeError("Not Cloneable."); - } - return copy; + } + + private void readObject(java.io.ObjectInputStream stream) + throws java.io.IOException, ClassNotFoundException { + + stream.defaultReadObject(); + try { + buildExpression(cronExpression); + } catch (Exception ignore) { + } // never happens + } + + public Object clone() { + CronExpression copy = null; + try { + copy = new CronExpression(getCronExpression()); + if (getTimeZone() != null) copy.setTimeZone((TimeZone) getTimeZone().clone()); + } catch (ParseException ex) { // never happens since the source is valid... + throw new IncompatibleClassChangeError("Not Cloneable."); } + return copy; + } } class ValueSet { - public int value; + public int value; - public int pos; + public int pos; } diff --git a/backend/src/main/java/sparqles/schedule/Scheduler.java b/backend/src/main/java/sparqles/schedule/Scheduler.java index 1d7f3dd4..a33693dd 100644 --- a/backend/src/main/java/sparqles/schedule/Scheduler.java +++ b/backend/src/main/java/sparqles/schedule/Scheduler.java @@ -2,13 +2,9 @@ import static sparqles.core.CONSTANTS.*; +import java.security.SecureRandom; import java.text.ParseException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; @@ -29,250 +25,290 @@ import sparqles.utils.MongoDBManager; public class Scheduler { - - public static final String CRON_EVERY_HOUR = "0 0 0/1 1/1 * ? *"; - public static final String CRON_EVERY_ONETEN = "0 30 1 1/1 * ? *"; - private static final Logger log = LoggerFactory.getLogger(Scheduler.class); - private static final String CRON_EVERY_DAY_AT_715 = "0 15 7 1/1 * ? *"; - private static final String CRON_EVERY_DAY_AT_215 = "0 15 2 1/1 * ? *"; - private static final String CRON_EVERY_MON_WED_FRI_SUN_THU_AT_410 = "0 10 4 ? * WED,THU *"; - - private static final String CRON_EVERY_SUN_AT_310 = "0 10 3 ? * SUN *"; - private static final String CRON_EVERY_SUN_AT_2330 = "0 30 23 ? * SUN *"; - private static final String CRON_EVERY_SAT_AT_310 = "0 10 3 ? * SAT *"; - private static final String CRON_FIRST_SAT_AT_MONTH_AT_TWO = "0 0 2 ? 1/1 SAT#1 *"; - private static final String CRON_EVERY_FIVE_MINUTES = "0 0/5 * 1/1 * ? *"; - - /** The default schedules for various tasks http://www.cronmaker.com/ */ - private static final Map taskSchedule = new HashMap(); - - static { - // availability - taskSchedule.put(ATASK, CRON_EVERY_HOUR); - // performance - taskSchedule.put(PTASK, CRON_EVERY_ONETEN); - // interoperability - taskSchedule.put(FTASK, CRON_EVERY_SUN_AT_310); - // discoverability - taskSchedule.put(DTASK, CRON_EVERY_SAT_AT_310); - // index - taskSchedule.put(ITASK, CRON_EVERY_DAY_AT_715); - // datahub refresh - taskSchedule.put(ETASK, CRON_EVERY_DAY_AT_215); + private static final Logger log = LoggerFactory.getLogger(Scheduler.class); + + @Deprecated public static final String CRON_EVERY_HOUR = "0 0 0/1 1/1 * ? *"; + @Deprecated public static final String CRON_EVERY_ONETEN = "0 30 1 1/1 * ? *"; + private static final String CRON_EVERY_DAY_AT_715 = "0 15 7 1/1 * ? *"; + private static final String CRON_EVERY_DAY_AT_215 = "0 15 2 1/1 * ? *"; + private static final String CRON_EVERY_MON_WED_FRI_SUN_THU_AT_410 = "0 10 4 ? * WED,THU *"; + + private static final String CRON_EVERY_SUN_AT_310 = "0 10 3 ? * SUN *"; + private static final String CRON_EVERY_SUN_AT_2330 = "0 30 23 ? * SUN *"; + private static final String CRON_EVERY_SAT_AT_310 = "0 10 3 ? * SAT *"; + private static final String CRON_FIRST_SAT_AT_MONTH_AT_TWO = "0 0 2 ? 1/1 SAT#1 *"; + private static final String CRON_EVERY_FIVE_MINUTES = "0 0/5 * 1/1 * ? *"; + + /** The default schedules for various tasks http://www.cronmaker.com/ */ + // private static final Map taskSchedule = new HashMap(); + private static final Random random = new SecureRandom(); + + // + // static { + // // availability + //// taskSchedule.put(ATASK, CRON_EVERY_HOUR); + // // performance + //// taskSchedule.put(PTASK, CRON_EVERY_ONETEN); + // // interoperability + // taskSchedule.put(FTASK, CRON_EVERY_SUN_AT_310); + // // discoverability + // taskSchedule.put(DTASK, CRON_EVERY_SAT_AT_310); + // taskSchedule.put(CTASK, CRON_EVERY_SUN_AT_2330); + // // index + // taskSchedule.put(ITASK, CRON_EVERY_DAY_AT_715); + // // datahub refresh + // taskSchedule.put(ETASK, CRON_EVERY_DAY_AT_215); + // } + + private final ScheduledExecutorService SERVICE, ASERVICE; + private FileManager _fm; + private MongoDBManager _dbm; + + private SchedulerMonitor _monitor; + + public Scheduler() { + this(SPARQLESProperties.getTASK_THREADS()); + } + + public Scheduler(int threads) { + int athreads = (int) (threads * 0.3); + int tthreads = threads - athreads; + // TODO: use virtual threads after JDK21 + // SERVICE = Executors.newScheduledThreadPool(tthreads, Thread.ofVirtual().factory()); + // ASERVICE = Executors.newScheduledThreadPool(athreads, Thread.ofVirtual().factory()); + SERVICE = Executors.newScheduledThreadPool(tthreads); + ASERVICE = Executors.newScheduledThreadPool(athreads); + + _monitor = new SchedulerMonitor(); + _monitor.start(); + log.info("INIT Scheduler with {} athreads and {} threads", athreads, tthreads); + } + + /** + * Creates for all endpoints in the DB a default schedule + * + * @param dbm + * @return + */ + public static Collection createDefaultSchedule(MongoDBManager dbm) { + List l = new ArrayList(); + Collection eps = dbm.get(Endpoint.class, Endpoint.SCHEMA$); + for (Endpoint ep : eps) { + Schedule s = defaultSchedule(ep); + l.add(s); } - private final ScheduledExecutorService SERVICE, ASERVICE; - private FileManager _fm; - private MongoDBManager _dbm; - - private SchedulerMonitor _monitor; - - public Scheduler() { - this(SPARQLESProperties.getTASK_THREADS()); + // add the analytics schedules for + Schedule s = new Schedule(); + s.setEndpoint(SPARQLESProperties.getSparqlesEndpoint()); + s.setITask(cronForITask(s.getEndpoint())); + s.setETask(cronForETask(s.getEndpoint())); + l.add(s); + + return l; + } + + /** + * Returns the default schedule element for the endpoints + * + * @param ep + * @return + */ + public static Schedule defaultSchedule(Endpoint ep) { + Schedule s = new Schedule(); + s.setEndpoint(ep); + + s.setATask(randomATaskCron(ep)); + s.setPTask(randomPTaskCron(ep)); + s.setFTask(cronForFTask(ep)); + s.setDTask(cronForDTask(ep)); + s.setCTask(cronForCTask(ep)); + s.setITask(cronForITask(ep)); + + return s; + } + + /** Check endpoint availability once an hour, spread out evenly */ + private static CharSequence randomATaskCron(Endpoint ep) { + return String.format("0 %d/60 * ? * * *", random.nextInt(60)); + } + + /** Avoid hitting SPARQL endpoints with expensive queries during "business" hours */ + private static CharSequence randomPTaskCron(Endpoint ep) { + var randHours = random.nextInt(19, 24 + 5) % 24; + var randMinutes = random.nextInt(60); + + return String.format("0 %d %d ? * * *", randMinutes, randHours); + } + + private static CharSequence cronForDTask(Endpoint ep) { + return CRON_EVERY_SAT_AT_310; + } + + private static CharSequence cronForFTask(Endpoint ep) { + return CRON_EVERY_SUN_AT_310; + } + + private static CharSequence cronForCTask(Endpoint ep) { + return CRON_EVERY_SUN_AT_2330; + } + + private static CharSequence cronForITask(Endpoint ep) { + return CRON_EVERY_DAY_AT_715; + } + + private static CharSequence cronForETask(Endpoint etask) { + // return CRON_EVERY_DAY_AT_215; + return null; // old.datahub is not updated any more, disable the task for now + } + + /** + * Initial the scheduler with the schedules from the underlying DB. + * + * @param db + */ + public void init(MongoDBManager db) { + + Collection schedules = db.get(Schedule.class, Schedule.SCHEMA$); + log.info("Scheduling tasks for {} endpoints", schedules.size()); + + for (Schedule sd : schedules) { + initSchedule(sd); } - - public Scheduler(int threads) { - int athreads = (int) (threads * 0.3); - int tthreads = threads - athreads; - SERVICE = Executors.newScheduledThreadPool(tthreads); - ASERVICE = Executors.newScheduledThreadPool(athreads); - - _monitor = new SchedulerMonitor(); - _monitor.start(); - log.info("INIT Scheduler with {} athreads and {} threads", athreads, tthreads); + } + + public void initSchedule(Schedule sd) { + Endpoint ep = sd.getEndpoint(); + + try { + if (sd.getATask() != null) { + schedule( + TaskFactory.create(ATASK, ep, _dbm, _fm), + new CronBasedIterator(sd.getATask().toString())); + } + if (sd.getPTask() != null) { + schedule( + TaskFactory.create(PTASK, ep, _dbm, _fm), + new CronBasedIterator(sd.getPTask().toString())); + } + if (sd.getFTask() != null) { + schedule( + TaskFactory.create(FTASK, ep, _dbm, _fm), + new CronBasedIterator(sd.getFTask().toString())); + } + if (sd.getDTask() != null) { + schedule( + TaskFactory.create(DTASK, ep, _dbm, _fm), + new CronBasedIterator(sd.getDTask().toString())); + } + if (sd.getCTask() != null) { + schedule( + TaskFactory.create(CTASK, ep, _dbm, _fm), + new CronBasedIterator(sd.getCTask().toString())); + } + if (sd.getITask() != null) { + schedule( + TaskFactory.create(ITASK, ep, _dbm, _fm), + new CronBasedIterator(sd.getITask().toString())); + } + if (sd.getETask() != null) { + RefreshDataHubTask task = (RefreshDataHubTask) TaskFactory.create(ETASK, ep, _dbm, _fm); + task.setScheduler(this); + schedule(task, new CronBasedIterator(sd.getITask().toString())); + } + } catch (ParseException e) { + log.warn( + "EXEC ParseException: {} for {}", ep.getUri(), ExceptionHandler.logAndtoString(e, true)); } + } - /** - * Creates for all endpoints in the DB a default schedule - * - * @param dbm - * @return - */ - public static Collection createDefaultSchedule(MongoDBManager dbm) { - List l = new ArrayList(); - Collection eps = dbm.get(Endpoint.class, Endpoint.SCHEMA$); - for (Endpoint ep : eps) { - Schedule s = defaultSchedule(ep); - l.add(s); - } - - // add the analytics schedules for - Schedule s = new Schedule(); - s.setEndpoint(SPARQLES); - s.setITask(taskSchedule.get(ITASK)); - s.setETask(taskSchedule.get(ETASK)); - l.add(s); - - return l; - } + public void schedule(Task task, ScheduleIterator iter) { + Date time = iter.next(); + schedule(task, iter, time); + } - /** - * Returns the default schedule element for the endpoints - * - * @param ep - * @return - */ - public static Schedule defaultSchedule(Endpoint ep) { - Schedule s = new Schedule(); - s.setEndpoint(ep); - - s.setATask(taskSchedule.get(ATASK)); - s.setPTask(taskSchedule.get(PTASK)); - s.setFTask(taskSchedule.get(FTASK)); - s.setDTask(taskSchedule.get(DTASK)); - s.setITask(taskSchedule.get(ITASK)); - - return s; - } + public void schedule(Task task, ScheduleIterator iter, Date time) { + long startTime = time.getTime() - System.currentTimeMillis(); - /** - * Initial the scheduler with the schedules from the underlying DB. - * - * @param db - */ - public void init(MongoDBManager db) { + SchedulerTimerTask t = new SchedulerTimerTask(task, iter); - Collection schedules = db.get(Schedule.class, Schedule.SCHEMA$); - log.info("Scheduling tasks for {} endpoints", schedules.size()); + if (task instanceof ATask) + _monitor.submitA(ASERVICE.schedule(t, startTime, TimeUnit.MILLISECONDS)); + else _monitor.submit(SERVICE.schedule(t, startTime, TimeUnit.MILLISECONDS)); - for (Schedule sd : schedules) { - initSchedule(sd); - } - } + // log.info("SCHEDULED {} next:'{}' ",task, time); + log.debug("SCHEDULED {} next:'{}' policy:'{}'", task, time, iter); + } - public void initSchedule(Schedule sd) { - Endpoint ep = sd.getEndpoint(); - - try { - if (sd.getATask() != null) { - schedule( - TaskFactory.create(ATASK, ep, _dbm, _fm), - new CronBasedIterator(sd.getATask().toString())); - } - if (sd.getPTask() != null) { - schedule( - TaskFactory.create(PTASK, ep, _dbm, _fm), - new CronBasedIterator(sd.getPTask().toString())); - } - if (sd.getFTask() != null) { - schedule( - TaskFactory.create(FTASK, ep, _dbm, _fm), - new CronBasedIterator(sd.getFTask().toString())); - } - if (sd.getDTask() != null) { - schedule( - TaskFactory.create(DTASK, ep, _dbm, _fm), - new CronBasedIterator(sd.getDTask().toString())); - } - if (sd.getITask() != null) { - schedule( - TaskFactory.create(ITASK, ep, _dbm, _fm), - new CronBasedIterator(sd.getITask().toString())); - } - if (sd.getETask() != null) { - RefreshDataHubTask task = - (RefreshDataHubTask) TaskFactory.create(ETASK, ep, _dbm, _fm); - task.setScheduler(this); - schedule(task, new CronBasedIterator(sd.getITask().toString())); - } - } catch (ParseException e) { - log.warn( - "EXEC ParseException: {} for {}", - ep.getUri(), - ExceptionHandler.logAndtoString(e, true)); - } - } + public void shutdown() { + SERVICE.shutdown(); + } - public void schedule(Task task, ScheduleIterator iter) { - Date time = iter.next(); - schedule(task, iter, time); + private void reschedule(Task task, ScheduleIterator iter) { + Date time = iter.next(); + if (time.getTime() < System.currentTimeMillis()) { + log.error("PAST stop scheduling task, next date is in the past!"); + return; } - - public void schedule(Task task, ScheduleIterator iter, Date time) { - long startTime = time.getTime() - System.currentTimeMillis(); - - SchedulerTimerTask t = new SchedulerTimerTask(task, iter); - - if (task instanceof ATask) - _monitor.submitA(ASERVICE.schedule(t, startTime, TimeUnit.MILLISECONDS)); - else _monitor.submit(SERVICE.schedule(t, startTime, TimeUnit.MILLISECONDS)); - - // log.info("SCHEDULED {} next:'{}' ",task, time); - log.debug("SCHEDULED {} next:'{}' policy:'{}'", task, time, iter); + if (task instanceof EndpointTask) { + EndpointTask t = (EndpointTask) task; + Endpoint ep = _dbm.getEndpoint(t.getEndpoint()); + if (ep == null) { + log.warn("Endpoint {} was removed from DB, stop schedulingl", ep); + return; + } + t.setEndpoint(ep); } - - public void shutdown() { - SERVICE.shutdown(); + schedule(task, iter, time); + + Object[] s = {task, time, iter}; + log.debug("RESCHEDULED {} next:'{}' policy:'{}'", s); + } + + public void close() { + log.info("Shutting down scheduler service"); + List tasks = SERVICE.shutdownNow(); + log.info("{} Tasks were scheduled after the shutdown command", tasks.size()); + _monitor.halt(); + if (_dbm != null) { + _dbm.close(); } + } - private void reschedule(Task task, ScheduleIterator iter) { - Date time = iter.next(); - if (time.getTime() < System.currentTimeMillis()) { - log.error("PAST stop scheduling task, next date is in the past!"); - return; - } - if (task instanceof EndpointTask) { - EndpointTask t = (EndpointTask) task; - Endpoint ep = _dbm.getEndpoint(t.getEndpoint()); - if (ep == null) { - log.warn("Endpoint {} was removed from DB, stop schedulingl", ep); - return; - } - t.setEndpoint(ep); - } - schedule(task, iter, time); - - Object[] s = {task, time, iter}; - log.debug("RESCHEDULED {} next:'{}' policy:'{}'", s); - } + public void useDB(MongoDBManager dbm) { + _dbm = dbm; + } - public void close() { - log.info("Shutting down scheduler service"); - List tasks = SERVICE.shutdownNow(); - log.info("{} Tasks were scheduled after the shutdown command", tasks.size()); - _monitor.halt(); - if (_dbm != null) { - _dbm.close(); - } - } + public void useFileManager(FileManager fm) { + _fm = fm; + } - public void useDB(MongoDBManager dbm) { - _dbm = dbm; - } + public void delSchedule(Endpoint ep) { + // TODO Auto-generated method stub - public void useFileManager(FileManager fm) { - _fm = fm; - } + } - public void delSchedule(Endpoint ep) { - // TODO Auto-generated method stub + /** + * A timer task which executes the assigned task and automatically reschedules the data + * + * @author umbrichj + */ + class SchedulerTimerTask implements Runnable { + private ScheduleIterator iterator; + private Task schedulerTask; + public SchedulerTimerTask(Task schedulerTask, ScheduleIterator iterator) { + this.schedulerTask = schedulerTask; + this.iterator = iterator; } - /** - * A timer task which executes the assigned task and automatically reschedules the data - * - * @author umbrichj - */ - class SchedulerTimerTask implements Runnable { - private ScheduleIterator iterator; - private Task schedulerTask; - - public SchedulerTimerTask(Task schedulerTask, ScheduleIterator iterator) { - this.schedulerTask = schedulerTask; - this.iterator = iterator; - } - - public void run() { - try { - schedulerTask.call(); - reschedule(schedulerTask, iterator); - } catch (Exception e) { - log.error( - "Exception: {} {}", - schedulerTask, - ExceptionHandler.logAndtoString(e, true)); - } - } + public void run() { + try { + schedulerTask.call(); + reschedule(schedulerTask, iterator); + } catch (Exception e) { + log.error("Exception: {} {}", schedulerTask, ExceptionHandler.logAndtoString(e, true)); + } } + } } diff --git a/backend/src/main/java/sparqles/schedule/SchedulerMonitor.java b/backend/src/main/java/sparqles/schedule/SchedulerMonitor.java index bada55df..605f7e6e 100644 --- a/backend/src/main/java/sparqles/schedule/SchedulerMonitor.java +++ b/backend/src/main/java/sparqles/schedule/SchedulerMonitor.java @@ -8,60 +8,60 @@ public class SchedulerMonitor extends Thread { - private static final Logger log = LoggerFactory.getLogger(SchedulerMonitor.class); - private final Long SLEEP_TIME = 10 * 60 * 1000L; - private final ConcurrentLinkedQueue future = - new ConcurrentLinkedQueue(); - private final ConcurrentLinkedQueue afuture = - new ConcurrentLinkedQueue(); - private boolean run = true; + private static final Logger log = LoggerFactory.getLogger(SchedulerMonitor.class); + private final Long SLEEP_TIME = 15 * 1000L; + private final ConcurrentLinkedQueue future = + new ConcurrentLinkedQueue(); + private final ConcurrentLinkedQueue afuture = + new ConcurrentLinkedQueue(); + private boolean run = true; - public void run() { + public void run() { - while (run) { - log.info("CHECK queue: {} availability, {} others", afuture.size(), future.size()); - int tasks = future.size(), atasks = future.size(); + while (run) { + log.info("CHECK queue: {} availability, {} others", afuture.size(), future.size()); + int tasks = future.size(), atasks = future.size(); - int tasksDone = 0, atasksDone = 0; - for (ScheduledFuture f : future) { - if (f.isDone()) { - future.remove(f); - tasksDone++; - } - } - - for (ScheduledFuture f : afuture) { - if (f.isDone()) { - afuture.remove(f); - atasksDone++; - } - } + int tasksDone = 0, atasksDone = 0; + for (ScheduledFuture f : future) { + if (f.isDone()) { + future.remove(f); + tasksDone++; + } + } - log.info( - "STATS availability: {} done, {} scheduled; other: {} done, {} scheduled", - atasksDone, - afuture.size(), - tasksDone, - future.size()); - try { - Thread.sleep(SLEEP_TIME); - } catch (InterruptedException e) { - log.error("{}", ExceptionHandler.logAndtoString(e, true)); - } + for (ScheduledFuture f : afuture) { + if (f.isDone()) { + afuture.remove(f); + atasksDone++; } - } + } - public void halt() { - run = false; + log.info( + "STATS availability: {} done, {} scheduled; other: {} done, {} scheduled", + atasksDone, + afuture.size(), + tasksDone, + future.size()); + try { + Thread.sleep(SLEEP_TIME); + } catch (InterruptedException e) { + log.error("{}", ExceptionHandler.logAndtoString(e, true)); + } } + } - public void submit(ScheduledFuture schedule) { - log.debug("Received new future object: {}", schedule); - future.add(schedule); - } + public void halt() { + run = false; + } - public void submitA(ScheduledFuture schedule) { - log.debug("Received new future object [A]: {}", schedule); - afuture.add(schedule); - } + public void submit(ScheduledFuture schedule) { + log.debug("Received new future object: {}", schedule); + future.add(schedule); + } + + public void submitA(ScheduledFuture schedule) { + log.debug("Received new future object [A]: {}", schedule); + afuture.add(schedule); + } } diff --git a/backend/src/main/java/sparqles/schedule/iter/CronBasedIterator.java b/backend/src/main/java/sparqles/schedule/iter/CronBasedIterator.java index 5aca05ed..44d81bb6 100644 --- a/backend/src/main/java/sparqles/schedule/iter/CronBasedIterator.java +++ b/backend/src/main/java/sparqles/schedule/iter/CronBasedIterator.java @@ -5,23 +5,23 @@ import sparqles.schedule.CronExpression; public class CronBasedIterator implements ScheduleIterator { - private final CronExpression _cron; + private final CronExpression _cron; - private Date next; + private Date next; - public CronBasedIterator(String cronExpression) throws ParseException { - _cron = new CronExpression(cronExpression); - next = _cron.getNextValidTimeAfter(new Date()); - } + public CronBasedIterator(String cronExpression) throws ParseException { + _cron = new CronExpression(cronExpression); + next = _cron.getNextValidTimeAfter(new Date()); + } - public Date next() { - Date res = next; - next = _cron.getNextValidTimeAfter(res); - return res; - } + public Date next() { + Date res = next; + next = _cron.getNextValidTimeAfter(res); + return res; + } - @Override - public String toString() { - return "" + this.getClass().getSimpleName() + "(" + _cron.getCronExpression() + ")"; - } + @Override + public String toString() { + return "" + this.getClass().getSimpleName() + "(" + _cron.getCronExpression() + ")"; + } } diff --git a/backend/src/main/java/sparqles/schedule/iter/DailyIterator.java b/backend/src/main/java/sparqles/schedule/iter/DailyIterator.java index 08fd412b..364f1135 100644 --- a/backend/src/main/java/sparqles/schedule/iter/DailyIterator.java +++ b/backend/src/main/java/sparqles/schedule/iter/DailyIterator.java @@ -4,29 +4,29 @@ import java.util.Date; public class DailyIterator implements ScheduleIterator { - private final int hourOfDay, minute, second; - private final Calendar calendar = Calendar.getInstance(); + private final int hourOfDay, minute, second; + private final Calendar calendar = Calendar.getInstance(); - public DailyIterator(int hourOfDay, int minute, int second) { - this(hourOfDay, minute, second, new Date()); - } + public DailyIterator(int hourOfDay, int minute, int second) { + this(hourOfDay, minute, second, new Date()); + } - public DailyIterator(int hourOfDay, int minute, int second, Date date) { - this.hourOfDay = hourOfDay; - this.minute = minute; - this.second = second; - calendar.setTime(date); - calendar.set(Calendar.HOUR_OF_DAY, hourOfDay); - calendar.set(Calendar.MINUTE, minute); - calendar.set(Calendar.SECOND, second); - calendar.set(Calendar.MILLISECOND, 0); - if (!calendar.getTime().before(date)) { - calendar.add(Calendar.DATE, -1); - } + public DailyIterator(int hourOfDay, int minute, int second, Date date) { + this.hourOfDay = hourOfDay; + this.minute = minute; + this.second = second; + calendar.setTime(date); + calendar.set(Calendar.HOUR_OF_DAY, hourOfDay); + calendar.set(Calendar.MINUTE, minute); + calendar.set(Calendar.SECOND, second); + calendar.set(Calendar.MILLISECOND, 0); + if (!calendar.getTime().before(date)) { + calendar.add(Calendar.DATE, -1); } + } - public Date next() { - calendar.add(Calendar.DATE, 1); - return calendar.getTime(); - } + public Date next() { + calendar.add(Calendar.DATE, 1); + return calendar.getTime(); + } } diff --git a/backend/src/main/java/sparqles/schedule/iter/HourlyIterator.java b/backend/src/main/java/sparqles/schedule/iter/HourlyIterator.java index e68a8c43..31e89c5d 100644 --- a/backend/src/main/java/sparqles/schedule/iter/HourlyIterator.java +++ b/backend/src/main/java/sparqles/schedule/iter/HourlyIterator.java @@ -5,27 +5,27 @@ public class HourlyIterator implements ScheduleIterator { - private final int minute, second; - private final Calendar calendar = Calendar.getInstance(); + private final int minute, second; + private final Calendar calendar = Calendar.getInstance(); - public HourlyIterator(int minute, int second) { - this(minute, second, new Date()); - } + public HourlyIterator(int minute, int second) { + this(minute, second, new Date()); + } - public HourlyIterator(int minute, int second, Date date) { - this.minute = minute; - this.second = second; - calendar.setTime(date); - calendar.set(Calendar.MINUTE, minute); - calendar.set(Calendar.SECOND, second); - calendar.set(Calendar.MILLISECOND, 0); - if (!calendar.getTime().before(date)) { - calendar.add(Calendar.HOUR, -1); - } + public HourlyIterator(int minute, int second, Date date) { + this.minute = minute; + this.second = second; + calendar.setTime(date); + calendar.set(Calendar.MINUTE, minute); + calendar.set(Calendar.SECOND, second); + calendar.set(Calendar.MILLISECOND, 0); + if (!calendar.getTime().before(date)) { + calendar.add(Calendar.HOUR, -1); } + } - public Date next() { - calendar.add(Calendar.HOUR, 1); - return calendar.getTime(); - } + public Date next() { + calendar.add(Calendar.HOUR, 1); + return calendar.getTime(); + } } diff --git a/backend/src/main/java/sparqles/schedule/iter/ScheduleIterator.java b/backend/src/main/java/sparqles/schedule/iter/ScheduleIterator.java index 2f04e429..d9678d03 100644 --- a/backend/src/main/java/sparqles/schedule/iter/ScheduleIterator.java +++ b/backend/src/main/java/sparqles/schedule/iter/ScheduleIterator.java @@ -4,5 +4,5 @@ public interface ScheduleIterator { - Date next(); + Date next(); } diff --git a/backend/src/main/java/sparqles/schedule/iter/SecondlyIterator.java b/backend/src/main/java/sparqles/schedule/iter/SecondlyIterator.java index 70c5f9aa..8ef4a276 100644 --- a/backend/src/main/java/sparqles/schedule/iter/SecondlyIterator.java +++ b/backend/src/main/java/sparqles/schedule/iter/SecondlyIterator.java @@ -5,25 +5,25 @@ public class SecondlyIterator implements ScheduleIterator { - private final int second; - private final Calendar calendar = Calendar.getInstance(); + private final int second; + private final Calendar calendar = Calendar.getInstance(); - public SecondlyIterator(int second) { - this(second, new Date()); - } + public SecondlyIterator(int second) { + this(second, new Date()); + } - public SecondlyIterator(int second, Date date) { - this.second = second; - calendar.setTime(date); - calendar.set(Calendar.SECOND, second); - calendar.set(Calendar.MILLISECOND, 0); - if (!calendar.getTime().before(date)) { - calendar.add(Calendar.SECOND, -second); - } + public SecondlyIterator(int second, Date date) { + this.second = second; + calendar.setTime(date); + calendar.set(Calendar.SECOND, second); + calendar.set(Calendar.MILLISECOND, 0); + if (!calendar.getTime().before(date)) { + calendar.add(Calendar.SECOND, -second); } + } - public Date next() { - calendar.add(Calendar.SECOND, second); - return calendar.getTime(); - } + public Date next() { + calendar.add(Calendar.SECOND, second); + return calendar.getTime(); + } } diff --git a/backend/src/main/java/sparqles/utils/AvroUtils.java b/backend/src/main/java/sparqles/utils/AvroUtils.java index 0247faac..53a979e5 100644 --- a/backend/src/main/java/sparqles/utils/AvroUtils.java +++ b/backend/src/main/java/sparqles/utils/AvroUtils.java @@ -10,62 +10,62 @@ public class AvroUtils { - private static Map schemas = new HashMap(); + private static Map schemas = new HashMap(); - private AvroUtils() {} + private AvroUtils() {} - public static void addSchema(String name, Schema schema) { - schemas.put(name, schema); - } + public static void addSchema(String name, Schema schema) { + schemas.put(name, schema); + } - public static Schema getSchema(String name) { - return schemas.get(name); - } + public static Schema getSchema(String name) { + return schemas.get(name); + } - public static String resolveSchema(String sc) { + public static String resolveSchema(String sc) { - String result = sc; - for (Map.Entry entry : schemas.entrySet()) - result = replace(result, entry.getKey(), entry.getValue().toString()); - return result; - } + String result = sc; + for (Map.Entry entry : schemas.entrySet()) + result = replace(result, entry.getKey(), entry.getValue().toString()); + return result; + } - static String replace(String str, String pattern, String replace) { - - int s = 0; - int e = 0; - StringBuffer result = new StringBuffer(); - while ((e = str.indexOf(pattern, s)) >= 0) { - result.append(str.substring(s, e)); - result.append(replace); - s = e + pattern.length(); - } - result.append(str.substring(s)); - return result.toString(); + static String replace(String str, String pattern, String replace) { + + int s = 0; + int e = 0; + StringBuffer result = new StringBuffer(); + while ((e = str.indexOf(pattern, s)) >= 0) { + result.append(str.substring(s, e)); + result.append(replace); + s = e + pattern.length(); } + result.append(str.substring(s)); + return result.toString(); + } - public static Schema parseSchema(String schemaString) { + public static Schema parseSchema(String schemaString) { - String completeSchema = resolveSchema(schemaString); - Schema schema = Schema.parse(completeSchema); - String name = schema.getFullName(); - schemas.put(name, schema); - return schema; - } + String completeSchema = resolveSchema(schemaString); + Schema schema = Schema.parse(completeSchema); + String name = schema.getFullName(); + schemas.put(name, schema); + return schema; + } - public static Schema parseSchema(InputStream in) throws IOException { + public static Schema parseSchema(InputStream in) throws IOException { - StringBuffer out = new StringBuffer(); - byte[] b = new byte[4096]; - for (int n; (n = in.read(b)) != -1; ) { - out.append(new String(b, 0, n)); - } - return parseSchema(out.toString()); + StringBuffer out = new StringBuffer(); + byte[] b = new byte[4096]; + for (int n; (n = in.read(b)) != -1; ) { + out.append(new String(b, 0, n)); } + return parseSchema(out.toString()); + } - public static Schema parseSchema(File file) throws IOException { + public static Schema parseSchema(File file) throws IOException { - FileInputStream fis = new FileInputStream(file); - return parseSchema(fis); - } + FileInputStream fis = new FileInputStream(file); + return parseSchema(fis); + } } diff --git a/backend/src/main/java/sparqles/utils/ConnectionManager.java b/backend/src/main/java/sparqles/utils/ConnectionManager.java index 3f45024f..e8f1498e 100644 --- a/backend/src/main/java/sparqles/utils/ConnectionManager.java +++ b/backend/src/main/java/sparqles/utils/ConnectionManager.java @@ -27,96 +27,97 @@ public class ConnectionManager { - private DefaultHttpClient _client; - private IdleConnectionMonitorThread _monitor; - - public ConnectionManager( - String proxyHost, int proxyPort, String puser, String ppassword, int connections) { - // general setup - SchemeRegistry supportedSchemes = new SchemeRegistry(); - - // Register the "http" and "https" protocol schemes, they are - // required by the default operator to look up socket factories. - supportedSchemes.register(new Scheme("http", PlainSocketFactory.getSocketFactory(), 80)); - supportedSchemes.register(new Scheme("https", SSLSocketFactory.getSocketFactory(), 443)); - - // prepare parameters - HttpParams params = new BasicHttpParams(); - HttpProtocolParams.setVersion(params, HttpVersion.HTTP_1_1); - HttpProtocolParams.setContentCharset(params, "UTF-8"); - HttpProtocolParams.setUseExpectContinue(params, true); - - HttpClientParams.setRedirecting(params, true); - - // connection params - params.setParameter(CoreConnectionPNames.SO_TIMEOUT, CONSTANTS.SOCKET_TIMEOUT); - // params.setParameter(CoreConnectionPNames.TCP_NODELAY, true); - params.setParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, CONSTANTS.CONNECTION_TIMEOUT); - - ConnManagerParams.setMaxTotalConnections(params, connections); - ClientConnectionManager cm = new ThreadSafeClientConnManager(params, supportedSchemes); - - _client = new DefaultHttpClient(cm, params); - - // check if we have a proxy - if (proxyHost != null) { - HttpHost proxy = new HttpHost(proxyHost, proxyPort, "http"); - _client.getParams().setParameter(ConnRoutePNames.DEFAULT_PROXY, proxy); - - if (puser != null) { - _client.getCredentialsProvider() - .setCredentials( - new AuthScope(proxyHost, proxyPort), - new UsernamePasswordCredentials(puser, new String(ppassword))); - } - } - _monitor = new IdleConnectionMonitorThread(cm); - _monitor.start(); + private DefaultHttpClient _client; + private IdleConnectionMonitorThread _monitor; + + public ConnectionManager( + String proxyHost, int proxyPort, String puser, String ppassword, int connections) { + // general setup + SchemeRegistry supportedSchemes = new SchemeRegistry(); + + // Register the "http" and "https" protocol schemes, they are + // required by the default operator to look up socket factories. + supportedSchemes.register(new Scheme("http", PlainSocketFactory.getSocketFactory(), 80)); + supportedSchemes.register(new Scheme("https", SSLSocketFactory.getSocketFactory(), 443)); + + // prepare parameters + HttpParams params = new BasicHttpParams(); + HttpProtocolParams.setVersion(params, HttpVersion.HTTP_1_1); + HttpProtocolParams.setContentCharset(params, "UTF-8"); + HttpProtocolParams.setUseExpectContinue(params, true); + + HttpClientParams.setRedirecting(params, true); + + // connection params + params.setParameter(CoreConnectionPNames.SO_TIMEOUT, CONSTANTS.SOCKET_TIMEOUT); + // params.setParameter(CoreConnectionPNames.TCP_NODELAY, true); + params.setParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, CONSTANTS.CONNECTION_TIMEOUT); + + ConnManagerParams.setMaxTotalConnections(params, connections); + ClientConnectionManager cm = new ThreadSafeClientConnManager(params, supportedSchemes); + + _client = new DefaultHttpClient(cm, params); + + // check if we have a proxy + if (proxyHost != null) { + HttpHost proxy = new HttpHost(proxyHost, proxyPort, "http"); + _client.getParams().setParameter(ConnRoutePNames.DEFAULT_PROXY, proxy); + + if (puser != null) { + _client + .getCredentialsProvider() + .setCredentials( + new AuthScope(proxyHost, proxyPort), + new UsernamePasswordCredentials(puser, new String(ppassword))); + } } + _monitor = new IdleConnectionMonitorThread(cm); + _monitor.start(); + } - public void shutdown() { - _client.getConnectionManager().shutdown(); - } + public void shutdown() { + _client.getConnectionManager().shutdown(); + } - public HttpResponse connect(HttpGet get) throws ClientProtocolException, IOException { - return _client.execute(get); - } + public HttpResponse connect(HttpGet get) throws ClientProtocolException, IOException { + return _client.execute(get); + } - static class IdleConnectionMonitorThread extends Thread { + static class IdleConnectionMonitorThread extends Thread { - private final ClientConnectionManager connMgr; - private volatile boolean shutdown; + private final ClientConnectionManager connMgr; + private volatile boolean shutdown; - public IdleConnectionMonitorThread(ClientConnectionManager connMgr) { - super(); - this.connMgr = connMgr; - } + public IdleConnectionMonitorThread(ClientConnectionManager connMgr) { + super(); + this.connMgr = connMgr; + } - @Override - public void run() { - try { - while (!shutdown) { - synchronized (this) { - wait(60000); - // Close expired connections - connMgr.closeExpiredConnections(); - // Optionally, close connections - // that have been idle longer than 30 sec - connMgr.closeIdleConnections(30, TimeUnit.SECONDS); - // Log.info("Cleaning up expired and idle - // connections"); - } - } - } catch (InterruptedException ex) { - // terminate - } + @Override + public void run() { + try { + while (!shutdown) { + synchronized (this) { + wait(60000); + // Close expired connections + connMgr.closeExpiredConnections(); + // Optionally, close connections + // that have been idle longer than 30 sec + connMgr.closeIdleConnections(30, TimeUnit.SECONDS); + // Log.info("Cleaning up expired and idle + // connections"); + } } + } catch (InterruptedException ex) { + // terminate + } + } - public void shutdown() { - shutdown = true; - synchronized (this) { - notifyAll(); - } - } + public void shutdown() { + shutdown = true; + synchronized (this) { + notifyAll(); + } } + } } diff --git a/backend/src/main/java/sparqles/utils/DatahubAccess.java b/backend/src/main/java/sparqles/utils/DatahubAccess.java index f855eaf2..7e71c2f6 100644 --- a/backend/src/main/java/sparqles/utils/DatahubAccess.java +++ b/backend/src/main/java/sparqles/utils/DatahubAccess.java @@ -33,153 +33,152 @@ import org.slf4j.LoggerFactory; import sparqles.avro.Dataset; import sparqles.avro.Endpoint; -import sparqles.core.CONSTANTS; import sparqles.core.EndpointFactory; +import sparqles.core.SPARQLESProperties; // http://datahub.io/api/2/search/resource?format=api/sparql&all_fields=1&limit=1000 public class DatahubAccess { - private static final Logger log = LoggerFactory.getLogger(DatahubAccess.class); - - /** This class fetch the SPARQL endpoint list from datahub using the datahub API */ - public static Collection checkEndpointList() { - Map results = new HashMap(); - try { - // Do not do this in production!!! - HostnameVerifier hostnameVerifier = - org.apache.http.conn.ssl.SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER; - - DefaultHttpClient client = new DefaultHttpClient(); - - SchemeRegistry registry = new SchemeRegistry(); - SSLSocketFactory socketFactory = SSLSocketFactory.getSocketFactory(); - socketFactory.setHostnameVerifier((X509HostnameVerifier) hostnameVerifier); - registry.register(new Scheme("https", socketFactory, 443)); - SingleClientConnManager mgr = new SingleClientConnManager(client.getParams(), registry); - DefaultHttpClient httpclient = new DefaultHttpClient(mgr, client.getParams()); - - // Set verifier - HttpsURLConnection.setDefaultHostnameVerifier(hostnameVerifier); - String apiURL = - "https://old.datahub.io/api/2/search/resource?format=api/sparql&all_fields=1&limit=1000"; - apiURL = "https://old.datahub.io/api/3/action/resource_search?query=format:api/sparql"; - HttpGet getRequest = new HttpGet(apiURL); - - getRequest.addHeader("User-Agent", CONSTANTS.USER_AGENT); - - HttpResponse response = httpclient.execute(getRequest); - - if (response.getStatusLine().getStatusCode() != 200) { - throw new RuntimeException( - "Failed : HTTP error code : " + response.getStatusLine().getStatusCode()); - } - HttpEntity entity = response.getEntity(); - String respString = - EntityUtils.toString(entity, ContentType.getOrDefault(entity).getCharset()); - - JsonFactory factory = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(factory); - JsonNode rootNode = mapper.readTree(respString); - - var resNode = rootNode.get("result"); - var res = (ArrayNode) resNode.get("results"); - log.info("We found {} datasets", res.size()); - Iterator iter = res.elements(); - int c = 1; - - Map> map = new HashMap>(); - while (iter.hasNext()) { - JsonNode node = iter.next(); - String endpointURL = node.findPath("url").asText().trim(); - String datasetId = node.findPath("package_id").asText().trim(); - - Set s = map.get(endpointURL); - if (s == null) { - s = new HashSet(); - map.put(endpointURL, s); - } - s.add(datasetId); - } - for (Entry> ent : map.entrySet()) { - String endpointURL = ent.getKey(); - - if (endpointURL.length() == 0) continue; - - Endpoint ep = results.get(endpointURL); - if (ep == null) { - try { - ep = EndpointFactory.newEndpoint(new URI(endpointURL)); - List l = new ArrayList(); - ep.setDatasets(l); - results.put(endpointURL, ep); - } catch (URISyntaxException e) { - log.warn("URISyntaxException:{}", e.getMessage()); - } - } - if (ent.getValue().size() != 0) { - for (String ds : ent.getValue()) { - ep = checkForDataset(ep, ds, httpclient); - log.info("Found dataset information for {}", ep); - } - } else { - System.err.println("This should not happend for ep" + ep); - } - log.info("[GET] [{}] {}", c++, ep); - } - // httpClient.getConnectionManager().shutdown(); - } catch (Exception e2) { - log.warn("[EXEC] {}", e2); - e2.printStackTrace(); + private static final Logger log = LoggerFactory.getLogger(DatahubAccess.class); + + /** This class fetch the SPARQL endpoint list from datahub using the datahub API */ + public static Collection checkEndpointList() { + Map results = new HashMap(); + try { + // Do not do this in production!!! + HostnameVerifier hostnameVerifier = + org.apache.http.conn.ssl.SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER; + + DefaultHttpClient client = new DefaultHttpClient(); + + SchemeRegistry registry = new SchemeRegistry(); + SSLSocketFactory socketFactory = SSLSocketFactory.getSocketFactory(); + socketFactory.setHostnameVerifier((X509HostnameVerifier) hostnameVerifier); + registry.register(new Scheme("https", socketFactory, 443)); + SingleClientConnManager mgr = new SingleClientConnManager(client.getParams(), registry); + DefaultHttpClient httpclient = new DefaultHttpClient(mgr, client.getParams()); + + // Set verifier + HttpsURLConnection.setDefaultHostnameVerifier(hostnameVerifier); + String apiURL = + "https://old.datahub.io/api/2/search/resource?format=api/sparql&all_fields=1&limit=1000"; + apiURL = "https://old.datahub.io/api/3/action/resource_search?query=format:api/sparql"; + HttpGet getRequest = new HttpGet(apiURL); + + getRequest.addHeader("User-Agent", SPARQLESProperties.getUserAgent()); + + HttpResponse response = httpclient.execute(getRequest); + + if (response.getStatusLine().getStatusCode() != 200) { + throw new RuntimeException( + "Failed : HTTP error code : " + response.getStatusLine().getStatusCode()); + } + HttpEntity entity = response.getEntity(); + String respString = + EntityUtils.toString(entity, ContentType.getOrDefault(entity).getCharset()); + + JsonFactory factory = new JsonFactory(); + ObjectMapper mapper = new ObjectMapper(factory); + JsonNode rootNode = mapper.readTree(respString); + + var resNode = rootNode.get("result"); + var res = (ArrayNode) resNode.get("results"); + log.info("We found {} datasets", res.size()); + Iterator iter = res.elements(); + int c = 1; + + Map> map = new HashMap>(); + while (iter.hasNext()) { + JsonNode node = iter.next(); + String endpointURL = node.findPath("url").asText().trim(); + String datasetId = node.findPath("package_id").asText().trim(); + + Set s = map.get(endpointURL); + if (s == null) { + s = new HashSet(); + map.put(endpointURL, s); } - log.info("Found {} endpoints", results.size()); - - return results.values(); + s.add(datasetId); + } + for (Entry> ent : map.entrySet()) { + String endpointURL = ent.getKey(); + + if (endpointURL.length() == 0) continue; + + Endpoint ep = results.get(endpointURL); + if (ep == null) { + try { + ep = EndpointFactory.newEndpoint(new URI(endpointURL)); + List l = new ArrayList(); + ep.setDatasets(l); + results.put(endpointURL, ep); + } catch (URISyntaxException e) { + log.warn("URISyntaxException:{}", e.getMessage()); + } + } + if (ent.getValue().size() != 0) { + for (String ds : ent.getValue()) { + ep = checkForDataset(ep, ds, httpclient); + log.info("Found dataset information for {}", ep); + } + } else { + System.err.println("This should not happend for ep" + ep); + } + log.info("[GET] [{}] {}", c++, ep); + } + // httpClient.getConnectionManager().shutdown(); + } catch (Exception e2) { + log.warn("[EXEC] {}", e2); + e2.printStackTrace(); } - - private static Endpoint checkForDataset(Endpoint ep, String datasetId, HttpClient httpClient) { - log.debug("[GET] dataset info for {} and {}", datasetId, ep); - HttpGet getRequest = null; - try { - getRequest = - new HttpGet("https://old.datahub.io/api/3/action/package_show?id=" + datasetId); - getRequest.addHeader("User-Agent", CONSTANTS.USER_AGENT); - System.out.println(getRequest); - HttpResponse response = httpClient.execute(getRequest); - if (response.getStatusLine().getStatusCode() != 200) { - throw new RuntimeException( - "Failed : HTTP error code : " + response.getStatusLine().getStatusCode()); - } - String respString = EntityUtils.toString(response.getEntity()); - // response.close(); - - JsonFactory factory = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(factory); - JsonNode rootNode = mapper.readTree(respString); - JsonNode res = rootNode.get("result"); - - // System.out.println(rootNode); - String ckan_url = res.findPath("url").asText(); - List titles = res.findValues("title"); - String title = null; - for (JsonNode s : titles) { - // System.out.println(s); - if (!s.toString().contains("Linking Open")) title = s.asText(); - if (title != null && title.length() > 200) { - title = title.substring(0, 200); - } - } - - Dataset d = new Dataset(); - d.setLabel(title); - d.setUri(ckan_url); - List l = ep.getDatasets(); - l.add(d); - - return ep; - - } catch (Exception e) { - log.warn("[EXEC] " + ep, e); + log.info("Found {} endpoints", results.size()); + + return results.values(); + } + + private static Endpoint checkForDataset(Endpoint ep, String datasetId, HttpClient httpClient) { + log.debug("[GET] dataset info for {} and {}", datasetId, ep); + HttpGet getRequest = null; + try { + getRequest = new HttpGet("https://old.datahub.io/api/3/action/package_show?id=" + datasetId); + getRequest.addHeader("User-Agent", SPARQLESProperties.getUserAgent()); + System.out.println(getRequest); + HttpResponse response = httpClient.execute(getRequest); + if (response.getStatusLine().getStatusCode() != 200) { + throw new RuntimeException( + "Failed : HTTP error code : " + response.getStatusLine().getStatusCode()); + } + String respString = EntityUtils.toString(response.getEntity()); + // response.close(); + + JsonFactory factory = new JsonFactory(); + ObjectMapper mapper = new ObjectMapper(factory); + JsonNode rootNode = mapper.readTree(respString); + JsonNode res = rootNode.get("result"); + + // System.out.println(rootNode); + String ckan_url = res.findPath("url").asText(); + List titles = res.findValues("title"); + String title = null; + for (JsonNode s : titles) { + // System.out.println(s); + if (!s.toString().contains("Linking Open")) title = s.asText(); + if (title != null && title.length() > 200) { + title = title.substring(0, 200); } - return ep; + } + + Dataset d = new Dataset(); + d.setLabel(title); + d.setUri(ckan_url); + List l = ep.getDatasets(); + l.add(d); + + return ep; + + } catch (Exception e) { + log.warn("[EXEC] " + ep, e); } + return ep; + } } diff --git a/backend/src/main/java/sparqles/utils/DateFormater.java b/backend/src/main/java/sparqles/utils/DateFormater.java index 803870f1..0dbc64a4 100644 --- a/backend/src/main/java/sparqles/utils/DateFormater.java +++ b/backend/src/main/java/sparqles/utils/DateFormater.java @@ -8,31 +8,31 @@ public class DateFormater { - public static final String YYYYMMDD = "yyyy-MM-dd"; - public static final String YYYYMMDDHHMM = "yyyy-MM-dd#HH:mm"; - public static final String ISO8601 = "yyyy-MM-dd'T'HH:mm'Z'"; - static final Calendar CAL = GregorianCalendar.getInstance(); + public static final String YYYYMMDD = "yyyy-MM-dd"; + public static final String YYYYMMDDHHMM = "yyyy-MM-dd#HH:mm"; + public static final String ISO8601 = "yyyy-MM-dd'T'HH:mm'Z'"; + static final Calendar CAL = GregorianCalendar.getInstance(); - public static String getDataAsString(String pattern) { - return getDataAsString(pattern, CAL.getTime()); - } + public static String getDataAsString(String pattern) { + return getDataAsString(pattern, CAL.getTime()); + } - public static String getDataAsString(String pattern, Date date) { - return new SimpleDateFormat(pattern).format(date); - } + public static String getDataAsString(String pattern, Date date) { + return new SimpleDateFormat(pattern).format(date); + } - public static String formatInterval(final long l) { - final long hr = TimeUnit.MILLISECONDS.toHours(l); - final long min = TimeUnit.MILLISECONDS.toMinutes(l - TimeUnit.HOURS.toMillis(hr)); - final long sec = - TimeUnit.MILLISECONDS.toSeconds( - l - TimeUnit.HOURS.toMillis(hr) - TimeUnit.MINUTES.toMillis(min)); - final long ms = - TimeUnit.MILLISECONDS.toMillis( - l - - TimeUnit.HOURS.toMillis(hr) - - TimeUnit.MINUTES.toMillis(min) - - TimeUnit.SECONDS.toMillis(sec)); - return String.format("%02d:%02d:%02d.%03d", hr, min, sec, ms); - } + public static String formatInterval(final long l) { + final long hr = TimeUnit.MILLISECONDS.toHours(l); + final long min = TimeUnit.MILLISECONDS.toMinutes(l - TimeUnit.HOURS.toMillis(hr)); + final long sec = + TimeUnit.MILLISECONDS.toSeconds( + l - TimeUnit.HOURS.toMillis(hr) - TimeUnit.MINUTES.toMillis(min)); + final long ms = + TimeUnit.MILLISECONDS.toMillis( + l + - TimeUnit.HOURS.toMillis(hr) + - TimeUnit.MINUTES.toMillis(min) + - TimeUnit.SECONDS.toMillis(sec)); + return String.format("%02d:%02d:%02d.%03d", hr, min, sec, ms); + } } diff --git a/backend/src/main/java/sparqles/utils/ExceptionHandler.java b/backend/src/main/java/sparqles/utils/ExceptionHandler.java index e53ba069..deae2dc1 100644 --- a/backend/src/main/java/sparqles/utils/ExceptionHandler.java +++ b/backend/src/main/java/sparqles/utils/ExceptionHandler.java @@ -12,85 +12,97 @@ public class ExceptionHandler { - private static final Logger log = LoggerFactory.getLogger(ExceptionHandler.class); - private static final AtomicInteger excID = new AtomicInteger(); - private static final AtomicLong excCounter = new AtomicLong(); + private static final Logger log = LoggerFactory.getLogger(ExceptionHandler.class); + private static final AtomicInteger excID = new AtomicInteger(); + private static final AtomicLong excCounter = new AtomicLong(); - private static final HashMap exceptionID = new HashMap(); + private static final HashMap exceptionID = new HashMap(); - public static String logAndtoString(Exception e) { - return logAndtoString(e, false); - } + public static String logAndtoString(Exception e) { + return logAndtoString(e, false); + } + + public static String logAndtoString(Exception e, boolean withExceptionID) { - public static String logAndtoString(Exception e, boolean withExceptionID) { - - String id = ExceptionHandler.getExceptionID(e); - ExceptionHandler.log(id, e); - - // if(ExceptionUtils.indexOfThrowable(e, Error.class) != -1) { - if (ExceptionUtils.indexOfThrowable(e, VirtualMachineError.class) != -1) { - // if(ExceptionUtils.indexOfThrowable(e, OutOfMemoryError.class) != -1) { - // 1) no point going further - // 2) no clue who/how a descendant of java.lang.Error was caught and not rethrown - // REVISIT: consider terminating on Error.class - // TODO: remove this crutch and ensure no part of the system does something that - // requires this hack - try { - System.exit(1); - } catch (Throwable e2) { // last resort - e2.printStackTrace(); // last resort - Runtime.getRuntime().exit(255); // last resort - } - } - - StringBuilder sb = new StringBuilder(); - if (withExceptionID) { - sb.append(id).append("> "); - } - sb.append(e.getClass().getSimpleName()) - .append(" msg:") - .append(e.getMessage()) - .append(" cause:") - .append(e.getCause()) - .toString(); - - return sb.toString(); + String id = ExceptionHandler.getExceptionID(e); + ExceptionHandler.log(id, e); + + // if(ExceptionUtils.indexOfThrowable(e, Error.class) != -1) { + if (ExceptionUtils.indexOfThrowable(e, VirtualMachineError.class) != -1) { + // if(ExceptionUtils.indexOfThrowable(e, OutOfMemoryError.class) != -1) { + // 1) no point going further + // 2) no clue who/how a descendant of java.lang.Error was caught and not rethrown + // REVISIT: consider terminating on Error.class + // TODO: remove this crutch and ensure no part of the system does something that + // requires this hack + try { + System.exit(1); + } catch (Throwable e2) { // last resort + e2.printStackTrace(); // last resort + Runtime.getRuntime().exit(255); // last resort + } } - public static String toFullString(Exception e) { - StringWriter sw = new StringWriter(); - PrintWriter pw = new PrintWriter(sw); - e.printStackTrace(pw); - return sw.toString(); + StringBuilder sb = new StringBuilder(); + if (withExceptionID) { + sb.append(id).append("> "); } + sb.append(e.getClass().getSimpleName()) + .append(" msg:") + .append(e.getMessage()) + .append(" cause:") + .append(e.getCause()) + .toString(); + + return sb.toString(); + } - private static String getExceptionID(Exception e) { - // System.out.println(exceptionID); + public static String toFullString(Exception e) { + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter(sw); + e.printStackTrace(pw); + return sw.toString(); + } - Integer id = exceptionID.get(e.getClass()); - if (id == null) { - id = excID.getAndIncrement(); - exceptionID.put(e.getClass(), id); - log.info("New Exception ID: {} -> {}", id, e.getClass()); - } - return "EXC@" + id + "#" + excCounter.getAndIncrement(); + public static String toFullCause(Exception e) { + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter(sw); + Throwable cause = e; + for (int i = 1; ; i++) { + pw.printf("%d: %s\n", i, cause.getClass().getCanonicalName()); + cause = e.getCause(); + if (cause == null) break; } + return sw.toString(); + } - private static void log(String id, Exception e) { - log.info(id, e); + private static String getExceptionID(Exception e) { + // System.out.println(exceptionID); + + Integer id = exceptionID.get(e.getClass()); + if (id == null) { + id = excID.getAndIncrement(); + exceptionID.put(e.getClass(), id); + log.info("New Exception ID: {} -> {}", id, e.getClass()); } + return "EXC@" + id + "#" + excCounter.getAndIncrement(); + } + + private static void log(String id, Exception e) { + log.info(id, e); + } - public static String getExceptionSummary(String message) { - if (StringUtils.isBlank(message)) { - return "N/A"; - } - int cutoff = message.indexOf('\n'); - if (cutoff == -1) { - cutoff = message.length(); - } - if (cutoff > 160) { - cutoff = 160; - } - return message.substring(0, cutoff); + public static String getExceptionSummary(String message) { + if (StringUtils.isBlank(message)) { + return "N/A"; + } + int cutoff = message.indexOf('\n'); + if (cutoff == -1) { + cutoff = message.length(); + } + if (cutoff > 160) { + cutoff = 160; } + return message.substring(0, cutoff); + } } diff --git a/backend/src/main/java/sparqles/utils/FaultDiagnostic.java b/backend/src/main/java/sparqles/utils/FaultDiagnostic.java new file mode 100644 index 00000000..2c3f1333 --- /dev/null +++ b/backend/src/main/java/sparqles/utils/FaultDiagnostic.java @@ -0,0 +1,149 @@ +package sparqles.utils; + +import java.io.EOFException; +import java.net.ConnectException; +import java.net.SocketException; +import java.net.UnknownHostException; +import java.net.http.HttpConnectTimeoutException; +import javax.net.ssl.SSLHandshakeException; +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.apache.http.HttpException; +import org.apache.http.conn.ConnectTimeoutException; +import org.apache.jena.query.QueryException; +import org.apache.jena.sparql.engine.http.QueryExceptionHTTP; +import org.apache.jena.sparql.resultset.ResultSetException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class FaultDiagnostic { + private static final Logger log = LoggerFactory.getLogger(FaultDiagnostic.class); + + public static FaultKind faultKindForJenaQuery(Exception e) { + log.trace("Diagnosing Jena query fault", e); + if (e == null) { + throw new NullPointerException("Exception shall not be null"); + } + if (e instanceof QueryExceptionHTTP qe) { + if (log.isTraceEnabled()) { + log.trace(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"); + log.trace("QueryExceptionHTTP message={}", qe.getMessage()); + log.trace("QueryExceptionHTTP responseMessage={}", qe.getResponseMessage()); + log.trace("QueryExceptionHTTP statusCode={}", qe.getStatusCode()); + log.trace("QueryExceptionHTTP statusLine={}", qe.getStatusLine()); + log.trace("QueryExceptionHTTP cause", qe.getCause()); + log.trace("QueryExceptionHTTP response={}", StringUtils.trunc(qe.getResponse())); + log.trace("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<"); + } + if (e.getCause() instanceof UnknownHostException) { + return FaultKind.DOWN_HOST_NOT_FOUND; + } + if (e.getCause() instanceof ConnectTimeoutException + || e.getCause() instanceof ConnectException + || e.getCause() instanceof HttpConnectTimeoutException) { + return FaultKind.DOWN_TIMEOUT; + } + if (e.getCause() instanceof SSLHandshakeException) { + return FaultKind.DOWN_TLS_CONFIGURATION_ERROR; + } else if (e.getCause() instanceof HttpException || qe.getStatusCode() >= 400) { + return faultKindForApacheHttpException(qe.getStatusCode()); + } else if (ExceptionUtils.indexOfThrowable(e, SocketException.class) != -1 + || ExceptionUtils.indexOfThrowable(e, EOFException.class) != -1) { + return FaultKind.DOWN_ENDPOINT; + } + } else if (e instanceof ResultSetException) { + if (e.getMessage().contains("Not a result set syntax:")) { + // (potentially) RDF but not a suitable for this SPARQL query response + return FaultKind.BAD_RESPONSE; + } + log.debug("Unknown fault", e); + return FaultKind.UNKNOWN; + } else if (e instanceof HttpException) { + return faultKindForApacheHttpException(e); + } else if (e instanceof ConnectTimeoutException + || e instanceof ConnectException + || e instanceof HttpConnectTimeoutException) { + return FaultKind.DOWN_TIMEOUT; + } else if (e instanceof UnknownHostException) { + return FaultKind.DOWN_HOST_NOT_FOUND; + } else if (ExceptionUtils.indexOfThrowable(e, SocketException.class) != -1) { + return FaultKind.DOWN_ENDPOINT; + } else if (e instanceof QueryException) { + if (e.getMessage().contains("Endpoint returned Content-Type:")) { + return FaultKind.BAD_RESPONSE; + } + log.debug("Unknown fault", e); + return FaultKind.UNKNOWN; + } else { + if (e.getMessage() != null) + if (e.getMessage().contains("401 Authorization Required")) return FaultKind.AUTH_401; + } + + log.debug("Unknown fault", e); + return FaultKind.UNKNOWN; + } + + public static FaultKind faultKindForApacheHttpException(Throwable e) { + if (e == null) { + throw new NullPointerException("Exception shall not be null"); + } + if (e.getMessage().contains("400") || e.getMessage().contains("501")) { + return FaultKind.BAD_REQUEST; + } else if (e.getMessage().contains("401")) { + return FaultKind.AUTH_401; + } else if (e.getMessage().contains("403")) { + return FaultKind.AUTH_403; + } else if (e.getMessage().contains("500")) { + return FaultKind.BAD_SERVER_ERROR; + } else if (e.getMessage().contains("502")) { + return FaultKind.DOWN_BAD_GATEWAY; + } else if (e.getMessage().contains("503")) { + return FaultKind.DOWN_ENDPOINT; + } else if (e.getMessage().contains("504")) { + return FaultKind.DOWN_TIMEOUT; + } + + log.debug("Unknown fault", e); + return FaultKind.UNKNOWN; + } + + public static FaultKind faultKindForApacheHttpException(int code) { + if (code == 400 || code == 501) { + return FaultKind.BAD_REQUEST; + } else if (code == 401) { + return FaultKind.AUTH_401; + } else if (code == 403) { + return FaultKind.AUTH_403; + } else if (code == 404) { + return FaultKind.DOWN_404_NOT_FOUND; + } else if (code == 500) { + return FaultKind.BAD_SERVER_ERROR; + } else if (code == 502) { + return FaultKind.DOWN_BAD_GATEWAY; + } else if (code == 503) { + return FaultKind.DOWN_ENDPOINT; + } else if (code == 504) { + return FaultKind.DOWN_TIMEOUT; + } + + log.debug("Unknown fault code={}", code); + return FaultKind.UNKNOWN; + } + + public static String interpretFault(FaultKind faultKind) { + return switch (faultKind) { + case UNKNOWN -> "Unknown fault"; + case DOWN_HOST_NOT_FOUND -> "🕳️ host not found"; + case DOWN_404_NOT_FOUND -> "🕳️ 404 endpoint not found"; + case DOWN_TLS_CONFIGURATION_ERROR -> "🔧 TLS misconfiguration (failed handshake)"; + case DOWN_TIMEOUT -> "🐌 connection timeout"; + case DOWN_BAD_GATEWAY -> "🔧 bad gateway"; + case DOWN_GONE_410 -> "💨 410 gone"; + case DOWN_ENDPOINT -> "🕳 endpoint down"; + case AUTH_401 -> "🛡️ server requires authentication"; + case AUTH_403 -> "🛡️ server denied access"; + case BAD_REQUEST -> "👾 host did not like our request (400)"; + case BAD_RESPONSE -> "🗑️ malformed response"; + case BAD_SERVER_ERROR -> "🔧 server error"; + }; + } +} diff --git a/backend/src/main/java/sparqles/utils/FaultKind.java b/backend/src/main/java/sparqles/utils/FaultKind.java new file mode 100644 index 00000000..777d44a3 --- /dev/null +++ b/backend/src/main/java/sparqles/utils/FaultKind.java @@ -0,0 +1,20 @@ +package sparqles.utils; + +public enum FaultKind { + UNKNOWN, + + DOWN_HOST_NOT_FOUND, + DOWN_404_NOT_FOUND, + DOWN_TLS_CONFIGURATION_ERROR, + DOWN_TIMEOUT, + DOWN_BAD_GATEWAY, + DOWN_GONE_410, + DOWN_ENDPOINT, + + AUTH_401, + AUTH_403, + + BAD_REQUEST, + BAD_RESPONSE, + BAD_SERVER_ERROR, +} diff --git a/backend/src/main/java/sparqles/utils/FileManager.java b/backend/src/main/java/sparqles/utils/FileManager.java index 0868e865..ba5b842f 100644 --- a/backend/src/main/java/sparqles/utils/FileManager.java +++ b/backend/src/main/java/sparqles/utils/FileManager.java @@ -43,264 +43,251 @@ * @author Juergen Umbrich */ public class FileManager { - private static final Logger log = LoggerFactory.getLogger(FileManager.class); - private final File rootFolder; - private final File avroFolder; - private final File resultsFolder; - private HashMap> eptask; - - public FileManager() { - String folder = SPARQLESProperties.getDATA_DIR(); - if (folder.startsWith("file:")) { - folder = folder.replace("file:", ""); - } - rootFolder = new File(folder); - avroFolder = new File(rootFolder, "avro"); - resultsFolder = new File(rootFolder, "results"); - - init(); + private static final Logger log = LoggerFactory.getLogger(FileManager.class); + private final File rootFolder; + private final File avroFolder; + private final File resultsFolder; + private HashMap> eptask; + + public FileManager() { + String folder = SPARQLESProperties.getDATA_DIR(); + if (folder.startsWith("file:")) { + folder = folder.replace("file:", ""); } + rootFolder = new File(folder); + avroFolder = new File(rootFolder, "avro"); + resultsFolder = new File(rootFolder, "results"); - private void init() { - eptask = new HashMap>(); - - if (!rootFolder.exists()) rootFolder.mkdirs(); - if (!avroFolder.exists()) avroFolder.mkdirs(); - if (!resultsFolder.exists()) resultsFolder.mkdirs(); - - if (rootFolder.isFile()) { - log.warn("The specified folder {} is not a directory", rootFolder); - return; - } - for (File f : avroFolder.listFiles()) { - String name = f.getName().replace(".avro", ""); - try { - String ep = URLDecoder.decode(name.substring(0, name.lastIndexOf(".")), "UTF-8"); - String task = name.substring(name.lastIndexOf(".") + 1); - - put(ep, task, f); - } catch (UnsupportedEncodingException e) { - log.warn("UnsupportedEncodingException: {} for {}", e.getMessage(), f); - } - } - } + init(); + } - private void put(String ep, String task, File f) { - Map map = eptask.get(ep); - if (map == null) { - map = new HashMap(); - eptask.put(ep, map); - } - if (map.containsKey(task)) log.warn("Duplicate entry for {}", f); - map.put(task, f); - } + private void init() { + eptask = new HashMap>(); - public List getResults(String endpointURI, Class cls) { - try { - return getResults(EndpointFactory.newEndpoint(new URI(endpointURI)), cls); - } catch (URISyntaxException e) { - Object[] t = { - e.getClass().getSimpleName(), e.getMessage(), cls.getSimpleName(), endpointURI - }; - log.error("{}:{} during deserialisation of {} results for {}", t); - return new ArrayList(); - } - } + if (!rootFolder.exists()) rootFolder.mkdirs(); + if (!avroFolder.exists()) avroFolder.mkdirs(); + if (!resultsFolder.exists()) resultsFolder.mkdirs(); - public List getResults(Endpoint ep, Class cls) { - List l = new ArrayList(); - File f = getFile(ep, cls.getSimpleName()); - DatumReader reader = new SpecificDatumReader(cls); - try { - DataFileReader dfr = new DataFileReader(f, reader); - while (dfr.hasNext()) { - l.add(dfr.next()); - } - } catch (IOException e) { - Object[] t = { - e.getClass().getSimpleName(), - e.getMessage(), - cls.getSimpleName(), - ep.getUri().toString() - }; - log.error("{}:{} during deserialisation of {} results for {}", t); - } - Object[] t = {l.size(), cls.getSimpleName(), ep.getUri().toString()}; - log.info("Deserialised {} {} results for {}", t); - return l; + if (rootFolder.isFile()) { + log.warn("The specified folder {} is not a directory", rootFolder); + return; } - - public boolean writeResult(V res) { - if (res instanceof DResult) return writeResult((DResult) res); - else if (res instanceof AResult) return writeResult((AResult) res); - else if (res instanceof PResult) return writeResult((PResult) res); - else if (res instanceof FResult) return writeResult((FResult) res); - - return true; + for (File f : avroFolder.listFiles()) { + String name = f.getName().replace(".avro", ""); + try { + String ep = URLDecoder.decode(name.substring(0, name.lastIndexOf(".")), "UTF-8"); + String task = name.substring(name.lastIndexOf(".") + 1); + + put(ep, task, f); + } catch (UnsupportedEncodingException e) { + log.warn("UnsupportedEncodingException: {} for {}", e.getMessage(), f); + } } + } - public boolean writeResult(FResult res) { - return writeResult( - res.getEndpointResult().getEndpoint(), - res.getClass().getSimpleName(), - (SpecificRecordBase) res); + private void put(String ep, String task, File f) { + Map map = eptask.get(ep); + if (map == null) { + map = new HashMap(); + eptask.put(ep, map); } - - public boolean writeResult(DResult res) { - return writeResult( - res.getEndpointResult().getEndpoint(), - res.getClass().getSimpleName(), - (SpecificRecordBase) res); + if (map.containsKey(task)) log.warn("Duplicate entry for {}", f); + map.put(task, f); + } + + public List getResults(String endpointURI, Class cls) { + try { + return getResults(EndpointFactory.newEndpoint(new URI(endpointURI)), cls); + } catch (URISyntaxException e) { + Object[] t = {e.getClass().getSimpleName(), e.getMessage(), cls.getSimpleName(), endpointURI}; + log.error("{}:{} during deserialisation of {} results for {}", t); + return new ArrayList(); } - - public boolean writeResult(AResult res) { - return writeResult( - res.getEndpointResult().getEndpoint(), - res.getClass().getSimpleName(), - (SpecificRecordBase) res); + } + + public List getResults(Endpoint ep, Class cls) { + List l = new ArrayList(); + File f = getFile(ep, cls.getSimpleName()); + DatumReader reader = new SpecificDatumReader(cls); + try { + DataFileReader dfr = new DataFileReader(f, reader); + while (dfr.hasNext()) { + l.add(dfr.next()); + } + } catch (IOException e) { + Object[] t = { + e.getClass().getSimpleName(), e.getMessage(), cls.getSimpleName(), ep.getUri().toString() + }; + log.error("{}:{} during deserialisation of {} results for {}", t); } - - public boolean writeResult(PResult res) { - return writeResult( - res.getEndpointResult().getEndpoint(), - res.getClass().getSimpleName(), - (SpecificRecordBase) res); + Object[] t = {l.size(), cls.getSimpleName(), ep.getUri().toString()}; + log.info("Deserialised {} {} results for {}", t); + return l; + } + + public boolean writeResult(V res) { + if (res instanceof DResult) return writeResult((DResult) res); + else if (res instanceof AResult) return writeResult((AResult) res); + else if (res instanceof PResult) return writeResult((PResult) res); + else if (res instanceof FResult) return writeResult((FResult) res); + + return true; + } + + public boolean writeResult(FResult res) { + return writeResult( + res.getEndpointResult().getEndpoint(), + res.getClass().getSimpleName(), + (SpecificRecordBase) res); + } + + public boolean writeResult(DResult res) { + return writeResult( + res.getEndpointResult().getEndpoint(), + res.getClass().getSimpleName(), + (SpecificRecordBase) res); + } + + public boolean writeResult(AResult res) { + return writeResult( + res.getEndpointResult().getEndpoint(), + res.getClass().getSimpleName(), + (SpecificRecordBase) res); + } + + public boolean writeResult(PResult res) { + return writeResult( + res.getEndpointResult().getEndpoint(), + res.getClass().getSimpleName(), + (SpecificRecordBase) res); + } + + public boolean writeResult(Endpoint ep, String task, V result) { + log.debug("[STORE] {}", result); + + try { + DatumWriter d = new SpecificDatumWriter((Class) result.getClass()); + DataFileWriter dfw = new DataFileWriter(d); + + File f = getFile(ep, task); + + if (f == null) { + f = createAVROFile(ep, task); + put(ep.getUri().toString(), task, f); + dfw.create(result.getSchema(), f); + } else { + dfw = dfw.appendTo(f); + } + dfw.append(result); + dfw.close(); + log.debug("[STORED] {}", result); + return true; + } catch (Exception e) { + // e.printStackTrace(); + log.warn("[STORE] {}", e); + return false; } - - public boolean writeResult(Endpoint ep, String task, V result) { - log.debug("[STORE] {}", result); - - try { - DatumWriter d = new SpecificDatumWriter((Class) result.getClass()); - DataFileWriter dfw = new DataFileWriter(d); - - File f = getFile(ep, task); - - if (f == null) { - f = createAVROFile(ep, task); - put(ep.getUri().toString(), task, f); - dfw.create(result.getSchema(), f); - } else { - dfw = dfw.appendTo(f); - } - dfw.append(result); - dfw.close(); - log.debug("[STORED] {}", result); - return true; - } catch (Exception e) { - // e.printStackTrace(); - log.warn("[STORE] {}", e); - return false; - } + } + + private File createAVROFile(Endpoint ep, String task) { + try { + return new File( + avroFolder, URLEncoder.encode(ep.getUri().toString(), "UTF-8") + "." + task + ".avro"); + } catch (UnsupportedEncodingException e) { + log.warn("UnsupportedEncodingException: {} for {}", e.getMessage(), ep.getUri().toString()); } - - private File createAVROFile(Endpoint ep, String task) { - try { - return new File( - avroFolder, - URLEncoder.encode(ep.getUri().toString(), "UTF-8") + "." + task + ".avro"); - } catch (UnsupportedEncodingException e) { - log.warn( - "UnsupportedEncodingException: {} for {}", - e.getMessage(), - ep.getUri().toString()); - } - return null; - } - - private File createResultFile(Endpoint ep, String query, Long date) { - try { - File folder = - new File(resultsFolder, URLEncoder.encode(ep.getUri().toString(), "UTF-8")); - folder.mkdir(); - - folder = new File(folder, DateFormater.getDataAsString(DateFormater.YYYYMMDD)); - folder.mkdir(); - return new File(folder, query.replaceAll("/", "-") + "_" + date + ".results.gz"); - } catch (UnsupportedEncodingException e) { - log.warn( - "UnsupportedEncodingException: {} for {}", - e.getMessage(), - ep.getUri().toString()); - } - return null; + return null; + } + + private File createResultFile(Endpoint ep, String query, Long date) { + try { + File folder = new File(resultsFolder, URLEncoder.encode(ep.getUri().toString(), "UTF-8")); + folder.mkdir(); + + folder = new File(folder, DateFormater.getDataAsString(DateFormater.YYYYMMDD)); + folder.mkdir(); + return new File(folder, query.replaceAll("/", "-") + "_" + date + ".results.gz"); + } catch (UnsupportedEncodingException e) { + log.warn("UnsupportedEncodingException: {} for {}", e.getMessage(), ep.getUri().toString()); } + return null; + } - private File getFile(Endpoint ep, String task) { - Map map = eptask.get(ep.getUri().toString()); - if (map == null) { - return null; - } - return map.get(task); + private File getFile(Endpoint ep, String task) { + Map map = eptask.get(ep.getUri().toString()); + if (map == null) { + return null; } - - public int writeSPARQLResults(ResultSet results, String queryFile, Endpoint ep, Long start) { - - PrintWriter out = null; - try { - out = getPARQLResultPrintStream(ep, queryFile, start); - int sols = 0; - while (results.hasNext()) { - QuerySolution qs = results.nextSolution(); - out.println(toString(qs, sols == 0)); - sols++; - } - out.close(); - return sols; - } catch (FileNotFoundException e) { - e.printStackTrace(); - } catch (IOException e) { - e.printStackTrace(); - } finally { - if (out != null) out.close(); - } - return -10; + return map.get(task); + } + + public int writeSPARQLResults(ResultSet results, String queryFile, Endpoint ep, Long start) { + + PrintWriter out = null; + try { + out = getPARQLResultPrintStream(ep, queryFile, start); + int sols = 0; + while (results.hasNext()) { + QuerySolution qs = results.nextSolution(); + out.println(toString(qs, sols == 0)); + sols++; + } + out.close(); + return sols; + } catch (FileNotFoundException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } finally { + if (out != null) out.close(); } - - public int writeSPARQLResults( - Iterator triples, String queryFile, Endpoint ep, Long start) { - PrintWriter out = null; - try { - out = getPARQLResultPrintStream(ep, queryFile, start); - int sols = 0; - while (triples.hasNext()) { - out.println(triples.next()); - sols++; - } - out.close(); - return sols; - } catch (FileNotFoundException e) { - e.printStackTrace(); - } catch (IOException e) { - e.printStackTrace(); - } finally { - if (out != null) out.close(); - } - return -10; + return -10; + } + + public int writeSPARQLResults( + Iterator triples, String queryFile, Endpoint ep, Long start) { + PrintWriter out = null; + try { + out = getPARQLResultPrintStream(ep, queryFile, start); + int sols = 0; + while (triples.hasNext()) { + out.println(triples.next()); + sols++; + } + out.close(); + return sols; + } catch (FileNotFoundException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } finally { + if (out != null) out.close(); } - - private PrintWriter getPARQLResultPrintStream(Endpoint ep, String queryFile, Long start) - throws FileNotFoundException, IOException { - - File f = createResultFile(ep, queryFile, start); - PrintWriter pw = null; - pw = new PrintWriter(new GZIPOutputStream(new FileOutputStream(f))); - return pw; + return -10; + } + + private PrintWriter getPARQLResultPrintStream(Endpoint ep, String queryFile, Long start) + throws FileNotFoundException, IOException { + + File f = createResultFile(ep, queryFile, start); + PrintWriter pw = null; + pw = new PrintWriter(new GZIPOutputStream(new FileOutputStream(f))); + return pw; + } + + private String toString(QuerySolution qs, boolean first) { + StringBuffer vars = new StringBuffer(); + StringBuffer sb = new StringBuffer(); + Iterator varns = qs.varNames(); + while (varns.hasNext()) { + String varName = varns.next(); + if (first) { + vars.append(varName + "\t"); + } + sb.append(FmtUtils.stringForObject(qs.get(varName)) + "\t"); } - private String toString(QuerySolution qs, boolean first) { - StringBuffer vars = new StringBuffer(); - StringBuffer sb = new StringBuffer(); - Iterator varns = qs.varNames(); - while (varns.hasNext()) { - String varName = varns.next(); - if (first) { - vars.append(varName + "\t"); - } - sb.append(FmtUtils.stringForObject(qs.get(varName)) + "\t"); - } - - if (first) return vars.toString() + "\n" + sb.toString(); - return sb.toString(); - } + if (first) return vars.toString() + "\n" + sb.toString(); + return sb.toString(); + } } diff --git a/backend/src/main/java/sparqles/utils/MongoDBManager.java b/backend/src/main/java/sparqles/utils/MongoDBManager.java index 1b510cee..6ffcc37f 100644 --- a/backend/src/main/java/sparqles/utils/MongoDBManager.java +++ b/backend/src/main/java/sparqles/utils/MongoDBManager.java @@ -34,12 +34,14 @@ import org.slf4j.LoggerFactory; import sparqles.avro.Endpoint; import sparqles.avro.analytics.AvailabilityView; +import sparqles.avro.analytics.CalculationView; import sparqles.avro.analytics.DiscoverabilityView; import sparqles.avro.analytics.EPView; import sparqles.avro.analytics.Index; import sparqles.avro.analytics.InteroperabilityView; import sparqles.avro.analytics.PerformanceView; import sparqles.avro.availability.AResult; +import sparqles.avro.calculation.CResult; import sparqles.avro.core.Robots; import sparqles.avro.discovery.DResult; import sparqles.avro.features.FResult; @@ -48,573 +50,562 @@ import sparqles.core.SPARQLESProperties; public class MongoDBManager { - public static final String COLL_SCHED = "schedule"; - public static final String COLL_ROBOTS = "robots"; - public static final String COLL_AVAIL = "atasks"; - public static final String COLL_PERF = "ptasks"; - public static final String COLL_DISC = "dtasks"; - public static final String COLL_FEAT = "ftasks"; - public static final String COLL_ENDS = "endpoints"; - public static final String COLL_INDEX = "index"; - public static final String COLL_AMONTHS = "amonths"; - public static final String COLL_AVAIL_AGG = "atasks_agg"; - public static final String COLL_PERF_AGG = "ptasks_agg"; - public static final String COLL_DISC_AGG = "dtasks_agg"; - public static final String COLL_FEAT_AGG = "ftasks_agg"; - public static final String COLL_EP_VIEW = "epview"; - private static final Logger log = LoggerFactory.getLogger(MongoDBManager.class); - private static final String RESULT_KEY = "endpointResult.endpoint.uri"; - private static final String VIEW_KEY = "endpoint.uri"; - private static final String EP_KEY = "uri"; - private static Map obj2col = new HashMap(); - - static { - obj2col.put(DResult.class, new String[] {COLL_DISC, RESULT_KEY}); - obj2col.put(AResult.class, new String[] {COLL_AVAIL, RESULT_KEY}); - obj2col.put(PResult.class, new String[] {COLL_PERF, RESULT_KEY}); - obj2col.put(FResult.class, new String[] {COLL_FEAT, RESULT_KEY}); - obj2col.put(Endpoint.class, new String[] {COLL_ENDS, EP_KEY}); - obj2col.put(Robots.class, new String[] {COLL_ROBOTS, VIEW_KEY}); - obj2col.put(Schedule.class, new String[] {COLL_SCHED, RESULT_KEY}); - - obj2col.put(AvailabilityView.class, new String[] {COLL_AVAIL_AGG, VIEW_KEY}); - obj2col.put(EPView.class, new String[] {COLL_EP_VIEW, VIEW_KEY}); - obj2col.put(Index.class, new String[] {COLL_INDEX, VIEW_KEY}); - obj2col.put(PerformanceView.class, new String[] {COLL_PERF_AGG, VIEW_KEY}); - obj2col.put(InteroperabilityView.class, new String[] {COLL_FEAT_AGG, VIEW_KEY}); - obj2col.put(DiscoverabilityView.class, new String[] {COLL_DISC_AGG, VIEW_KEY}); + private static final Logger log = LoggerFactory.getLogger(MongoDBManager.class); + + private MongoClient client; + private DB db; + + private static final String RESULT_KEY = "endpointResult.endpoint.uri"; + private static final String VIEW_KEY = "endpoint.uri"; + private static final String EP_KEY = "uri"; + + public static final String COLL_SCHED = "schedule"; + + public static final String COLL_ROBOTS = "robots"; + public static final String COLL_AVAIL = "atasks"; + public static final String COLL_PERF = "ptasks"; + public static final String COLL_DISC = "dtasks"; + public static final String COLL_FEAT = "ftasks"; + public static final String COLL_CALC = "ctasks"; + public static final String COLL_ENDS = "endpoints"; + public static final String COLL_INDEX = "index"; + public static final String COLL_AMONTHS = "amonths"; + + public static final String COLL_AVAIL_AGG = "atasks_agg"; + public static final String COLL_PERF_AGG = "ptasks_agg"; + public static final String COLL_DISC_AGG = "dtasks_agg"; + public static final String COLL_CALC_AGG = "ctasks_agg"; + public static final String COLL_FEAT_AGG = "ftasks_agg"; + public static final String COLL_EP_VIEW = "epview"; + + private static Map obj2col = new HashMap(); + + static { + obj2col.put(DResult.class, new String[] {COLL_DISC, RESULT_KEY}); + obj2col.put(AResult.class, new String[] {COLL_AVAIL, RESULT_KEY}); + obj2col.put(PResult.class, new String[] {COLL_PERF, RESULT_KEY}); + obj2col.put(FResult.class, new String[] {COLL_FEAT, RESULT_KEY}); + obj2col.put(CResult.class, new String[] {COLL_CALC, RESULT_KEY}); + obj2col.put(Endpoint.class, new String[] {COLL_ENDS, EP_KEY}); + obj2col.put(Robots.class, new String[] {COLL_ROBOTS, VIEW_KEY}); + obj2col.put(Schedule.class, new String[] {COLL_SCHED, RESULT_KEY}); + + obj2col.put(AvailabilityView.class, new String[] {COLL_AVAIL_AGG, VIEW_KEY}); + obj2col.put(EPView.class, new String[] {COLL_EP_VIEW, VIEW_KEY}); + obj2col.put(Index.class, new String[] {COLL_INDEX, VIEW_KEY}); + obj2col.put(PerformanceView.class, new String[] {COLL_PERF_AGG, VIEW_KEY}); + obj2col.put(InteroperabilityView.class, new String[] {COLL_FEAT_AGG, VIEW_KEY}); + obj2col.put(DiscoverabilityView.class, new String[] {COLL_DISC_AGG, VIEW_KEY}); + obj2col.put(CalculationView.class, new String[] {COLL_CALC_AGG, VIEW_KEY}); + } + + public MongoDBManager() { + setup(); + } + + public boolean isRunning() { + if (client == null || db == null) return false; + return true; + } + + public void setup() { + try { + client = new MongoClient(SPARQLESProperties.getDB_HOST(), SPARQLESProperties.getDB_PORT()); + + log.info("[INIT] MongoDB {} ", client); + db = client.getDB(SPARQLESProperties.getDB_NAME()); + + } catch (UnknownHostException e) { + log.error( + "Could not connect to MongoDB instance, {}", ExceptionHandler.logAndtoString(e, true)); } - - private MongoClient client; - private DB db; - - public MongoDBManager() { - setup(); + try { + String[] cols = { + COLL_AVAIL_AGG, + COLL_PERF_AGG, + COLL_DISC_AGG, + COLL_CALC_AGG, + COLL_FEAT_AGG, + COLL_FEAT_AGG, + COLL_EP_VIEW, + COLL_INDEX, + COLL_SCHED + }; + for (String col : cols) { + DBCollection c = db.getCollection(col); + if (c.getIndexInfo().size() == 0) + c.ensureIndex(new BasicDBObject("endpoint.uri", 1), new BasicDBObject("unique", true)); + } + // + DBCollection c = db.getCollection(COLL_ENDS); + DBObject d = new BasicDBObject("uri", 1); + if (c.getIndexInfo().size() == 0) c.ensureIndex(d, new BasicDBObject("unique", true)); + } catch (Exception e) { + log.error( + "Exception while creating indices for MongoDB collections, {}", + ExceptionHandler.logAndtoString(e, true)); } - - public boolean isRunning() { - if (client == null || db == null) return false; - return true; - } - - public void setup() { - try { - client = - new MongoClient( - SPARQLESProperties.getDB_HOST(), SPARQLESProperties.getDB_PORT()); - - log.info("[INIT] MongoDB {} ", client); - db = client.getDB(SPARQLESProperties.getDB_NAME()); - - } catch (UnknownHostException e) { - log.error( - "Coulld not connect to MongoDB instance, {}", - ExceptionHandler.logAndtoString(e, true)); - } - try { - String[] cols = { - COLL_AVAIL_AGG, - COLL_PERF_AGG, - COLL_DISC_AGG, - COLL_FEAT_AGG, - COLL_FEAT_AGG, - COLL_EP_VIEW, - COLL_INDEX, - COLL_SCHED - }; - for (String col : cols) { - DBCollection c = db.getCollection(col); - if (c.getIndexInfo().size() == 0) - c.ensureIndex( - new BasicDBObject("endpoint.uri", 1), - new BasicDBObject("unique", true)); - } - // - DBCollection c = db.getCollection(COLL_ENDS); - DBObject d = new BasicDBObject("uri", 1); - if (c.getIndexInfo().size() == 0) c.ensureIndex(d, new BasicDBObject("unique", true)); - } catch (Exception e) { - log.error( - "Exception while creating indices for MongoDB collections, {}", - ExceptionHandler.logAndtoString(e, true)); - } + } + + public void initEndpointCollection() { + DBCollection c = db.getCollection(COLL_ENDS); + c.drop(); + } + + public void initScheduleCollection() { + DBCollection c = db.getCollection(COLL_SCHED); + c.drop(); + log.warn("Collection '{}' dropped", COLL_SCHED); + } + + public void initAggregateCollections() { + String[] cols = { + COLL_AVAIL_AGG, COLL_PERF_AGG, COLL_DISC_AGG, COLL_CALC_AGG, COLL_FEAT_AGG, COLL_FEAT_AGG + }; + for (String col : cols) { + DBCollection c = db.getCollection(col); + c.drop(); + c.ensureIndex(new BasicDBObject("endpoint.uri", 1), new BasicDBObject("unique", true)); } + } - public void initEndpointCollection() { - DBCollection c = db.getCollection(COLL_ENDS); - c.drop(); - } + public boolean insert(Collection results) { + boolean res = true; - public void initScheduleCollection() { - DBCollection c = db.getCollection(COLL_SCHED); - c.drop(); + for (V v : results) { + res = res && insert(v); } - - public void initAggregateCollections() { - String[] cols = { - COLL_AVAIL_AGG, COLL_PERF_AGG, COLL_DISC_AGG, COLL_FEAT_AGG, COLL_FEAT_AGG - }; - for (String col : cols) { - DBCollection c = db.getCollection(col); - c.drop(); - c.ensureIndex(new BasicDBObject("endpoint.uri", 1), new BasicDBObject("unique", true)); - } + return res; + } + + public boolean insert(V res) { + String[] v = obj2col.get(res.getClass()); + if (v != null && v[0] != null) return insert(v[0], res, res.getSchema()); + else { + log.error("Collection for {} unknown", res.getClass()); } - - public boolean insert(Collection results) { - boolean res = true; - - for (V v : results) { - res = res && insert(v); - } - return res; - } - - public boolean insert(V res) { - String[] v = obj2col.get(res.getClass()); - if (v != null && v[0] != null) return insert(v[0], res, res.getSchema()); - else { - log.error("Collection for {} unknown", res.getClass()); - } + return false; + } + + private boolean insert(String collName, Object e, Schema schema) { + DBCollection c = db.getCollection(collName); + String s = e.toString(); + if (s.contains("uri") && s.contains("datasets")) + s = + s.substring(s.indexOf("uri"), s.indexOf("datasets")) + .replaceAll("\"", "") + .trim() + .replaceAll("\\\\/", "/"); + try { + + DBObject dbObject = getObject(e, schema); + WriteResult wr = c.insert(dbObject, WriteConcern.ACKNOWLEDGED); + if (wr.getError() != null) { + log.warn("INSERT ERROR {}:{} #>{}", collName, s, wr.getError()); + log.debug("INSERT ERROR {}:{} #>{}", collName, e.toString(), wr.getError()); return false; + } else { + log.debug("INSERT SUCCESS {}:{}", collName, s); + log.trace("INSERT SUCCESS {}:{}", collName, e.toString()); + } + return true; + } catch (DuplicateKey ex) { + log.error( + "INSERT DUPLICATE uri key for {} ({})", e, ExceptionHandler.logAndtoString(ex, true)); + return true; + } catch (MongoException ex) { + log.error("INSERT MongoDB Exception: {}: {}", e, ExceptionHandler.logAndtoString(ex, true)); + } catch (Exception ex) { + log.error("INSERT Exception: {} {}", e, ExceptionHandler.logAndtoString(ex, true)); } - - private boolean insert(String collName, Object e, Schema schema) { - DBCollection c = db.getCollection(collName); - String s = e.toString(); - if (s.contains("uri") && s.contains("datasets")) - s = - s.substring(s.indexOf("uri"), s.indexOf("datasets")) - .replaceAll("\"", "") - .trim() - .replaceAll("\\\\/", "/"); - try { - - DBObject dbObject = getObject(e, schema); - WriteResult wr = c.insert(dbObject, WriteConcern.ACKNOWLEDGED); - if (wr.getError() != null) { - log.info("INSERT ERROR {}:{} #>{}", collName, s, wr.getError()); - log.debug("INSERT ERROR {}:{} #>{}", collName, e.toString(), wr.getError()); - return false; - } else { - log.info("INSERT SUCCESS {}:{}", collName, s); - log.debug("INSERT SUCCESS {}:{}", collName, e.toString()); - } - return true; - } catch (DuplicateKey ex) { - log.error( - "INSERT DUPLICATE uri key for {} ({})", - e, - ExceptionHandler.logAndtoString(ex, true)); - return true; - } catch (MongoException ex) { - log.error( - "INSERT MongoDB Exception: {}: {}", - e, - ExceptionHandler.logAndtoString(ex, true)); - } catch (Exception ex) { - log.error("INSERT Exception: {} {}", e, ExceptionHandler.logAndtoString(ex, true)); - } + return false; + } + + public boolean update(V res) { + + if (res instanceof AvailabilityView) + return update( + COLL_AVAIL_AGG, ((AvailabilityView) res).getEndpoint(), res, res.getSchema(), VIEW_KEY); + if (res instanceof PerformanceView) + return update( + COLL_PERF_AGG, ((PerformanceView) res).getEndpoint(), res, res.getSchema(), VIEW_KEY); + if (res instanceof InteroperabilityView) + return update( + COLL_FEAT_AGG, + ((InteroperabilityView) res).getEndpoint(), + res, + res.getSchema(), + VIEW_KEY); + if (res instanceof DiscoverabilityView) + return update( + COLL_DISC_AGG, ((DiscoverabilityView) res).getEndpoint(), res, res.getSchema(), VIEW_KEY); + if (res instanceof CalculationView) + return update( + COLL_CALC_AGG, ((CalculationView) res).getEndpoint(), res, res.getSchema(), VIEW_KEY); + + if (res instanceof Endpoint) + return update(COLL_ENDS, ((Endpoint) res), res, res.getSchema(), EP_KEY); + if (res instanceof EPView) + return update(COLL_EP_VIEW, ((EPView) res).getEndpoint(), res, res.getSchema(), VIEW_KEY); + if (res instanceof Index) + return update(COLL_INDEX, ((Index) res).getEndpoint(), res, res.getSchema(), VIEW_KEY); + return false; + } + + private boolean update(String collName, Endpoint ep, Object e, Schema schema, String key) { + DBCollection c = db.getCollection(collName); + try { + DBObject dbObject = getObject(e, schema); + BasicDBObject q = new BasicDBObject(); + q.append(key, ep.getUri().toString()); + + WriteResult wr = c.update(q, dbObject); + if (wr.getError() != null) { + log.warn("UPDATE ERROR {}:{} #>{}", collName, ep.getUri(), wr.getError()); + log.debug("UPDATE ERROR {}:{} #>{}", collName, e.toString(), wr.getError()); return false; + } else { + log.debug("UPDATE SUCCESS {}:{}", collName, ep.getUri()); + log.trace("UPDATE SUCCESS {}:{}", collName, e.toString()); + } + + return true; + } catch (DuplicateKey ex) { + log.error( + "INSERT DUPLICATE uri key for {} ({})", + ep.getUri(), + ExceptionHandler.logAndtoString(ex, true)); + return true; + } catch (MongoException ex) { + log.error( + "INSERT MongoDB Exception: {}: {}", + ep.getUri(), + ExceptionHandler.logAndtoString(ex, true)); + } catch (Exception ex) { + if (ex instanceof SocketTimeoutException) {} + + log.error("INSERT Exception: {} {}", ep.getUri(), ExceptionHandler.logAndtoString(ex, true)); } + return false; + } - public boolean update(V res) { - - if (res instanceof AvailabilityView) - return update( - COLL_AVAIL_AGG, - ((AvailabilityView) res).getEndpoint(), - res, - res.getSchema(), - VIEW_KEY); - if (res instanceof PerformanceView) - return update( - COLL_PERF_AGG, - ((PerformanceView) res).getEndpoint(), - res, - res.getSchema(), - VIEW_KEY); - if (res instanceof InteroperabilityView) - return update( - COLL_FEAT_AGG, - ((InteroperabilityView) res).getEndpoint(), - res, - res.getSchema(), - VIEW_KEY); - if (res instanceof DiscoverabilityView) - return update( - COLL_DISC_AGG, - ((DiscoverabilityView) res).getEndpoint(), - res, - res.getSchema(), - VIEW_KEY); - - if (res instanceof Endpoint) - return update(COLL_ENDS, ((Endpoint) res), res, res.getSchema(), EP_KEY); - if (res instanceof EPView) - return update( - COLL_EP_VIEW, ((EPView) res).getEndpoint(), res, res.getSchema(), VIEW_KEY); - if (res instanceof Index) - return update(COLL_INDEX, ((Index) res).getEndpoint(), res, res.getSchema(), VIEW_KEY); - return false; - } + public List get(Class cls, Schema schema) { + return getResults(null, cls, schema); + } - private boolean update(String collName, Endpoint ep, Object e, Schema schema, String key) { - DBCollection c = db.getCollection(collName); - try { - DBObject dbObject = getObject(e, schema); - BasicDBObject q = new BasicDBObject(); - q.append(key, ep.getUri().toString()); - - WriteResult wr = c.update(q, dbObject); - if (wr.getError() != null) { - log.info("UPDATE ERROR {}:{} #>{}", collName, ep.getUri(), wr.getError()); - log.debug("UPDATE ERROR {}:{} #>{}", collName, e.toString(), wr.getError()); - return false; - } else { - log.info("UPDATE SUCCESS {}:{}", collName, ep.getUri()); - log.debug("UPDATE SUCCESS {}:{}", collName, e.toString()); - } - - return true; - } catch (DuplicateKey ex) { - log.error( - "INSERT DUPLICATE uri key for {} ({})", - ep.getUri(), - ExceptionHandler.logAndtoString(ex, true)); - return true; - } catch (MongoException ex) { - log.error( - "INSERT MongoDB Exception: {}: {}", - ep.getUri(), - ExceptionHandler.logAndtoString(ex, true)); - } catch (Exception ex) { - if (ex instanceof SocketTimeoutException) {} + public Iterator getIterator(Class cls, Schema schema) { + return getResultsIterator(null, cls, schema); + } - log.error( - "INSERT Exception: {} {}", - ep.getUri(), - ExceptionHandler.logAndtoString(ex, true)); - } - return false; + public Endpoint getEndpoint(Endpoint ep) { + List res = scan(ep, COLL_ENDS, Endpoint.class, Endpoint.SCHEMA$, EP_KEY); + if (res.size() != 1) { + log.error("Received {} results for {}; expected one result ", res.size(), ep.getUri()); } + if (res.size() == 0) return null; + return res.get(0); + } - public List get(Class cls, Schema schema) { - return getResults(null, cls, schema); - } + public List getResults(Endpoint ep, Class cls, Schema schema) { + String[] v = obj2col.get(cls); - public Iterator getIterator(Class cls, Schema schema) { - return getResultsIterator(null, cls, schema); - } - - public Endpoint getEndpoint(Endpoint ep) { - List res = scan(ep, COLL_ENDS, Endpoint.class, Endpoint.SCHEMA$, EP_KEY); - if (res.size() != 1) { - log.error("Received {} results for {}; expected one result ", res.size(), ep.getUri()); - } - if (res.size() == 0) return null; - return res.get(0); + if (v != null && v[0] != null && v[1] != null) return scan(ep, v[0], cls, schema, v[1]); + else { + log.warn("Collection for {} unknown", cls); } + return new ArrayList(); + } - public List getResults(Endpoint ep, Class cls, Schema schema) { - String[] v = obj2col.get(cls); + public Iterator getResultsIterator(Endpoint ep, Class cls, Schema schema) { + String[] v = obj2col.get(cls); - if (v != null && v[0] != null && v[1] != null) return scan(ep, v[0], cls, schema, v[1]); - else { - log.warn("Collection for {} unknown", cls); - } - return new ArrayList(); + if (v != null && v[0] != null && v[1] != null) return scanIterator(ep, v[0], cls, schema, v[1]); + else { + log.warn("Collection for {} unknown", cls); } + return new Iterator() { - public Iterator getResultsIterator(Endpoint ep, Class cls, Schema schema) { - String[] v = obj2col.get(cls); - - if (v != null && v[0] != null && v[1] != null) - return scanIterator(ep, v[0], cls, schema, v[1]); - else { - log.warn("Collection for {} unknown", cls); - } - return new Iterator() { - - public boolean hasNext() { - return false; - } - - @Override - public T next() { - return null; - } - - @Override - public void remove() { - ; - } - }; - } - - public List getResultsSince( - Endpoint ep, Class cls, Schema schema, long since) { - - ArrayList reslist = new ArrayList(); + public boolean hasNext() { + return false; + } - DBCollection c = db.getCollection(COLL_AVAIL); - DBCursor curs = null; + @Override + public T next() { + return null; + } + + @Override + public void remove() { + ; + } + }; + } + + public List getResultsSince( + Endpoint ep, Class cls, Schema schema, long since) { + + ArrayList reslist = new ArrayList(); + + DBCollection c = db.getCollection(COLL_AVAIL); + DBCursor curs = null; + try { + if (ep == null) { + curs = c.find(); + } else { + + DBObject q = + QueryBuilder.start() + .and( + QueryBuilder.start(RESULT_KEY).is(ep.getUri().toString()).get(), + QueryBuilder.start("endpointResult.start").greaterThan(since).get()) + .get(); + log.debug("[EXEC] {}", q); + curs = c.find(q); + } + + while (curs.hasNext()) { + DBObject o = curs.next(); + SpecificDatumReader r = new SpecificDatumReader(cls); + JsonDecoder d; try { - if (ep == null) { - curs = c.find(); - } else { - - DBObject q = - QueryBuilder.start() - .and( - QueryBuilder.start(RESULT_KEY) - .is(ep.getUri().toString()) - .get(), - QueryBuilder.start("endpointResult.start") - .greaterThan(since) - .get()) - .get(); - log.info("[EXEC] {}", q); - curs = c.find(q); - } - - while (curs.hasNext()) { - DBObject o = curs.next(); - SpecificDatumReader r = new SpecificDatumReader(cls); - JsonDecoder d; - try { - d = DecoderFactory.get().jsonDecoder(schema, o.toString()); - T t = (T) r.read(null, d); - reslist.add(t); - } catch (IOException e) { - log.error( - "GET RESULT Exception: {} {}", - ep.getUri(), - ExceptionHandler.logAndtoString(e, true)); - } - } - } finally { - if (curs != null) curs.close(); + d = DecoderFactory.get().jsonDecoder(schema, o.toString()); + T t = (T) r.read(null, d); + reslist.add(t); + } catch (IOException e) { + log.error( + "GET RESULT Exception: {} {}", ep.getUri(), ExceptionHandler.logAndtoString(e, true)); } - return reslist; + } + } finally { + if (curs != null) curs.close(); } - - public List getResultsSince( - Endpoint ep, Class cls, Schema schema, long from, long to) { - ArrayList reslist = new ArrayList(); - - DBCollection c = db.getCollection(COLL_AVAIL); - DBCursor curs = null; + return reslist; + } + + public List getResultsSince( + Endpoint ep, Class cls, Schema schema, long from, long to) { + ArrayList reslist = new ArrayList(); + + DBCollection c = db.getCollection(COLL_AVAIL); + DBCursor curs = null; + try { + if (ep == null) { + curs = c.find(); + } else { + + DBObject q = + QueryBuilder.start() + .and( + QueryBuilder.start(RESULT_KEY).is(ep.getUri().toString()).get(), + QueryBuilder.start("endpointResult.start").greaterThan(from).get(), + QueryBuilder.start("endpointResult.start").lessThanEquals(to).get()) + .get(); + log.debug("[EXEC] {}", q); + curs = c.find(q); + } + + while (curs.hasNext()) { + DBObject o = curs.next(); + SpecificDatumReader r = new SpecificDatumReader(cls); + JsonDecoder d; try { - if (ep == null) { - curs = c.find(); - } else { - - DBObject q = - QueryBuilder.start() - .and( - QueryBuilder.start(RESULT_KEY) - .is(ep.getUri().toString()) - .get(), - QueryBuilder.start("endpointResult.start") - .greaterThan(from) - .get(), - QueryBuilder.start("endpointResult.start") - .lessThanEquals(to) - .get()) - .get(); - log.info("[EXEC] {}", q); - curs = c.find(q); - } - - while (curs.hasNext()) { - DBObject o = curs.next(); - SpecificDatumReader r = new SpecificDatumReader(cls); - JsonDecoder d; - try { - d = DecoderFactory.get().jsonDecoder(schema, o.toString()); - T t = (T) r.read(null, d); - reslist.add(t); - } catch (IOException e) { - log.error( - "GET RESULT SINCE Exception: {} {}", - ep.getUri(), - ExceptionHandler.logAndtoString(e, true)); - } - } - } finally { - if (curs != null) curs.close(); + d = DecoderFactory.get().jsonDecoder(schema, o.toString()); + T t = (T) r.read(null, d); + reslist.add(t); + } catch (IOException e) { + log.error( + "GET RESULT SINCE Exception: {} {}", + ep.getUri(), + ExceptionHandler.logAndtoString(e, true)); } - return reslist; + } + } finally { + if (curs != null) curs.close(); } - - private DBObject getObject(Object o, Schema s) { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - JsonEncoder e; + return reslist; + } + + private DBObject getObject(Object o, Schema s) { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + JsonEncoder e; + try { + e = EncoderFactory.get().jsonEncoder(s, baos); + SpecificDatumWriter w = new SpecificDatumWriter(o.getClass()); + w.write(o, e); + e.flush(); + DBObject dbObject = (DBObject) JSON.parse(baos.toString()); + return dbObject; + + } catch (IOException e1) { + log.error( + "GET OBJECT Exception: {} {}", o.getClass(), ExceptionHandler.logAndtoString(e1, true)); + } + return null; + } + + private List scan(Endpoint ep, String colName, Class cls, Schema schema, String key) { + ArrayList reslist = new ArrayList(); + + DBCollection c = db.getCollection(colName); + DBCursor curs = null; + try { + if (ep == null) { + curs = c.find().batchSize(50).addOption(com.mongodb.Bytes.QUERYOPTION_NOTIMEOUT); + } else { + BasicDBObject q = new BasicDBObject(); + q.append(key, ep.getUri().toString()); + curs = c.find(q).batchSize(50).addOption(com.mongodb.Bytes.QUERYOPTION_NOTIMEOUT); + } + + while (curs.hasNext()) { + DBObject o = curs.next(); + SpecificDatumReader r = new SpecificDatumReader(cls); + JsonDecoder d; try { - e = EncoderFactory.get().jsonEncoder(s, baos); - SpecificDatumWriter w = new SpecificDatumWriter(o.getClass()); - w.write(o, e); - e.flush(); - DBObject dbObject = (DBObject) JSON.parse(baos.toString()); - return dbObject; - - } catch (IOException e1) { - log.error( - "GET OBJECT Exception: {} {}", - o.getClass(), - ExceptionHandler.logAndtoString(e1, true)); + d = DecoderFactory.get().jsonDecoder(schema, o.toString()); + T t = (T) r.read(null, d); + reslist.add(t); + } catch (IOException e) { + log.error( + "SCAN Exception: {}:{}:{} {}", + ep.getUri(), + colName, + cls, + ExceptionHandler.logAndtoString(e, true)); } - return null; + } + } finally { + if (curs != null) curs.close(); } - private List scan(Endpoint ep, String colName, Class cls, Schema schema, String key) { - ArrayList reslist = new ArrayList(); - - DBCollection c = db.getCollection(colName); - DBCursor curs = null; - try { - if (ep == null) { - curs = c.find().batchSize(50).addOption(com.mongodb.Bytes.QUERYOPTION_NOTIMEOUT); - } else { - BasicDBObject q = new BasicDBObject(); - q.append(key, ep.getUri().toString()); - curs = c.find(q).batchSize(50).addOption(com.mongodb.Bytes.QUERYOPTION_NOTIMEOUT); - } - - while (curs.hasNext()) { - DBObject o = curs.next(); - SpecificDatumReader r = new SpecificDatumReader(cls); - JsonDecoder d; - try { - d = DecoderFactory.get().jsonDecoder(schema, o.toString()); - T t = (T) r.read(null, d); - reslist.add(t); - } catch (IOException e) { - log.error( - "SCAN Exception: {}:{}:{} {}", - ep.getUri(), - colName, - cls, - ExceptionHandler.logAndtoString(e, true)); - } - } - } finally { - if (curs != null) curs.close(); + return reslist; + } + + private Iterator scanIterator( + final Endpoint ep, + final String colName, + final Class cls, + final Schema schema, + final String key) { + ArrayList reslist = new ArrayList(); + + DBCollection c = db.getCollection(colName); + final DBCursor curs; + try { + if (ep == null) { + curs = c.find().batchSize(50).addOption(com.mongodb.Bytes.QUERYOPTION_NOTIMEOUT); + } else { + BasicDBObject q = new BasicDBObject(); + q.append(key, ep.getUri().toString()); + curs = c.find(q).batchSize(50).addOption(com.mongodb.Bytes.QUERYOPTION_NOTIMEOUT); + } + + return new Iterator() { + + @Override + public boolean hasNext() { + // TODO Auto-generated method stub + return curs.hasNext(); } - return reslist; - } + @Override + public T next() { + DBObject o = curs.next(); + SpecificDatumReader r = new SpecificDatumReader(cls); + JsonDecoder d; + try { + d = DecoderFactory.get().jsonDecoder(schema, o.toString()); + T t = (T) r.read(null, d); + return t; + } catch (IOException e) { + log.error( + "SCAN Exception: {}:{}:{} {}", + ep.getUri(), + colName, + cls, + ExceptionHandler.logAndtoString(e, true)); + } + return null; + } - private Iterator scanIterator( - final Endpoint ep, - final String colName, - final Class cls, - final Schema schema, - final String key) { - ArrayList reslist = new ArrayList(); + @Override + public void remove() { + // TODO Auto-generated method stub - DBCollection c = db.getCollection(colName); - final DBCursor curs; - try { - if (ep == null) { - curs = c.find().batchSize(50).addOption(com.mongodb.Bytes.QUERYOPTION_NOTIMEOUT); - } else { - BasicDBObject q = new BasicDBObject(); - q.append(key, ep.getUri().toString()); - curs = c.find(q).batchSize(50).addOption(com.mongodb.Bytes.QUERYOPTION_NOTIMEOUT); - } - - return new Iterator() { - - @Override - public boolean hasNext() { - // TODO Auto-generated method stub - return curs.hasNext(); - } - - @Override - public T next() { - DBObject o = curs.next(); - SpecificDatumReader r = new SpecificDatumReader(cls); - JsonDecoder d; - try { - d = DecoderFactory.get().jsonDecoder(schema, o.toString()); - T t = (T) r.read(null, d); - return t; - } catch (IOException e) { - log.error( - "SCAN Exception: {}:{}:{} {}", - ep.getUri(), - colName, - cls, - ExceptionHandler.logAndtoString(e, true)); - } - return null; - } - - @Override - public void remove() { - // TODO Auto-generated method stub - - } - }; - - } finally { - // if(curs!=null) - // curs.close(); } - } + }; - public boolean cleanup(Endpoint ep) { - // remove endpoint - boolean res = true; - res = res && remove(ep, Endpoint.class); - res = res && remove(ep, AvailabilityView.class); - res = res && remove(ep, InteroperabilityView.class); - res = res && remove(ep, DiscoverabilityView.class); - res = res && remove(ep, PerformanceView.class); - return res; + } finally { + // if(curs!=null) + // curs.close(); } - - public boolean remove(Endpoint ep, Class cls) { - String[] v = obj2col.get(cls); - - DBCollection c = db.getCollection(v[0]); - try { - BasicDBObject q = new BasicDBObject(); - q.append(v[1], ep.getUri().toString()); - - WriteResult wr = c.remove(q, WriteConcern.ACKNOWLEDGED); - if (wr.getError() != null) { - log.info("REMOVE ERROR {}:{} #>{}", v[0], ep.getUri(), wr.getError()); - return false; - } else { - log.info("REMOVE SUCCESS {}:{}", v[0], ep.getUri()); - } - return true; - } catch (DuplicateKey ex) { - log.error( - "REMOVE DUPLICATE uri key for {} ({})", - ep.getUri(), - ExceptionHandler.logAndtoString(ex, true)); - return true; - } catch (MongoException ex) { - log.error( - "REMOVE MongoDB Exception: {}: {}", - ep.getUri(), - ExceptionHandler.logAndtoString(ex, true)); - } catch (Exception ex) { - log.error( - "REMOVE Exception: {} {}", - ep.getUri(), - ExceptionHandler.logAndtoString(ex, true)); - } + } + + public boolean cleanup(Endpoint ep) { + // remove endpoint + boolean res = true; + res = res && remove(ep, Endpoint.class); + res = res && remove(ep, AvailabilityView.class); + res = res && remove(ep, InteroperabilityView.class); + res = res && remove(ep, DiscoverabilityView.class); + res = res && remove(ep, PerformanceView.class); + res = res && remove(ep, CalculationView.class); + return res; + } + + public boolean remove(Endpoint ep, Class cls) { + String[] v = obj2col.get(cls); + + DBCollection c = db.getCollection(v[0]); + try { + BasicDBObject q = new BasicDBObject(); + q.append(v[1], ep.getUri().toString()); + + WriteResult wr = c.remove(q, WriteConcern.ACKNOWLEDGED); + if (wr.getError() != null) { + log.warn("REMOVE ERROR {}:{} #>{}", v[0], ep.getUri(), wr.getError()); return false; + } else { + log.debug("REMOVE SUCCESS {}:{}", v[0], ep.getUri()); + } + return true; + } catch (DuplicateKey ex) { + log.error( + "REMOVE DUPLICATE uri key for {} ({})", + ep.getUri(), + ExceptionHandler.logAndtoString(ex, true)); + return true; + } catch (MongoException ex) { + log.error( + "REMOVE MongoDB Exception: {}: {}", + ep.getUri(), + ExceptionHandler.logAndtoString(ex, true)); + } catch (Exception ex) { + log.error("REMOVE Exception: {} {}", ep.getUri(), ExceptionHandler.logAndtoString(ex, true)); } - - public boolean close() { - client.close(); - return true; + return false; + } + + public boolean close() { + client.close(); + return true; + } + + public long getFirstAvailabitlityTime() { + long result = 0; + DBCursor cursor = null; + try { + DBCollection collection = db.getCollection(COLL_AVAIL); + DBObject filter = new BasicDBObject(); + DBObject projection = (DBObject) JSON.parse("{'endpointResult.start':1, '_id':0}"); + DBObject sort = new BasicDBObject(); + sort.put("endpointResult.start", 1); + cursor = collection.find(filter, projection).sort(sort).limit(1); + result = + Long.parseLong(((DBObject) cursor.next().get("endpointResult")).get("start").toString()); + } finally { + cursor.close(); } + return result; + } } diff --git a/backend/src/main/java/sparqles/utils/QueryManager.java b/backend/src/main/java/sparqles/utils/QueryManager.java index f5022302..a515c03c 100644 --- a/backend/src/main/java/sparqles/utils/QueryManager.java +++ b/backend/src/main/java/sparqles/utils/QueryManager.java @@ -3,76 +3,112 @@ import java.io.File; import java.io.FileNotFoundException; import java.io.InputStream; +import java.time.Duration; +import java.util.Map; import java.util.Scanner; +import org.apache.jena.http.sys.HttpRequestModifier; +import org.apache.jena.query.ARQ; import org.apache.jena.query.QueryExecution; +import org.apache.jena.sparql.exec.http.Params; +import org.apache.jena.sparql.exec.http.QueryExecutionHTTP; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import sparqles.avro.Endpoint; +import sparqles.core.CONSTANTS; +import sparqles.core.SPARQLESProperties; public class QueryManager { - private static final Logger log = LoggerFactory.getLogger(QueryManager.class); + public static final UserAgentRequestModifier USER_AGENT_REQUEST_MODIFIER = + new UserAgentRequestModifier(); + private static final Logger log = LoggerFactory.getLogger(QueryManager.class); - public static String getQuery(String folder, String qFile) { - log.info("getQuery {}, {}", folder, qFile); - String content = null; - Scanner scanner = null; - if (folder.startsWith("file:")) { - File fold = new File(folder.replace("file:", "")); - try { - scanner = new Scanner(new File(fold, qFile)); - } catch (FileNotFoundException e) { - log.error("Query file not found: {}", qFile, e); - } - } else { - QueryManager.class.getClassLoader(); - InputStream res = ClassLoader.getSystemResourceAsStream(folder + qFile); - if (res != null) scanner = new Scanner(res); - } - if (scanner == null) { - log.warn("FAILED Could not load query file {} from {}", qFile, folder); - return null; - } + public static String getQuery(String folder, String qFile) { + log.info("getQuery {}, {}", folder, qFile); + String content = null; + Scanner scanner = null; + if (folder.startsWith("file:")) { + File fold = new File(folder.replace("file:", "")); + try { + scanner = new Scanner(new File(fold, qFile)); + } catch (FileNotFoundException e) { + log.error("Query file not found: {}", qFile, e); + } + } else { + InputStream res = ClassLoader.getSystemResourceAsStream(folder + qFile); + if (res != null) scanner = new Scanner(res); + } + if (scanner == null) { + log.warn("FAILED Could not load query file {} from {}", qFile, folder); + return null; + } - if (scanner.hasNext()) content = scanner.useDelimiter("\\Z").next(); + if (scanner.hasNext()) content = scanner.useDelimiter("\\Z").next(); - log.debug("PARSED input:{},output:{}", qFile, content); - scanner.close(); - return substitute(content); - } + log.debug("PARSED input:{},output:{}", qFile, content); + scanner.close(); + return substitute(content); + } - private static String substitute(String query) { - long time = System.currentTimeMillis(); - if (query.contains("%%uri1")) { - String url1 = ""; - query = query.replace("%%uri1", url1); - } - if (query.contains("%%uri2")) { - String url2 = ""; - query = query.replace("%%uri2", url2); - } - if (query.contains("%%uri3")) { - String url3 = ""; - query = query.replace("%%uri3", url3); - } - return query; + private static String substitute(String query) { + long time = System.currentTimeMillis(); + if (query.contains("%%uri1")) { + String url1 = ""; + query = query.replace("%%uri1", url1); + } + if (query.contains("%%uri2")) { + String url2 = ""; + query = query.replace("%%uri2", url2); + } + if (query.contains("%%uri3")) { + String url3 = ""; + query = query.replace("%%uri3", url3); } + return query; + } + + public static QueryExecution getExecution(Endpoint ep, String query) throws Exception { + return getExecution(ep.getUri().toString(), query, -1); + } + + public static QueryExecution getExecution(String epURL, String query) { + return getExecution(epURL, query, -1); + } + + public static QueryExecution getExecution(Endpoint ep, String query, Duration timeout) { + return getExecution(ep.getUri().toString(), query, timeout.toSeconds()); + } + + public static QueryExecution getExecution(String epURL, String query, Duration timeout) { + return getExecution(epURL, query, timeout.toSeconds()); + } - public static QueryExecution getExecution(Endpoint ep, String query) throws Exception { - return getExecution(ep.getUri().toString(), query); + /** + * , + * + * @param epURL HTTP SPARQL endpoint URI + * @param query a valid SPARQL query + * @param timeout in seconds + * @return + * @throws Exception + */ + public static QueryExecution getExecution(String epURL, String query, long timeout) { + log.debug( + "INIT QueryExecution for {} with query {}", + epURL, + StringUtils.trunc(query.replaceAll("\n", ""), CONSTANTS.STRING_LEN)); + QueryExecutionHTTP qe = QueryExecution.service(epURL, query); + if (timeout != -1) { + qe.getContext().set(ARQ.httpQueryTimeout, timeout); } + qe.getContext().set(ARQ.httpRequestModifer, USER_AGENT_REQUEST_MODIFIER); + return qe; + } - public static QueryExecution getExecution(String epURL, String query) throws Exception { - try { - // FIXME - // HttpEnv.getDftHttpClient() - // HttpOp.setUserAgent(CONSTANTS.USER_AGENT); - log.debug( - "INIT QueryExecution for {} with query {}", epURL, query.replaceAll("\n", "")); - return QueryExecution.service(epURL, query); - } catch (Exception e) { - throw e; - // throw new Exception(e.getMessage()); - } + public static class UserAgentRequestModifier implements HttpRequestModifier { + @Override + public void modify(Params params, Map httpHeaders) { + httpHeaders.put("User-Agent", SPARQLESProperties.getUserAgent()); } + } } diff --git a/backend/src/main/java/sparqles/utils/StringUtils.java b/backend/src/main/java/sparqles/utils/StringUtils.java new file mode 100644 index 00000000..4e2996c1 --- /dev/null +++ b/backend/src/main/java/sparqles/utils/StringUtils.java @@ -0,0 +1,18 @@ +package sparqles.utils; + +import sparqles.core.CONSTANTS; + +public class StringUtils { + + public static String trunc(String val) { + return trunc(val, CONSTANTS.STRING_LEN); + } + + public static String trunc(String val, int cutoff) { + if (val == null) return null; + else if (val.length() < cutoff + 3) return val; + else { + return val.substring(0, cutoff) + "..."; + } + } +} diff --git a/backend/src/main/java/sparqles/utils/cli/ARGUMENTS.java b/backend/src/main/java/sparqles/utils/cli/ARGUMENTS.java index fd0104dd..cc1ca138 100644 --- a/backend/src/main/java/sparqles/utils/cli/ARGUMENTS.java +++ b/backend/src/main/java/sparqles/utils/cli/ARGUMENTS.java @@ -13,165 +13,167 @@ */ public class ARGUMENTS { - public static final int SHORT_ARG = 0; - public static final int LONG_ARG = 1; - /* - * GENERAL ARGUMENTS - */ - public static final String[] PARAM_PROP_FILE = createParam("p", "prop"); - public static final Option OPTION_PROP_FILE = - createOption( - "property file", - 1, - "SPARQLES (additional) property file", - PARAM_PROP_FILE[SHORT_ARG], - PARAM_PROP_FILE[LONG_ARG], - true); - public static final String[] PARAM_FLAG_DEBUG = createParam("d", "verbose"); - public static final Option OPTION_DEBUG = - createOption( - "flag", - 0, - "enable verbose mode", - PARAM_FLAG_DEBUG[SHORT_ARG], - PARAM_FLAG_DEBUG[LONG_ARG], - false); - public static final String[] PARAM_FLAG_INIT = createParam("i", "init"); - public static final Option OPTION_INIT = - createOption( - "flag", - 0, - "init datahub list", - PARAM_FLAG_INIT[SHORT_ARG], - PARAM_FLAG_INIT[LONG_ARG], - false); - public static final String[] PARAM_FLAG_UPDATE_EPS = createParam("u", "update"); - public static final Option OPTION_UPDATE_EPS = - createOption( - "flag", - 0, - "update datahub list", - PARAM_FLAG_UPDATE_EPS[SHORT_ARG], - PARAM_FLAG_UPDATE_EPS[LONG_ARG], - false); - public static final String[] PARAM_FLAG_START = createParam("s", "start"); - public static final Option OPTION_START = - createOption( - "flag", - 0, - "start the service", - PARAM_FLAG_START[SHORT_ARG], - PARAM_FLAG_START[LONG_ARG], - false); - public static final String[] PARAM_FLAG_RECOMPUTE = createParam("r", "recompute"); - public static final Option OPTION_RECOMPUTE = - createOption( - "flag", - 0, - "recompute the analytics", - PARAM_FLAG_RECOMPUTE[SHORT_ARG], - PARAM_FLAG_RECOMPUTE[LONG_ARG], - false); - public static final String[] PARAM_FLAG_RESCHEDULE = createParam("rs", "reschedule"); - public static final Option OPTION_RESCHEDULE = - createOption( - "flag", - 0, - "create default schedule", - PARAM_FLAG_RESCHEDULE[SHORT_ARG], - PARAM_FLAG_RESCHEDULE[LONG_ARG], - false); - public static final String[] PARAM_RUN = createParam("run", "run"); - ; - public static final Option OPTION_RUN = - createOption( - "task", - 1, - "run task (itask, atask)", - PARAM_RUN[SHORT_ARG], - PARAM_RUN[LONG_ARG], - false); - public static final String[] PARAM_FLAG_INDEX = createParam("iv", "indexview"); - public static final Option OPTION_INDEX = - createOption( - "flag", - 0, - "run indexview computation", - PARAM_FLAG_INDEX[SHORT_ARG], - PARAM_FLAG_INDEX[LONG_ARG], - false); - public static final String[] PARAM_FLAG_RECOMPUTELAST = createParam("rl", "recomputeLast"); - public static final Option OPTION_RECOMPUTELAST = - createOption( - "flag", - 0, - "recompute the last analytics", - PARAM_FLAG_RECOMPUTELAST[SHORT_ARG], - PARAM_FLAG_RECOMPUTELAST[LONG_ARG], - false); - public static final String[] PARAM_FLAG_STATS = createParam("st", "stats"); - public static final Option OPTION_STATS = - createOption( - "flag", - 0, - "mongodb stats", - PARAM_FLAG_STATS[SHORT_ARG], - PARAM_FLAG_STATS[LONG_ARG], - false); - /* - * HELP - */ - protected static final String PARAM_HELP = "?"; - protected static final String[] PARAM_HELP1 = createParam("h", "help"); - protected static final OptionGroup OPTIONGROUP_HELP = new OptionGroup(); + public static final int SHORT_ARG = 0; + public static final int LONG_ARG = 1; - static { - OPTIONGROUP_HELP.addOption( - createOption("help", 0, "print help screen", PARAM_HELP, null, false)); - OPTIONGROUP_HELP.addOption( - createOption( - "help", - 0, - "print help screen", - PARAM_HELP1[SHORT_ARG], - PARAM_HELP1[LONG_ARG], - false)); - } + /* + * HELP + */ + protected static final String PARAM_HELP = "?"; + protected static final String[] PARAM_HELP1 = createParam("h", "help"); + protected static final OptionGroup OPTIONGROUP_HELP = new OptionGroup(); - /** - * @param string - * @param string2 - * @return - */ - protected static String[] createParam(String s, String l) { - String[] arg = new String[2]; - arg[SHORT_ARG] = s; - arg[LONG_ARG] = l; - return arg; - } + static { + OPTIONGROUP_HELP.addOption( + createOption("help", 0, "print help screen", PARAM_HELP, null, false)); + OPTIONGROUP_HELP.addOption( + createOption( + "help", 0, "print help screen", PARAM_HELP1[SHORT_ARG], PARAM_HELP1[LONG_ARG], false)); + } + + /* + * GENERAL ARGUMENTS + */ + public static final String[] PARAM_PROP_FILE = createParam("p", "prop"); + public static final String[] PARAM_FLAG_DEBUG = createParam("d", "verbose"); + public static final String[] PARAM_FLAG_INIT = createParam("i", "init"); + public static final String[] PARAM_FLAG_UPDATE_EPS = createParam("u", "update"); + public static final String[] PARAM_FLAG_START = createParam("s", "start"); + public static final String[] PARAM_FLAG_RECOMPUTE = createParam("r", "recompute"); + public static final String[] PARAM_FLAG_RESCHEDULE = createParam("rs", "reschedule"); + public static final String[] PARAM_RUN = createParam("run", "run"); + public static final String[] PARAM_FLAG_INDEX = createParam("iv", "indexview"); + public static final String[] PARAM_FLAG_RECOMPUTELAST = createParam("rl", "recomputeLast"); + public static final String[] PARAM_FLAG_STATS = createParam("st", "stats"); + public static final String[] PARAM_ADD = createParam("ae", "addEndpoint"); + + public static final Option OPTION_PROP_FILE = + createOption( + "property file", + 1, + "SPARQLES (additional) property file", + PARAM_PROP_FILE[SHORT_ARG], + PARAM_PROP_FILE[LONG_ARG], + true); + public static final Option OPTION_DEBUG = + createOption( + "flag", + 0, + "enable verbose mode", + PARAM_FLAG_DEBUG[SHORT_ARG], + PARAM_FLAG_DEBUG[LONG_ARG], + false); + public static final Option OPTION_INIT = + createOption( + "flag", + 0, + "init datahub list", + PARAM_FLAG_INIT[SHORT_ARG], + PARAM_FLAG_INIT[LONG_ARG], + false); + public static final Option OPTION_START = + createOption( + "flag", + 0, + "start the service", + PARAM_FLAG_START[SHORT_ARG], + PARAM_FLAG_START[LONG_ARG], + false); + public static final Option OPTION_RECOMPUTE = + createOption( + "flag", + 0, + "recompute the analytics", + PARAM_FLAG_RECOMPUTE[SHORT_ARG], + PARAM_FLAG_RECOMPUTE[LONG_ARG], + false); + public static final Option OPTION_RESCHEDULE = + createOption( + "flag", + 0, + "create default schedule", + PARAM_FLAG_RESCHEDULE[SHORT_ARG], + PARAM_FLAG_RESCHEDULE[LONG_ARG], + false); + public static final Option OPTION_RUN = + createOption( + "task", 1, "run task (itask, atask)", PARAM_RUN[SHORT_ARG], PARAM_RUN[LONG_ARG], false); + public static final Option OPTION_INDEX = + createOption( + "flag", + 0, + "run indexview computation", + PARAM_FLAG_INDEX[SHORT_ARG], + PARAM_FLAG_INDEX[LONG_ARG], + false); + + public static final Option OPTION_RECOMPUTELAST = + createOption( + "flag", + 0, + "recompute the last analytics", + PARAM_FLAG_RECOMPUTELAST[SHORT_ARG], + PARAM_FLAG_RECOMPUTELAST[LONG_ARG], + false); - protected static Option createOption( - String argName, - int args, - String description, - String shortArgname, - String longArgname, - boolean mandatory) { - Option o; - if (shortArgname != null) { - o = - OptionBuilder.withArgName(argName) - .withDescription(description) - .create(shortArgname); - } else { - o = OptionBuilder.withArgName(argName).withDescription(description).create(); - } - if (longArgname != null) { - o.setLongOpt(longArgname); - } - if (args >= 0) o.setArgs(args); - o.setRequired(mandatory); + public static final Option OPTION_UPDATE_EPS = + createOption( + "flag", + 0, + "update datahub list", + PARAM_FLAG_UPDATE_EPS[SHORT_ARG], + PARAM_FLAG_UPDATE_EPS[LONG_ARG], + false); - return o; + public static final Option OPTION_STATS = + createOption( + "flag", + 0, + "mongodb stats", + PARAM_FLAG_STATS[SHORT_ARG], + PARAM_FLAG_STATS[LONG_ARG], + false); + + public static final Option OPTION_ADD = + createOption( + "endpoint", + 3, + "add a new endpoint to monitor", + PARAM_ADD[SHORT_ARG], + PARAM_ADD[LONG_ARG], + false); + + /** + * @param string + * @param string2 + * @return + */ + protected static String[] createParam(String s, String l) { + String[] arg = new String[2]; + arg[SHORT_ARG] = s; + arg[LONG_ARG] = l; + return arg; + } + + protected static Option createOption( + String argName, + int args, + String description, + String shortArgname, + String longArgname, + boolean mandatory) { + Option o; + if (shortArgname != null) { + o = OptionBuilder.withArgName(argName).withDescription(description).create(shortArgname); + } else { + o = OptionBuilder.withArgName(argName).withDescription(description).create(); } + if (longArgname != null) { + o.setLongOpt(longArgname); + } + if (args >= 0) o.setArgs(args); + o.setRequired(mandatory); + + return o; + } } diff --git a/backend/src/main/java/sparqles/utils/cli/CLIObject.java b/backend/src/main/java/sparqles/utils/cli/CLIObject.java index 3f1380c4..fcacb4f9 100644 --- a/backend/src/main/java/sparqles/utils/cli/CLIObject.java +++ b/backend/src/main/java/sparqles/utils/cli/CLIObject.java @@ -11,125 +11,125 @@ public abstract class CLIObject { - private static final Logger log = LoggerFactory.getLogger(CLIObject.class); - private Options _opts; - - public CLIObject() { - init(); - } - - /** - * @param cmd - * @param paramSparqlQuery - * @return - */ - public static String getOptionValue(CommandLine cmd, String[] param) { - if (cmd.hasOption(param[ARGUMENTS.SHORT_ARG])) - return cmd.getOptionValue(param[ARGUMENTS.SHORT_ARG]); - if (cmd.hasOption(param[ARGUMENTS.LONG_ARG])) - return cmd.getOptionValue(param[ARGUMENTS.LONG_ARG]); - return null; - } - - /** - * @param cmd - * @param paramProxyPort - * @param object - * @return - */ - public static String getOptionValue(CommandLine cmd, String[] param, String defaultValue) { - String s = getOptionValue(cmd, param); - if (s == null) return defaultValue; - return s; - } - - public static String[] getOptionValues(CommandLine cmd, String[] param) { - if (cmd.hasOption(param[ARGUMENTS.SHORT_ARG])) - return cmd.getOptionValues(param[ARGUMENTS.SHORT_ARG]); - if (cmd.hasOption(param[ARGUMENTS.LONG_ARG])) - return cmd.getOptionValues(param[ARGUMENTS.LONG_ARG]); - return null; - } - - /** - * @param paramDebug - * @return - */ - public static boolean hasOption(CommandLine cmd, String[] param) { - if (cmd.hasOption(param[ARGUMENTS.SHORT_ARG])) return true; - if (cmd.hasOption(param[ARGUMENTS.LONG_ARG])) return true; - - return false; - } - - public static boolean hasOption(CommandLine cmd, String param) { - return cmd.hasOption(param); - } - - public Options getOptions() { - return _opts; - } - - public abstract String getDescription(); - - public String getCommand() { - return this.getClass().getSimpleName(); + private static final Logger log = LoggerFactory.getLogger(CLIObject.class); + private Options _opts; + + public Options getOptions() { + return _opts; + } + + public abstract String getDescription(); + + public String getCommand() { + return this.getClass().getSimpleName(); + } + + public CLIObject() { + init(); + } + + protected void init() { + _opts = new Options(); + _opts.addOptionGroup(ARGUMENTS.OPTIONGROUP_HELP); + addOptions(_opts); + } + + /** + * add all Option(Groups) to this object Note: The help flag is set automatically ("?") + * + * @param opts + */ + protected abstract void addOptions(Options opts); + + public void run(String[] args) { + log.info("[START] [ARGS] " + Arrays.toString(args)); + CommandLine cmd = verifyArgs(args); + + long start = System.currentTimeMillis(); + execute(cmd); + long end = System.currentTimeMillis(); + log.info("[END] (" + (end - start) + " ms)"); + } + + protected abstract void execute(CommandLine cmd); + + public CommandLine verifyArgs(String[] args) { + init(); + + CommandLineParser parser = new GnuParser(); + CommandLine cmd = null; + try { + cmd = parser.parse(_opts, args); + } catch (org.apache.commons.cli.ParseException e) { + log.info( + "ERROR: " + + e.getClass().getSimpleName() + + " : " + + e.getMessage() + + " args={" + + Arrays.toString(args) + + "}"); + HelpFormatter formatter = new HelpFormatter(); + formatter.printHelp(this.getClass().getSimpleName(), _opts, true); + System.exit(-1); } - - protected void init() { - _opts = new Options(); - _opts.addOptionGroup(ARGUMENTS.OPTIONGROUP_HELP); - addOptions(_opts); - } - - /** - * add all Option(Groups) to this object Note: The help flag is set automatically ("?") - * - * @param opts - */ - protected abstract void addOptions(Options opts); - - public void run(String[] args) { - log.info("[START] [ARGS] " + Arrays.toString(args)); - CommandLine cmd = verifyArgs(args); - - long start = System.currentTimeMillis(); - execute(cmd); - long end = System.currentTimeMillis(); - log.info("[END] (" + (end - start) + " ms)"); - } - - protected abstract void execute(CommandLine cmd); - - public CommandLine verifyArgs(String[] args) { - init(); - - CommandLineParser parser = new GnuParser(); - CommandLine cmd = null; - try { - cmd = parser.parse(_opts, args); - } catch (org.apache.commons.cli.ParseException e) { - log.info( - "ERROR: " - + e.getClass().getSimpleName() - + " : " - + e.getMessage() - + " args={" - + Arrays.toString(args) - + "}"); - HelpFormatter formatter = new HelpFormatter(); - formatter.printHelp(this.getClass().getSimpleName(), _opts, true); - System.exit(-1); - } - if (cmd != null - && (cmd.hasOption(ARGUMENTS.PARAM_HELP) - || cmd.hasOption(ARGUMENTS.PARAM_HELP1[ARGUMENTS.SHORT_ARG]) - || cmd.hasOption(ARGUMENTS.PARAM_HELP1[ARGUMENTS.LONG_ARG]))) { - log.info("Here is a help (args length " + cmd.getArgList().size() + "): "); - HelpFormatter formatter = new HelpFormatter(); - formatter.printHelp(this.getClass().getSimpleName(), _opts, true); - System.exit(-1); - } - return cmd; + if (cmd != null + && (cmd.hasOption(ARGUMENTS.PARAM_HELP) + || cmd.hasOption(ARGUMENTS.PARAM_HELP1[ARGUMENTS.SHORT_ARG]) + || cmd.hasOption(ARGUMENTS.PARAM_HELP1[ARGUMENTS.LONG_ARG]))) { + log.info("Here is a help (args length " + cmd.getArgList().size() + "): "); + HelpFormatter formatter = new HelpFormatter(); + formatter.printHelp(this.getClass().getSimpleName(), _opts, true); + System.exit(-1); } + return cmd; + } + + /** + * @param cmd + * @param paramSparqlQuery + * @return + */ + public static String getOptionValue(CommandLine cmd, String[] param) { + if (cmd.hasOption(param[ARGUMENTS.SHORT_ARG])) + return cmd.getOptionValue(param[ARGUMENTS.SHORT_ARG]); + if (cmd.hasOption(param[ARGUMENTS.LONG_ARG])) + return cmd.getOptionValue(param[ARGUMENTS.LONG_ARG]); + return null; + } + + /** + * @param cmd + * @param paramProxyPort + * @param object + * @return + */ + public static String getOptionValue(CommandLine cmd, String[] param, String defaultValue) { + String s = getOptionValue(cmd, param); + if (s == null) return defaultValue; + return s; + } + + public static String[] getOptionValues(CommandLine cmd, String[] param) { + if (cmd.hasOption(param[ARGUMENTS.SHORT_ARG])) + return cmd.getOptionValues(param[ARGUMENTS.SHORT_ARG]); + if (cmd.hasOption(param[ARGUMENTS.LONG_ARG])) + return cmd.getOptionValues(param[ARGUMENTS.LONG_ARG]); + return null; + } + + /** + * @param paramDebug + * @return + */ + public static boolean hasOption(CommandLine cmd, String[] param) { + if (cmd.hasOption(param[ARGUMENTS.SHORT_ARG])) return true; + if (cmd.hasOption(param[ARGUMENTS.LONG_ARG])) return true; + + return false; + } + + public static boolean hasOption(CommandLine cmd, String param) { + return cmd.hasOption(param); + } } diff --git a/backend/src/main/java/sparqles/utils/cli/OneTimeExecution.java b/backend/src/main/java/sparqles/utils/cli/OneTimeExecution.java index e7218e51..036233b1 100644 --- a/backend/src/main/java/sparqles/utils/cli/OneTimeExecution.java +++ b/backend/src/main/java/sparqles/utils/cli/OneTimeExecution.java @@ -18,71 +18,68 @@ import sparqles.utils.MongoDBManager; public class OneTimeExecution { - private static final Logger log = LoggerFactory.getLogger(OneTimeExecution.class); - private MongoDBManager dbm; - private FileManager fm; + private static final Logger log = LoggerFactory.getLogger(OneTimeExecution.class); + private MongoDBManager dbm; + private FileManager fm; - public OneTimeExecution(MongoDBManager dbm, FileManager fm) { - this.dbm = dbm; - this.fm = fm; - } + public OneTimeExecution(MongoDBManager dbm, FileManager fm) { + this.dbm = dbm; + this.fm = fm; + } - public void run(String task) { - Collection eps = dbm.get(Endpoint.class, Endpoint.SCHEMA$); + public void run(String task) { + Collection eps = dbm.get(Endpoint.class, Endpoint.SCHEMA$); - ExecutorService executor = Executors.newFixedThreadPool(50); + ExecutorService executor = Executors.newFixedThreadPool(50); - List> todo = new ArrayList>(eps.size()); + List> todo = new ArrayList>(eps.size()); - for (Endpoint ep : eps) { - Task t = TaskFactory.create(task, ep, dbm, fm); - log.info("OneTimeSchedule {}", ep); - todo.add(t); - // compService.submit(t); - } + for (Endpoint ep : eps) { + Task t = TaskFactory.create(task, ep, dbm, fm); + log.info("OneTimeSchedule {}", ep); + todo.add(t); + // compService.submit(t); + } - List> all = new ArrayList>(); - try { - all = executor.invokeAll(todo); - } catch (InterruptedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - int count = 0, failed = 0; - for (Future f : all) { - try { - T t = f.get(); - if (f.isDone()) { - count++; - log.info("Task for {} completed", t); + List> all = new ArrayList>(); + try { + all = executor.invokeAll(todo); + } catch (InterruptedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + int count = 0, failed = 0; + for (Future f : all) { + try { + T t = f.get(); + if (f.isDone()) { + count++; + log.info("Task for {} completed", t); - } else { - failed++; - log.info("Task for {} not completed", t); - } - } catch (InterruptedException | ExecutionException e) { - e.printStackTrace(); - } + } else { + failed++; + log.info("Task for {} not completed", t); } - - // - // Future f= null; - // try { - // while((f = compService.poll()) != null){ - // while(!f.isDone()){ - // Thread.sleep(500); - // log.debug("Waiting unitl task {} is done", f.get()); - // } - // log.info("Task for {} completed", f.get()); - // } - // } catch (Exception e) { - // e.printStackTrace(); - // } - log.info( - "All {} tasks are oprocessed with {} done and {} failed", - (count + failed), - count, - failed); - executor.shutdown(); + } catch (InterruptedException | ExecutionException e) { + e.printStackTrace(); + } } + + // + // Future f= null; + // try { + // while((f = compService.poll()) != null){ + // while(!f.isDone()){ + // Thread.sleep(500); + // log.debug("Waiting unitl task {} is done", f.get()); + // } + // log.info("Task for {} completed", f.get()); + // } + // } catch (Exception e) { + // e.printStackTrace(); + // } + log.info( + "All {} tasks are oprocessed with {} done and {} failed", (count + failed), count, failed); + executor.shutdown(); + } } diff --git a/backend/src/main/java/sparqles/utils/cli/SPARQLES.java b/backend/src/main/java/sparqles/utils/cli/SPARQLES.java index e63fa9fa..fd3c2f68 100644 --- a/backend/src/main/java/sparqles/utils/cli/SPARQLES.java +++ b/backend/src/main/java/sparqles/utils/cli/SPARQLES.java @@ -1,7 +1,9 @@ package sparqles.utils.cli; import java.io.File; +import java.net.URISyntaxException; import java.util.Collection; +import java.util.List; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Options; import org.slf4j.Logger; @@ -10,13 +12,16 @@ import sparqles.analytics.IndexViewAnalytics; import sparqles.analytics.RefreshDataHubTask; import sparqles.analytics.StatsAnalyser; +import sparqles.avro.Dataset; import sparqles.avro.Endpoint; import sparqles.avro.availability.AResult; +import sparqles.avro.calculation.CResult; import sparqles.avro.discovery.DResult; import sparqles.avro.features.FResult; import sparqles.avro.performance.PResult; import sparqles.avro.schedule.Schedule; import sparqles.core.CONSTANTS; +import sparqles.core.EndpointFactory; import sparqles.core.SPARQLESProperties; import sparqles.schedule.Scheduler; import sparqles.utils.DatahubAccess; @@ -30,201 +35,232 @@ * @author UmbrichJ */ public class SPARQLES extends CLIObject { - private static final Logger log = LoggerFactory.getLogger(SPARQLES.class); - private Scheduler scheduler; - private MongoDBManager dbm; - private FileManager _fm; + private static final Logger log = LoggerFactory.getLogger(SPARQLES.class); + private Scheduler scheduler; + private MongoDBManager dbm; + private FileManager _fm; - @Override - public String getDescription() { - return "Start and control SPARQLES"; - } + @Override + public String getDescription() { + return "Start and control SPARQLES"; + } - @Override - protected void addOptions(Options opts) { - opts.addOption(ARGUMENTS.OPTION_PROP_FILE); - opts.addOption(ARGUMENTS.OPTION_INIT); - opts.addOption(ARGUMENTS.OPTION_UPDATE_EPS); - opts.addOption(ARGUMENTS.OPTION_START); - opts.addOption(ARGUMENTS.OPTION_STATS); - opts.addOption(ARGUMENTS.OPTION_RECOMPUTE); - opts.addOption(ARGUMENTS.OPTION_RECOMPUTELAST); - opts.addOption(ARGUMENTS.OPTION_RESCHEDULE); + @Override + protected void addOptions(Options opts) { + opts.addOption(ARGUMENTS.OPTION_PROP_FILE); + opts.addOption(ARGUMENTS.OPTION_INIT); + opts.addOption(ARGUMENTS.OPTION_UPDATE_EPS); + opts.addOption(ARGUMENTS.OPTION_START); + opts.addOption(ARGUMENTS.OPTION_STATS); + opts.addOption(ARGUMENTS.OPTION_RECOMPUTE); + opts.addOption(ARGUMENTS.OPTION_RECOMPUTELAST); + opts.addOption(ARGUMENTS.OPTION_RESCHEDULE); - opts.addOption(ARGUMENTS.OPTION_RUN); - opts.addOption(ARGUMENTS.OPTION_INDEX); - } + opts.addOption(ARGUMENTS.OPTION_RUN); + opts.addOption(ARGUMENTS.OPTION_INDEX); + opts.addOption(ARGUMENTS.OPTION_ADD); + } - @Override - protected void execute(CommandLine cmd) { - parseCMD(cmd); - - // reinitialise datahub - if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_INIT)) { - // check the endpoint list - Collection eps = DatahubAccess.checkEndpointList(); - dbm.initEndpointCollection(); - dbm.setup(); - dbm.insert(eps); - } - if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_UPDATE_EPS)) { - // check the endpoint list - RefreshDataHubTask t = new RefreshDataHubTask(); - t.setDBManager(dbm); - t.setScheduler(scheduler); - - try { - t.call(); - } catch (Exception e) { - e.printStackTrace(); - } - } - if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_RESCHEDULE)) { - Collection epss = Scheduler.createDefaultSchedule(dbm); - dbm.initScheduleCollection(); - dbm.setup(); - dbm.insert(epss); - } - if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_RECOMPUTE)) { - recomputeAnalytics(false); - } - if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_RECOMPUTELAST)) { - recomputeAnalytics(true); - } - if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_INDEX)) { - recomputeIndexView(); - } - if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_STATS)) { - computeStats(); - } - - if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_RUN)) { - String task = CLIObject.getOptionValue(cmd, ARGUMENTS.PARAM_RUN).trim(); - if (task.equalsIgnoreCase(CONSTANTS.ITASK)) { - IndexViewAnalytics a = new IndexViewAnalytics(); - a.setDBManager(dbm); - try { - a.call(); - } catch (Exception e) { - e.printStackTrace(); - } - } else if (task.equalsIgnoreCase(CONSTANTS.ATASK)) { - OneTimeExecution ex = new OneTimeExecution(dbm, _fm); - ex.run(CONSTANTS.ATASK); - } else if (task.equalsIgnoreCase(CONSTANTS.FTASK)) { - OneTimeExecution ex = new OneTimeExecution(dbm, _fm); - ex.run(CONSTANTS.FTASK); - } else if (task.equalsIgnoreCase(CONSTANTS.PTASK)) { - OneTimeExecution ex = new OneTimeExecution(dbm, _fm); - ex.run(CONSTANTS.PTASK); - } else if (task.equalsIgnoreCase(CONSTANTS.DTASK)) { - OneTimeExecution ex = new OneTimeExecution(dbm, _fm); - ex.run(CONSTANTS.DTASK); - } else { - log.warn("Task {} not known", task); - } - } + @Override + protected void execute(CommandLine cmd) { + parseCMD(cmd); + // System.setProperty("javax.net.debug", "ssl:handshake:verbose"); - if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_START)) { - start(); - } + // reinitialise datahub + if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_INIT)) { + // check the endpoint list + Collection eps = DatahubAccess.checkEndpointList(); + dbm.initEndpointCollection(); + dbm.setup(); + dbm.insert(eps); + } + if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_UPDATE_EPS)) { + // check the endpoint list + RefreshDataHubTask t = new RefreshDataHubTask(); + t.setDBManager(dbm); + t.setScheduler(scheduler); - Runtime.getRuntime().addShutdownHook(new ShutdownThread(this)); + try { + t.call(); + } catch (Exception e) { + e.printStackTrace(); + } + } + if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_RESCHEDULE)) { + Collection epss = Scheduler.createDefaultSchedule(dbm); + log.info("Created a new schedule for {} endpoints", epss.size()); + dbm.initScheduleCollection(); + dbm.setup(); + dbm.insert(epss); + log.info("Persisted the schedule for {} endpoints", epss.size()); + } + if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_RECOMPUTE)) { + recomputeAnalytics(false); + } + if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_RECOMPUTELAST)) { + recomputeAnalytics(true); + } + if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_INDEX)) { + recomputeIndexView(); + } + if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_STATS)) { + computeStats(); + } + if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_ADD)) { + String[] opts = CLIObject.getOptionValue(cmd, ARGUMENTS.PARAM_ADD).trim().split(";"); + String endpointUri = opts[0]; + String label = ""; + if (opts.length > 1) label = opts[1]; + addEndpoint(endpointUri, label); } - private void recomputeIndexView() { + if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_RUN)) { + String task = CLIObject.getOptionValue(cmd, ARGUMENTS.PARAM_RUN).trim(); + if (task.equalsIgnoreCase(CONSTANTS.ITASK)) { IndexViewAnalytics a = new IndexViewAnalytics(); a.setDBManager(dbm); try { - a.call(); + a.call(); } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); + e.printStackTrace(); } + } else if (task.equalsIgnoreCase(CONSTANTS.ATASK)) { + OneTimeExecution ex = new OneTimeExecution(dbm, _fm); + ex.run(CONSTANTS.ATASK); + } else if (task.equalsIgnoreCase(CONSTANTS.FTASK)) { + OneTimeExecution ex = new OneTimeExecution(dbm, _fm); + ex.run(CONSTANTS.FTASK); + } else if (task.equalsIgnoreCase(CONSTANTS.PTASK)) { + OneTimeExecution ex = new OneTimeExecution(dbm, _fm); + ex.run(CONSTANTS.PTASK); + } else if (task.equalsIgnoreCase(CONSTANTS.DTASK)) { + OneTimeExecution ex = new OneTimeExecution(dbm, _fm); + ex.run(CONSTANTS.DTASK); + } else if (task.equalsIgnoreCase(CONSTANTS.CTASK)) { + OneTimeExecution ex = new OneTimeExecution(dbm, _fm); + ex.run(CONSTANTS.CTASK); + } else { + log.warn("Task {} not known", task); + } } - private void computeStats() { - StatsAnalyser stats = new StatsAnalyser(); - stats.setDBManager(dbm); - try { - stats.call(); - } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } + if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_START)) { + start(); } - private void recomputeAnalytics(boolean onlyLast) { - dbm.initAggregateCollections(); + Runtime.getRuntime().addShutdownHook(new ShutdownThread(this)); + } - AnalyserInit a = new AnalyserInit(dbm, onlyLast); - a.run(); + private void recomputeIndexView() { + IndexViewAnalytics a = new IndexViewAnalytics(); + a.setDBManager(dbm); + try { + a.call(); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); } + } - private void start() { - scheduler.init(dbm); - try { - long start = System.currentTimeMillis(); - while (true) { - log.info( - "Running since {}", - DateFormater.formatInterval(System.currentTimeMillis() - start)); - Thread.sleep(1800000); - } - } catch (Throwable t) { - t.printStackTrace(); - } + private void computeStats() { + StatsAnalyser stats = new StatsAnalyser(); + stats.setDBManager(dbm); + try { + stats.call(); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); } + } - private void parseCMD(CommandLine cmd) { - // load the Properties - if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_PROP_FILE)) { - File propFile = new File(CLIObject.getOptionValue(cmd, ARGUMENTS.PARAM_PROP_FILE)); - if (propFile.exists()) { - log.info("Reading properties from {}", propFile); - SPARQLESProperties.init(propFile); - } else { - log.warn("Specified property file ({}) does not exist", propFile); - } - } - setup(true, false); - } + private void recomputeAnalytics(boolean onlyLast) { + dbm.initAggregateCollections(); - public void init(String[] arguments) { - CommandLine cmd = verifyArgs(arguments); - parseCMD(cmd); + AnalyserInit a = new AnalyserInit(dbm, onlyLast); + a.run(); + } + + private void addEndpoint(String endpointUri, String label) { + log.info("Adding endpoint with uri \"{}\" and label \"{}\"", endpointUri, label); + try { + Endpoint ep = EndpointFactory.newEndpoint(endpointUri); + if (!label.equals("")) { + Dataset d = new Dataset(); + d.setLabel(label); + d.setUri(endpointUri); + List l = ep.getDatasets(); + l.add(d); + ep.setDatasets(l); + } + dbm.insert(ep); + } catch (URISyntaxException e) { + log.warn("URISyntaxException:{}", e.getMessage()); } + } - private void setup(boolean useDB, boolean useFM) { - // Init the scheduler - scheduler = new Scheduler(); + private void start() { + scheduler.init(dbm); + try { + long start = System.currentTimeMillis(); + while (true) { + log.info( + "Running since {}", DateFormater.formatInterval(System.currentTimeMillis() - start)); + Thread.sleep(1800000); + } + } catch (Throwable t) { + t.printStackTrace(); + } + } - if (useDB) { - dbm = new MongoDBManager(); - scheduler.useDB(dbm); - } - if (useFM) { - _fm = new FileManager(); - } - scheduler.useFileManager(_fm); + private void parseCMD(CommandLine cmd) { + // load the Properties + if (CLIObject.hasOption(cmd, ARGUMENTS.PARAM_PROP_FILE)) { + File propFile = new File(CLIObject.getOptionValue(cmd, ARGUMENTS.PARAM_PROP_FILE)); + if (propFile.exists()) { + log.info("Reading properties from {}", propFile); + SPARQLESProperties.init(propFile); + } else { + log.warn("Specified property file ({}) does not exist", propFile); + } } + setup(true, false); + } + + public void init(String[] arguments) { + CommandLine cmd = verifyArgs(arguments); + parseCMD(cmd); + } - public void stop() { - log.info("[START] [SHUTDOWN] Shutting down the system"); - scheduler.close(); - log.info("[SUCCESS] [SHUTDOWN] Everything closed normally"); + private void setup(boolean useDB, boolean useFM) { + // Init the scheduler + scheduler = new Scheduler(); + + if (useDB) { + dbm = new MongoDBManager(); + scheduler.useDB(dbm); + } + if (useFM) { + _fm = new FileManager(); } + scheduler.useFileManager(_fm); + } - class ShutdownThread extends Thread { - private SPARQLES _s; + public void stop() { + log.info("[START] [SHUTDOWN] Shutting down the system"); + scheduler.close(); + log.info("[SUCCESS] [SHUTDOWN] Everything closed normally"); + } - public ShutdownThread(SPARQLES s) { - _s = s; - } + class ShutdownThread extends Thread { + private SPARQLES _s; - @Override - public void run() { - _s.stop(); - } + public ShutdownThread(SPARQLES s) { + _s = s; + } + + @Override + public void run() { + _s.stop(); } + } } diff --git a/backend/src/main/resources/sparqles.properties b/backend/src/main/resources/sparqles.properties index 73d6b4b8..2a1a2f5b 100644 --- a/backend/src/main/resources/sparqles.properties +++ b/backend/src/main/resources/sparqles.properties @@ -1,19 +1,24 @@ # SPARQLES=/usr/local/sparqles + # FileManager data.dir=/usr/local/sparqles/data.dir + #QueryManager #ftask.queries=ftask/ ftask.queries=ftask/ ptask.queries=ptask/ + #Scheduler task.threads=64 + # Database Manager # use localhost if running locally without Docker db.host=localhost #db.host=database-svc db.port=27017 db.name=sparqles + # Wait time between two consequutive SPARQL queries against the same server #general wait time waittime=290 diff --git a/backend/src/main/resources/sparqles_docker.properties b/backend/src/main/resources/sparqles_docker.properties index 19a47fa4..a2066159 100644 --- a/backend/src/main/resources/sparqles_docker.properties +++ b/backend/src/main/resources/sparqles_docker.properties @@ -1,19 +1,25 @@ # SPARQLES=/usr/local/sparqles +host=https://example.com + # FileManager data.dir=/usr/local/sparqles/data.dir + #QueryManager #ftask.queries=ftask/ ftask.queries=ftask/ ptask.queries=ptask/ + #Scheduler task.threads=64 + # Database Manager # use localhost if running locally without Docker #db.host=localhost db.host=database-svc db.port=27017 db.name=sparqles + # Wait time between two consequutive SPARQL queries against the same server #general wait time waittime=290 diff --git a/backend/src/test/java/sparqles/analytics/AAnalyticsInitTEST.java b/backend/src/test/java/sparqles/analytics/AAnalyticsInitTEST.java index 5279394e..3c35abf6 100644 --- a/backend/src/test/java/sparqles/analytics/AAnalyticsInitTEST.java +++ b/backend/src/test/java/sparqles/analytics/AAnalyticsInitTEST.java @@ -1,5 +1,7 @@ package sparqles.analytics; +import static org.junit.Assert.*; + import java.io.File; import org.junit.After; import org.junit.Before; @@ -9,24 +11,24 @@ public class AAnalyticsInitTEST { - private MongoDBManager m; + private MongoDBManager m; - @Before - public void setUp() throws Exception { - SPARQLESProperties.init(new File("src/test/resources/sparqles.properties")); - m = new MongoDBManager(); - } + @Before + public void setUp() throws Exception { + SPARQLESProperties.init(new File("src/test/resources/sparqles.properties")); + m = new MongoDBManager(); + } - @After - public void tearDown() throws Exception { - m.close(); - } + @After + public void tearDown() throws Exception { + m.close(); + } - @Test - public void test() { - m.initAggregateCollections(); + @Test + public void test() { + m.initAggregateCollections(); - AnalyserInit a = new AnalyserInit(m); - a.run(); - } + AnalyserInit a = new AnalyserInit(m); + a.run(); + } } diff --git a/backend/src/test/java/sparqles/analytics/AAnalyticsTEST.java b/backend/src/test/java/sparqles/analytics/AAnalyticsTEST.java index 73c2e70d..3577f5b5 100644 --- a/backend/src/test/java/sparqles/analytics/AAnalyticsTEST.java +++ b/backend/src/test/java/sparqles/analytics/AAnalyticsTEST.java @@ -1,5 +1,7 @@ package sparqles.analytics; +import static org.junit.Assert.*; + import java.io.File; import java.net.URISyntaxException; import java.util.Comparator; @@ -16,55 +18,54 @@ public class AAnalyticsTEST { - private MongoDBManager m; - - @Before - public void setUp() throws Exception { - SPARQLESProperties.init(new File("src/test/resources/sparqles.properties")); - m = new MongoDBManager(); - } + private MongoDBManager m; - @After - public void tearDown() throws Exception { - m.close(); - } + @Before + public void setUp() throws Exception { + SPARQLESProperties.init(new File("src/test/resources/sparqles.properties")); + m = new MongoDBManager(); + } - @Test - public void test() throws URISyntaxException { - m.initAggregateCollections(); - AAnalyser a = new AAnalyser(m); + @After + public void tearDown() throws Exception { + m.close(); + } - Endpoint ep = EndpointFactory.newEndpoint("http://dbpedia.org/sparql"); - System.out.println("Analyse"); + @Test + public void test() throws URISyntaxException { + m.initAggregateCollections(); + AAnalyser a = new AAnalyser(m); - TreeSet res = - new TreeSet( - new Comparator() { - public int compare(AResult o1, AResult o2) { - int diff = - o1.getEndpointResult() - .getStart() - .compareTo(o2.getEndpointResult().getStart()); - return diff; - } - }); + Endpoint ep = EndpointFactory.newEndpoint("http://dbpedia.org/sparql"); + System.out.println("Analyse"); - List epRes = m.getResults(ep, AResult.class, AResult.SCHEMA$); - System.out.println(epRes.size()); - System.out.println("Results: " + epRes.size()); - for (AResult epres : epRes) { - res.add(epres); - // System.out.println(new Date(epres.getEndpointResult().getStart())); - } + TreeSet res = + new TreeSet( + new Comparator() { + public int compare(AResult o1, AResult o2) { + int diff = + Comparator.comparingLong((AResult it) -> it.getEndpointResult().getStart()) + .compare(o1, o2); + return diff; + } + }); - // if(_onlyLast&&epRes.size()!=0){ - // a.analyse(res.last()); - // }else{ - for (AResult ares : res) { - a.analyse(ares); - } - // } - // log.info("ANALYSE AVAILABILITY {} and {}",ep, epRes.size()); + List epRes = m.getResults(ep, AResult.class, AResult.SCHEMA$); + System.out.println(epRes.size()); + System.out.println("Results: " + epRes.size()); + for (AResult epres : epRes) { + res.add(epres); + // System.out.println(new Date(epres.getEndpointResult().getStart())); + } + // if(_onlyLast&&epRes.size()!=0){ + // a.analyse(res.last()); + // }else{ + for (AResult ares : res) { + a.analyse(ares); } + // } + // log.info("ANALYSE AVAILABILITY {} and {}",ep, epRes.size()); + + } } diff --git a/backend/src/test/java/sparqles/analytics/FAnalyticsTEST.java b/backend/src/test/java/sparqles/analytics/FAnalyticsTEST.java index 540d5bbf..bd66c968 100644 --- a/backend/src/test/java/sparqles/analytics/FAnalyticsTEST.java +++ b/backend/src/test/java/sparqles/analytics/FAnalyticsTEST.java @@ -1,5 +1,7 @@ package sparqles.analytics; +import static org.junit.Assert.*; + import java.io.File; import java.net.URISyntaxException; import java.util.Collection; @@ -14,34 +16,34 @@ public class FAnalyticsTEST { - private MongoDBManager m; + private MongoDBManager m; - @Before - public void setUp() throws Exception { - SPARQLESProperties.init(new File("src/test/resources/ends.properties")); - m = new MongoDBManager(); - } + @Before + public void setUp() throws Exception { + SPARQLESProperties.init(new File("src/test/resources/ends.properties")); + m = new MongoDBManager(); + } - @After - public void tearDown() throws Exception { - m.close(); - } + @After + public void tearDown() throws Exception { + m.close(); + } - @Test - public void test() throws URISyntaxException { - m.initAggregateCollections(); - FAnalyser a = new FAnalyser(m); + @Test + public void test() throws URISyntaxException { + m.initAggregateCollections(); + FAnalyser a = new FAnalyser(m); - Endpoint ep = EndpointFactory.newEndpoint("http://dbpedia.org/sparql"); - System.out.println("Analyse"); + Endpoint ep = EndpointFactory.newEndpoint("http://dbpedia.org/sparql"); + System.out.println("Analyse"); - Collection ress = m.getResults(ep, FResult.class, FResult.SCHEMA$); - for (FResult pr : ress) { + Collection ress = m.getResults(ep, FResult.class, FResult.SCHEMA$); + for (FResult pr : ress) { - a.analyse(pr); - } + a.analyse(pr); + } - // a.analyse(ep); + // a.analyse(ep); - } + } } diff --git a/backend/src/test/java/sparqles/analytics/IndexViewAnalyticsTEST.java b/backend/src/test/java/sparqles/analytics/IndexViewAnalyticsTEST.java index fdf92f2f..1beaa39d 100644 --- a/backend/src/test/java/sparqles/analytics/IndexViewAnalyticsTEST.java +++ b/backend/src/test/java/sparqles/analytics/IndexViewAnalyticsTEST.java @@ -1,5 +1,7 @@ package sparqles.analytics; +import static org.junit.Assert.*; + import java.io.File; import org.junit.After; import org.junit.Before; @@ -9,24 +11,24 @@ public class IndexViewAnalyticsTEST { - private MongoDBManager m; + private MongoDBManager m; - @Before - public void setUp() throws Exception { - SPARQLESProperties.init(new File("src/test/resources/sparqles.properties")); - m = new MongoDBManager(); - } + @Before + public void setUp() throws Exception { + SPARQLESProperties.init(new File("src/test/resources/sparqles.properties")); + m = new MongoDBManager(); + } - @After - public void tearDown() throws Exception { - m.close(); - } + @After + public void tearDown() throws Exception { + m.close(); + } - @Test - public void test() throws Exception { + @Test + public void test() throws Exception { - IndexViewAnalytics a = new IndexViewAnalytics(); - a.setDBManager(m); - a.call(); - } + IndexViewAnalytics a = new IndexViewAnalytics(); + a.setDBManager(m); + a.call(); + } } diff --git a/backend/src/test/java/sparqles/analytics/PAnalyticsTEST.java b/backend/src/test/java/sparqles/analytics/PAnalyticsTEST.java index f40df127..33402ab6 100644 --- a/backend/src/test/java/sparqles/analytics/PAnalyticsTEST.java +++ b/backend/src/test/java/sparqles/analytics/PAnalyticsTEST.java @@ -1,5 +1,7 @@ package sparqles.analytics; +import static org.junit.Assert.*; + import java.io.File; import java.net.URISyntaxException; import java.util.Collection; @@ -14,33 +16,33 @@ public class PAnalyticsTEST { - private MongoDBManager m; + private MongoDBManager m; - @Before - public void setUp() throws Exception { - SPARQLESProperties.init(new File("src/test/resources/ends.properties")); - m = new MongoDBManager(); - } + @Before + public void setUp() throws Exception { + SPARQLESProperties.init(new File("src/test/resources/ends.properties")); + m = new MongoDBManager(); + } - @After - public void tearDown() throws Exception { - m.close(); - } + @After + public void tearDown() throws Exception { + m.close(); + } - @Test - public void test() throws URISyntaxException { - m.initAggregateCollections(); - PAnalyser a = new PAnalyser(m); + @Test + public void test() throws URISyntaxException { + m.initAggregateCollections(); + PAnalyser a = new PAnalyser(m); - Endpoint ep = EndpointFactory.newEndpoint("http://dbpedia.org/sparql"); - System.out.println("Analyse"); + Endpoint ep = EndpointFactory.newEndpoint("http://dbpedia.org/sparql"); + System.out.println("Analyse"); - Collection ress = m.getResults(ep, PResult.class, PResult.SCHEMA$); - for (PResult pr : ress) { - a.analyse(pr); - } + Collection ress = m.getResults(ep, PResult.class, PResult.SCHEMA$); + for (PResult pr : ress) { + a.analyse(pr); + } - // a.analyse(ep); + // a.analyse(ep); - } + } } diff --git a/backend/src/test/java/sparqles/core/ARQRequestTEST.java b/backend/src/test/java/sparqles/core/ARQRequestTEST.java index a331757a..28412a85 100644 --- a/backend/src/test/java/sparqles/core/ARQRequestTEST.java +++ b/backend/src/test/java/sparqles/core/ARQRequestTEST.java @@ -9,24 +9,23 @@ public class ARQRequestTEST { - @Before - public void setUp() throws Exception {} + @Before + public void setUp() throws Exception {} - @After - public void tearDown() throws Exception {} + @After + public void tearDown() throws Exception {} - @Test - @Ignore("requires a server, not a unit test") - public void test() { - // HttpOp.setUserAgent(CONSTANTS.USER_AGENT); - try (QueryExecution ex = - QueryExecution.service( - "http://localhost:8000/sparql", "SELECT * WHERE { ?s ?p ?o . }")) { - ResultSet res; - res = ex.execSelect(); - while (res.hasNext()) { - System.out.println("nextS"); - } - } + @Test + @Ignore("requires a server, not a unit test") + public void test() { + // HttpOp.setUserAgent(CONSTANTS.USER_AGENT); + try (QueryExecution ex = + QueryExecution.service("http://localhost:8000/sparql", "SELECT * WHERE { ?s ?p ?o . }")) { + ResultSet res; + res = ex.execSelect(); + while (res.hasNext()) { + System.out.println("nextS"); + } } + } } diff --git a/backend/src/test/java/sparqles/core/AvailabilityTEST.java b/backend/src/test/java/sparqles/core/AvailabilityTEST.java index 1197eb3e..6495565d 100644 --- a/backend/src/test/java/sparqles/core/AvailabilityTEST.java +++ b/backend/src/test/java/sparqles/core/AvailabilityTEST.java @@ -10,59 +10,59 @@ public class AvailabilityTEST { - private MongoDBManager m; + private MongoDBManager m; - @Before - public void setUp() throws Exception { - SPARQLESProperties.init(new File("src/main/resources/sparqles.properties")); - // m = new MongoDBManager(); + @Before + public void setUp() throws Exception { + SPARQLESProperties.init(new File("src/main/resources/sparqles.properties")); + // m = new MongoDBManager(); - } + } - @After - public void tearDown() throws Exception { - m.close(); - } + @After + public void tearDown() throws Exception { + m.close(); + } - @Test - public void testSingle() throws Exception { + @Test + public void testSingle() throws Exception { - ATask a = new ATask(Endpoints.DBPEDIA); - // a.setDBManager(m); + ATask a = new ATask(Endpoints.DBPEDIA); + // a.setDBManager(m); - AResult ar = a.call(); + AResult ar = a.call(); - // m.insert(ar); - } + // m.insert(ar); + } - // @Test - // public void test() { - // - // Endpoint ep = Endpoints.DBPEDIA; - // - // m.initEndpointCollection(); - // m.initAggregateCollections(); - // m.insert(ep); - // - // Schedule sc = new Schedule(); - // sc.setEndpoint(ep); - // sc.setATask("0 0/2 * 1/1 * ? *"); - // m.insert(sc); - // - // Scheduler s = new Scheduler(); - // - // s.useDB(m); - // s.init(m); - // - // try { - // Thread.sleep(60*60*1000); - // } catch (InterruptedException e) { - // // TODO Auto-generated catch block - // e.printStackTrace(); - // } - // - // s.close(); - // m.close(); - // - // } + // @Test + // public void test() { + // + // Endpoint ep = Endpoints.DBPEDIA; + // + // m.initEndpointCollection(); + // m.initAggregateCollections(); + // m.insert(ep); + // + // Schedule sc = new Schedule(); + // sc.setEndpoint(ep); + // sc.setATask("0 0/2 * 1/1 * ? *"); + // m.insert(sc); + // + // Scheduler s = new Scheduler(); + // + // s.useDB(m); + // s.init(m); + // + // try { + // Thread.sleep(60*60*1000); + // } catch (InterruptedException e) { + // // TODO Auto-generated catch block + // e.printStackTrace(); + // } + // + // s.close(); + // m.close(); + // + // } } diff --git a/backend/src/test/java/sparqles/core/DiscoverabilityTEST.java b/backend/src/test/java/sparqles/core/DiscoverabilityTEST.java index bdb12697..c9afc3c0 100644 --- a/backend/src/test/java/sparqles/core/DiscoverabilityTEST.java +++ b/backend/src/test/java/sparqles/core/DiscoverabilityTEST.java @@ -10,41 +10,41 @@ public class DiscoverabilityTEST { - private MongoDBManager m = null; + private MongoDBManager m = null; - @Before - public void setUp() throws Exception { - SPARQLESProperties.init(new File("src/test/resources/sparqles.properties")); - // m = new MongoDBManager(); + @Before + public void setUp() throws Exception { + SPARQLESProperties.init(new File("src/test/resources/sparqles.properties")); + // m = new MongoDBManager(); - // + // - } + } - @After - public void tearDown() throws Exception { - // m.close(); - } + @After + public void tearDown() throws Exception { + // m.close(); + } - @Test - public void test() throws Exception { - Endpoint ep = Endpoints.DBPEDIA; + @Test + public void test() throws Exception { + Endpoint ep = Endpoints.DBPEDIA; - test(ep); - } + test(ep); + } - private void test(Endpoint ep) throws Exception { - Task t = TaskFactory.create(CONSTANTS.DTASK, ep, m, null); - DResult res = t.call(); - System.out.println(res); - // m.insert(res); - } + private void test(Endpoint ep) throws Exception { + Task t = TaskFactory.create(CONSTANTS.DTASK, ep, m, null); + DResult res = t.call(); + System.out.println(res); + // m.insert(res); + } - @Test - public void testGroup() throws Exception { - Endpoint[] eps = {Endpoints.DBPEDIA, Endpoints.AEMET}; - for (Endpoint ep : eps) { - test(ep); - } + @Test + public void testGroup() throws Exception { + Endpoint[] eps = {Endpoints.DBPEDIA, Endpoints.AEMET}; + for (Endpoint ep : eps) { + test(ep); } + } } diff --git a/backend/src/test/java/sparqles/core/EPViewTEST.java b/backend/src/test/java/sparqles/core/EPViewTEST.java index 6cc6d5af..d576bdc9 100644 --- a/backend/src/test/java/sparqles/core/EPViewTEST.java +++ b/backend/src/test/java/sparqles/core/EPViewTEST.java @@ -1,5 +1,7 @@ package sparqles.core; +import static org.junit.Assert.*; + import java.io.File; import org.junit.After; import org.junit.Before; @@ -9,22 +11,22 @@ public class EPViewTEST { - private MongoDBManager m; + private MongoDBManager m; - @Before - public void setUp() throws Exception { - SPARQLESProperties.init(new File("src/test/resources/ends.properties")); - m = new MongoDBManager(); - } + @Before + public void setUp() throws Exception { + SPARQLESProperties.init(new File("src/test/resources/ends.properties")); + m = new MongoDBManager(); + } - @After - public void tearDown() throws Exception { - m.close(); - } + @After + public void tearDown() throws Exception { + m.close(); + } - @Test - public void test() { + @Test + public void test() { - Endpoint ep = Endpoints.DBPEDIA; - } + Endpoint ep = Endpoints.DBPEDIA; + } } diff --git a/backend/src/test/java/sparqles/core/Endpoints.java b/backend/src/test/java/sparqles/core/Endpoints.java index 8def13d3..b97b3558 100644 --- a/backend/src/test/java/sparqles/core/Endpoints.java +++ b/backend/src/test/java/sparqles/core/Endpoints.java @@ -5,19 +5,19 @@ public class Endpoints { - public static Endpoint AEMET, DBPEDIA, TEST, TEST1, NDB; + public static Endpoint AEMET, DBPEDIA, TEST, TEST1, NDB; - static { - try { - AEMET = EndpointFactory.newEndpoint(new URI("http://aemet.linkeddata.es/sparql")); - DBPEDIA = EndpointFactory.newEndpoint(new URI("http://dbpedia.org/sparql")); - TEST = EndpointFactory.newEndpoint(new URI("http://www.rdfabout.com/sparql")); + static { + try { + AEMET = EndpointFactory.newEndpoint(new URI("http://aemet.linkeddata.es/sparql")); + DBPEDIA = EndpointFactory.newEndpoint(new URI("http://dbpedia.org/sparql")); + TEST = EndpointFactory.newEndpoint(new URI("http://www.rdfabout.com/sparql")); - TEST1 = EndpointFactory.newEndpoint(new URI("http://ecowlim.tfri.gov.tw/sparql/query")); + TEST1 = EndpointFactory.newEndpoint(new URI("http://ecowlim.tfri.gov.tw/sparql/query")); - NDB = EndpointFactory.newEndpoint(new URI("http://ndb.publink.lod2.eu/sparql")); - } catch (Exception e) { + NDB = EndpointFactory.newEndpoint(new URI("http://ndb.publink.lod2.eu/sparql")); + } catch (Exception e) { - } } + } } diff --git a/backend/src/test/java/sparqles/core/InteropTEST.java b/backend/src/test/java/sparqles/core/InteropTEST.java index c13edf97..16e8028f 100644 --- a/backend/src/test/java/sparqles/core/InteropTEST.java +++ b/backend/src/test/java/sparqles/core/InteropTEST.java @@ -1,5 +1,7 @@ package sparqles.core; +import static org.junit.Assert.*; + import java.io.File; import org.junit.After; import org.junit.Before; @@ -10,71 +12,71 @@ public class InteropTEST { - private MongoDBManager m; + private MongoDBManager m; - @Before - public void setUp() throws Exception { - SPARQLESProperties.init(new File("src/test/resources/ends.properties")); - m = new MongoDBManager(); - } + @Before + public void setUp() throws Exception { + SPARQLESProperties.init(new File("src/test/resources/ends.properties")); + m = new MongoDBManager(); + } - @After - public void tearDown() throws Exception { - m.close(); - } + @After + public void tearDown() throws Exception { + m.close(); + } - @Test - public void testSingle() throws Exception { - Endpoint ep = Endpoints.DBPEDIA; + @Test + public void testSingle() throws Exception { + Endpoint ep = Endpoints.DBPEDIA; - Task t = TaskFactory.create(CONSTANTS.FTASK, ep, m, null); - FResult res = t.call(); - System.out.println(res); - m.insert(res); - } + Task t = TaskFactory.create(CONSTANTS.FTASK, ep, m, null); + FResult res = t.call(); + System.out.println(res); + m.insert(res); + } - // @Test - // public void testGroup() throws Exception { - // - // Endpoint [] eps = {Endpoints.DBPEDIA,Endpoints.AEMET}; - // for(Endpoint ep: eps){ - // Task t = TaskFactory.create(CONSTANTS.PTASK, ep, m, null); - // PResult res = t.call(); - // System.out.println(res); - // m.insert(res); - // } - // - // } - // - // - // @Test - // public void testIterator() { - // - // Endpoint ep = Endpoints.DBPEDIA; - // - // m.initEndpointCollection(); - // m.initAggregateCollections(); - // m.insert(ep); - // - // Schedule sc = new Schedule(); - // sc.setEndpoint(ep); - // sc.setPTask("0 0/2 * 1/1 * ? *"); - // m.insert(sc); - // - // Scheduler s = new Scheduler(); - // - // s.useDB(m); - // s.init(m); - // - // try { - // Thread.sleep(60*60*1000); - // } catch (InterruptedException e) { - // // TODO Auto-generated catch block - // e.printStackTrace(); - // } - // - // s.close(); - // m.close(); - // - // } + // @Test + // public void testGroup() throws Exception { + // + // Endpoint [] eps = {Endpoints.DBPEDIA,Endpoints.AEMET}; + // for(Endpoint ep: eps){ + // Task t = TaskFactory.create(CONSTANTS.PTASK, ep, m, null); + // PResult res = t.call(); + // System.out.println(res); + // m.insert(res); + // } + // + // } + // + // + // @Test + // public void testIterator() { + // + // Endpoint ep = Endpoints.DBPEDIA; + // + // m.initEndpointCollection(); + // m.initAggregateCollections(); + // m.insert(ep); + // + // Schedule sc = new Schedule(); + // sc.setEndpoint(ep); + // sc.setPTask("0 0/2 * 1/1 * ? *"); + // m.insert(sc); + // + // Scheduler s = new Scheduler(); + // + // s.useDB(m); + // s.init(m); + // + // try { + // Thread.sleep(60*60*1000); + // } catch (InterruptedException e) { + // // TODO Auto-generated catch block + // e.printStackTrace(); + // } + // + // s.close(); + // m.close(); + // + // } } diff --git a/backend/src/test/java/sparqles/core/LogTest.java b/backend/src/test/java/sparqles/core/LogTest.java index bf582ba4..8df47164 100644 --- a/backend/src/test/java/sparqles/core/LogTest.java +++ b/backend/src/test/java/sparqles/core/LogTest.java @@ -1,5 +1,7 @@ package sparqles.core; +import static org.junit.Assert.*; + import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -7,24 +9,24 @@ import org.slf4j.LoggerFactory; public class LogTest { - private static final Logger log = LoggerFactory.getLogger(LogTest.class); + private static final Logger log = LoggerFactory.getLogger(LogTest.class); - @Before - public void setUp() throws Exception {} + @Before + public void setUp() throws Exception {} - @After - public void tearDown() throws Exception {} + @After + public void tearDown() throws Exception {} - @Test - public void test() { + @Test + public void test() { - System.out.println("Test"); - log.debug("DEBUG"); - log.info("INFO"); - log.warn("WARN"); - log.error("ERROR"); + System.out.println("Test"); + log.debug("DEBUG"); + log.info("INFO"); + log.warn("WARN"); + log.error("ERROR"); - org.apache.log4j.Logger log4j = org.apache.log4j.Logger.getLogger(getClass()); - log4j.info("LOG4j info"); - } + org.apache.log4j.Logger log4j = org.apache.log4j.Logger.getLogger(getClass()); + log4j.info("LOG4j info"); + } } diff --git a/backend/src/test/java/sparqles/core/MongoDBTest.java b/backend/src/test/java/sparqles/core/MongoDBTest.java index c4dfbbde..191b8a94 100644 --- a/backend/src/test/java/sparqles/core/MongoDBTest.java +++ b/backend/src/test/java/sparqles/core/MongoDBTest.java @@ -25,84 +25,84 @@ public class MongoDBTest { - protected MongoDBManager m; - EndpointResult epr = new EndpointResult(Endpoints.DBPEDIA, 1L, 2L); - RobotsTXT r = new RobotsTXT(true, true, false, false, false, false, null); - AResult aDummy = new AResult(epr, 1L, true, false, null, "testDummy"); - FResult fDummy = new FResult(epr, new HashMap()); - PResult pDummy = new PResult(epr, new HashMap()); - DResult dDummy = - new DResult(epr, r, (List) new ArrayList(), new ArrayList()); - - @Before - public void setUp() throws Exception { - SPARQLESProperties.init(new File("src/test/resources/sparqles.properties")); - m = new MongoDBManager(); - } - - @After - public void tearDown() throws Exception { - m.close(); - } - - @Test - public void testConnection() { - MongoDBManager m = new MongoDBManager(); - assertTrue(m.isRunning()); - } - - @Test - public void testInsertEP() { - m.initEndpointCollection(); - m.setup(); - - Endpoint e = Endpoints.DBPEDIA; - - // assertTrue(m.insert(e)); - // assertEquals(1, m.get(Endpoint.class, Endpoint.SCHEMA$).size()); - // - assertTrue(m.insert(e)); - assertEquals(1, m.get(Endpoint.class, Endpoint.SCHEMA$).size()); - - assertTrue(m.remove(e, Endpoint.class)); - assertEquals(0, m.get(Endpoint.class, Endpoint.SCHEMA$).size()); - } - - @Test - public void testUpdateEP() { - m.initEndpointCollection(); - m.setup(); - - Endpoint e = Endpoints.DBPEDIA; - - assertTrue(m.insert(e)); - List ee = m.get(Endpoint.class, Endpoint.SCHEMA$); - assertEquals(1, ee.size()); - - // assertTrue(m.insert(e)); - // ee = m.get(Endpoint.class, Endpoint.SCHEMA$); - // assertEquals(1, ee.size()); - - Endpoint edb = ee.get(0); - assertEquals(0, edb.getDatasets().size()); - e.getDatasets().add(new Dataset("Test", "Test")); - assertTrue(m.update(e)); - ee = m.get(Endpoint.class, Endpoint.SCHEMA$); - edb = ee.get(0); - assertEquals(1, edb.getDatasets().size()); - } - - // @Test - // public void testInsertAResult() { - // m.setup(); - // - // AResult e = aDummy; - // + protected MongoDBManager m; + EndpointResult epr = new EndpointResult(Endpoints.DBPEDIA, 1L, 2L); + RobotsTXT r = new RobotsTXT(true, true, false, false, false, false, null); + AResult aDummy = new AResult(epr, 1L, true, false, null, "testDummy"); + FResult fDummy = new FResult(epr, new HashMap()); + PResult pDummy = new PResult(epr, new HashMap()); + DResult dDummy = + new DResult(epr, r, (List) new ArrayList(), new ArrayList()); + + @Before + public void setUp() throws Exception { + SPARQLESProperties.init(new File("src/test/resources/sparqles.properties")); + m = new MongoDBManager(); + } + + @After + public void tearDown() throws Exception { + m.close(); + } + + @Test + public void testConnection() { + MongoDBManager m = new MongoDBManager(); + assertTrue(m.isRunning()); + } + + @Test + public void testInsertEP() { + m.initEndpointCollection(); + m.setup(); + + Endpoint e = Endpoints.DBPEDIA; + // assertTrue(m.insert(e)); - // assertEquals(1, m.get(AResult.class, AResult.SCHEMA$).size()); + // assertEquals(1, m.get(Endpoint.class, Endpoint.SCHEMA$).size()); // + assertTrue(m.insert(e)); + assertEquals(1, m.get(Endpoint.class, Endpoint.SCHEMA$).size()); + + assertTrue(m.remove(e, Endpoint.class)); + assertEquals(0, m.get(Endpoint.class, Endpoint.SCHEMA$).size()); + } + + @Test + public void testUpdateEP() { + m.initEndpointCollection(); + m.setup(); + + Endpoint e = Endpoints.DBPEDIA; + + assertTrue(m.insert(e)); + List ee = m.get(Endpoint.class, Endpoint.SCHEMA$); + assertEquals(1, ee.size()); + // assertTrue(m.insert(e)); - // assertEquals(2, m.get(AResult.class, AResult.SCHEMA$).size()); - // } + // ee = m.get(Endpoint.class, Endpoint.SCHEMA$); + // assertEquals(1, ee.size()); + + Endpoint edb = ee.get(0); + assertEquals(0, edb.getDatasets().size()); + e.getDatasets().add(new Dataset("Test", "Test")); + assertTrue(m.update(e)); + ee = m.get(Endpoint.class, Endpoint.SCHEMA$); + edb = ee.get(0); + assertEquals(1, edb.getDatasets().size()); + } + + // @Test + // public void testInsertAResult() { + // m.setup(); + // + // AResult e = aDummy; + // + // assertTrue(m.insert(e)); + // assertEquals(1, m.get(AResult.class, AResult.SCHEMA$).size()); + // + // assertTrue(m.insert(e)); + // assertEquals(2, m.get(AResult.class, AResult.SCHEMA$).size()); + // } } diff --git a/backend/src/test/java/sparqles/core/PerformanceTEST.java b/backend/src/test/java/sparqles/core/PerformanceTEST.java index 617f3bc7..849b7d71 100644 --- a/backend/src/test/java/sparqles/core/PerformanceTEST.java +++ b/backend/src/test/java/sparqles/core/PerformanceTEST.java @@ -1,5 +1,7 @@ package sparqles.core; +import static org.junit.Assert.*; + import java.io.File; import org.junit.After; import org.junit.Before; @@ -10,73 +12,73 @@ public class PerformanceTEST { - private MongoDBManager m; + private MongoDBManager m; - @Before - public void setUp() throws Exception { - SPARQLESProperties.init(new File("src/main/resources/sparqles.properties")); - // m = new MongoDBManager(); + @Before + public void setUp() throws Exception { + SPARQLESProperties.init(new File("src/main/resources/sparqles.properties")); + // m = new MongoDBManager(); - } + } - @After - public void tearDown() throws Exception { - m.close(); - } + @After + public void tearDown() throws Exception { + m.close(); + } - @Test - public void testSingle() throws Exception { - Endpoint ep = Endpoints.DBPEDIA; + @Test + public void testSingle() throws Exception { + Endpoint ep = Endpoints.DBPEDIA; - Task t = TaskFactory.create(CONSTANTS.PTASK, ep, m, null); - PResult res = t.call(); - System.out.println(res); - // m.insert(res); + Task t = TaskFactory.create(CONSTANTS.PTASK, ep, m, null); + PResult res = t.call(); + System.out.println(res); + // m.insert(res); - } + } - // @Test - // public void testGroup() throws Exception { - // - // Endpoint [] eps = {Endpoints.DBPEDIA,Endpoints.AEMET}; - // for(Endpoint ep: eps){ - // Task t = TaskFactory.create(CONSTANTS.PTASK, ep, m, null); - // PResult res = t.call(); - // System.out.println(res); - // m.insert(res); - // } - // - // } - // - // - // @Test - // public void testIterator() { - // - // Endpoint ep = Endpoints.DBPEDIA; - // - // m.initEndpointCollection(); - // m.initAggregateCollections(); - // m.insert(ep); - // - // Schedule sc = new Schedule(); - // sc.setEndpoint(ep); - // sc.setPTask("0 0/2 * 1/1 * ? *"); - // m.insert(sc); - // - // Scheduler s = new Scheduler(); - // - // s.useDB(m); - // s.init(m); - // - // try { - // Thread.sleep(60*60*1000); - // } catch (InterruptedException e) { - // // TODO Auto-generated catch block - // e.printStackTrace(); - // } - // - // s.close(); - // m.close(); - // - // } + // @Test + // public void testGroup() throws Exception { + // + // Endpoint [] eps = {Endpoints.DBPEDIA,Endpoints.AEMET}; + // for(Endpoint ep: eps){ + // Task t = TaskFactory.create(CONSTANTS.PTASK, ep, m, null); + // PResult res = t.call(); + // System.out.println(res); + // m.insert(res); + // } + // + // } + // + // + // @Test + // public void testIterator() { + // + // Endpoint ep = Endpoints.DBPEDIA; + // + // m.initEndpointCollection(); + // m.initAggregateCollections(); + // m.insert(ep); + // + // Schedule sc = new Schedule(); + // sc.setEndpoint(ep); + // sc.setPTask("0 0/2 * 1/1 * ? *"); + // m.insert(sc); + // + // Scheduler s = new Scheduler(); + // + // s.useDB(m); + // s.init(m); + // + // try { + // Thread.sleep(60*60*1000); + // } catch (InterruptedException e) { + // // TODO Auto-generated catch block + // e.printStackTrace(); + // } + // + // s.close(); + // m.close(); + // + // } } diff --git a/backend/src/test/java/sparqles/core/SchedulerTEST.java b/backend/src/test/java/sparqles/core/SchedulerTEST.java index 4a71fe8f..de7c8720 100644 --- a/backend/src/test/java/sparqles/core/SchedulerTEST.java +++ b/backend/src/test/java/sparqles/core/SchedulerTEST.java @@ -1,5 +1,7 @@ package sparqles.core; +import static org.junit.Assert.*; + import java.text.ParseException; import java.util.Date; import org.junit.After; @@ -10,34 +12,34 @@ public class SchedulerTEST { - @Before - public void setUp() throws Exception {} - - @After - public void tearDown() throws Exception {} - - @Test - public void test() { - - try { - CronBasedIterator iter = new CronBasedIterator(Scheduler.CRON_EVERY_HOUR); - System.out.println("Now: " + new Date(System.currentTimeMillis())); - System.out.println("Next: " + iter.next()); - System.out.println("Next: " + iter.next()); - System.out.println("Next: " + iter.next()); - System.out.println("Next: " + iter.next()); - System.out.println("Next: " + iter.next()); - - iter = new CronBasedIterator(Scheduler.CRON_EVERY_ONETEN); - System.out.println("Now: " + new Date(System.currentTimeMillis())); - System.out.println("Next: " + iter.next()); - System.out.println("Next: " + iter.next()); - System.out.println("Next: " + iter.next()); - System.out.println("Next: " + iter.next()); - System.out.println("Next: " + iter.next()); - - } catch (ParseException e) { - e.printStackTrace(); - } + @Before + public void setUp() throws Exception {} + + @After + public void tearDown() throws Exception {} + + @Test + public void test() { + + try { + CronBasedIterator iter = new CronBasedIterator(Scheduler.CRON_EVERY_HOUR); + System.out.println("Now: " + new Date(System.currentTimeMillis())); + System.out.println("Next: " + iter.next()); + System.out.println("Next: " + iter.next()); + System.out.println("Next: " + iter.next()); + System.out.println("Next: " + iter.next()); + System.out.println("Next: " + iter.next()); + + iter = new CronBasedIterator(Scheduler.CRON_EVERY_ONETEN); + System.out.println("Now: " + new Date(System.currentTimeMillis())); + System.out.println("Next: " + iter.next()); + System.out.println("Next: " + iter.next()); + System.out.println("Next: " + iter.next()); + System.out.println("Next: " + iter.next()); + System.out.println("Next: " + iter.next()); + + } catch (ParseException e) { + e.printStackTrace(); } + } } diff --git a/backend/src/test/resources/sparqles.properties b/backend/src/test/resources/sparqles.properties index eec69a90..125c1790 100644 --- a/backend/src/test/resources/sparqles.properties +++ b/backend/src/test/resources/sparqles.properties @@ -1,22 +1,28 @@ # + # FileManager data.dir=data.test + #QueryManager #ftask.queries=ftask/ ftask.queries=ftask/ ptask.queries=ptask/ + #Scheduler task.threads=50 + # Database Manager db.host=localhost db.port=27011 db.name=sparqles + # Wait time between two consequutive SPARQL queries against the same server #general wait time waittime=5000 #or specifif for each task #ptask.waittime= #ftask.waittime= + #Endpoints endpoint.list=file:./WebContent/WEB-INF/resources/datahub.endpoints diff --git a/frontend/app.js b/frontend/app.js index 44d5f14b..b246ad48 100644 --- a/frontend/app.js +++ b/frontend/app.js @@ -53,14 +53,51 @@ app.get('/', function (req, res) { else return v } ) - // rename availability colors range - for (var i = 0; i < amonths.length; i++) { - if (amonths[i]['key'] == '0-5') amonths[i]['key'] = '[0-5)' - if (amonths[i]['key'] == '5-75') amonths[i]['key'] = '[5-75)' - if (amonths[i]['key'] == '75-95') amonths[i]['key'] = '[75-95)' - if (amonths[i]['key'] == '95-99') amonths[i]['key'] = '[95-99)' - if (amonths[i]['key'] == '99-100') amonths[i]['key'] = '[99-100]' + + var indexCalculation = JSON.parse(JSON.stringify(index.calculation), function(k, v) { + if (k === "data") + this.values = v; + else + return v; + }); + + console.log(`All availability data: ${JSON.stringify(index.availability)}`); + console.log(`All amonths data: ${JSON.stringify(amonths)}`); + + var availability = amonths; + //var availability = null; // do not use amoths, use index.availability + if (typeof availability != undefined && availability != null && availability.length > 0) { + // TODO: stop this senseless renaming 'zeroFive' to '0-5' to '[0-5)' + for (var i = 0; i < availability.length; i++) { + if (availability[i]['key'] == '0-5') availability[i]['key'] = '[0-5)' + if (availability[i]['key'] == '5-75') availability[i]['key'] = '[5-75)' + if (availability[i]['key'] == '75-95') availability[i]['key'] = '[75-95)' + if (availability[i]['key'] == '95-99') availability[i]['key'] = '[95-99)' + if (availability[i]['key'] == '99-100') availability[i]['key'] = '[99-100]' + } } + else { + availability = index.availability; + + function indexValuesToAmonthFormat(entry) { + console.log(`Current index.avail entry: ${JSON.stringify(amonths)}`); + return entry.values.map(function (value) { + // return [new Date(parseInt(value.x)).toISOString(), value.y * 100]; + // return [value.x, value.y * 100]; + return [parseInt(value.x), value.y * 100]; + }).sort((a,b) => a[0] - b[0]); + } + + availability = [ + { "key": "[0-5)", "index": 1, "values": indexValuesToAmonthFormat(availability[0]) }, + { "key": "[5-90)", "index": 2, "values": indexValuesToAmonthFormat(availability[1]) }, + { "key": "[90-95)", "index": 3, "values": indexValuesToAmonthFormat(availability[2]) }, + { "key": "[95-100]", "index": 4, "values": indexValuesToAmonthFormat(availability[3]) } + ]; + + console.log(`Final values from index.avail: ${JSON.stringify(availability)}`); + } + //amonths = JSON.parse(JSON.stringify(amonths).replace("\"0\-5\":", "\"[0-5[\":")); //PERFORMANCE @@ -96,37 +133,47 @@ app.get('/', function (req, res) { configInstanceTitle: configApp.get('configInstanceTitle'), baseUri: configApp.get('baseUri'), gitRepo: configApp.get('gitRepo'), - amonths: amonths, + amonths: availability, // TODO: refactor naming index: index, indexInterop: indexInterop, + indexCalculation: indexCalculation, nbEndpointsSearch: nbEndpointsSearch, nbEndpointsVoID: nbEndpointsVoID, nbEndpointsSD: nbEndpointsSD, nbEndpointsServerName: nbEndpointsServerName, nbEndpointsTotal: nbEndpointsTotal, nbEndpointsNoDesc: nbEndpointsNoDesc, - lastUpdate: lastUpdate.length > 0 ? lastUpdate[0].lastUpdate : 0, + lastUpdate: lastUpdate.length > 0 ? new Date(lastUpdate[0].lastUpdate) : 0, perf: { - threshold: 10000 /*mostCommonThreshold[0]*/, - data: [ - { - key: 'Cold Tests', - color: '#1f77b4', - values: [ - { label: 'Median ASK', value: avgASKCold }, - { label: 'Median JOIN', value: avgJOINCold }, - ], - }, - { - key: 'Warm Tests', - color: '#2ca02c', - values: [ - { label: 'Median ASK', value: avgASKWarm }, - { label: 'Median JOIN', value: avgJOINWarm }, - ], - }, - ], + threshold: index.performance?.threshold > 0 ? index.performance.threshold : 10000, + data: index.performance.data.map((entry) => { + return { + key: entry.key, + color: entry.color, + // FIXME: update schema to avoid this + values: entry.data, + } + }) + // data: [ + // { + // key: 'Cold Tests', + // color: '#1f77b4', + // values: [ + // { label: 'Median ASK', value: avgASKCold }, + // { label: 'Median JOIN', value: avgJOINCold }, + // ], + // }, + // { + // key: 'Warm Tests', + // color: '#2ca02c', + // values: [ + // { label: 'Median ASK', value: avgASKWarm }, + // { label: 'Median JOIN', value: avgJOINWarm }, + // ], + // }, + // ], }, + // perf: index.performance, configInterop: JSON.parse(fs.readFileSync('./texts/interoperability.json')), configPerformance: JSON.parse(fs.readFileSync('./texts/performance.json')), configDisco: JSON.parse(fs.readFileSync('./texts/discoverability.json')), @@ -270,28 +317,34 @@ app.get('/endpoint', function (req, res) { } mongoDBProvider.getLatestDisco(uri, function (error, latestDisco) { - var SDDescription = [ - { - label: 'foo', - value: true, - }, - ] - var SDDescription = [] - var descriptionFiles = latestDisco[0].descriptionFiles - for (var i = 0; i < descriptionFiles.length; i++) { - var d = descriptionFiles[i] - var name = d.Operation - if (name == 'EPURL') name = 'HTTP Get' - if (name == 'wellknown') name = '/.well-known/void' - var preds = false - // check if SPARQLDESCpreds object is empty or not - if (Object.keys(d.SPARQLDESCpreds).length) { - preds = true + var SDDescription = []; + + if (typeof latestDisco == 'undefined' || latestDisco == null) { + SDDescription = [ + { + label: 'foo', + value: true, + }, + ]; + } else { + var descriptionFiles = latestDisco[0]?.descriptionFiles; + if (descriptionFiles != null) { + for (var i = 0; i < descriptionFiles.length; i++) { + var d = descriptionFiles[i] + var name = d.Operation + if (name == 'EPURL') name = 'HTTP Get' + if (name == 'wellknown') name = '/.well-known/void' + var preds = false + // check if SPARQLDESCpreds object is empty or not + if (Object.keys(d.SPARQLDESCpreds).length) { + preds = true + } + SDDescription.push({ + label: name, + value: preds, + }) } - SDDescription.push({ - label: name, - value: preds, - }) + } } docs[0].discoverability.SDDescription = SDDescription diff --git a/frontend/views/content/api.pug b/frontend/views/content/api.pug index bd98a957..bfb9f305 100644 --- a/frontend/views/content/api.pug +++ b/frontend/views/content/api.pug @@ -35,7 +35,7 @@ block content td - td - td - - .apiExample For example, using the List All Endpoints API, your app can have access to the latest list of endpoints: + .apiExample For example, using the List All Endpoints API, your app can have access to the latest list of endpoints: a(href="/api/endpoint/list", target="_blank") #{baseUri}/api/endpoint/list li.apiOperation#listEndpoints .apiOperationHeading @@ -105,7 +105,7 @@ block content td - td - td - - .apiExample For example, using the Availability API, your app can have the latest list of endpoints and their availability: + .apiExample For example, using the Availability API, your app can have the latest list of endpoints and their availability: a(href="/api/availability", target="_blank") #{baseUri}/api/availability li.apiOperation#discoverability .apiOperationHeading @@ -127,7 +127,7 @@ block content td - td - td - - .apiExample For example, using the Discoverability API, your app can have the latest list of endpoints and their discoverability information: + .apiExample For example, using the Discoverability API, your app can have the latest list of endpoints and their discoverability information: a(href="/api/discoverability", target="_blank") #{baseUri}/api/discoverability li.apiOperation#interoperability .apiOperationHeading @@ -149,7 +149,7 @@ block content td - td - td - - .apiExample For example, using the Interoperability API, your app can have the latest list of endpoints and their interoperability information: + .apiExample For example, using the Interoperability API, your app can have the latest list of endpoints and their interoperability information: a(href="/api/interoperability", target="_blank") #{baseUri}/api/interoperability li.apiOperation#performance .apiOperationHeading @@ -171,9 +171,9 @@ block content td - td - td - - .apiExample For example, using the Performance API, your app can have the latest list of endpoints and their performance information: + .apiExample For example, using the Performance API, your app can have the latest list of endpoints and their performance information: a(href="/api/performance", target="_blank") #{baseUri}/api/performance - script. + script. $(".apiOperationContent").hide() $( ".apiOperationHeading" ).click(function() { $(this).parent().find( ".apiOperationContent" ).slideToggle( "slow", function() { diff --git a/frontend/views/content/availability.pug b/frontend/views/content/availability.pug index a4cea5d8..c0f6ad54 100644 --- a/frontend/views/content/availability.pug +++ b/frontend/views/content/availability.pug @@ -34,15 +34,15 @@ block content article.wrp .wrplInnerBox(style='height: 100%;') div - b #{parseFloat((nbEndpointsUp/nbEndpointsTotal*100).toFixed(2))+ '%'} - | (#{nbEndpointsUp}/#{nbEndpointsTotal}) endpoints are - b available + b #{parseFloat((nbEndpointsUp/nbEndpointsTotal*100).toFixed(2))+ '%'} + | (#{nbEndpointsUp}/#{nbEndpointsTotal}) endpoints are + b available br -//#{JSON.stringify(atasks_agg[0])} table#table_current.sortable.psp-table(cellspacing='0', cellpadding='0', border='0', width='100%') thead tr - th.sorttable_nosort.tablehead(width='5%') + th.sorttable_nosort.tablehead(width='5%') th.tablehead(width='45%') SPARQL Endpoint th.tablehead(width='25%') Uptime Last 24h th.tablehead(width='25%') Uptime Last 7 days @@ -73,13 +73,13 @@ block content span(onmouseover=`tooltip.show('${generateTooltip(ep.endpoint.datasets)}')`, onmouseout='tooltip.hide();') #{ep.endpoint.uri} (#{ep.endpoint.datasets.length}) else if ep.endpoint.datasets.length>0 - td + td a(href!=`/endpoint?uri=${encodeURIComponent(ep.endpoint.uri)}`) #{ep.endpoint.datasets[0].label} - else + else td a(href!=`/endpoint?uri=${encodeURIComponent(ep.endpoint.uri)}`) #{ep.endpoint.uri} //-Display uptime last 24h - td + td if ep.uptimeLast24h==0 .textDisabled 0% else diff --git a/frontend/views/content/index.pug b/frontend/views/content/index.pug index a0469aa8..381caa38 100644 --- a/frontend/views/content/index.pug +++ b/frontend/views/content/index.pug @@ -10,7 +10,7 @@ block content header.wrpl.w-3-3 h1 | SPARQL Endpoints Status !{configInstanceTitle} - time Last update: #{new Date(lastUpdate)} + time Last update: #{lastUpdate.toISOString()} details summary Description: p SPARQL Endpoints Status tool monitors the availability, performance, interoperability and discoverability of SPARQL Endpoints registered in Datahub. @@ -22,19 +22,19 @@ block content article.wrpl(style='min-width: 400px;') .wrplInnerBox(style='height: 380px;') a(title='', href='./availability') - h2 + h2 span(onmouseover!=`tooltip.show('${configAvailability["availability"]}')`, onmouseout='tooltip.hide();') Availability svg#avail(style='height: 350px; padding-right:10px;') article.wrpl(style='min-width: 400px;') .wrplInnerBox(style='height: 380px;') - function numberWithCommas(x) {if (x) return x.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ",");} a(title='', href='./performance') - h2 + h2 span(onmouseover!=`tooltip.show('${configPerformance["performance"]}')`, onmouseout='tooltip.hide();') Performance span - b #{numberWithCommas(perf.threshold)} - | is the most common - span(onmouseover!=`tooltip.show('${configPerformance["Result-size thresholds"]}')`, onmouseout='tooltip.hide();') result-size threshold + b #{numberWithCommas(perf.threshold)} + | is the median + span(onmouseover!=`tooltip.show('${configPerformance["Result-size thresholds"]}')`, onmouseout='tooltip.hide();') result-size threshold svg#perf article.wrpl(style='min-width: 400px;') .wrplInnerBox(style='height: 380px;') @@ -45,52 +45,106 @@ block content article.wrpl(style='min-width: 400px;') .wrplInnerBox(style='height: 380px;') a(title='', href='./discoverability') - h2 + h2 span(onmouseover!=`tooltip.show('${configDisco["discoverability"]}')`, onmouseout='tooltip.hide();') Discoverability div - b #{parseFloat((nbEndpointsVoID/nbEndpointsTotal*100).toFixed(2))+ '%'} - | of the endpoints provide a - span(onmouseover!=`tooltip.show('${configDisco["VoID Description"]}')`, onmouseout='tooltip.hide();') VoID description + b #{parseFloat((nbEndpointsVoID/nbEndpointsTotal*100).toFixed(2))+ '%'} + | of the endpoints provide a + span(onmouseover!=`tooltip.show('${configDisco["VoID Description"]}')`, onmouseout='tooltip.hide();') VoID description div - b #{parseFloat((nbEndpointsSD/nbEndpointsTotal*100).toFixed(2))+ '%'} - | of the endpoints provide a - span(onmouseover!=`tooltip.show('${configDisco["Service Description"]}')`, onmouseout='tooltip.hide();') SD description + b #{parseFloat((nbEndpointsSD/nbEndpointsTotal*100).toFixed(2))+ '%'} + | of the endpoints provide a + span(onmouseover!=`tooltip.show('${configDisco["Service Description"]}')`, onmouseout='tooltip.hide();') SD description div - b #{parseFloat((nbEndpointsNoDesc/nbEndpointsTotal*100).toFixed(2))+ '%'} - | of the endpoints have - b no - | description + b #{parseFloat((nbEndpointsNoDesc/nbEndpointsTotal*100).toFixed(2))+ '%'} + | of the endpoints have + b no + | description br span(onmouseover!=`tooltip.show('${configDisco["Server Name"]}')`, onmouseout='tooltip.hide();') Server name: svg#disco(style='height: 350px;margin-top:-50px;') + article.wrpl(style='min-width: 400px;') + .wrplInnerBox(style='height: 380px;') + a(title='', href='#') + h2 + span(onmouseover='tooltip.show(\'#{configProfiles["profiles"]}\')', onmouseout='tooltip.hide();') Profiles (VoID & Coherence) + div + b #{parseFloat((indexCalculation.VoID*100).toFixed(2))+ '%'} + | of the endpoints have a + b calculated VoID descriptions + | (including + b #{parseFloat((indexCalculation.VoIDPart*100).toFixed(2))+ '%'} + | partially calculated) + div + b #{parseFloat((indexCalculation.Coherence*100).toFixed(2))+ '%'} + | of the endpoints have a + b calculated coherence + br + span(onmouseover='tooltip.show(\'#{configProfiles["Coherence"]}\')', onmouseout='tooltip.hide();') Coherence: + svg#coher(style='height: 350px;margin-top:-50px;') + article.wrpl(style='min-width: 400px;') + .wrplInnerBox(style='height: 380px;') + a(title='', href='#') + h2 + span(onmouseover='tooltip.show(\'#{configProfiles["profiles"]}\')', onmouseout='tooltip.hide();') Profiles (SD & RS) + div + b #{parseFloat((indexCalculation.SD*100).toFixed(2))+ '%'} + | of the endpoints have a + b calculated Service description + | (including + b #{parseFloat((indexCalculation.SDPart*100).toFixed(2))+ '%'} + | partially calculated) + div + b #{parseFloat((indexCalculation.RS*100).toFixed(2))+ '%'} + | of the endpoints have a + b calculated relation specialty + br + span(onmouseover='tooltip.show(\'#{configProfiles["RS"]}\')', onmouseout='tooltip.hide();') Relationship specialty: + svg#rs(style='height: 350px;margin-top:-50px;') + script. + script. var colors = d3.scale.category20(); - keyColor = function(d, i) {return colors(d.key)}; - + keyColor = function(d, i) { + // return grey colour for values to be largely ignored + if (d.key === "[0-5)" || d.key === "missing") { + return "#c4c4c4"; + } + return colors(d.key); + }; + + labelColor = function(d, i) { + // return grey colour for values to be largely ignored + if (d?.data?.label === "missing") { + return "#c4c4c4"; + } + return colors(d?.data?.label); + }; + // Availability nv.addGraph(function() { var chart = nv.models.stackedAreaChart() - .x(function(d) { return d[0] }) + .x(function(d) { return new Date(d[0]) }) .y(function(d) { return d[1] }) //.yDomain([0,1]) .color(keyColor) .showControls(false); //.clipEdge(true); - - //chart.xAxis.tickFormat(function(d) { return d3.time.format('%Y-%m')(new Date(d)) }); + + chart.xAxis.tickFormat(function(d) { return d3.time.format('%Y-%m')(new Date(d)) }); //make sure everyone has the same time displayed. - chart.xAxis.tickFormat(function(d) { return d3.time.format('%Y-%m')(new Date(d+ ((new Date().getTimezoneOffset()+60) * 60000))) }); + //- chart.xAxis.tickFormat(function(d) { return d3.time.format('%Y-%m')(new Date(d+ ((new Date().getTimezoneOffset()+60) * 60000))) }); chart.yAxis.tickFormat(d3.format('.0f')); - + d3.select('#avail') .datum(!{JSON.stringify(amonths)}) .call(chart); - + nv.utils.windowResize(chart.update); chart.dispatch.on('stateChange', function(e) { nv.log('New State:', JSON.stringify(e)); }); return chart; }); - + // Performance nv.addGraph(function() { var chart = nv.models.multiBarHorizontalChart() @@ -148,7 +202,8 @@ block content .valueFormat(d3.format('.2%')) .showLegend(false) .height(550) - .color(d3.scale.category10().range()) + //- .color(d3.scale.category10().range()) + .color(labelColor) .margin({top: 0, right: 5, bottom: 40, left: 0}) .donut(true); chart.pie @@ -161,3 +216,49 @@ block content nv.utils.windowResize(chart.update); return chart; }); + + // Coherence + nv.addGraph(function() { + var chart = nv.models.pieChart() + .x(function(d) { return d.label }) + .y(function(d) { return d.value }) + .values(function(d) { return d }) + .valueFormat(d3.format('.2%')) + .showLegend(false) + .height(550) + .color(d3.scale.category10().range()) + .margin({top: 0, right: 5, bottom: 40, left: 0}) + .donut(true); + chart.pie + .startAngle(function(d) { return d.startAngle/2 -Math.PI/2 }) + .endAngle(function(d) { return d.endAngle/2 -Math.PI/2 }); + d3.select("#coher") + .datum([!{JSON.stringify(indexCalculation.coherences[0].values)}]) + .transition().duration(500) + .call(chart); + nv.utils.windowResize(chart.update); + return chart; + }); + + // Relationship speciality + nv.addGraph(function() { + var chart = nv.models.pieChart() + .x(function(d) { return d.label }) + .y(function(d) { return d.value }) + .values(function(d) { return d }) + .valueFormat(d3.format('.2%')) + .showLegend(false) + .height(550) + .color(d3.scale.category10().range()) + .margin({top: 0, right: 5, bottom: 40, left: 0}) + .donut(true); + chart.pie + .startAngle(function(d) { return d.startAngle/2 -Math.PI/2 }) + .endAngle(function(d) { return d.endAngle/2 -Math.PI/2 }); + d3.select("#rs") + .datum([!{JSON.stringify(indexCalculation.rss[0].values)}]) + .transition().duration(500) + .call(chart); + nv.utils.windowResize(chart.update); + return chart; + }); diff --git a/frontend/views/content/performance.pug b/frontend/views/content/performance.pug index 4f2bb438..d7d7c632 100644 --- a/frontend/views/content/performance.pug +++ b/frontend/views/content/performance.pug @@ -52,7 +52,7 @@ block content //-TODO: if more than one endpoint then display how many and their names if ep.endpoint.datasets.length>1 - var generateTooltip = function(datasets){tooltipHtml='This endpoint includes '+datasets.length+' datasets:

    '; - - for(var i in datasets){tooltipHtml+='
  • '+datasets[i].label+'
  • ';} + - for(var i in datasets){tooltipHtml+='
  • '+datasets[i]?.label+'
  • ';} - tooltipHtml+='
'; - return tooltipHtml; - } @@ -60,8 +60,9 @@ block content a(href!=`/endpoint?uri=${encodeURIComponent(ep.endpoint.uri)}`) span(onmouseover=`tooltip.show('${generateTooltip(ep.endpoint.datasets)}')`, onmouseout='tooltip.hide();') #{ep.endpoint.uri} (#{ep.endpoint.datasets.length}) else + - var label = ep.endpoint.datasets[0]?.label td - a(href!=`/endpoint?uri=${encodeURIComponent(ep.endpoint.uri)}`) #{ep.endpoint.datasets[0].label} + a(href!=`/endpoint?uri=${encodeURIComponent(ep.endpoint.uri)}`) #{label} //-Display Threshold td.tdright if ep.threshold>0 && ep.threshold%100==0 diff --git a/frontend/views/footer.pug b/frontend/views/footer.pug index d4f9db84..963b5b8e 100644 --- a/frontend/views/footer.pug +++ b/frontend/views/footer.pug @@ -21,8 +21,8 @@ footer#footer a(href=`${gitRepo}/issues/new?labels=bug`, title='Report a bug', target='_blank') Report a bug //- li //- a(href=`${gitRepo}/wiki`, title='About SPARQL Endpoints Status', target='_blank') About - //- li - //- a(href=`${baseUri}/api`, title='Get access to SPARQLES via APIs') APIs + li + a(href=`${baseUri}/api`, title='Get access to SPARQLES via APIs') API //- li //- a(href=`${baseUri}/data`, title='Get access to SPARQLES Data') Data //- li @@ -34,6 +34,8 @@ footer#footer ul.xoxo.blogroll li a(href='https://www.semanticscholar.org/paper/SPARQLES%3A-Monitoring-public-SPARQL-endpoints-Vandenbussche-Umbrich/8e4248fcffed6d28f1b9f4547243a611e3be097b', target='_blank') Semantic Web J. (2017) + li + a(href='https://web.archive.org/web/20240613012405/https://www.3dfed.com/wp-content/uploads/2023/03/3DFed-D2.2-Report-on-Monitoring-the-Data-Storages.pdf', target='_blank') 3DFed D2.2 (2023) .wrpl //- a#mt(href='http://www.wu.ac.at/infobiz/en/', target='_blank') //- div diff --git a/scripts/requirements.txt b/scripts/requirements.txt new file mode 100644 index 00000000..575c7e9b --- /dev/null +++ b/scripts/requirements.txt @@ -0,0 +1,2 @@ +rdflib==7.1.1 +requests==2.32.3 diff --git a/scripts/script.py b/scripts/script.py new file mode 100644 index 00000000..d7c29999 --- /dev/null +++ b/scripts/script.py @@ -0,0 +1,116 @@ +import sys +import rdflib +from rdflib.namespace import RDF, RDFS, DCTERMS +from rdflib import Namespace +import subprocess +import urllib.parse +import requests + +# Define namespaces +VOID = Namespace("http://rdfs.org/ns/void#") +SD = Namespace("http://www.w3.org/ns/sparql-service-description#") + +def parse_turtle_file(file_path): + # Create a graph + g = rdflib.Graph() + + g.bind("void", VOID) + g.bind("sd", SD) +# g.bind("dcterms", DCTERMS) + + # Parse the Turtle file + g.parse(file_path, format='turtle') + + # Query the graph for the sparqlEndpoint and label + query = """ + PREFIX dcterms: + + SELECT ?endpointUri ?endpointLabel + WHERE { + { + ?dataset a void:Dataset ; + void:sparqlEndpoint ?endpointUri ; + rdfs:label ?endpointLabel . + } + UNION + { + ?service a sd:Service ; + sd:endpoint ?endpointUri ; + dcterms:description ?endpointLabel . + } + } + """ + results = g.query(query) + + return results + +def decode_and_filter_uri(endpointUri): + # Strip the 'urn:sd:' prefix if present + if endpointUri.startswith("urn:sd:"): + endpointUri = endpointUri[len("urn:sd:"):] + + # URL-decode the URI + endpointUri = urllib.parse.unquote(endpointUri) + + return endpointUri + +def check_url(endpointUri): + try: + response = requests.get(endpointUri + "?query=ASK+WHERE%7B?s+?p+?o%7D", timeout=5) + if response.status_code == 200: + return True + else: + print(f"URL {endpointUri} returned status code {response.status_code}") + return False + except requests.RequestException as e: +# print(f"URL {endpointUri} failed to connect: {e}") + return False + +def execute_system_call(endpointUri, endpointLabel): + # Construct the command + command = [ + "docker", "compose", "exec", "backend-svc", "bash", "-c", + f"/build/bin/sparqles -p /build/src/main/resources/sparqles_docker.properties --addEndpoint {endpointUri} '{endpointLabel}'" + ] + + # Execute the command + subprocess.run(command, check=True) + #print(" ".join(command)) + + +def truncate_description(description): + # Split the description by lines + lines = description.splitlines() + + # Take the first line + first_line = lines[0] if lines else "" + + # Truncate to the first 160 characters + truncated_description = first_line[:160] + + return truncated_description + +def main(file_path): + # Parse the Turtle file + results = parse_turtle_file(file_path) + + # Iterate over the results and execute the system call for each endpoint + for row in results: + endpointUri = row.endpointUri + endpointLabel = truncate_description(row.endpointLabel) + if "'" in endpointLabel: + endpointLabel = "" + # Decode and filter the URI + endpointUri = decode_and_filter_uri(endpointUri) + if endpointUri.startswith("http://") or endpointUri.startswith("https://"): + # Check if the URL is reachable and returns a 200 status code + if check_url(endpointUri): + execute_system_call(endpointUri, endpointLabel) + +if __name__ == "__main__": + if len(sys.argv) != 2: + print("Usage: python script.py ") + sys.exit(1) + + file_path = sys.argv[1] + main(file_path)