diff --git a/hedweb/templates/remodel-input.html b/hedweb/templates/remodel-input.html
new file mode 100644
index 00000000..31c5806d
--- /dev/null
+++ b/hedweb/templates/remodel-input.html
@@ -0,0 +1,14 @@
+{% macro create_remodel_input(title) %}
+
+{% endmacro %}
\ No newline at end of file
diff --git a/tests/data/bad_reorder_remdl.json b/tests/data/bad_reorder_remdl.json
new file mode 100644
index 00000000..0ac7dfbd
--- /dev/null
+++ b/tests/data/bad_reorder_remdl.json
@@ -0,0 +1,46 @@
+[
+ {
+ "badcommand": "remove_columns",
+ "description": "bad structure",
+ "parameters": {
+ "ignore_missing": true
+ }
+ },
+ {
+ "command": "remove_columns",
+ "description": "Get rid of the sample and the value columns",
+ "parameters": {
+ "remove_names": [
+ "sample",
+ "value"
+ ],
+ "ignore_missing": true
+ }
+ },
+ {
+ "command": "unknown_command",
+ "description": "bad command",
+ "parameters": {
+ "ignore_missing": true
+ }
+ },
+ {
+ "command": "reorder_columns",
+ "description": "Order columns so that response_time and trial_type come after onset and duration",
+ "parameters": {
+ "column_order": [
+ "onset",
+ "duration"
+ ],
+ "ignore_missing": true,
+ "keep_others": true
+ }
+ },
+ {
+ "command": "remove_columns",
+ "description": "bad parameters",
+ "parameters": {
+ "ignore_missing": true
+ }
+ }
+]
diff --git a/tests/data/simple_reorder_remdl.json b/tests/data/simple_reorder_remdl.json
new file mode 100644
index 00000000..7bf4eb15
--- /dev/null
+++ b/tests/data/simple_reorder_remdl.json
@@ -0,0 +1,19 @@
+[
+ {
+ "command": "remove_columns",
+ "description": "Get rid of the sample and the value columns",
+ "parameters": {
+ "remove_names": ["sample", "value"],
+ "ignore_missing": true
+ }
+ },
+ {
+ "command": "reorder_columns",
+ "description": "Order columns so that response_time and trial_type come after onset and duration",
+ "parameters": {
+ "column_order": ["onset", "duration"],
+ "ignore_missing": true,
+ "keep_others": true
+ }
+ }
+]
diff --git a/tests/data/sub-002_task-FacePerception_run-1_events.tsv b/tests/data/sub-002_task-FacePerception_run-1_events.tsv
new file mode 100644
index 00000000..24b41a36
--- /dev/null
+++ b/tests/data/sub-002_task-FacePerception_run-1_events.tsv
@@ -0,0 +1,201 @@
+onset duration sample event_type face_type rep_status trial rep_lag value stim_file
+0.004 n/a 1.0 setup_right_sym n/a n/a n/a n/a 3 n/a
+24.2098181818 n/a 6052.4545 show_face_initial unfamiliar_face first_show 1 n/a 13 u032.bmp
+25.0352727273 n/a 6258.8182 show_circle n/a n/a 1 n/a 0 circle.bmp
+25.158 n/a 6289.5 left_press n/a n/a 1 n/a 256 n/a
+26.7352727273 n/a 6683.8182 show_cross n/a n/a 2 n/a 1 cross.bmp
+27.2498181818 n/a 6812.4545 show_face unfamiliar_face immediate_repeat 2 1 14 u032.bmp
+27.8970909091 n/a 6974.2727 left_press n/a n/a 2 n/a 256 n/a
+28.0998181818 n/a 7024.9545 show_circle n/a n/a 2 n/a 0 circle.bmp
+29.7998181818 n/a 7449.9545 show_cross n/a n/a 3 n/a 1 cross.bmp
+30.3570909091 n/a 7589.2727 show_face unfamiliar_face first_show 3 n/a 13 u088.bmp
+31.188 n/a 7797.0 show_circle n/a n/a 3 n/a 0 circle.bmp
+32.888 n/a 8222.0 show_cross n/a n/a 4 n/a 1 cross.bmp
+33.3643636364 n/a 8341.0909 show_face unfamiliar_face first_show 4 n/a 13 u084.bmp
+34.368 n/a 8592.0 show_circle n/a n/a 4 n/a 0 circle.bmp
+36.068 n/a 9017.0 show_cross n/a n/a 5 n/a 1 cross.bmp
+36.5561818182 n/a 9139.0455 show_face famous_face first_show 5 n/a 5 f123.bmp
+37.3161818182 n/a 9329.0455 right_press n/a n/a 5 n/a 4096 n/a
+37.3825454545 n/a 9345.6364 show_circle n/a n/a 5 n/a 0 circle.bmp
+39.0825454545 n/a 9770.6364 show_cross n/a n/a 6 n/a 1 cross.bmp
+39.5789090909 n/a 9894.7273 show_face unfamiliar_face first_show 6 n/a 13 u022.bmp
+40.581636363600005 n/a 10145.4091 show_circle n/a n/a 6 n/a 0 circle.bmp
+42.2816363636 n/a 10570.4091 show_cross n/a n/a 7 n/a 1 cross.bmp
+42.8025454545 n/a 10700.6364 show_face famous_face first_show 7 n/a 5 f094.bmp
+43.5489090909 n/a 10887.2273 right_press n/a n/a 7 n/a 4096 n/a
+43.7198181818 n/a 10929.9545 show_circle n/a n/a 7 n/a 0 circle.bmp
+45.4198181818 n/a 11354.9545 show_cross n/a n/a 8 n/a 1 cross.bmp
+46.043454545500005 n/a 11510.8636 show_face scrambled_face first_show 8 n/a 17 s150.bmp
+46.95072727270001 n/a 11737.6818 show_circle n/a n/a 8 n/a 0 circle.bmp
+48.6507272727 n/a 12162.6818 show_cross n/a n/a 9 n/a 1 cross.bmp
+49.1343636364 n/a 12283.5909 show_face unfamiliar_face delayed_repeat 9 6 15 u088.bmp
+50.1352727273 n/a 12533.8182 show_circle n/a n/a 9 n/a 0 circle.bmp
+51.8352727273 n/a 12958.8182 show_cross n/a n/a 10 n/a 1 cross.bmp
+52.3916363636 n/a 13097.9091 show_face famous_face first_show 10 n/a 5 f063.bmp
+53.100727272700006 n/a 13275.1818 right_press n/a n/a 10 n/a 4096 n/a
+53.2616363636 n/a 13315.4091 show_circle n/a n/a 10 n/a 0 circle.bmp
+54.96163636359999 n/a 13740.4091 show_cross n/a n/a 11 n/a 1 cross.bmp
+55.5489090909 n/a 13887.2273 show_face unfamiliar_face delayed_repeat 11 7 15 u084.bmp
+56.5589090909 n/a 14139.7273 show_circle n/a n/a 11 n/a 0 circle.bmp
+58.258909090900005 n/a 14564.7273 show_cross n/a n/a 12 n/a 1 cross.bmp
+58.8061818182 n/a 14701.5455 show_face unfamiliar_face first_show 12 n/a 13 u004.bmp
+59.5407272727 n/a 14885.1818 left_press n/a n/a 12 n/a 256 n/a
+59.7270909091 n/a 14931.7727 show_circle n/a n/a 12 n/a 0 circle.bmp
+61.4270909091 n/a 15356.7727 show_cross n/a n/a 13 n/a 1 cross.bmp
+61.9134545455 n/a 15478.3636 show_face unfamiliar_face immediate_repeat 13 1 14 u004.bmp
+62.850727272700006 n/a 15712.6818 left_press n/a n/a 13 n/a 256 n/a
+62.8934545455 n/a 15723.3636 show_circle n/a n/a 13 n/a 0 circle.bmp
+64.5934545455 n/a 16148.3636 show_cross n/a n/a 14 n/a 1 cross.bmp
+65.10436363640001 n/a 16276.0909 show_face famous_face delayed_repeat 14 9 7 f123.bmp
+65.7989090909 n/a 16449.7273 right_press n/a n/a 14 n/a 4096 n/a
+66.10436363640001 n/a 16526.0909 show_circle n/a n/a 14 n/a 0 circle.bmp
+67.8043636364 n/a 16951.0909 show_cross n/a n/a 15 n/a 1 cross.bmp
+68.4289090909 n/a 17107.2273 show_face famous_face first_show 15 n/a 5 f006.bmp
+69.3470909091 n/a 17336.7727 right_press n/a n/a 15 n/a 4096 n/a
+69.3625454545 n/a 17340.6364 show_circle n/a n/a 15 n/a 0 circle.bmp
+71.0625454545 n/a 17765.6364 show_cross n/a n/a 16 n/a 1 cross.bmp
+71.6025454545 n/a 17900.6364 show_face unfamiliar_face delayed_repeat 16 10 15 u022.bmp
+72.32981818180001 n/a 18082.4545 right_press n/a n/a 16 n/a 4096 n/a
+72.5616363636 n/a 18140.4091 show_circle n/a n/a 16 n/a 0 circle.bmp
+74.26163636359999 n/a 18565.4091 show_cross n/a n/a 17 n/a 1 cross.bmp
+74.7598181818 n/a 18689.9545 show_face scrambled_face first_show 17 n/a 17 s043.bmp
+75.6989090909 n/a 18924.7273 left_press n/a n/a 17 n/a 256 n/a
+75.7334545455 n/a 18933.3636 show_circle n/a n/a 17 n/a 0 circle.bmp
+77.4334545455 n/a 19358.3636 show_cross n/a n/a 18 n/a 1 cross.bmp
+78.0670909091 n/a 19516.7727 show_face scrambled_face immediate_repeat 18 1 18 s043.bmp
+78.8107272727 n/a 19702.6818 left_press n/a n/a 18 n/a 256 n/a
+78.9434545455 n/a 19735.8636 show_circle n/a n/a 18 n/a 0 circle.bmp
+80.6434545455 n/a 20160.8636 show_cross n/a n/a 19 n/a 1 cross.bmp
+81.1916363636 n/a 20297.9091 show_face famous_face delayed_repeat 19 12 7 f094.bmp
+81.8416363636 n/a 20460.4091 right_press n/a n/a 19 n/a 4096 n/a
+82.1370909091 n/a 20534.2727 show_circle n/a n/a 19 n/a 0 circle.bmp
+83.8370909091 n/a 20959.2727 show_cross n/a n/a 20 n/a 1 cross.bmp
+84.4989090909 n/a 21124.7273 show_face scrambled_face first_show 20 n/a 17 s083.bmp
+85.43072727270001 n/a 21357.6818 show_circle n/a n/a 20 n/a 0 circle.bmp
+85.6189090909 n/a 21404.7273 left_press n/a n/a 20 n/a 256 n/a
+87.1307272727 n/a 21782.6818 show_cross n/a n/a 21 n/a 1 cross.bmp
+87.75618181819999 n/a 21939.0455 show_face scrambled_face immediate_repeat 21 1 18 s083.bmp
+88.6252727273 n/a 22156.3182 show_circle n/a n/a 21 n/a 0 circle.bmp
+90.3252727273 n/a 22581.3182 show_cross n/a n/a 22 n/a 1 cross.bmp
+90.8970909091 n/a 22724.2727 show_face scrambled_face delayed_repeat 22 14 19 s150.bmp
+91.7243636364 n/a 22931.0909 show_circle n/a n/a 22 n/a 0 circle.bmp
+93.4243636364 n/a 23356.0909 show_cross n/a n/a 23 n/a 1 cross.bmp
+93.8870909091 n/a 23471.7727 show_face famous_face first_show 23 n/a 5 f093.bmp
+94.7770909091 n/a 23694.2727 show_circle n/a n/a 23 n/a 0 circle.bmp
+94.8107272727 n/a 23702.6818 left_press n/a n/a 23 n/a 256 n/a
+96.4770909091 n/a 24119.2727 show_cross n/a n/a 24 n/a 1 cross.bmp
+97.078 n/a 24269.5 show_face famous_face delayed_repeat 24 14 7 f063.bmp
+97.668 n/a 24417.0 right_press n/a n/a 24 n/a 4096 n/a
+97.9389090909 n/a 24484.7273 show_circle n/a n/a 24 n/a 0 circle.bmp
+99.6389090909 n/a 24909.7273 show_cross n/a n/a 25 n/a 1 cross.bmp
+100.218 n/a 25054.5 show_face famous_face first_show 25 n/a 5 f143.bmp
+100.9407272727 n/a 25235.1818 right_press n/a n/a 25 n/a 4096 n/a
+101.2198181818 n/a 25304.9545 show_circle n/a n/a 25 n/a 0 circle.bmp
+102.9198181818 n/a 25729.9545 show_cross n/a n/a 26 n/a 1 cross.bmp
+103.4589090909 n/a 25864.7273 show_face famous_face immediate_repeat 26 1 6 f143.bmp
+104.0743636364 n/a 26018.5909 right_press n/a n/a 26 n/a 4096 n/a
+104.3925454545 n/a 26098.1364 show_circle n/a n/a 26 n/a 0 circle.bmp
+106.0925454545 n/a 26523.1364 show_cross n/a n/a 27 n/a 1 cross.bmp
+106.5661818182 n/a 26641.5455 show_face scrambled_face first_show 27 n/a 17 s142.bmp
+107.408 n/a 26852.0 show_circle n/a n/a 27 n/a 0 circle.bmp
+107.4134545455 n/a 26853.3636 left_press n/a n/a 27 n/a 256 n/a
+109.108 n/a 27277.0 show_cross n/a n/a 28 n/a 1 cross.bmp
+109.7407272727 n/a 27435.1818 show_face famous_face delayed_repeat 28 13 7 f006.bmp
+110.5252727273 n/a 27631.3182 left_press n/a n/a 28 n/a 256 n/a
+110.7089090909 n/a 27677.2273 show_circle n/a n/a 28 n/a 0 circle.bmp
+112.4089090909 n/a 28102.2273 show_cross n/a n/a 29 n/a 1 cross.bmp
+112.998 n/a 28249.5 show_face unfamiliar_face first_show 29 n/a 13 u131.bmp
+113.7443636364 n/a 28436.0909 left_press n/a n/a 29 n/a 256 n/a
+113.9298181818 n/a 28482.4545 show_circle n/a n/a 29 n/a 0 circle.bmp
+115.6298181818 n/a 28907.4545 show_cross n/a n/a 30 n/a 1 cross.bmp
+116.138 n/a 29034.5 show_face unfamiliar_face first_show 30 n/a 13 u020.bmp
+117.1143636364 n/a 29278.5909 show_circle n/a n/a 30 n/a 0 circle.bmp
+118.8143636364 n/a 29703.5909 show_cross n/a n/a 31 n/a 1 cross.bmp
+119.2789090909 n/a 29819.7273 show_face unfamiliar_face immediate_repeat 31 1 14 u020.bmp
+119.8943636364 n/a 29973.5909 left_press n/a n/a 31 n/a 256 n/a
+120.1043636364 n/a 30026.0909 show_circle n/a n/a 31 n/a 0 circle.bmp
+121.8043636364 n/a 30451.0909 show_cross n/a n/a 32 n/a 1 cross.bmp
+122.3025454545 n/a 30575.6364 show_face scrambled_face first_show 32 n/a 17 s088.bmp
+123.1370909091 n/a 30784.2727 left_press n/a n/a 32 n/a 256 n/a
+123.2089090909 n/a 30802.2273 show_circle n/a n/a 32 n/a 0 circle.bmp
+124.9089090909 n/a 31227.2273 show_cross n/a n/a 33 n/a 1 cross.bmp
+125.4098181818 n/a 31352.4545 show_face famous_face delayed_repeat 33 10 7 f093.bmp
+126.0852727273 n/a 31521.3182 right_press n/a n/a 33 n/a 4096 n/a
+126.3307272727 n/a 31582.6818 show_circle n/a n/a 33 n/a 0 circle.bmp
+128.0307272727 n/a 32007.6818 show_cross n/a n/a 34 n/a 1 cross.bmp
+128.5834545455 n/a 32145.8636 show_face scrambled_face first_show 34 n/a 17 s081.bmp
+129.5343636364 n/a 32383.5909 show_circle n/a n/a 34 n/a 0 circle.bmp
+129.778 n/a 32444.5 right_press n/a n/a 34 n/a 4096 n/a
+131.2343636364 n/a 32808.5909 show_cross n/a n/a 35 n/a 1 cross.bmp
+131.758 n/a 32939.5 show_face scrambled_face immediate_repeat 35 1 18 s081.bmp
+132.7398181818 n/a 33184.9545 show_circle n/a n/a 35 n/a 0 circle.bmp
+132.7798181818 n/a 33194.9545 left_press n/a n/a 35 n/a 256 n/a
+134.4398181818 n/a 33609.9545 show_cross n/a n/a 36 n/a 1 cross.bmp
+135.0816363636 n/a 33770.4091 show_face unfamiliar_face first_show 36 n/a 13 u077.bmp
+135.9107272727 n/a 33977.6818 show_circle n/a n/a 36 n/a 0 circle.bmp
+135.9407272727 n/a 33985.1818 right_press n/a n/a 36 n/a 4096 n/a
+137.61072727270002 n/a 34402.6818 show_cross n/a n/a 37 n/a 1 cross.bmp
+138.2061818182 n/a 34551.5455 show_face unfamiliar_face immediate_repeat 37 1 14 u077.bmp
+139.1061818182 n/a 34776.5455 show_circle n/a n/a 37 n/a 0 circle.bmp
+139.12345454549998 n/a 34780.8636 right_press n/a n/a 37 n/a 4096 n/a
+140.8061818182 n/a 35201.5455 show_cross n/a n/a 38 n/a 1 cross.bmp
+141.4307272727 n/a 35357.6818 show_face scrambled_face delayed_repeat 38 11 19 s142.bmp
+142.2698181818 n/a 35567.4545 show_circle n/a n/a 38 n/a 0 circle.bmp
+142.32254545450002 n/a 35580.6364 left_press n/a n/a 38 n/a 256 n/a
+143.9698181818 n/a 35992.4545 show_cross n/a n/a 39 n/a 1 cross.bmp
+144.6207272727 n/a 36155.1818 show_face scrambled_face first_show 39 n/a 17 s034.bmp
+145.4498181818 n/a 36362.4545 right_press n/a n/a 39 n/a 4096 n/a
+145.478 n/a 36369.5 show_circle n/a n/a 39 n/a 0 circle.bmp
+147.178 n/a 36794.5 show_cross n/a n/a 40 n/a 1 cross.bmp
+147.74436363639998 n/a 36936.0909 show_face scrambled_face immediate_repeat 40 1 18 s034.bmp
+148.7343636364 n/a 37183.5909 show_circle n/a n/a 40 n/a 0 circle.bmp
+148.7452727273 n/a 37186.3182 right_press n/a n/a 40 n/a 4096 n/a
+150.4343636364 n/a 37608.5909 show_cross n/a n/a 41 n/a 1 cross.bmp
+150.9016363636 n/a 37725.4091 show_face unfamiliar_face delayed_repeat 41 12 15 u131.bmp
+151.868 n/a 37967.0 right_press n/a n/a 41 n/a 4096 n/a
+151.9189090909 n/a 37979.7273 show_circle n/a n/a 41 n/a 0 circle.bmp
+151.9752727273 n/a 37993.8182 double_press n/a n/a 41 n/a 4352 n/a
+153.6189090909 n/a 38404.7273 show_cross n/a n/a 42 n/a 1 cross.bmp
+154.2598181818 n/a 38564.9545 show_face unfamiliar_face first_show 42 n/a 13 u100.bmp
+155.0989090909 n/a 38774.7273 show_circle n/a n/a 42 n/a 0 circle.bmp
+155.1689090909 n/a 38792.2273 left_press n/a n/a 42 n/a 256 n/a
+156.7989090909 n/a 39199.7273 show_cross n/a n/a 43 n/a 1 cross.bmp
+157.43345454549998 n/a 39358.3636 show_face unfamiliar_face immediate_repeat 43 1 14 u100.bmp
+157.9898181818 n/a 39497.4545 left_press n/a n/a 43 n/a 256 n/a
+158.3389090909 n/a 39584.7273 show_circle n/a n/a 43 n/a 0 circle.bmp
+160.0389090909 n/a 40009.7273 show_cross n/a n/a 44 n/a 1 cross.bmp
+160.5070909091 n/a 40126.7727 show_face scrambled_face first_show 44 n/a 17 s087.bmp
+161.3289090909 n/a 40332.2273 left_press n/a n/a 44 n/a 256 n/a
+161.4152727273 n/a 40353.8182 show_circle n/a n/a 44 n/a 0 circle.bmp
+163.1152727273 n/a 40778.8182 show_cross n/a n/a 45 n/a 1 cross.bmp
+163.748 n/a 40937.0 show_face scrambled_face immediate_repeat 45 1 18 s087.bmp
+164.4807272727 n/a 41120.1818 right_press n/a n/a 45 n/a 4096 n/a
+164.7252727273 n/a 41181.3182 show_circle n/a n/a 45 n/a 0 circle.bmp
+166.42527272729998 n/a 41606.3182 show_cross n/a n/a 46 n/a 1 cross.bmp
+166.97254545450002 n/a 41743.1364 show_face scrambled_face delayed_repeat 46 14 19 s088.bmp
+167.9352727273 n/a 41983.8182 show_circle n/a n/a 46 n/a 0 circle.bmp
+168.1816363636 n/a 42045.4091 left_press n/a n/a 46 n/a 256 n/a
+169.6352727273 n/a 42408.8182 show_cross n/a n/a 47 n/a 1 cross.bmp
+170.2298181818 n/a 42557.4545 show_face scrambled_face first_show 47 n/a 17 s074.bmp
+171.0552727273 n/a 42763.8182 show_circle n/a n/a 47 n/a 0 circle.bmp
+171.398 n/a 42849.5 right_press n/a n/a 47 n/a 4096 n/a
+172.7552727273 n/a 43188.8182 show_cross n/a n/a 48 n/a 1 cross.bmp
+173.2534545455 n/a 43313.3636 show_face unfamiliar_face first_show 48 n/a 13 u058.bmp
+174.1370909091 n/a 43534.2727 right_press n/a n/a 48 n/a 4096 n/a
+174.1789090909 n/a 43544.7273 show_circle n/a n/a 48 n/a 0 circle.bmp
+175.8789090909 n/a 43969.7273 show_cross n/a n/a 49 n/a 1 cross.bmp
+176.49436363639998 n/a 44123.5909 show_face unfamiliar_face immediate_repeat 49 1 14 u058.bmp
+177.33436363639998 n/a 44333.5909 show_circle n/a n/a 49 n/a 0 circle.bmp
+177.4043636364 n/a 44351.0909 right_press n/a n/a 49 n/a 4096 n/a
+179.0343636364 n/a 44758.5909 show_cross n/a n/a 50 n/a 1 cross.bmp
+179.6343636364 n/a 44908.5909 show_face famous_face first_show 50 n/a 5 f038.bmp
+180.4970909091 n/a 45124.2727 show_circle n/a n/a 50 n/a 0 circle.bmp
+180.6652727273 n/a 45166.3182 left_press n/a n/a 50 n/a 256 n/a
+182.1970909091 n/a 45549.2727 show_cross n/a n/a 51 n/a 1 cross.bmp
+182.6752727273 n/a 45668.8182 show_face scrambled_face first_show 51 n/a 17 s090.bmp
+183.4961818182 n/a 45874.0455 show_circle n/a n/a 51 n/a 0 circle.bmp
+183.4961818182 n/a 45874.0455 right_press n/a n/a 51 n/a 4096 n/a
+185.1961818182 n/a 46299.0455 show_cross n/a n/a 52 n/a 1 cross.bmp
+185.6652727273 n/a 46416.3182 show_face famous_face first_show 52 n/a 5 f020.bmp
+186.3670909091 n/a 46591.7727 right_press n/a n/a 52 n/a 4096 n/a
+186.6225454545 n/a 46655.6364 show_circle n/a n/a 52 n/a 0 circle.bmp
diff --git a/tests/data/task-FacePerception_events.json b/tests/data/task-FacePerception_events.json
new file mode 100644
index 00000000..fa018c47
--- /dev/null
+++ b/tests/data/task-FacePerception_events.json
@@ -0,0 +1,138 @@
+{
+ "onset": {
+ "Description": "Position of event marker in seconds relative to the start.",
+ "Units": "s"
+ },
+ "duration": {
+ "Description": "Duration of the event in seconds.",
+ "Units": "s"
+ },
+ "event_type": {
+ "LongName": "Event category",
+ "Description": "The main category of the event.",
+ "Levels": {
+ "show_face": "Display a face to mark end of pre-stimulus and start of blink-inhibition.",
+ "show_face_initial": "Display a face at the beginning of the recording.",
+ "show_circle": "Display a white circle to mark end of the stimulus and blink inhibition.",
+ "show_cross": "Display only a white cross to mark start of trial and fixation.",
+ "left_press": "Experiment participant presses a key with left index finger.",
+ "right_press": "Experiment participant presses a key with right index finger.",
+ "setup_left_sym": "Setup for experiment with pressing key with left index finger means a face with above average symmetry.",
+ "setup_right_sym": "Setup for experiment with pressing key with right index finger means a face with above average symmetry.",
+ "double_press": "Experiment participant presses both keys ."
+ },
+ "HED": {
+ "show_face": "Sensory-event, Experimental-stimulus, (Def/Face-image, Onset), (Def/Blink-inhibition-task,Onset),(Def/Cross-only, Offset)",
+ "show_face_initial": "Sensory-event, Experimental-stimulus, (Def/Face-image, Onset), (Def/Blink-inhibition-task,Onset), (Def/Fixation-task, Onset)",
+ "show_circle": "Sensory-event, (Intended-effect, Cue), (Def/Circle-only, Onset), (Def/Face-image, Offset), (Def/Blink-inhibition-task, Offset), (Def/Fixation-task, Offset)",
+ "show_cross": "Sensory-event, (Intended-effect, Cue), (Def/Cross-only, Onset), (Def/Fixation-task, Onset), (Def/Circle-only, Offset)",
+ "left_press": "Agent-action, Participant-response, Def/Press-left-finger",
+ "right_press": "Agent-action, Participant-response, Def/Press-right-finger",
+ "setup_left_sym": "Experiment-structure, (Def/Left-sym-cond, Onset), (Def/Initialize-recording, Onset)",
+ "setup_right_sym": "Experiment-structure, (Def/Right-sym-cond, Onset), (Def/Initialize-recording, Onset)",
+ "double_press": "Agent-action, Indeterminate-action, (Press, Keyboard-key)"
+ }
+ },
+ "face_type": {
+ "Description": "Factor indicating type of face image being displayed.",
+ "Levels": {
+ "famous_face": "A face that should be recognized by the participants.",
+ "unfamiliar_face": "A face that should not be recognized by the participants.",
+ "scrambled_face": "A scrambled face image generated by taking face 2D FFT."
+ },
+ "HED": {
+ "famous_face": "Def/Famous-face-cond",
+ "unfamiliar_face": "Def/Unfamiliar-face-cond",
+ "scrambled_face": "Def/Scrambled-face-cond"
+ }
+ },
+ "rep_status": {
+ "Description": "Factor indicating whether this image has been already seen.",
+ "Levels": {
+ "first_show": "Factor level indicating the first display of this face.",
+ "immediate_repeat": "Factor level indicating this face was the same as previous one.",
+ "delayed_repeat": "Factor level indicating face was seen 5 to 15 trials ago."
+ },
+ "HED": {
+ "first_show": "Def/First-show-cond",
+ "immediate_repeat": "Def/Immediate-repeat-cond",
+ "delayed_repeat": "Def/Delayed-repeat-cond"
+ }
+ },
+ "trial": {
+ "Description": "Indicates which trial this event belongs to.",
+ "HED": "Experimental-trial/#"
+ },
+ "rep_lag": {
+ "Description": "How face images before this one was the image was previously presented.",
+ "HED": "(Face, Item-interval/#)"
+ },
+ "stim_file": {
+ "Description": "Path of the stimulus file in the stimuli directory.",
+ "HED": "(Image, Pathname/#)"
+ },
+ "hed_def_sensory": {
+ "Description": "Metadata dictionary for gathering sensory definitions",
+ "HED": {
+ "cross_only_def": "(Definition/Cross-only, (Visual-presentation, (Foreground-view, (White, Cross), (Center-of, Computer-screen)), (Background-view, Black), Description/A white fixation cross on a black background in the center of the screen.))",
+ "face_image_def": "(Definition/Face-image, (Visual-presentation, (Foreground-view, ((Image, Face, Hair), Color/Grayscale), ((White, Cross), (Center-of, Computer-screen))), (Background-view, Black), Description/A happy or neutral face in frontal or three-quarters frontal pose with long hair cropped presented as an achromatic foreground image on a black background with a white fixation cross superposed.))",
+ "circle_only_def": "(Definition/Circle-only, (Visual-presentation, (Foreground-view, ((White, Circle), (Center-of, Computer-screen))), (Background-view, Black), Description/A white circle on a black background in the center of the screen.))"
+ }
+ },
+ "hed_def_actions": {
+ "Description": "Metadata dictionary for gathering participant action definitions",
+ "HED": {
+ "press_left_finger_def": "(Definition/Press-left-finger, ((Index-finger, (Left-side-of, Experiment-participant)), (Press, Keyboard-key), Description/The participant presses a key with the left index finger to indicate a face symmetry judgment.))",
+ "press_right_finger_def": "(Definition/Press-right-finger, ((Index-finger, (Right-side-of, Experiment-participant)), (Press, Keyboard-key), Description/The participant presses a key with the right index finger to indicate a face symmetry evaluation.))"
+ }
+ },
+ "hed_def_conds": {
+ "Description": "Metadata dictionary for gathering experimental condition definitions",
+ "HED": {
+ "famous_face_cond_def": "(Definition/Famous-face-cond, (Condition-variable/Face-type, (Image, (Face, Famous)), Description/A face that should be recognized by the participants))",
+ "unfamiliar_face_cond_def": "(Definition/Unfamiliar-face-cond, (Condition-variable/Face-type, (Image, (Face, Unfamiliar)), Description/A face that should not be recognized by the participants.))",
+ "scrambled_face_cond_def": "(Definition/Scrambled-face-cond, (Condition-variable/Face-type, (Image, (Face, Disordered)), Description/A scrambled face image generated by taking face 2D FFT.))",
+ "first_show_cond_def": "(Definition/First-show-cond, ((Condition-variable/Repetition-type, (Item-count/1, Face), Item-interval/0), Description/Factor level indicating the first display of this face.))",
+ "immediate_repeat_cond_def": "(Definition/Immediate-repeat-cond, ((Condition-variable/Repetition-type, (Item-count/2, Face), Item-interval/1), Description/Factor level indicating this face was the same as previous one.))",
+ "delayed_repeat_cond_def": "(Definition/Delayed-repeat-cond, (Condition-variable/Repetition-type, (Item-count/2, Face), (Item-interval, (Greater-than-or-equal-to, Item-interval/5)), Description/Factor level indicating face was seen 5 to 15 trials ago.))",
+ "left_sym_cond_def": "(Definition/Left-sym-cond, (Condition-variable/Key-assignment, ((Index-finger, (Left-side-of, Experiment-participant)), (Behavioral-evidence, Symmetrical)), ((Index-finger, (Right-side-of, Experiment-participant)), (Behavioral-evidence, Asymmetrical)), Description/Left index finger key press indicates a face with above average symmetry.))",
+ "right_sym_cond_def": "(Definition/Right-sym-cond, (Condition-variable/Key-assignment, ((Index-finger, (Right-side-of, Experiment-participant)), (Behavioral-evidence, Symmetrical)), ((Index-finger, (Left-side-of, Experiment-participant)), (Behavioral-evidence, Asymmetrical)), Description/Right index finger key press indicates a face with above average symmetry.))"
+ }
+ },
+ "hed_def_tasks": {
+ "Description": "Metadata dictionary for gathering task definitions",
+ "HED": {
+ "face_symmetry_evaluation_task_def": "(Definition/Face-symmetry-evaluation-task, (Task, Experiment-participant, (See, Face), (Discriminate, (Face, Symmetrical)), (Press, Keyboard-key), Description/Evaluate degree of image symmetry and respond with key press evaluation.))",
+ "blink_inhibition_task_def": "(Definition/Blink-inhibition-task, (Task, Experiment-participant, Inhibit-blinks, Description/Do not blink while the face image is displayed.))",
+ "fixation_task_def": "(Definition/Fixation-task, (Task, Experiment-participant, (Fixate, Cross), Description/Fixate on the cross at the screen center.))"
+ }
+ },
+ "hed_def_setup": {
+ "Description": "Metadata dictionary for gathering setup definitions",
+ "HED": {
+ "setup_def": "(Definition/Initialize-recording, (Recording))"
+ }
+
+ },
+ "value": {
+ "Description": "Numerical event marker",
+ "Levels": {
+ "x0": "Disappearance of face image and display of the inter-stimulus circle simultaneously",
+ "x1": "Disappearance of face image and display of the inter-stimulus circle simultaneously",
+ "x2": "Initial setup with left finger key press indicating above average symmetry",
+ "x3": "Initial setup with right finger key press indicating above average symmetry",
+ "x5": "Initial presentation of famous face",
+ "x6": "Immediate repeated presentation of famous face",
+ "x7": "Delayed repeated presentation of famous face",
+ "x13": "Initial presentation of unfamiliar face",
+ "x14": "Immediate repeated presentation of unfamiliar face",
+ "x15": "Delayed repeated presentation of unfamiliar face",
+ "x17": "Initial presentation of scrambled face",
+ "x18": "Immediate repeated presentation of scrambled face",
+ "x19": "Delayed repeated presentation of scrambled face",
+ "x256": "Left finger key press",
+ "x4096": "Right finger key press",
+ "x4352": "Left and right finger key presses"
+ }
+ }
+}
diff --git a/tests/test_events.py b/tests/test_events.py
index f186d9c8..307ed2b7 100644
--- a/tests/test_events.py
+++ b/tests/test_events.py
@@ -1,4 +1,5 @@
import os
+import json
import unittest
from werkzeug.test import create_environ
from werkzeug.wrappers import Request
@@ -195,6 +196,49 @@ def test_events_validate_valid(self):
self.assertEqual('success', results['msg_category'],
'validate msg_category should be success when no errors')
+ def test_events_remodel_valid_no_hed(self):
+ from hedweb.events import remodel
+ events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ 'data/sub-002_task-FacePerception_run-1_events.tsv')
+ remodel_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ 'data/simple_reorder_remdl.json')
+ events = TabularInput(file=events_path, name='wh_events')
+ df = events.dataframe
+ df_rows = len(df)
+ df_cols = len(df.columns)
+ with open(remodel_path, 'r') as fp:
+ remodel_json = json.load(fp)
+ remodeler = {'name': "simple_reorder_remdl.json", 'commands': remodel_json}
+ hed_schema = None
+ sidecar = None
+
+ with self.app.app_context():
+ results = remodel(hed_schema, events, sidecar, remodeler)
+ self.assertTrue(results['data'], 'remodel results should have a data key when successful')
+ self.assertEqual('success', results['msg_category'],'remodel msg_category should be success when no errors')
+ # TODO: Test the rows and columns of result.
+
+ def test_events_remodel_invalid_no_hed(self):
+ from hedweb.events import remodel
+ events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ 'data/sub-002_task-FacePerception_run-1_events.tsv')
+ remodel_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ 'data/simple_reorder_remdl.json')
+ events = TabularInput(file=events_path, name='wh_events')
+ with open(remodel_path, 'r') as fp:
+ remodeler = json.load(fp)
+ hed_schema = None
+ sidecar = None
+ command_0 = {'badcommand': 'remove_columns', 'description': 'bad structure', 'parameters': {'ignore_missing': True}}
+ command_1 = {'command': 'unknown_command', 'description': 'bad command', 'parameters': {'ignore_missing': True}}
+ command_2 = {'command': 'remove_columns', 'description': 'bad parameters', 'parameters': {'ignore_missing': True}}
+ commands_bad = [command_0, remodeler[0], command_1, remodeler[1], command_2]
+ remodel_bad = {'name': 'remodel_bad.json', 'commands': commands_bad}
+ with self.app.app_context():
+ results = remodel(hed_schema, events, sidecar, remodel_bad)
+ self.assertTrue(results['data'], 'remodel results should have a data key when unsuccessful')
+ self.assertEqual('warning', results['msg_category'],'remodel msg_category should be success when no errors')
+
if __name__ == '__main__':
unittest.main()
diff --git a/tests/test_routes/test_routes_events.py b/tests/test_routes/test_routes_events.py
index 80231036..c91f90a8 100644
--- a/tests/test_routes/test_routes_events.py
+++ b/tests/test_routes/test_routes_events.py
@@ -71,6 +71,66 @@ def test_events_results_assemble_invalid(self):
"The response data for invalid event assembly should have error messages")
json_buffer.close()
+ def test_events_results_remodel_valid(self):
+ events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ '../data/sub-002_task-FacePerception_run-1_events.tsv')
+ remodel_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ '../data/simple_reorder_remdl.json')
+ with open(events_path, 'r') as sc:
+ y = sc.read()
+ events_buffer = io.BytesIO(bytes(y, 'utf-8'))
+
+ with open(remodel_path, 'r') as sc:
+ x = sc.read()
+ remodel_buffer = io.BytesIO(bytes(x, 'utf-8'))
+
+ with self.app.app_context():
+ input_data = {base_constants.SCHEMA_VERSION: '8.0.0',
+ base_constants.COMMAND_OPTION: base_constants.COMMAND_REMODEL,
+ base_constants.REMODEL_FILE: (remodel_buffer, 'simple_reorder_remdl.json'),
+ base_constants.EVENTS_FILE: (events_buffer,
+ 'sub-002_task-FacePerception_run-1_events.tsv.tsv')}
+ response = self.app.test.post('/events_submit', content_type='multipart/form-data', data=input_data)
+ self.assertTrue(isinstance(response, Response),
+ 'events_submit remodel should return a Response when commands are valid')
+ self.assertEqual(200, response.status_code, 'Remodeling valid file has a valid status code')
+ headers_dict = dict(response.headers)
+ self.assertEqual("success", headers_dict["Category"],
+ "A valid remodeling operation should be successful")
+ self.assertTrue(response.data, "The remodeled events file should return data")
+ remodel_buffer.close()
+ events_buffer.close()
+
+ def test_events_results_remodel_invalid(self):
+ events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ '../data/sub-002_task-FacePerception_run-1_events.tsv')
+ remodel_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ '../data/bad_reorder_remdl.json')
+ with open(events_path, 'r') as sc:
+ y = sc.read()
+ events_buffer = io.BytesIO(bytes(y, 'utf-8'))
+
+ with open(remodel_path, 'r') as sc:
+ x = sc.read()
+ remodel_buffer = io.BytesIO(bytes(x, 'utf-8'))
+
+ with self.app.app_context():
+ input_data = {base_constants.SCHEMA_VERSION: '8.0.0',
+ base_constants.COMMAND_OPTION: base_constants.COMMAND_REMODEL,
+ base_constants.REMODEL_FILE: (remodel_buffer, 'bad_reorder_remdl.json'),
+ base_constants.EVENTS_FILE: (events_buffer,
+ 'sub-002_task-FacePerception_run-1_events.tsv.tsv')}
+ response = self.app.test.post('/events_submit', content_type='multipart/form-data', data=input_data)
+ self.assertTrue(isinstance(response, Response),
+ 'events_submit remodel should return a Response when commands are valid')
+ self.assertEqual(200, response.status_code, 'Remodeling valid file has a valid status code')
+ headers_dict = dict(response.headers)
+ self.assertEqual("warning", headers_dict["Category"],
+ "An invalid remodeling operation should result in warning")
+ self.assertTrue(response.data, "The invalid commands should return data")
+ remodel_buffer.close()
+ events_buffer.close()
+
def test_events_results_validate_valid(self):
json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/bids_events.json')
events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/bids_events.tsv')
From e99482d4fe8cc0cba15f6b4f225c2ac19024ff89 Mon Sep 17 00:00:00 2001
From: Kay Robbins <1189050+VisLab@users.noreply.github.com>
Date: Tue, 13 Sep 2022 09:38:18 -0500
Subject: [PATCH 03/25] Updated the web interface with to append datetime to
downloaded files
---
deploy_hed_dev/Dockerfile | 8 +-
deploy_hed_dev/httpd.conf | 2 +-
deploy_hed_dev/web.wsgi | 2 +-
hedweb/events.py | 38 ++++---
hedweb/sidecar.py | 13 ++-
hedweb/spreadsheet.py | 5 +-
tests/data/full_example.json | 49 +++++++++
tests/data/only_splitevents_example.json | 16 +++
tests/data/rename_example.json | 11 ++
tests/data/splitevents_example.json | 36 +++++++
...ub-0013_task-stopsignal_acq-seq_events.tsv | 101 ++++++++++++++++++
11 files changed, 251 insertions(+), 30 deletions(-)
create mode 100644 tests/data/full_example.json
create mode 100644 tests/data/only_splitevents_example.json
create mode 100644 tests/data/rename_example.json
create mode 100644 tests/data/splitevents_example.json
create mode 100644 tests/data/sub-0013_task-stopsignal_acq-seq_events.tsv
diff --git a/deploy_hed_dev/Dockerfile b/deploy_hed_dev/Dockerfile
index 53823d99..53b024f2 100644
--- a/deploy_hed_dev/Dockerfile
+++ b/deploy_hed_dev/Dockerfile
@@ -13,12 +13,12 @@ pip3 install --no-cache-dir -r requirements.txt && \
pip3 install git+https://github.com/hed-standard/hed-python/@develop \
mkdir -p /var/www/localhost/htdocs && \
cp /etc/mime.types /var/www/mime.types && \
-mkdir -p /var/log/hedtools_dev && \
-chown -R www-data:www-data /var/log/hedtools_dev && \
+mkdir -p /var/log/hedtools && \
+chown -R www-data:www-data /var/log/hedtools && \
mkdir -p /var/cache/schema_cache && \
chown -R www-data:www-data /var/cache/schema_cache
COPY httpd.conf /etc/apache2/apache2.conf
-COPY ./hedtools /var/www/hedtools_dev/
-COPY ./hedtools/hedweb /var/www/hedtools_dev/hedweb/
+COPY ./hedtools /var/www/hedtools/
+COPY ./hedtools/hedweb /var/www/hedtools/hedweb/
ENTRYPOINT /usr/sbin/apache2 -D FOREGROUND -f /etc/apache2/apache2.conf
ENV HEDTOOLS_CONFIG_CLASS=config.ProductionConfig
diff --git a/deploy_hed_dev/httpd.conf b/deploy_hed_dev/httpd.conf
index b327968a..1b358209 100644
--- a/deploy_hed_dev/httpd.conf
+++ b/deploy_hed_dev/httpd.conf
@@ -12,4 +12,4 @@ LoadModule dir_module /usr/lib/apache2/modules/mod_dir.so
LoadModule authz_core_module /usr/lib/apache2/modules/mod_authz_core.so
#LoadModule unixd_module /usr/lib/apache2/modules/mod_unixd.so
LoadModule wsgi_module /usr/local/lib/python3.9/site-packages/mod_wsgi/server/mod_wsgi-py39.cpython-39-x86_64-linux-gnu.so
-WSGIScriptAlias / /var/www/hedtools_dev/web.wsgi
+WSGIScriptAlias / /var/www/hedtools/web.wsgi
diff --git a/deploy_hed_dev/web.wsgi b/deploy_hed_dev/web.wsgi
index f8266178..13923de3 100644
--- a/deploy_hed_dev/web.wsgi
+++ b/deploy_hed_dev/web.wsgi
@@ -1,3 +1,3 @@
import sys
-sys.path.insert(0, "/var/www/hedtools_dev")
+sys.path.insert(0, "/var/www/hedtools")
from hedweb.runserver import app as application
diff --git a/hedweb/events.py b/hedweb/events.py
index bcb2f2ed..88a64160 100644
--- a/hedweb/events.py
+++ b/hedweb/events.py
@@ -78,7 +78,7 @@ def process(arguments):
raise HedFileError('BadHedSchema', "Please provide a valid HedSchema for event processing", "")
events = arguments.get(base_constants.EVENTS, None)
sidecar = arguments.get(base_constants.JSON_SIDECAR, None)
- remodeler = arguments.get(base_constants.REMODEL_COMMANDS, None)
+ remodel_commands = arguments.get(base_constants.REMODEL_COMMANDS, None)
query = arguments.get(base_constants.QUERY, None)
columns_included = arguments.get(base_constants.COLUMNS_INCLUDED, None)
if not events or not isinstance(events, TabularInput):
@@ -94,7 +94,7 @@ def process(arguments):
elif command == base_constants.COMMAND_GENERATE_SIDECAR:
results = generate_sidecar(events, arguments.get(base_constants.COLUMNS_SELECTED, None))
elif command == base_constants.COMMAND_REMODEL:
- results = remodel(hed_schema, events, sidecar, remodeler)
+ results = remodel(hed_schema, events, sidecar, remodel_commands)
else:
raise HedFileError('UnknownEventsProcessingMethod', f'Command {command} is missing or invalid', '')
return results
@@ -121,7 +121,7 @@ def assemble(hed_schema, events, columns_included=None, expand_defs=True):
df, defs = assemble_hed(events, columns_included=columns_included, expand_defs=expand_defs)
csv_string = df.to_csv(None, sep='\t', index=False, header=True)
display_name = events.name
- file_name = generate_filename(display_name, name_suffix='_expanded', extension='.tsv')
+ file_name = generate_filename(display_name, name_suffix='_expanded', extension='.tsv', append_datetime=True)
return {base_constants.COMMAND: base_constants.COMMAND_ASSEMBLE,
base_constants.COMMAND_TARGET: 'events',
'data': csv_string, 'output_display_name': file_name, 'definitions': DefinitionDict.get_as_strings(defs),
@@ -152,7 +152,7 @@ def generate_sidecar(events, columns_selected):
hed_dict[column_name] = generate_sidecar_entry(column_name, column_values=column_values)
display_name = events.name
- file_name = generate_filename(display_name, name_suffix='_generated', extension='.json')
+ file_name = generate_filename(display_name, name_suffix='_generated', extension='.json', append_datetime=True)
return {base_constants.COMMAND: base_constants.COMMAND_GENERATE_SIDECAR,
base_constants.COMMAND_TARGET: 'events',
'data': json.dumps(hed_dict, indent=4),
@@ -160,14 +160,14 @@ def generate_sidecar(events, columns_selected):
'msg': 'JSON sidecar generation from event file complete'}
-def remodel(hed_schema, events, sidecar, remodeler):
+def remodel(hed_schema, events, sidecar, remodel_commands):
""" Remodel a given events file.
Args:
hed_schema (HedSchema, HedSchemaGroup or None): A HED schema or HED schema group.
- events (EventsInput): An events input object.
- sidecar (Sidecar or None): A sidecar object.
- remodeler (dict): Remodeling file.
+ events (EventsInput): An events input object.
+ sidecar (Sidecar or None): A sidecar object.
+ remodel_commands (dict): A dictionary with the name and command list of the remodeling file.
Returns:
dict: A dictionary pointing to results or errors.
@@ -179,25 +179,27 @@ def remodel(hed_schema, events, sidecar, remodeler):
schema_version = hed_schema.version
else:
schema_version = None
- remodeler_name = remodeler['name']
- remodeler_commands = remodeler['commands']
- command_list, errors = Dispatcher.parse_commands(remodeler_commands)
+ remodel_name = remodel_commands['name']
+ commands = remodel_commands['commands']
+ command_list, errors = Dispatcher.parse_commands(commands)
if errors:
issue_str = Dispatcher.errors_to_str(errors)
- file_name = generate_filename(remodeler_name, name_suffix='_command_parse_errors', extension='.txt')
+ file_name = generate_filename(remodel_name, name_suffix='_command_parse_errors',
+ extension='.txt', append_datetime=True)
return {base_constants.COMMAND: base_constants.COMMAND_REMODEL,
base_constants.COMMAND_TARGET: 'events',
'data': issue_str, "output_display_name": file_name,
base_constants.SCHEMA_VERSION: schema_version, "msg_category": "warning",
'msg': f"Remodeling command file for {display_name} had validation errors"}
df = events.dataframe
- dispatch = Dispatcher(remodeler_commands, data_root=None, hed_versions=schema_version)
+ dispatch = Dispatcher(commands, data_root=None, hed_versions=schema_version)
df = dispatch.prep_events(df)
for operation in dispatch.parsed_ops:
df = operation.do_op(dispatch, df, display_name, sidecar=sidecar)
df = df.fillna('n/a')
csv_string = df.to_csv(None, sep='\t', index=False, header=True)
- file_name = generate_filename(display_name, name_suffix='_remodeled', extension='.tsv')
+ name_suffix = f"_remodeled_by_{remodel_name}"
+ file_name = generate_filename(display_name, name_suffix=name_suffix, extension='.tsv', append_datetime=True)
return {base_constants.COMMAND: base_constants.COMMAND_REMODEL,
base_constants.COMMAND_TARGET: 'events', 'data': csv_string, "output_display_name": file_name,
base_constants.SCHEMA_VERSION: schema_version, 'msg_category': 'success',
@@ -233,7 +235,7 @@ def search(hed_schema, events, query, columns_included=None):
csv_string = ''
msg = f"Events file has no events satisfying the query {query}."
display_name = events.name
- file_name = generate_filename(display_name, name_suffix='_query', extension='.tsv')
+ file_name = generate_filename(display_name, name_suffix='_query', extension='.tsv', append_datetime=True)
return {base_constants.COMMAND: base_constants.COMMAND_SEARCH,
base_constants.COMMAND_TARGET: 'events',
'data': csv_string, 'output_display_name': file_name,
@@ -268,7 +270,8 @@ def validate(hed_schema, events, sidecar=None, check_for_warnings=False):
issue_str = get_printable_issue_string(issues, title="Event file errors:")
if issue_str:
- file_name = generate_filename(display_name, name_suffix='_validation_errors', extension='.txt')
+ file_name = generate_filename(display_name, name_suffix='_validation_errors',
+ extension='.txt', append_datetime=True)
return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE,
base_constants.COMMAND_TARGET: 'events',
'data': issue_str, "output_display_name": file_name,
@@ -297,7 +300,8 @@ def validate_query(hed_schema, query):
if not query:
display_name = 'empty_query'
issue_str = "Empty query could not be processed."
- file_name = generate_filename(display_name, name_suffix='_validation_errors', extension='.txt')
+ file_name = generate_filename(display_name, name_suffix='_validation_errors',
+ extension='.txt', append_datetime=True)
return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE,
base_constants.COMMAND_TARGET: 'query',
'data': issue_str, "output_display_name": file_name,
diff --git a/hedweb/sidecar.py b/hedweb/sidecar.py
index cfffa9e1..dc655454 100644
--- a/hedweb/sidecar.py
+++ b/hedweb/sidecar.py
@@ -126,14 +126,15 @@ def sidecar_convert(hed_schema, sidecar, command=base_constants.COMMAND_TO_SHORT
display_name = sidecar.name
if issues:
issue_str = get_printable_issue_string(issues, f"JSON conversion for {display_name} was unsuccessful")
- file_name = generate_filename(display_name, name_suffix=f"_{tag_form}_conversion_errors", extension='.txt')
+ file_name = generate_filename(display_name, name_suffix=f"_{tag_form}_conversion_errors",
+ extension='.txt', append_datetime=True)
return {base_constants.COMMAND: command,
base_constants.COMMAND_TARGET: 'sidecar',
'data': issue_str, 'output_display_name': file_name,
base_constants.SCHEMA_VERSION: schema_version, 'msg_category': 'warning',
'msg': f'JSON file {display_name} had validation errors'}
else:
- file_name = generate_filename(display_name, name_suffix=f"_{tag_form}", extension='.json')
+ file_name = generate_filename(display_name, name_suffix=f"_{tag_form}", extension='.json', append_datetime=True)
data = sidecar.get_as_json_string()
return {base_constants.COMMAND: command,
base_constants.COMMAND_TARGET: 'sidecar',
@@ -158,7 +159,7 @@ def sidecar_extract(sidecar):
df = hed_to_df(str_sidecar)
data = df.to_csv(None, sep='\t', index=False, header=True)
display_name = sidecar.name
- file_name = generate_filename(display_name, name_suffix='_extracted', extension='.tsv')
+ file_name = generate_filename(display_name, name_suffix='_extracted', extension='.tsv', append_datetime=True)
return {base_constants.COMMAND: base_constants.COMMAND_EXTRACT_SPREADSHEET,
base_constants.COMMAND_TARGET: 'sidecar',
'data': data, 'output_display_name': file_name,
@@ -188,7 +189,8 @@ def sidecar_merge(sidecar, spreadsheet, include_description_tags=False):
merge_hed_dict(sidecar_dict, hed_dict)
display_name = sidecar.name
data = json.dumps(sidecar_dict, indent=4)
- file_name = generate_filename(display_name, name_suffix='_extracted_merged', extension='.json')
+ file_name = generate_filename(display_name, name_suffix='_extracted_merged',
+ extension='.json', append_datetime=True)
return {base_constants.COMMAND: base_constants.COMMAND_EXTRACT_SPREADSHEET,
base_constants.COMMAND_TARGET: 'sidecar',
'data': data, 'output_display_name': file_name,
@@ -214,7 +216,8 @@ def sidecar_validate(hed_schema, sidecar, check_for_warnings=False):
issues = sidecar.validate_entries(validator, check_for_warnings=check_for_warnings)
if issues:
issue_str = get_printable_issue_string(issues, f"JSON dictionary {sidecar.name} validation errors")
- file_name = generate_filename(display_name, name_suffix='validation_errors', extension='.txt')
+ file_name = generate_filename(display_name, name_suffix='validation_errors',
+ extension='.txt', append_datetime=True)
return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE,
base_constants.COMMAND_TARGET: 'sidecar',
'data': issue_str, 'output_display_name': file_name,
diff --git a/hedweb/spreadsheet.py b/hedweb/spreadsheet.py
index 99864ee6..2f49cc6c 100644
--- a/hedweb/spreadsheet.py
+++ b/hedweb/spreadsheet.py
@@ -111,7 +111,7 @@ def spreadsheet_convert(hed_schema, spreadsheet, command=base_constants.COMMAND_
suffix = '_to_short'
spreadsheet.convert_to_short(hed_schema)
- file_name = generate_filename(display_name, name_suffix=suffix, extension=display_ext)
+ file_name = generate_filename(display_name, name_suffix=suffix, extension=display_ext, append_datetime=True)
return {base_constants.COMMAND: command,
base_constants.COMMAND_TARGET: 'spreadsheet', 'data': '',
base_constants.SPREADSHEET: spreadsheet, 'output_display_name': file_name,
@@ -137,7 +137,8 @@ def spreadsheet_validate(hed_schema, spreadsheet, check_for_warnings=False):
display_name = spreadsheet.name
if issues:
issue_str = get_printable_issue_string(issues, f"Spreadsheet {display_name} validation errors")
- file_name = generate_filename(display_name, name_suffix='_validation_errors', extension='.txt')
+ file_name = generate_filename(display_name, name_suffix='_validation_errors',
+ extension='.txt', append_datetime=True)
return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE,
base_constants.COMMAND_TARGET: 'spreadsheet',
'data': issue_str, "output_display_name": file_name,
diff --git a/tests/data/full_example.json b/tests/data/full_example.json
new file mode 100644
index 00000000..187e523f
--- /dev/null
+++ b/tests/data/full_example.json
@@ -0,0 +1,49 @@
+[
+ {
+ "command": "rename_columns",
+ "description": "Create separate response event from response time column.",
+ "parameters": {
+ "column_mapping": {"sex": "face_gender"},
+ "ignore_missing": false
+ }
+ },
+ {
+ "command": "split_event",
+ "description": "Split trial encoding into event encoding including trial events.",
+ "parameters": {
+ "anchor_column": "event_type",
+ "new_events": {"response": {"onset_source": ["response_time"],
+ "duration": [0],
+ "copy_columns": ["response_accuracy",
+ "response_hand",
+ "trial_type"]},
+ "stop_signal": {"onset_source": ["stop_signal_delay"],
+ "duration": [0.5],
+ "copy_columns": ["response_accuracy",
+ "response_hand",
+ "trial_type"]},
+ "go_signal": {"onset_source": [0],
+ "duration": ["duration"],
+ "copy_columns": ["response_accuracy",
+ "response_hand",
+ "trial_type",
+ "face_gender"]},
+ "trial": {"onset_source": [0],
+ "duration": [2],
+ "copy_columns": ["response_accuracy",
+ "response_hand",
+ "trial_type",
+ "face_gender"]}},
+ "remove_parent_event": true
+ }
+ },
+ {
+ "command": "remove_columns",
+ "description": "Remove offset columns events.",
+ "parameters": {
+ "remove_names": ["stop_signal_delay", "response_time"],
+ "ignore_missing": true
+ }
+ }
+]
+
diff --git a/tests/data/only_splitevents_example.json b/tests/data/only_splitevents_example.json
new file mode 100644
index 00000000..972fc973
--- /dev/null
+++ b/tests/data/only_splitevents_example.json
@@ -0,0 +1,16 @@
+[
+ {
+ "command": "split_event",
+ "description": "Create separate response event from response time column.",
+ "parameters": {
+ "anchor_column": "event_type",
+ "new_events": {"response": {"onset_source": ["response_time"],
+ "duration": [0],
+ "copy_columns": ["response_accuracy",
+ "response_hand",
+ "trial_type"]}},
+ "remove_parent_event": false
+ }
+ }
+]
+
diff --git a/tests/data/rename_example.json b/tests/data/rename_example.json
new file mode 100644
index 00000000..881917f7
--- /dev/null
+++ b/tests/data/rename_example.json
@@ -0,0 +1,11 @@
+[
+ {
+ "command": "rename_columns",
+ "description": "Create separate response event from response time column.",
+ "parameters": {
+ "column_mapping": {"sex": "face_gender"},
+ "ignore_missing": false
+ }
+ }
+]
+
diff --git a/tests/data/splitevents_example.json b/tests/data/splitevents_example.json
new file mode 100644
index 00000000..b24965e8
--- /dev/null
+++ b/tests/data/splitevents_example.json
@@ -0,0 +1,36 @@
+[
+ {
+ "command": "rename_columns",
+ "description": "Create separate response event from response time column.",
+ "parameters": {
+ "column_mapping": {
+ "sex": "face_gender"
+ },
+ "ignore_missing": false
+ }
+ },
+ {
+ "command": "split_event",
+ "description": "Create separate response event from response time column.",
+ "parameters": {
+ "anchor_column": "event_type",
+ "new_events": {
+ "response": {
+ "onset_source": [
+ "response_time"
+ ],
+ "duration": [
+ 0
+ ],
+ "copy_columns": [
+ "response_accuracy",
+ "response_hand",
+ "trial_type"
+ ]
+ }
+ },
+ "remove_parent_event": false
+ }
+ }
+]
+
diff --git a/tests/data/sub-0013_task-stopsignal_acq-seq_events.tsv b/tests/data/sub-0013_task-stopsignal_acq-seq_events.tsv
new file mode 100644
index 00000000..05bffac2
--- /dev/null
+++ b/tests/data/sub-0013_task-stopsignal_acq-seq_events.tsv
@@ -0,0 +1,101 @@
+onset duration trial_type stop_signal_delay response_time response_accuracy response_hand sex
+0.0776 0.5083 go n/a 0.565 correct right female
+5.5774 0.5083 unsuccesful_stop 0.2 0.49 correct right female
+9.5856 0.5084 go n/a 0.45 correct right female
+13.5939 0.5083 succesful_stop 0.2 n/a n/a right female
+17.1021 0.5083 unsuccesful_stop 0.25 0.633 correct left male
+21.6103 0.5083 go n/a 0.443 correct left male
+24.6186 0.5083 go n/a 0.439 correct left male
+28.6268 0.5083 go n/a 0.667 correct left male
+32.1434 0.5083 go n/a 0.55 correct right female
+36.1516 0.5083 succesful_stop 0.25 n/a n/a right female
+41.6514 0.5084 go n/a 0.59 correct right female
+44.6597 0.5083 unsuccesful_stop 0.3 0.511 correct right female
+49.6679 0.5083 go n/a 0.604 correct right female
+52.1845 0.5083 go n/a 0.743 correct left male
+56.1927 0.5084 succesful_stop 0.3 n/a n/a right female
+60.6926 0.5083 unsuccesful_stop 0.35 0.555 correct left male
+65.7008 0.5083 go n/a 0.584 correct right female
+73.7173 0.5083 succesful_stop 0.35 n/a n/a right female
+76.7255 0.5083 succesful_stop 0.4 n/a n/a right male
+81.2337 0.5084 go n/a 0.615 correct left male
+84.742 0.5083 go n/a 0.754 correct left male
+89.2502 0.5083 go n/a 0.777 correct right female
+92.2668 0.5083 go n/a 0.644 correct right female
+97.2666 0.5084 unsuccesful_stop 0.45 0.629 correct right female
+100.2832 0.5083 go n/a 0.714 correct right female
+104.7831 0.5083 go n/a 0.627 correct left male
+108.2997 0.5083 go n/a 0.668 correct left male
+113.2995 0.5084 go n/a 0.558 correct left male
+117.3078 0.5083 go n/a 1.038 incorrect left female
+120.816 0.5083 go n/a 0.764 correct left male
+125.8242 0.5083 go n/a 0.782 correct right female
+129.3325 0.5083 unsuccesful_stop 0.5 0.722 correct left male
+132.8407 0.5083 go n/a 0.716 correct right female
+137.8489 0.5083 go n/a 0.741 correct right female
+141.3571 0.5084 succesful_stop 0.5 n/a n/a right male
+145.8653 0.5084 go n/a 1.027 correct right female
+149.3736 0.5083 go n/a 0.881 correct left male
+153.3818 0.5083 go n/a 0.801 correct right female
+157.89 0.5084 go n/a 0.803 correct left male
+160.8983 0.5083 go n/a 0.771 correct right female
+164.4149 0.5083 succesful_stop 0.55 n/a n/a right female
+169.4147 0.5083 go n/a 0.899 correct left male
+172.923 0.5083 unsuccesful_stop 0.6 0.754 correct left male
+176.9312 0.5083 go n/a 1.11 correct left male
+180.4478 0.5083 succesful_stop 0.65 n/a n/a right male
+188.9559 0.5083 unsuccesful_stop 0.7 0.867 correct right female
+193.4641 0.5083 unsuccesful_stop 0.75 0.814 correct left male
+197.4723 0.5083 go n/a 1.21 correct right female
+201.4805 0.5084 go n/a 0.859 correct left male
+204.9888 0.5083 unsuccesful_stop 0.75 0.973 correct right female
+212.5136 0.5083 go n/a 1.02 correct left male
+221.5217 0.5083 go n/a 0.817 correct left male
+225.5299 0.5083 go n/a 1.038 correct right female
+228.5465 0.5083 go n/a 1.049 correct right female
+234.0463 0.5084 go n/a 0.92 correct left male
+237.0546 0.5083 succesful_stop 0.7 n/a n/a right female
+241.0628 0.5083 go n/a 1.266 correct right female
+245.071 0.5084 unsuccesful_stop 0.7 0.854 correct right female
+248.5876 0.5083 go n/a 0.985 correct left male
+254.0875 0.5083 go n/a 0.789 correct right female
+260.6123 0.5083 go n/a 0.928 correct right female
+266.1122 0.5083 go n/a 0.807 correct left male
+269.6204 0.5083 go n/a 0.735 correct left male
+273.6286 0.5083 succesful_stop 0.65 n/a n/a right male
+277.6368 0.5084 go n/a 0.896 correct right female
+281.6451 0.5083 succesful_stop 0.65 n/a n/a right female
+289.6615 0.5083 unsuccesful_stop 0.7 0.831 correct right female
+293.1698 0.5083 go n/a 0.876 correct left male
+296.6863 0.5084 go n/a 1.021 correct right female
+302.1862 0.5083 unsuccesful_stop 0.7 1.085 correct left male
+306.1944 0.5083 succesful_stop 0.65 n/a n/a right female
+309.2027 0.5083 go n/a 0.814 correct right female
+313.2109 0.5083 go n/a 1.053 correct left male
+318.2191 0.5083 go n/a 1.002 correct left male
+322.2273 0.5083 go n/a 1.057 correct right female
+326.2355 0.5084 succesful_stop 0.65 n/a n/a right male
+330.2438 0.5083 succesful_stop 0.7 n/a n/a right male
+334.252 0.5083 go n/a 0.962 correct left male
+341.2685 0.5083 go n/a 0.817 correct right female
+346.2767 0.5083 unsuccesful_stop 0.75 0.822 correct left male
+350.2849 0.5083 go n/a 0.889 correct left male
+353.2932 0.5083 go n/a 0.946 correct right female
+358.3014 0.5083 go n/a 0.911 correct right female
+360.818 0.5083 unsuccesful_stop 0.8 1.054 correct left male
+364.8262 0.5083 go n/a 0.966 correct right female
+368.8344 0.5083 unsuccesful_stop 0.8 0.99 correct right female
+373.8343 0.5083 go n/a 1.004 correct right female
+377.8425 0.5083 unsuccesful_stop 0.75 0.909 correct left male
+381.8507 0.5084 go n/a 0.859 correct left male
+385.859 0.5083 go n/a 1.186 correct right female
+389.3672 0.5083 go n/a 1.288 correct right female
+393.3754 0.5083 go n/a 0.979 correct left male
+398.3836 0.5084 go n/a 1.067 correct left male
+400.9002 0.5083 succesful_stop 0.7 n/a n/a right male
+409.4083 0.5084 go n/a 0.901 correct left male
+414.4165 0.5084 unsuccesful_stop 0.65 0.879 correct left male
+418.4248 0.5083 go n/a 1.003 correct left male
+422.433 0.5083 succesful_stop 0.6 n/a n/a right female
+429.9495 0.5083 succesful_stop 0.55 n/a n/a right female
+437.9659 0.5083 go n/a 0.866 correct left male
From 05a7dd8079311fd2a0167a614bbbebd3c306c7b9 Mon Sep 17 00:00:00 2001
From: Kay Robbins <1189050+VisLab@users.noreply.github.com>
Date: Tue, 13 Sep 2022 11:14:39 -0500
Subject: [PATCH 04/25] Minor update to the deploy.sh for deploy_hed_dev
---
deploy_hed_dev/deploy.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/deploy_hed_dev/deploy.sh b/deploy_hed_dev/deploy.sh
index 55a5aeb1..bb296a05 100644
--- a/deploy_hed_dev/deploy.sh
+++ b/deploy_hed_dev/deploy.sh
@@ -19,7 +19,7 @@ HOST_PORT=33004
CONTAINER_PORT=80
CODE_DEPLOY_DIR="${DEPLOY_DIR}/hedtools"
-SOURCE_DEPLOY_DIR="${DEPLOY_DIR}/hed-web/deploy_hed"
+SOURCE_DEPLOY_DIR="${DEPLOY_DIR}/hed-web/deploy_hed_dev"
BASE_CONFIG_FILE="${SOURCE_DEPLOY_DIR}/base_config.py"
CONFIG_FILE="${CODE_DEPLOY_DIR}/config.py"
SOURCE_WSGI_FILE="${SOURCE_DEPLOY_DIR}/web.wsgi"
From df2de44720de35024adef8af538780ca8cdc888c Mon Sep 17 00:00:00 2001
From: Kay Robbins <1189050+VisLab@users.noreply.github.com>
Date: Tue, 13 Sep 2022 14:12:07 -0500
Subject: [PATCH 05/25] Removed the hedtools from the requirements for the
docker deployment
---
deploy_hed_dev/requirements.txt | 1 -
1 file changed, 1 deletion(-)
diff --git a/deploy_hed_dev/requirements.txt b/deploy_hed_dev/requirements.txt
index deda73e9..b60d7402 100644
--- a/deploy_hed_dev/requirements.txt
+++ b/deploy_hed_dev/requirements.txt
@@ -6,7 +6,6 @@ defusedxml==0.7.1
et-xmlfile==1.1.0
Flask==2.1.2
Flask-WTF==1.0.1
-hedtools==0.1.0
inflect>=5.5.2
itsdangerous==2.1.2
jdcal==1.4.1
From dc7c14cf78ef945ed7f9aedf52f9087d94e5c19c Mon Sep 17 00:00:00 2001
From: Kay Robbins <1189050+VisLab@users.noreply.github.com>
Date: Tue, 13 Sep 2022 15:40:22 -0500
Subject: [PATCH 06/25] Updated the dockerfile continuation line
---
deploy_hed_dev/Dockerfile | 2 +-
deploy_hed_dev/deploy.sh | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/deploy_hed_dev/Dockerfile b/deploy_hed_dev/Dockerfile
index 53b024f2..11bc0865 100644
--- a/deploy_hed_dev/Dockerfile
+++ b/deploy_hed_dev/Dockerfile
@@ -10,7 +10,7 @@ apache2 \
apache2-dev && \
pip3 install --upgrade pip && \
pip3 install --no-cache-dir -r requirements.txt && \
-pip3 install git+https://github.com/hed-standard/hed-python/@develop \
+pip3 install git+https://github.com/hed-standard/hed-python/@develop && \
mkdir -p /var/www/localhost/htdocs && \
cp /etc/mime.types /var/www/mime.types && \
mkdir -p /var/log/hedtools && \
diff --git a/deploy_hed_dev/deploy.sh b/deploy_hed_dev/deploy.sh
index bb296a05..3eebd71e 100644
--- a/deploy_hed_dev/deploy.sh
+++ b/deploy_hed_dev/deploy.sh
@@ -127,4 +127,4 @@ switch_to_web_directory
build_new_container
delete_old_container
run_new_container
-cleanup_directory
+# cleanup_directory
From 2b94daba092de3b97ff22364dba5d333685b6720 Mon Sep 17 00:00:00 2001
From: Kay Robbins <1189050+VisLab@users.noreply.github.com>
Date: Tue, 13 Sep 2022 15:54:23 -0500
Subject: [PATCH 07/25] Added an install git to the dockerfile
---
deploy_hed_dev/Dockerfile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/deploy_hed_dev/Dockerfile b/deploy_hed_dev/Dockerfile
index 11bc0865..91ba40dd 100644
--- a/deploy_hed_dev/Dockerfile
+++ b/deploy_hed_dev/Dockerfile
@@ -1,7 +1,7 @@
FROM python:3.9-slim-buster
COPY requirements.txt /root/
WORKDIR /root
-RUN apt-get update && apt-get install -y gcc \
+RUN apt-get update && apt-get install -y gcc && apt-get install -y git\
musl-dev \
openrc \
libxslt-dev \
From 460bfb74041290cc721a2df1a01c9896460fbd33 Mon Sep 17 00:00:00 2001
From: Kay Robbins <1189050+VisLab@users.noreply.github.com>
Date: Tue, 13 Sep 2022 16:07:53 -0500
Subject: [PATCH 08/25] Updated the dockerfile with a different version of
installation of git
---
deploy_hed_dev/Dockerfile | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/deploy_hed_dev/Dockerfile b/deploy_hed_dev/Dockerfile
index 91ba40dd..a18c6943 100644
--- a/deploy_hed_dev/Dockerfile
+++ b/deploy_hed_dev/Dockerfile
@@ -1,7 +1,8 @@
FROM python:3.9-slim-buster
COPY requirements.txt /root/
WORKDIR /root
-RUN apt-get update && apt-get install -y gcc && apt-get install -y git\
+RUN apt-get update && apt-get install -y gcc \
+git \
musl-dev \
openrc \
libxslt-dev \
From 99229c9af79d66a5c0252bb905e34bff6a3b7bff Mon Sep 17 00:00:00 2001
From: Kay Robbins <1189050+VisLab@users.noreply.github.com>
Date: Tue, 13 Sep 2022 16:56:46 -0500
Subject: [PATCH 09/25] Updated the deploy script
---
deploy_hed_dev/deploy.sh | 2 +-
hedweb/templates/layout.html | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/deploy_hed_dev/deploy.sh b/deploy_hed_dev/deploy.sh
index 3eebd71e..bb296a05 100644
--- a/deploy_hed_dev/deploy.sh
+++ b/deploy_hed_dev/deploy.sh
@@ -127,4 +127,4 @@ switch_to_web_directory
build_new_container
delete_old_container
run_new_container
-# cleanup_directory
+cleanup_directory
diff --git a/hedweb/templates/layout.html b/hedweb/templates/layout.html
index c6f42cd1..424b2e2b 100644
--- a/hedweb/templates/layout.html
+++ b/hedweb/templates/layout.html
@@ -48,7 +48,7 @@
Web-based tools for HED schema, HED tags, and event
{% block footer %}
From 9089ae3712a4a86d0695f28f515f6dce68262d84 Mon Sep 17 00:00:00 2001
From: Kay Robbins <1189050+VisLab@users.noreply.github.com>
Date: Sun, 2 Oct 2022 11:24:57 -0500
Subject: [PATCH 10/25] Trying to get summaries to download
---
hedweb/constants/base_constants.py | 4 +-
hedweb/events.py | 44 +++---
hedweb/services.py | 4 +-
hedweb/templates/js/events-form.js | 2 +-
hedweb/templates/js/form-helpers.js | 4 +-
hedweb/web_util.py | 127 +++++++++++++-----
requirements.txt | 32 ++---
...er_remdl.json => simple_reorder_rmdl.json} | 4 +-
tests/test_events.py | 17 +--
tests/test_routes/test_routes_events.py | 4 +-
tests/test_web_util.py | 37 +++--
11 files changed, 174 insertions(+), 105 deletions(-)
rename tests/data/{simple_reorder_remdl.json => simple_reorder_rmdl.json} (82%)
diff --git a/hedweb/constants/base_constants.py b/hedweb/constants/base_constants.py
index ed95f1da..b292ac66 100644
--- a/hedweb/constants/base_constants.py
+++ b/hedweb/constants/base_constants.py
@@ -67,12 +67,14 @@
JSON_SIDECARS = 'json_sidecars'
JSON_STRING = 'json_string'
+MSG = 'msg'
+MSG_CATEGORY = 'msg_category'
OTHER_VERSION_OPTION = 'Other'
OUTPUT_DISPLAY_NAME = 'output_display_name'
QUERY = 'query'
REMODEL_FILE = 'remodel_file'
-REMODEL_COMMANDS = 'remodel_commands'
+REMODEL_OPERATIONS = 'remodel_operations'
REMOVE_DEFS = 'remove_defs'
REQUIRED_COLUMN_INDICES = 'required_column_indices'
diff --git a/hedweb/events.py b/hedweb/events.py
index 88a64160..46d35272 100644
--- a/hedweb/events.py
+++ b/hedweb/events.py
@@ -1,5 +1,6 @@
from flask import current_app
import json
+import io
from werkzeug.utils import secure_filename
import pandas as pd
@@ -44,11 +45,12 @@ def get_events_form_input(request):
json_sidecar = Sidecar(files=f, name=secure_filename(f.filename))
arguments[base_constants.JSON_SIDECAR] = json_sidecar
remodel = None
- if base_constants.REMODEL_FILE in request.files:
+ if arguments[base_constants.COMMAND] == base_constants.COMMAND_REMODEL and \
+ base_constants.REMODEL_FILE in request.files:
f = request.files[base_constants.REMODEL_FILE]
name = secure_filename(f.filename)
- remodel = {'name': name, 'commands': json.load(f)}
- arguments[base_constants.REMODEL_COMMANDS] = remodel
+ remodel = {'name': name, 'operations': json.load(f)}
+ arguments[base_constants.REMODEL_OPERATIONS] = remodel
if base_constants.EVENTS_FILE in request.files:
f = request.files[base_constants.EVENTS_FILE]
arguments[base_constants.EVENTS] = \
@@ -78,7 +80,7 @@ def process(arguments):
raise HedFileError('BadHedSchema', "Please provide a valid HedSchema for event processing", "")
events = arguments.get(base_constants.EVENTS, None)
sidecar = arguments.get(base_constants.JSON_SIDECAR, None)
- remodel_commands = arguments.get(base_constants.REMODEL_COMMANDS, None)
+ remodel_operations = arguments.get(base_constants.REMODEL_OPERATIONS, None)
query = arguments.get(base_constants.QUERY, None)
columns_included = arguments.get(base_constants.COLUMNS_INCLUDED, None)
if not events or not isinstance(events, TabularInput):
@@ -94,7 +96,7 @@ def process(arguments):
elif command == base_constants.COMMAND_GENERATE_SIDECAR:
results = generate_sidecar(events, arguments.get(base_constants.COLUMNS_SELECTED, None))
elif command == base_constants.COMMAND_REMODEL:
- results = remodel(hed_schema, events, sidecar, remodel_commands)
+ results = remodel(hed_schema, events, sidecar, remodel_operations)
else:
raise HedFileError('UnknownEventsProcessingMethod', f'Command {command} is missing or invalid', '')
return results
@@ -160,14 +162,14 @@ def generate_sidecar(events, columns_selected):
'msg': 'JSON sidecar generation from event file complete'}
-def remodel(hed_schema, events, sidecar, remodel_commands):
+def remodel(hed_schema, events, sidecar, remodel_operations):
""" Remodel a given events file.
Args:
hed_schema (HedSchema, HedSchemaGroup or None): A HED schema or HED schema group.
events (EventsInput): An events input object.
sidecar (Sidecar or None): A sidecar object.
- remodel_commands (dict): A dictionary with the name and command list of the remodeling file.
+ remodel_operations (dict): A dictionary with the name and list of operations in the remodeling file.
Returns:
dict: A dictionary pointing to results or errors.
@@ -179,29 +181,41 @@ def remodel(hed_schema, events, sidecar, remodel_commands):
schema_version = hed_schema.version
else:
schema_version = None
- remodel_name = remodel_commands['name']
- commands = remodel_commands['commands']
- command_list, errors = Dispatcher.parse_commands(commands)
+ remodel_name = remodel_operations['name']
+ operations = remodel_operations['operations']
+ operations_list, errors = Dispatcher.parse_operations(operations)
if errors:
issue_str = Dispatcher.errors_to_str(errors)
- file_name = generate_filename(remodel_name, name_suffix='_command_parse_errors',
+ file_name = generate_filename(remodel_name, name_suffix='_operation_parse_errors',
extension='.txt', append_datetime=True)
return {base_constants.COMMAND: base_constants.COMMAND_REMODEL,
base_constants.COMMAND_TARGET: 'events',
'data': issue_str, "output_display_name": file_name,
base_constants.SCHEMA_VERSION: schema_version, "msg_category": "warning",
- 'msg': f"Remodeling command file for {display_name} had validation errors"}
+ 'msg': f"Remodeling operation list for {display_name} had validation errors"}
df = events.dataframe
- dispatch = Dispatcher(commands, data_root=None, hed_versions=schema_version)
+ dispatch = Dispatcher(operations, data_root=None, hed_versions=schema_version)
df = dispatch.prep_events(df)
for operation in dispatch.parsed_ops:
df = operation.do_op(dispatch, df, display_name, sidecar=sidecar)
df = df.fillna('n/a')
- csv_string = df.to_csv(None, sep='\t', index=False, header=True)
name_suffix = f"_remodeled_by_{remodel_name}"
file_name = generate_filename(display_name, name_suffix=name_suffix, extension='.tsv', append_datetime=True)
+ if not dispatch.context_dict:
+ data = df.to_csv(None, sep='\t', index=False, header=True)
+ zip_data = None
+ output_name = file_name
+ else:
+ output_name = generate_filename(display_name, name_suffix=name_suffix + '_zip',
+ extension='.zip', append_datetime=True)
+ archive = Dispatcher.archive_data_file(df, file_name)
+ archive = dispatch.archive_context(archive=archive)
+ # Dispatcher.save_archive(archive, 'd:/junk/temp.zip')
+ zip_data = archive.getvalue()
+ data = None
return {base_constants.COMMAND: base_constants.COMMAND_REMODEL,
- base_constants.COMMAND_TARGET: 'events', 'data': csv_string, "output_display_name": file_name,
+ base_constants.COMMAND_TARGET: 'events', 'data': data, 'zip_data': zip_data,
+ "output_display_name": output_name,
base_constants.SCHEMA_VERSION: schema_version, 'msg_category': 'success',
'msg': f"Command parsing for {display_name} remodeling was successful"}
diff --git a/hedweb/services.py b/hedweb/services.py
index 4322f163..3590bf56 100644
--- a/hedweb/services.py
+++ b/hedweb/services.py
@@ -130,8 +130,8 @@ def get_remodeler(arguments, params):
if base_constants.REMODEL_FILE in params:
f = io.StringIO(base_constants.REMODEL_FILE)
name = 'remodel_commands.json'
- remodel = {'name': name, 'commands': json.load(f)}
- arguments[base_constants.REMODEL_COMMANDS] = remodel
+ remodel = {'name': name, 'operations': json.load(f)}
+ arguments[base_constants.REMODEL_OPERATIONS] = remodel
def get_service_info(params):
diff --git a/hedweb/templates/js/events-form.js b/hedweb/templates/js/events-form.js
index 96041c70..f11c0f31 100644
--- a/hedweb/templates/js/events-form.js
+++ b/hedweb/templates/js/events-form.js
@@ -157,4 +157,4 @@ function submitForm() {
}
}
)
-}
\ No newline at end of file
+}
diff --git a/hedweb/templates/js/form-helpers.js b/hedweb/templates/js/form-helpers.js
index 6a59a2e9..5fdd2e00 100644
--- a/hedweb/templates/js/form-helpers.js
+++ b/hedweb/templates/js/form-helpers.js
@@ -186,10 +186,12 @@ function splitExt(filename) {
* @param {String} content_type - Type of file to create
*/
function triggerDownloadBlob(download_blob, display_name, content_type) {
- const url = URL.createObjectURL(new Blob([download_blob], {type:content_type}));
+ // const url = URL.createObjectURL(new Blob([download_blob]));
+ const url = URL.createObjectURL(new Blob([download_blob], {type: content_type}))
const link = document.createElement('a');
link.href = url;
link.setAttribute('download', display_name);
+ link.setAttribute('type', content_type)
document.body.appendChild(link);
link.click();
}
diff --git a/hedweb/web_util.py b/hedweb/web_util.py
index 878e47d0..f1fadd84 100644
--- a/hedweb/web_util.py
+++ b/hedweb/web_util.py
@@ -1,8 +1,9 @@
import io
import json
import os
+import base64
from urllib.parse import urlparse
-from flask import current_app, Response, make_response
+from flask import current_app, Response, make_response, send_file
from werkzeug.utils import secure_filename
from hed import schema as hedschema
@@ -85,45 +86,44 @@ def form_has_url(request, url_field, valid_extensions=None):
return file_extension_is_valid(parsed_url.path, valid_extensions)
-def generate_download_file_from_text(download_text, display_name=None,
- header=None, msg_category='success', msg=''):
+def generate_download_file_from_text(results, file_header=None):
""" Generate a download file from text output.
Args:
- download_text (str): Text with newlines for iterating.
- display_name (str): Name to be assigned to the file in the response.
- header (str): Optional header for download file blob.
- msg_category (str): Category of the message to be displayed ('Success', 'Error', 'Warning')
- msg (str): Optional message to be displayed in the submit-flash-field.
+ results: Text with newlines for iterating.
+ file_header (str): Optional header for download file blob.
Returns:
Response: A Response object containing the downloaded file.
"""
- if not display_name:
+ display_name = results.get('output_display_name', None)
+ if display_name is None:
display_name = 'download.txt'
+ download_text = results.get('data', '')
if not download_text:
raise HedFileError('EmptyDownloadText', "No download text given", "")
-
+ headers = {'Content-Disposition': f"attachment filename={display_name}",
+ 'Category': results[base_constants.MSG_CATEGORY],
+ 'Message': results[base_constants.MSG]}
def generate():
- if header:
- yield header
+ if file_header:
+ yield file_header
for issue in download_text.splitlines(True):
yield issue
return Response(generate(), mimetype='text/plain charset=utf-8',
headers={'Content-Disposition': f"attachment filename={display_name}",
- 'Category': msg_category, 'Message': msg})
+ 'Category': results[base_constants.MSG_CATEGORY],
+ 'Message': results[base_constants.MSG]})
-def generate_download_spreadsheet(results, msg_category='success', msg=''):
+def generate_download_spreadsheet(results):
""" Generate a download Excel file.
Args:
results (dict): Dictionary with the results to be downloaded.
- msg_category (str): Category of the message to be displayed ('Success', 'Error', 'Warning')
- msg (str): Optional message to be displayed in the submit-flash-field.
Returns:
Response: A Response object containing the downloaded file.
@@ -131,42 +131,96 @@ def generate_download_spreadsheet(results, msg_category='success', msg=''):
"""
# return generate_download_test()
spreadsheet = results[base_constants.SPREADSHEET]
- display_name = results[base_constants.OUTPUT_DISPLAY_NAME]
-
if not spreadsheet.loaded_workbook:
- return generate_download_file_from_text(spreadsheet.to_csv(), display_name=display_name,
- msg_category=msg_category, msg=msg)
+ return generate_download_file_from_text({'data': spreadsheet.to_csv(),
+ 'output_display_name': results[base_constants.OUTPUT_DISPLAY_NAME],
+ base_constants.MSG_CATEGORY: results[base_constants.MSG_CATEGORY],
+ base_constants.MSG: results[base_constants.MSG]})
buffer = io.BytesIO()
spreadsheet.to_excel(buffer, output_processed_file=True)
buffer.seek(0)
response = make_response()
response.data = buffer.read()
- response.headers['Content-Disposition'] = 'attachment; filename=' + display_name
- response.headers['Category'] = msg_category
- response.headers['Message'] = msg
+ response.headers['Content-Disposition'] = 'attachment; filename=' + results[base_constants.OUTPUT_DISPLAY_NAME]
+ response.headers['Category'] = results[base_constants.MSG_CATEGORY]
+ response.headers['Message'] = results[base_constants.MSG]
response.mimetype = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
return response
-def generate_text_response(download_text, msg_category='success', msg=''):
+def generate_text_response(results):
""" Generate a download response.
Args:
- download_text (str): Text to be downloaded as part of the response.
- msg_category (str): Category of the message to be displayed ('Success', 'Error', 'Warning')
- msg (str): Optional message to be displayed in the submit-flash-field.
+ results (dict): Dictionary containing the results of the data.
Returns:
Response: A Response object containing the downloaded file.
"""
- headers = {'Category': msg_category, 'Message': msg}
+ headers = {'Category': results[base_constants.MSG_CATEGORY], 'Message': results[base_constants.MSG]}
+ download_text = results.get('data', '')
if len(download_text) > 0:
headers['Content-Length'] = len(download_text)
return Response(download_text, mimetype='text/plain charset=utf-8', headers=headers)
+def generate_download_zip_file(results):
+ """ Generate a download response.
+
+ Args:
+ results (dict): Dictionary of results to use in constructing response.
+
+ Returns:
+ Response: A Response object containing the downloaded file.
+
+
+ """
+
+ archive = results['zip_data']
+ response = make_response()
+ response.data = archive
+ response.headers['Content-type'] = 'zip'
+ response.headers['Content-Disposition'] = 'attachment; filename=tempA.zip'
+ response.headers['Category'] = results[base_constants.MSG_CATEGORY]
+ response.headers['Message'] = results[base_constants.MSG]
+ response.mimetype = 'application/zip'
+ return response
+ # archive = results['zip_data']
+ # with open('d:/junk/temp2.zip', 'wb') as fp:
+ # fp.write(archive)
+ # buffer = io.BytesIO(archive)
+ # response = make_response()
+ # buffer.seek(0)
+ # buflen = len(archive)
+ # response.data = buffer.read()
+ # response.headers['Content-Disposition'] = 'attachment; filename=temp.zip'
+ # response.headers['Category'] = results[base_constants.MSG_CATEGORY]
+ # response.headers['Message'] = results[base_constants.MSG]
+ # response.headers['Content-Type'] = 'application/zip; charset=utf-8'
+ # response.headers['Content-Length'] = buflen
+ # response.mimetype = 'application/zip'
+ # return response
+ # fileobj = io.BytesIO()
+ # with zipfile.ZipFile(fileobj, 'w') as zip_file:
+ # zip_info = zipfile.ZipInfo(FILEPATH)
+ # zip_info.date_time = time.localtime(time.time())[:6]
+ # zip_info.compress_type = zipfile.ZIP_DEFLATED
+ # with open(FILEPATH, 'rb') as fd:
+ # zip_file.writestr(zip_info, fd.read())
+ # fileobj.seek(0)
+ #
+ # response = make_response(fileobj.read())
+ # response.headers.set('Content-Type', 'zip')
+ # response.headers.set('Content-Disposition', 'attachment', filename='%s.zip' % os.path.basename(FILEPATH))
+
+ # archive = results['zip_data']
+ # with open('d:/junk/temp2.zip', 'wb') as fp:
+ # fp.write(archive)
+ # response = send_file('d:/junk/junk3.zip', mimetype='application/zip', as_attachment=True, attachment_filename='junk3.zip')
+ return response
+
def get_hed_schema_from_pull_down(request):
""" Create a HedSchema object from form pull-down box.
@@ -249,7 +303,7 @@ def handle_http_error(ex):
else:
message = str(ex)
error_message = f"{error_code}: [{message}]"
- return generate_text_response('', msg_category='error', msg=error_message)
+ return generate_text_response({'data': '', base_constants.MSG_CATEGORY: 'error', base_constants.MSG: error_message})
def package_results(results):
@@ -259,13 +313,12 @@ def package_results(results):
results (dict): A dictionary with the results
"""
- msg = results.get('msg', '')
- msg_category = results.get('msg_category', 'success')
- display_name = results.get('output_display_name', '')
- if results['data']:
- return generate_download_file_from_text(results['data'], display_name=display_name,
- msg_category=msg_category, msg=msg)
+
+ if results.get('data', None):
+ return generate_download_file_from_text(results)
+ elif results.get('zip_data', None):
+ return generate_download_zip_file(results)
elif not results.get('spreadsheet', None):
- return generate_text_response("", msg=msg, msg_category=msg_category)
+ return generate_text_response(results)
else:
- return generate_download_spreadsheet(results, msg_category=msg_category, msg=msg)
+ return generate_download_spreadsheet(results)
diff --git a/requirements.txt b/requirements.txt
index 29ce1d06..b49b8916 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,26 +1,26 @@
-click==8.0.3
-coverage==6.3.1
+click==8.1.3
+coverage==6.4.4
defusedxml==0.7.1
et-xmlfile==1.1.0
-Flask==2.0.3
-Flask-WTF==1.0.0
-inflect==5.4.0
-itsdangerous==2.0.1
+Flask==2.2.2
+Flask-WTF==1.0.1
+inflect==6.0.0
+itsdangerous==2.1.2
jdcal==1.4.1
-Jinja2==3.0.3
-MarkupSafe==2.0.1
-numpy==1.21.5
-openpyxl==3.0.9
+Jinja2==3.1.2
+MarkupSafe==2.1.1
+numpy==1.21.6
+openpyxl==3.0.10
pandas==1.3.5
-portalocker==2.3.2
+portalocker==2.5.1
python-dateutil==2.8.2
-pytz==2021.3
-semantic_version==2.9.0
+pytz==2022.2.1
+semantic_version==2.10.0
six==1.16.0
-Sphinx==4.4.0
+Sphinx==5.2.2
SphinxExtensions==0.2.0
sphinx_rtd_theme==1.0.0
-Werkzeug==2.0.3
+Werkzeug==2.2.2
WTForms==3.0.1
xlrd==2.0.1
-attrs==21.4.0
+attrs==22.1.0
diff --git a/tests/data/simple_reorder_remdl.json b/tests/data/simple_reorder_rmdl.json
similarity index 82%
rename from tests/data/simple_reorder_remdl.json
rename to tests/data/simple_reorder_rmdl.json
index 7bf4eb15..6b5b4c63 100644
--- a/tests/data/simple_reorder_remdl.json
+++ b/tests/data/simple_reorder_rmdl.json
@@ -1,6 +1,6 @@
[
{
- "command": "remove_columns",
+ "operation": "remove_columns",
"description": "Get rid of the sample and the value columns",
"parameters": {
"remove_names": ["sample", "value"],
@@ -8,7 +8,7 @@
}
},
{
- "command": "reorder_columns",
+ "operation": "reorder_columns",
"description": "Order columns so that response_time and trial_type come after onset and duration",
"parameters": {
"column_order": ["onset", "duration"],
diff --git a/tests/test_events.py b/tests/test_events.py
index 307ed2b7..2d1a3da1 100644
--- a/tests/test_events.py
+++ b/tests/test_events.py
@@ -201,14 +201,14 @@ def test_events_remodel_valid_no_hed(self):
events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'data/sub-002_task-FacePerception_run-1_events.tsv')
remodel_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
- 'data/simple_reorder_remdl.json')
+ 'data/simple_reorder_rmdl.json')
events = TabularInput(file=events_path, name='wh_events')
df = events.dataframe
df_rows = len(df)
df_cols = len(df.columns)
with open(remodel_path, 'r') as fp:
remodel_json = json.load(fp)
- remodeler = {'name': "simple_reorder_remdl.json", 'commands': remodel_json}
+ remodeler = {'name': "simple_reorder_rmdl.json", 'operations': remodel_json}
hed_schema = None
sidecar = None
@@ -220,20 +220,21 @@ def test_events_remodel_valid_no_hed(self):
def test_events_remodel_invalid_no_hed(self):
from hedweb.events import remodel
+
events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'data/sub-002_task-FacePerception_run-1_events.tsv')
remodel_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
- 'data/simple_reorder_remdl.json')
+ 'data/simple_reorder_rmdl.json')
events = TabularInput(file=events_path, name='wh_events')
with open(remodel_path, 'r') as fp:
remodeler = json.load(fp)
hed_schema = None
sidecar = None
- command_0 = {'badcommand': 'remove_columns', 'description': 'bad structure', 'parameters': {'ignore_missing': True}}
- command_1 = {'command': 'unknown_command', 'description': 'bad command', 'parameters': {'ignore_missing': True}}
- command_2 = {'command': 'remove_columns', 'description': 'bad parameters', 'parameters': {'ignore_missing': True}}
- commands_bad = [command_0, remodeler[0], command_1, remodeler[1], command_2]
- remodel_bad = {'name': 'remodel_bad.json', 'commands': commands_bad}
+ operation_0 = {'badoperation': 'remove_columns', 'description': 'bad structure', 'parameters': {'ignore_missing': True}}
+ operation_1 = {'operation': 'unknown_command', 'description': 'bad command', 'parameters': {'ignore_missing': True}}
+ operation_2 = {'command': 'remove_columns', 'description': 'bad parameters', 'parameters': {'ignore_missing': True}}
+ operation_bad = [operation_0, remodeler[0], operation_1, remodeler[1], operation_2]
+ remodel_bad = {'name': 'remodel_bad.json', 'operations': operation_bad}
with self.app.app_context():
results = remodel(hed_schema, events, sidecar, remodel_bad)
self.assertTrue(results['data'], 'remodel results should have a data key when unsuccessful')
diff --git a/tests/test_routes/test_routes_events.py b/tests/test_routes/test_routes_events.py
index c91f90a8..86227a4e 100644
--- a/tests/test_routes/test_routes_events.py
+++ b/tests/test_routes/test_routes_events.py
@@ -75,7 +75,7 @@ def test_events_results_remodel_valid(self):
events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'../data/sub-002_task-FacePerception_run-1_events.tsv')
remodel_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
- '../data/simple_reorder_remdl.json')
+ '../data/simple_reorder_rmdl.json')
with open(events_path, 'r') as sc:
y = sc.read()
events_buffer = io.BytesIO(bytes(y, 'utf-8'))
@@ -87,7 +87,7 @@ def test_events_results_remodel_valid(self):
with self.app.app_context():
input_data = {base_constants.SCHEMA_VERSION: '8.0.0',
base_constants.COMMAND_OPTION: base_constants.COMMAND_REMODEL,
- base_constants.REMODEL_FILE: (remodel_buffer, 'simple_reorder_remdl.json'),
+ base_constants.REMODEL_FILE: (remodel_buffer, 'simple_reorder_rmdl.json'),
base_constants.EVENTS_FILE: (events_buffer,
'sub-002_task-FacePerception_run-1_events.tsv.tsv')}
response = self.app.test.post('/events_submit', content_type='multipart/form-data', data=input_data)
diff --git a/tests/test_web_util.py b/tests/test_web_util.py
index 4c999ff8..dcf7db18 100644
--- a/tests/test_web_util.py
+++ b/tests/test_web_util.py
@@ -2,6 +2,7 @@
import unittest
from werkzeug.test import create_environ
from werkzeug.wrappers import Request, Response
+from hedweb.constants import base_constants, file_constants
from tests.test_web_base import TestWebBase
@@ -11,7 +12,6 @@ class Test(TestWebBase):
def test_form_has_file(self):
from hedweb.web_util import form_has_file
- from hedweb.constants import file_constants
with self.app.test as _:
json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events.json')
with open(json_path, 'rb') as fp:
@@ -27,7 +27,6 @@ def test_form_has_file(self):
def test_form_has_option(self):
from hedweb.web_util import form_has_option
- from hedweb.constants import base_constants
with self.app.test as _:
environ = create_environ(data={base_constants.CHECK_FOR_WARNINGS: 'on'})
@@ -41,7 +40,6 @@ def test_form_has_option(self):
def test_form_has_url(self):
from hedweb.web_util import form_has_url
- from hedweb.constants import base_constants, file_constants
with self.app.test as _:
environ = create_environ(data={base_constants.SCHEMA_URL: 'https://www.google.com/my.json'})
request = Request(environ)
@@ -54,21 +52,20 @@ def test_generate_download_file_from_text(self):
from hedweb.web_util import generate_download_file_from_text
with self.app.test_request_context():
the_text = 'The quick brown fox\nIs too slow'
- response = generate_download_file_from_text(the_text, 'temp',
- msg_category='success', msg='Successful')
+ response = generate_download_file_from_text({'data': the_text, 'msg_category':'success',
+ 'msg': 'Successful'})
self.assertIsInstance(response, Response,
'Generate_response_download_file_from_text returns a response for string')
self.assertEqual(200, response.status_code,
"Generate_response_download_file_from_text has status code 200 for string")
header_content = dict(response.headers)
self.assertEqual('success', header_content['Category'], "The msg_category is success")
- self.assertEqual('attachment filename=temp', header_content['Content-Disposition'],
+ self.assertEqual('attachment filename=download.txt', header_content['Content-Disposition'],
"generate_download_file has the correct attachment file name")
def test_generate_download_spreadsheet_excel(self):
with self.app.test_request_context():
from hed.models import SpreadsheetInput
- from hedweb.constants import base_constants
from hedweb.web_util import generate_download_spreadsheet
spreadsheet_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/ExcelOneSheet.xlsx')
@@ -78,8 +75,9 @@ def test_generate_download_spreadsheet_excel(self):
4: 'Attribute/Informational/Description/'},
name='ExcelOneSheet.xlsx')
results = {base_constants.SPREADSHEET: spreadsheet,
- base_constants.OUTPUT_DISPLAY_NAME: 'ExcelOneSheetA.xlsx'}
- response = generate_download_spreadsheet(results, msg_category='success', msg='Successful download')
+ base_constants.OUTPUT_DISPLAY_NAME: 'ExcelOneSheetA.xlsx',
+ base_constants.MSG: 'Successful download', base_constants.MSG_CATEGORY: 'success'}
+ response = generate_download_spreadsheet(results)
self.assertIsInstance(response, Response, 'generate_download_spreadsheet returns a response for xlsx files')
headers_dict = dict(response.headers)
self.assertEqual(200, response.status_code, 'generate_download_spreadsheet should return status code 200')
@@ -91,7 +89,6 @@ def test_generate_download_spreadsheet_excel(self):
def test_generate_download_spreadsheet_excel_code(self):
with self.app.test_request_context():
from hed.models import SpreadsheetInput
- from hedweb.constants import base_constants
from hedweb.web_util import generate_download_spreadsheet
spreadsheet_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/ExcelOneSheet.xlsx')
@@ -101,8 +98,9 @@ def test_generate_download_spreadsheet_excel_code(self):
3: 'Description/'},
name='ExcelOneSheet.xlsx')
results = {base_constants.SPREADSHEET: spreadsheet,
- base_constants.OUTPUT_DISPLAY_NAME: 'ExcelOneSheetA.xlsx'}
- response = generate_download_spreadsheet(results, msg_category='success', msg='Successful download')
+ base_constants.OUTPUT_DISPLAY_NAME: 'ExcelOneSheetA.xlsx',
+ base_constants.MSG: 'Successful download', base_constants.MSG_CATEGORY: 'success'}
+ response = generate_download_spreadsheet(results)
self.assertIsInstance(response, Response, 'generate_download_spreadsheet returns a response for tsv files')
headers_dict = dict(response.headers)
self.assertEqual(200, response.status_code, 'generate_download_spreadsheet should return status code 200')
@@ -114,7 +112,6 @@ def test_generate_download_spreadsheet_excel_code(self):
def test_generate_download_spreadsheet_tsv(self):
with self.app.test_request_context():
from hed.models import SpreadsheetInput
- from hedweb.constants import base_constants
from hedweb.web_util import generate_download_spreadsheet
spreadsheet_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'data/LKTEventCodesHED3.tsv')
@@ -125,8 +122,9 @@ def test_generate_download_spreadsheet_tsv(self):
4: 'Attribute/Informational/Description/'},
name='LKTEventCodesHED3.tsv')
results = {base_constants.SPREADSHEET: spreadsheet,
- base_constants.OUTPUT_DISPLAY_NAME: 'LKTEventCodesHED3.tsv'}
- response = generate_download_spreadsheet(results, msg_category='success', msg='Successful download')
+ base_constants.OUTPUT_DISPLAY_NAME: 'LKTEventCodesHED3.tsv',
+ base_constants.MSG: 'Successful download', base_constants.MSG_CATEGORY: 'success'}
+ response = generate_download_spreadsheet(results)
self.assertIsInstance(response, Response, 'generate_download_spreadsheet returns a response for tsv files')
headers_dict = dict(response.headers)
self.assertEqual(200, response.status_code, 'generate_download_spreadsheet should return status code 200')
@@ -138,17 +136,16 @@ def test_generate_download_spreadsheet_tsv(self):
def test_generate_text_response(self):
with self.app.test_request_context():
from hedweb.web_util import generate_text_response
- download_text = 'testme'
- test_msg = 'testing'
- response = generate_text_response(download_text, msg_category='success', msg=test_msg)
+ results = {'data': 'testme', base_constants.MSG: 'testing', base_constants.MSG_CATEGORY: 'success'}
+ response = generate_text_response(results)
self.assertIsInstance(response, Response, 'generate_download_text_response returns a response')
headers_dict = dict(response.headers)
self.assertEqual(200, response.status_code, 'generate_download_text_response should return status code 200')
self.assertEqual('text/plain charset=utf-8', response.mimetype,
"generate_download_download_text_response should return text")
- self.assertEqual(test_msg, headers_dict['Message'],
+ self.assertEqual(results[base_constants.MSG], headers_dict['Message'],
"generate_download_text_response have the correct message in the response")
- self.assertEqual(download_text, response.data.decode('ascii'),
+ self.assertEqual(results['data'], response.data.decode('ascii'),
"generate_download_text_response have the download text as response data")
def test_get_hed_schema_from_pull_down_empty(self):
From a9a6308d07de3a7f14eb7babd86f2e063dc3d919 Mon Sep 17 00:00:00 2001
From: Kay Robbins <1189050+VisLab@users.noreply.github.com>
Date: Wed, 12 Oct 2022 18:50:47 -0500
Subject: [PATCH 11/25] Fixed the zip download
---
hedweb/constants/base_constants.py | 18 ++-
hedweb/events.py | 142 +++++++++---------
hedweb/routes.py | 2 +-
hedweb/schema.py | 11 +-
hedweb/services.py | 38 ++---
hedweb/sidecar.py | 63 ++++----
hedweb/spreadsheet.py | 35 +++--
hedweb/static/img/temp2.zip | Bin 0 -> 849 bytes
hedweb/static/resources/services.json | 33 ++--
hedweb/strings.py | 14 +-
hedweb/templates/events.html | 8 +-
hedweb/templates/js/events-form.js | 50 +++---
hedweb/templates/js/form-helpers.js | 4 +-
hedweb/templates/js/json-input.js | 37 -----
hedweb/templates/js/sidecar-form.js | 16 +-
hedweb/templates/js/sidecar-input.js | 37 +++++
hedweb/templates/js/string-form.js | 5 -
hedweb/templates/json-input.html | 14 --
hedweb/templates/options.html | 9 +-
hedweb/templates/sidecar-input.html | 14 ++
hedweb/templates/sidecar.html | 6 +-
hedweb/web_util.py | 80 ++++------
...ple.json => rename_example_temp_rmdl.json} | 2 +-
...son => splitevents_example_temp_rmdl.json} | 0
tests/test_events.py | 45 +++---
tests/test_routes/test_routes_events.py | 16 +-
tests/test_routes/test_routes_sidecar.py | 30 ++--
tests/test_services.py | 51 +++++--
tests/test_sidecar.py | 36 ++---
tests/test_web_util.py | 12 +-
30 files changed, 420 insertions(+), 408 deletions(-)
create mode 100644 hedweb/static/img/temp2.zip
delete mode 100644 hedweb/templates/js/json-input.js
create mode 100644 hedweb/templates/js/sidecar-input.js
delete mode 100644 hedweb/templates/json-input.html
create mode 100644 hedweb/templates/sidecar-input.html
rename tests/data/{rename_example.json => rename_example_temp_rmdl.json} (81%)
rename tests/data/{splitevents_example.json => splitevents_example_temp_rmdl.json} (100%)
diff --git a/hedweb/constants/base_constants.py b/hedweb/constants/base_constants.py
index b292ac66..1beacc5c 100644
--- a/hedweb/constants/base_constants.py
+++ b/hedweb/constants/base_constants.py
@@ -47,6 +47,7 @@
EXPAND_DEFS = 'expand_defs'
+FILE_LIST = 'file_list'
FORMAT_OPTION = 'format_option'
FORMAT_TO_EXCEL = 'to_excel'
FORMAT_TO_JSON = 'to_json'
@@ -57,15 +58,9 @@
INCLUDE_DEFINITION_TAGS = 'include_definition_tags'
INCLUDE_DESCRIPTION_TAGS = 'include_description_tags'
+INCLUDE_SUMMARIES = 'include_summaries'
ISSUE_STRING = 'issue_string'
-JSON_DISPLAY_NAME = 'json_display_name'
-JSON_FILE = 'json_file'
-JSON_LIST = 'json_list'
-JSON_PATH = 'json_path'
-JSON_SIDECAR = 'json_sidecar'
-JSON_SIDECARS = 'json_sidecars'
-JSON_STRING = 'json_string'
MSG = 'msg'
MSG_CATEGORY = 'msg_category'
@@ -75,6 +70,7 @@
QUERY = 'query'
REMODEL_FILE = 'remodel_file'
REMODEL_OPERATIONS = 'remodel_operations'
+REMODEL_STRING = 'remodel_string'
REMOVE_DEFS = 'remove_defs'
REQUIRED_COLUMN_INDICES = 'required_column_indices'
@@ -99,10 +95,16 @@
SCHEMA_URL_OPTION = 'schema_url_option'
SCHEMA_VERSION = 'schema_version'
SCHEMA_VERSION_LIST = 'schema_version_list'
+SCHEMA_VERSION_STRING = 'schema_version_string'
SERVICE = 'service'
SERVICE_PARAMETERS = 'service_parameters'
+SIDECAR = 'sidecar'
+SIDECAR_DISPLAY_NAME = 'sidecar_display_name'
+SIDECAR_FILE = 'sidecar_file'
+SIDECAR_PATH = 'sidecar_path'
+SIDECAR_STRING = 'sidecar_string'
SPREADSHEET = 'spreadsheet'
SPREADSHEET_DISPLAY_NAME = 'spreadsheet_display_name'
@@ -123,7 +125,7 @@
WORKSHEET_NAMES = 'worksheet_names'
WORKSHEET_SELECT = 'worksheet_select'
WORKSHEET_SELECTED = 'worksheet_selected'
-
+ZIP_NAME = 'zip_name'
# Type constants
BOOLEAN = 'boolean'
diff --git a/hedweb/events.py b/hedweb/events.py
index 46d35272..e9fb40a5 100644
--- a/hedweb/events.py
+++ b/hedweb/events.py
@@ -1,17 +1,18 @@
from flask import current_app
import json
-import io
from werkzeug.utils import secure_filename
import pandas as pd
from hed import schema as hedschema
from hed.errors import get_printable_issue_string, HedFileError
-from hed.tools import assemble_hed, Dispatcher, TabularSummary, generate_filename, generate_sidecar_entry, search_tabular
from hed.models import DefinitionDict, Sidecar, TabularInput
+from hed.schema.hed_schema_io import get_schema_versions
+from hed.tools import assemble_hed, Dispatcher, TabularSummary, generate_filename, \
+ generate_sidecar_entry, search_tabular
from hed.validator import HedValidator
from hedweb.constants import base_constants
from hedweb.columns import create_column_selections, create_columns_included
-from hedweb.web_util import form_has_option, get_hed_schema_from_pull_down
+from hedweb.web_util import filter_issues, form_has_option, get_hed_schema_from_pull_down
app_config = current_app.config
@@ -32,6 +33,7 @@ def get_events_form_input(request):
base_constants.COMMAND: request.form.get(base_constants.COMMAND_OPTION, ''),
base_constants.CHECK_FOR_WARNINGS: form_has_option(request, base_constants.CHECK_FOR_WARNINGS, 'on'),
base_constants.EXPAND_DEFS: form_has_option(request, base_constants.EXPAND_DEFS, 'on'),
+ base_constants.INCLUDE_SUMMARIES: form_has_option(request, base_constants.INCLUDE_SUMMARIES, 'on'),
base_constants.COLUMNS_SELECTED: create_column_selections(request.form),
base_constants.COLUMNS_INCLUDED: create_columns_included(request.form)
}
@@ -39,22 +41,22 @@ def get_events_form_input(request):
arguments[base_constants.COLUMNS_INCLUDED] = ['onset'] # TODO add user interface option to choose columns.
if arguments[base_constants.COMMAND] != base_constants.COMMAND_GENERATE_SIDECAR:
arguments[base_constants.SCHEMA] = get_hed_schema_from_pull_down(request)
- json_sidecar = None
- if base_constants.JSON_FILE in request.files:
- f = request.files[base_constants.JSON_FILE]
- json_sidecar = Sidecar(files=f, name=secure_filename(f.filename))
- arguments[base_constants.JSON_SIDECAR] = json_sidecar
- remodel = None
+ sidecar = None
+ if base_constants.SIDECAR_FILE in request.files:
+ f = request.files[base_constants.SIDECAR_FILE]
+ sidecar = Sidecar(files=f, name=secure_filename(f.filename))
+ arguments[base_constants.SIDECAR] = sidecar
+ remodel_operations = None
if arguments[base_constants.COMMAND] == base_constants.COMMAND_REMODEL and \
- base_constants.REMODEL_FILE in request.files:
+ base_constants.REMODEL_FILE in request.files:
f = request.files[base_constants.REMODEL_FILE]
name = secure_filename(f.filename)
- remodel = {'name': name, 'operations': json.load(f)}
- arguments[base_constants.REMODEL_OPERATIONS] = remodel
+ remodel_operations = {'name': name, 'operations': json.load(f)}
+ arguments[base_constants.REMODEL_OPERATIONS] = remodel_operations
if base_constants.EVENTS_FILE in request.files:
f = request.files[base_constants.EVENTS_FILE]
arguments[base_constants.EVENTS] = \
- TabularInput(file=f, sidecar=arguments.get(base_constants.JSON_SIDECAR, None),
+ TabularInput(file=f, sidecar=arguments.get(base_constants.SIDECAR, None),
name=secure_filename(f.filename))
return arguments
@@ -76,10 +78,11 @@ def process(arguments):
command = arguments.get(base_constants.COMMAND, None)
if command == base_constants.COMMAND_GENERATE_SIDECAR:
pass
- elif not hed_schema or not isinstance(hed_schema, hedschema.hed_schema.HedSchema):
+ elif not hed_schema or not \
+ isinstance(hed_schema, (hedschema.hed_schema.HedSchema, hedschema.hed_schema_group.HedSchemaGroup)):
raise HedFileError('BadHedSchema', "Please provide a valid HedSchema for event processing", "")
events = arguments.get(base_constants.EVENTS, None)
- sidecar = arguments.get(base_constants.JSON_SIDECAR, None)
+ sidecar = arguments.get(base_constants.SIDECAR, None)
remodel_operations = arguments.get(base_constants.REMODEL_OPERATIONS, None)
query = arguments.get(base_constants.QUERY, None)
columns_included = arguments.get(base_constants.COLUMNS_INCLUDED, None)
@@ -96,7 +99,8 @@ def process(arguments):
elif command == base_constants.COMMAND_GENERATE_SIDECAR:
results = generate_sidecar(events, arguments.get(base_constants.COLUMNS_SELECTED, None))
elif command == base_constants.COMMAND_REMODEL:
- results = remodel(hed_schema, events, sidecar, remodel_operations)
+ results = remodel(hed_schema, events, sidecar, remodel_operations,
+ include_summaries=arguments.get(base_constants.INCLUDE_SUMMARIES, False))
else:
raise HedFileError('UnknownEventsProcessingMethod', f'Command {command} is missing or invalid', '')
return results
@@ -116,7 +120,6 @@ def assemble(hed_schema, events, columns_included=None, expand_defs=True):
"""
- schema_version = hed_schema.version
results = validate(hed_schema, events)
if results['data']:
return results
@@ -127,7 +130,8 @@ def assemble(hed_schema, events, columns_included=None, expand_defs=True):
return {base_constants.COMMAND: base_constants.COMMAND_ASSEMBLE,
base_constants.COMMAND_TARGET: 'events',
'data': csv_string, 'output_display_name': file_name, 'definitions': DefinitionDict.get_as_strings(defs),
- 'schema_version': schema_version, 'msg_category': 'success', 'msg': 'Events file successfully expanded'}
+ 'schema_version': hed_schema.get_formatted_version(as_string=True),
+ 'msg_category': 'success', 'msg': 'Events file successfully expanded'}
def generate_sidecar(events, columns_selected):
@@ -162,14 +166,15 @@ def generate_sidecar(events, columns_selected):
'msg': 'JSON sidecar generation from event file complete'}
-def remodel(hed_schema, events, sidecar, remodel_operations):
+def remodel(hed_schema, events, sidecar, remodel_operations, include_summaries=True):
""" Remodel a given events file.
Args:
hed_schema (HedSchema, HedSchemaGroup or None): A HED schema or HED schema group.
events (EventsInput): An events input object.
sidecar (Sidecar or None): A sidecar object.
- remodel_operations (dict): A dictionary with the name and list of operations in the remodeling file.
+ remodel_operations (dict): A dictionary with the name and list of operations in the remodeling file.
+ include_summaries (bool): If true and summaries exist, package event file and summaries in a zip file.
Returns:
dict: A dictionary pointing to results or errors.
@@ -177,10 +182,6 @@ def remodel(hed_schema, events, sidecar, remodel_operations):
"""
display_name = events.name
- if hed_schema:
- schema_version = hed_schema.version
- else:
- schema_version = None
remodel_name = remodel_operations['name']
operations = remodel_operations['operations']
operations_list, errors = Dispatcher.parse_operations(operations)
@@ -190,34 +191,34 @@ def remodel(hed_schema, events, sidecar, remodel_operations):
extension='.txt', append_datetime=True)
return {base_constants.COMMAND: base_constants.COMMAND_REMODEL,
base_constants.COMMAND_TARGET: 'events',
- 'data': issue_str, "output_display_name": file_name,
- base_constants.SCHEMA_VERSION: schema_version, "msg_category": "warning",
+ 'data': issue_str, 'output_display_name': file_name,
+ 'msg_category': "warning",
'msg': f"Remodeling operation list for {display_name} had validation errors"}
df = events.dataframe
- dispatch = Dispatcher(operations, data_root=None, hed_versions=schema_version)
+ dispatch = Dispatcher(operations, data_root=None, hed_versions=hed_schema)
df = dispatch.prep_events(df)
for operation in dispatch.parsed_ops:
df = operation.do_op(dispatch, df, display_name, sidecar=sidecar)
df = df.fillna('n/a')
+ data = df.to_csv(None, sep='\t', index=False, header=True)
name_suffix = f"_remodeled_by_{remodel_name}"
file_name = generate_filename(display_name, name_suffix=name_suffix, extension='.tsv', append_datetime=True)
- if not dispatch.context_dict:
- data = df.to_csv(None, sep='\t', index=False, header=True)
- zip_data = None
- output_name = file_name
+ output_name = file_name
+
+ response = {base_constants.COMMAND: base_constants.COMMAND_REMODEL,
+ base_constants.COMMAND_TARGET: 'events', 'data': '', "output_display_name": output_name,
+ base_constants.SCHEMA_VERSION: get_schema_versions(hed_schema, as_string=True),
+ base_constants.MSG_CATEGORY: 'success',
+ base_constants.MSG: f"Command parsing for {display_name} remodeling was successful"}
+ if dispatch.context_dict and include_summaries:
+ file_list = dispatch.get_context_summaries()
+ file_list.append({'file_name': output_name, 'file_format': '.tsv', 'file_type': 'tabular', 'content': data})
+ response[base_constants.FILE_LIST] = file_list
+ response[base_constants.ZIP_NAME] = generate_filename(display_name, name_suffix=name_suffix + '_zip',
+ extension='.zip', append_datetime=True)
else:
- output_name = generate_filename(display_name, name_suffix=name_suffix + '_zip',
- extension='.zip', append_datetime=True)
- archive = Dispatcher.archive_data_file(df, file_name)
- archive = dispatch.archive_context(archive=archive)
- # Dispatcher.save_archive(archive, 'd:/junk/temp.zip')
- zip_data = archive.getvalue()
- data = None
- return {base_constants.COMMAND: base_constants.COMMAND_REMODEL,
- base_constants.COMMAND_TARGET: 'events', 'data': data, 'zip_data': zip_data,
- "output_display_name": output_name,
- base_constants.SCHEMA_VERSION: schema_version, 'msg_category': 'success',
- 'msg': f"Command parsing for {display_name} remodeling was successful"}
+ response['data'] = data
+ return response
def search(hed_schema, events, query, columns_included=None):
@@ -233,7 +234,7 @@ def search(hed_schema, events, query, columns_included=None):
dict: A dictionary pointing to results or errors.
"""
- schema_version = hed_schema.version
+
results = validate(hed_schema, events)
if results['data']:
return results
@@ -253,7 +254,8 @@ def search(hed_schema, events, query, columns_included=None):
return {base_constants.COMMAND: base_constants.COMMAND_SEARCH,
base_constants.COMMAND_TARGET: 'events',
'data': csv_string, 'output_display_name': file_name,
- 'schema_version': schema_version, 'msg_category': 'success', 'msg': msg}
+ 'schema_version': hed_schema.get_formatted_version(as_string=True),
+ base_constants.MSG_CATEGORY: 'success', base_constants.MSG: msg}
def validate(hed_schema, events, sidecar=None, check_for_warnings=False):
@@ -270,32 +272,36 @@ def validate(hed_schema, events, sidecar=None, check_for_warnings=False):
"""
- schema_version = hed_schema.version
display_name = events.name
validator = HedValidator(hed_schema=hed_schema)
issue_str = ''
if sidecar:
issues = sidecar.validate_entries(validator, check_for_warnings=check_for_warnings)
+ issues = filter_issues(issues, check_for_warnings)
if issues:
issue_str = issue_str + get_printable_issue_string(issues, title="Sidecar definition errors:")
if not issue_str:
issues = events.validate_file(validator, check_for_warnings=check_for_warnings)
+ issues = filter_issues(issues, check_for_warnings)
if issues:
issue_str = get_printable_issue_string(issues, title="Event file errors:")
if issue_str:
+ data = issue_str
file_name = generate_filename(display_name, name_suffix='_validation_errors',
extension='.txt', append_datetime=True)
- return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE,
- base_constants.COMMAND_TARGET: 'events',
- 'data': issue_str, "output_display_name": file_name,
- base_constants.SCHEMA_VERSION: schema_version, "msg_category": "warning",
- 'msg': f"Events file {display_name} had validation errors"}
+ category = 'warning'
+ msg = f"Events file {display_name} had validation errors"
else:
- return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE,
- base_constants.COMMAND_TARGET: 'sidecar', 'data': '',
- base_constants.SCHEMA_VERSION: schema_version, 'msg_category': 'success',
- 'msg': f"Events file {display_name} had no validation errors"}
+ data = ''
+ file_name = display_name
+ category = 'success'
+ msg = f"Events file {display_name} had validation errors"
+
+ return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE, base_constants.COMMAND_TARGET: 'events',
+ 'data': data, "output_display_name": file_name,
+ base_constants.SCHEMA_VERSION: get_schema_versions(hed_schema, as_string=True),
+ base_constants.MSG_CATEGORY: category, base_constants.MSG: msg}
def validate_query(hed_schema, query):
@@ -310,20 +316,18 @@ def validate_query(hed_schema, query):
"""
- schema_version = hed_schema.version
if not query:
- display_name = 'empty_query'
- issue_str = "Empty query could not be processed."
- file_name = generate_filename(display_name, name_suffix='_validation_errors',
+ data = "Empty query could not be processed."
+ file_name = generate_filename('empty_query', name_suffix='_validation_errors',
extension='.txt', append_datetime=True)
- return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE,
- base_constants.COMMAND_TARGET: 'query',
- 'data': issue_str, "output_display_name": file_name,
- base_constants.SCHEMA_VERSION: schema_version, "msg_category": "warning",
- 'msg': f"Query {display_name} had validation errors"}
+ category = 'warning'
+ msg = f"Empty query could not be processed"
else:
- display_name = 'Nice_query'
- return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE,
- base_constants.COMMAND_TARGET: 'query', 'data': '',
- base_constants.SCHEMA_VERSION: schema_version, 'msg_category': 'success',
- 'msg': f"Events file {display_name} had no validation errors"}
+ data = ''
+ file_name = 'Nice_query'
+ category = 'success'
+ msg = f"Query had no validation errors"
+
+ return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE, base_constants.COMMAND_TARGET: 'query',
+ 'data': data, base_constants.SCHEMA_VERSION: get_schema_versions(hed_schema, as_string=True),
+ base_constants.MSG_CATEGORY: category, base_constants.MSG: msg}
diff --git a/hedweb/routes.py b/hedweb/routes.py
index e90373ec..ff8b0a6c 100644
--- a/hedweb/routes.py
+++ b/hedweb/routes.py
@@ -88,7 +88,7 @@ def schema_version_results():
f = request.files[base_constants.SCHEMA_PATH]
hed_schema = hedschema.from_string(f.stream.read(file_constants.BYTE_LIMIT).decode('ascii'),
file_type=secure_filename(f.filename))
- hed_info[base_constants.SCHEMA_VERSION] = hed_schema.version
+ hed_info[base_constants.SCHEMA_VERSION] = hed_schema.get_formatted_version(as_string=True)
return json.dumps(hed_info)
except Exception as ex:
return handle_error(ex)
diff --git a/hedweb/schema.py b/hedweb/schema.py
index 1dc4d545..d52fc1dd 100644
--- a/hedweb/schema.py
+++ b/hedweb/schema.py
@@ -128,7 +128,6 @@ def schema_convert(hed_schema, display_name):
"""
- schema_version = hed_schema.version
schema_format = os.path.splitext(display_name)[1]
if schema_format == file_constants.SCHEMA_XML_EXTENSION:
data = hed_schema.get_as_mediawiki_string()
@@ -141,7 +140,8 @@ def schema_convert(hed_schema, display_name):
return {'command': base_constants.COMMAND_CONVERT_SCHEMA,
base_constants.COMMAND_TARGET: 'schema',
'data': data, 'output_display_name': file_name,
- 'schema_version': schema_version, 'msg_category': 'success',
+ 'schema_version': hed_schema.get_formatted_version(as_string=True),
+ 'msg_category': 'success',
'msg': 'Schema was successfully converted'}
@@ -157,7 +157,6 @@ def schema_validate(hed_schema, display_name):
"""
- schema_version = hed_schema.version
issues = hed_schema.check_compliance()
if issues:
issue_str = get_printable_issue_string(issues, f"Schema HED 3G compliance errors for {display_name}:")
@@ -165,11 +164,13 @@ def schema_validate(hed_schema, display_name):
return {'command': base_constants.COMMAND_VALIDATE,
base_constants.COMMAND_TARGET: 'schema',
'data': issue_str, 'output_display_name': file_name,
- 'schema_version': schema_version, 'msg_category': 'warning',
+ 'schema_version': hed_schema.get_formatted_version(as_string=True),
+ 'msg_category': 'warning',
'msg': 'Schema is not HED 3G compliant'}
else:
return {'command': base_constants.COMMAND_VALIDATE,
base_constants.COMMAND_TARGET: 'schema',
'data': '', 'output_display_name': display_name,
- 'schema_version': schema_version, 'msg_category': 'success',
+ 'schema_version': hed_schema.get_formatted_version(as_string=True),
+ 'msg_category': 'success',
'msg': 'Schema had no HED-3G validation errors'}
diff --git a/hedweb/services.py b/hedweb/services.py
index 3590bf56..e536e977 100644
--- a/hedweb/services.py
+++ b/hedweb/services.py
@@ -29,7 +29,7 @@ def get_input_from_request(request):
arguments = get_service_info(service_request)
arguments[base_constants.SCHEMA] = get_input_schema(service_request)
get_column_parameters(arguments, service_request)
- get_remodeler(arguments, service_request)
+ get_remodel_parameters(arguments, service_request)
get_sidecar(arguments, service_request)
get_input_objects(arguments, service_request)
arguments[base_constants.QUERY] = service_request.get('query', None)
@@ -73,17 +73,18 @@ def get_sidecar(arguments, params):
"""
sidecar_list = []
- if base_constants.JSON_STRING in params and params[base_constants.JSON_STRING]:
- sidecar_list = [params[base_constants.JSON_STRING]]
- elif base_constants.JSON_LIST in params and params[base_constants.JSON_LIST]:
- sidecar_list = params[base_constants.JSON_LIST]
+ if base_constants.SIDECAR_STRING in params and params[base_constants.SIDECAR_STRING]:
+ sidecar_list = params[base_constants.SIDECAR_STRING]
+ if not isinstance(sidecar_list, list):
+ sidecar_list = [sidecar_list]
if sidecar_list:
file_list = []
for s_string in sidecar_list:
file_list.append(io.StringIO(s_string))
- arguments[base_constants.JSON_SIDECAR] = Sidecar(files=file_list, name="Merged_JSON_Sidecar")
+ schema = arguments.get('schema', None)
+ arguments[base_constants.SIDECAR] = Sidecar(files=file_list, name="Merged_Sidecar", hed_schema=schema)
else:
- arguments[base_constants.JSON_SIDECAR] = None
+ arguments[base_constants.SIDECAR] = None
def get_input_objects(arguments, params):
@@ -97,25 +98,26 @@ def get_input_objects(arguments, params):
"""
+ schema = arguments.get('schema', None)
if base_constants.EVENTS_STRING in params and params[base_constants.EVENTS_STRING]:
arguments[base_constants.EVENTS] = \
TabularInput(file=io.StringIO(params[base_constants.EVENTS_STRING]),
- sidecar=arguments.get(base_constants.JSON_SIDECAR, None), name='Events')
+ sidecar=arguments.get(base_constants.SIDECAR, None), name='Events', hed_schema=schema)
if base_constants.SPREADSHEET_STRING in params and params[base_constants.SPREADSHEET_STRING]:
tag_columns, prefix_dict = spreadsheet.get_prefix_dict(params)
has_column_names = arguments.get(base_constants.HAS_COLUMN_NAMES, None)
arguments[base_constants.SPREADSHEET] = \
SpreadsheetInput(file=io.StringIO(params[base_constants.SPREADSHEET_STRING]), file_type=".tsv",
tag_columns=tag_columns, has_column_names=has_column_names,
- column_prefix_dictionary=prefix_dict, name='spreadsheet.tsv')
+ column_prefix_dictionary=prefix_dict, name='spreadsheet.tsv', hed_schema=schema)
if base_constants.STRING_LIST in params and params[base_constants.STRING_LIST]:
s_list = []
for s in params[base_constants.STRING_LIST]:
- s_list.append(HedString(s))
+ s_list.append(HedString(s, hed_schema=schema))
arguments[base_constants.STRING_LIST] = s_list
-def get_remodeler(arguments, params):
+def get_remodel_parameters(arguments, params):
""" Update arguments with the remodeler information if any.
Args:
@@ -126,12 +128,9 @@ def get_remodeler(arguments, params):
"""
- remodel = None
- if base_constants.REMODEL_FILE in params:
- f = io.StringIO(base_constants.REMODEL_FILE)
- name = 'remodel_commands.json'
- remodel = {'name': name, 'operations': json.load(f)}
- arguments[base_constants.REMODEL_OPERATIONS] = remodel
+ if base_constants.REMODEL_STRING in params:
+ arguments[base_constants.REMODEL_OPERATIONS] = \
+ {'name': 'remodel_commands.json', 'operations': json.loads(params[base_constants.REMODEL_STRING])}
def get_service_info(params):
@@ -183,8 +182,9 @@ def get_input_schema(parameters):
schema_url = parameters[base_constants.SCHEMA_URL]
the_schema = hedschema.load_schema(schema_url)
elif base_constants.SCHEMA_VERSION in parameters and parameters[base_constants.SCHEMA_VERSION]:
- hed_file_path = hedschema.get_path_from_hed_version(parameters[base_constants.SCHEMA_VERSION])
- the_schema = hedschema.load_schema(hed_file_path)
+ # hed_file_path = hedschema.get_path_from_hed_version(parameters[base_constants.SCHEMA_VERSION])
+ versions = parameters[base_constants.SCHEMA_VERSION]
+ the_schema = hedschema.load_schema_version(versions)
except HedFileError:
the_schema = None
diff --git a/hedweb/sidecar.py b/hedweb/sidecar.py
index dc655454..261f8d4c 100644
--- a/hedweb/sidecar.py
+++ b/hedweb/sidecar.py
@@ -9,9 +9,10 @@
from hed.errors import HedFileError, get_printable_issue_string
from hed.models import SpreadsheetInput, Sidecar
+from hed.schema.hed_schema_io import get_schema_versions
from hed.tools import df_to_hed, generate_filename, hed_to_df, merge_hed_dict
from hedweb.constants import base_constants, file_constants
-from hedweb.web_util import form_has_option, get_hed_schema_from_pull_down
+from hedweb.web_util import form_has_option, filter_issues, get_hed_schema_from_pull_down
app_config = current_app.config
@@ -27,7 +28,7 @@ def get_input_from_form(request):
"""
- arguments = {base_constants.SCHEMA: get_hed_schema_from_pull_down(request), base_constants.JSON_SIDECAR: None,
+ arguments = {base_constants.SCHEMA: get_hed_schema_from_pull_down(request), base_constants.SIDECAR: None,
base_constants.COMMAND: request.form.get(base_constants.COMMAND_OPTION, None),
base_constants.CHECK_FOR_WARNINGS:
form_has_option(request, base_constants.CHECK_FOR_WARNINGS, 'on'),
@@ -37,10 +38,10 @@ def get_input_from_form(request):
form_has_option(request, base_constants.INCLUDE_DESCRIPTION_TAGS, 'on'),
base_constants.SPREADSHEET_TYPE: file_constants.TSV_EXTENSION,
}
- if base_constants.JSON_FILE in request.files:
- f = request.files[base_constants.JSON_FILE]
+ if base_constants.SIDECAR_FILE in request.files:
+ f = request.files[base_constants.SIDECAR_FILE]
fb = io.StringIO(f.read(file_constants.BYTE_LIMIT).decode('ascii'))
- arguments[base_constants.JSON_SIDECAR] = Sidecar(files=fb, name=secure_filename(f.filename))
+ arguments[base_constants.SIDECAR] = Sidecar(files=fb, name=secure_filename(f.filename))
if base_constants.SPREADSHEET_FILE in request.files and \
request.files[base_constants.SPREADSHEET_FILE].filename:
filename = request.files[base_constants.SPREADSHEET_FILE].filename
@@ -71,10 +72,10 @@ def process(arguments):
pass
elif not hed_schema or not isinstance(hed_schema, hedschema.hed_schema.HedSchema):
raise HedFileError('BadHedSchema', "Please provide a valid HedSchema", "")
- sidecar = arguments.get(base_constants.JSON_SIDECAR, None)
+ sidecar = arguments.get(base_constants.SIDECAR, None)
spreadsheet = arguments.get(base_constants.SPREADSHEET, 'None')
if not sidecar:
- raise HedFileError('MissingJSONFile', "Please give a valid JSON file to process", "")
+ raise HedFileError('MissingSidecarFile', "Please give a valid JSON sidecar file to process", "")
check_for_warnings = arguments.get(base_constants.CHECK_FOR_WARNINGS, False)
expand_defs = arguments.get(base_constants.EXPAND_DEFS, False)
include_description_tags = arguments.get(base_constants.INCLUDE_DESCRIPTION_TAGS, False)
@@ -105,10 +106,6 @@ def sidecar_convert(hed_schema, sidecar, command=base_constants.COMMAND_TO_SHORT
"""
- schema_version = hed_schema.version
- # results = sidecar_validate(hed_schema, sidecar, check_for_warnings=False)
- # if results['data']:
- # return results
if command == base_constants.COMMAND_TO_LONG:
tag_form = 'long_tag'
else:
@@ -124,23 +121,22 @@ def sidecar_convert(hed_schema, sidecar, command=base_constants.COMMAND_TO_SHORT
# issues = ErrorHandler.filter_issues_by_severity(issues, ErrorSeverity.ERROR)
display_name = sidecar.name
+ issues = filter_issues(issues, False)
if issues:
- issue_str = get_printable_issue_string(issues, f"JSON conversion for {display_name} was unsuccessful")
+ data = get_printable_issue_string(issues, f"JSON conversion for {display_name} was unsuccessful")
file_name = generate_filename(display_name, name_suffix=f"_{tag_form}_conversion_errors",
extension='.txt', append_datetime=True)
- return {base_constants.COMMAND: command,
- base_constants.COMMAND_TARGET: 'sidecar',
- 'data': issue_str, 'output_display_name': file_name,
- base_constants.SCHEMA_VERSION: schema_version, 'msg_category': 'warning',
- 'msg': f'JSON file {display_name} had validation errors'}
+ category = 'warning'
+ msg = f'Sidecar file {display_name} had validation errors'
else:
file_name = generate_filename(display_name, name_suffix=f"_{tag_form}", extension='.json', append_datetime=True)
data = sidecar.get_as_json_string()
- return {base_constants.COMMAND: command,
- base_constants.COMMAND_TARGET: 'sidecar',
- 'data': data, 'output_display_name': file_name,
- base_constants.SCHEMA_VERSION: schema_version, 'msg_category': 'success',
- 'msg': f'JSON sidecar {display_name} was successfully converted'}
+ category = 'success'
+ msg = f'Sidecar file {display_name} was successfully converted'
+ return {base_constants.COMMAND: command, base_constants.COMMAND_TARGET: 'sidecar',
+ 'data': data, 'output_display_name': file_name,
+ base_constants.SCHEMA_VERSION: get_schema_versions(hed_schema, as_string=True),
+ 'msg_category': category, 'msg': msg}
def sidecar_extract(sidecar):
@@ -210,21 +206,22 @@ def sidecar_validate(hed_schema, sidecar, check_for_warnings=False):
"""
- schema_version = hed_schema.version
display_name = sidecar.name
validator = HedValidator(hed_schema)
issues = sidecar.validate_entries(validator, check_for_warnings=check_for_warnings)
if issues:
- issue_str = get_printable_issue_string(issues, f"JSON dictionary {sidecar.name} validation errors")
+ data = get_printable_issue_string(issues, f"JSON dictionary {sidecar.name} validation errors")
file_name = generate_filename(display_name, name_suffix='validation_errors',
extension='.txt', append_datetime=True)
- return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE,
- base_constants.COMMAND_TARGET: 'sidecar',
- 'data': issue_str, 'output_display_name': file_name,
- base_constants.SCHEMA_VERSION: schema_version, 'msg_category': 'warning',
- 'msg': f'JSON sidecar {display_name} had validation errors'}
+ category = 'warning'
+ msg = f'JSON sidecar {display_name} had validation errors'
else:
- return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE,
- base_constants.COMMAND_TARGET: 'sidecar', 'data': '',
- base_constants.SCHEMA_VERSION: schema_version, 'msg_category': 'success',
- 'msg': f'JSON file {display_name} had no validation errors'}
+ data = ''
+ file_name = display_name
+ category = 'success'
+ msg = f'JSON file {display_name} had no validation errors'
+
+ return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE, base_constants.COMMAND_TARGET: 'sidecar',
+ 'data': data, 'output_display_name': file_name,
+ base_constants.SCHEMA_VERSION: get_schema_versions(hed_schema, as_string=True),
+ base_constants.MSG_CATEGORY: category, base_constants.MSG: msg}
diff --git a/hedweb/spreadsheet.py b/hedweb/spreadsheet.py
index 2f49cc6c..aa89e2fe 100644
--- a/hedweb/spreadsheet.py
+++ b/hedweb/spreadsheet.py
@@ -2,6 +2,7 @@
from flask import current_app
from werkzeug.utils import secure_filename
from hed import schema as hedschema
+from hed.schema.hed_schema_io import get_schema_versions
from hed.errors import get_printable_issue_string, HedFileError
from hed.models import SpreadsheetInput
from hed.tools import generate_filename
@@ -9,7 +10,7 @@
from hedweb.constants import base_constants, file_constants
from hedweb.columns import get_prefix_dict
-from hedweb.web_util import form_has_option, get_hed_schema_from_pull_down
+from hedweb.web_util import filter_issues, form_has_option, get_hed_schema_from_pull_down
app_config = current_app.config
@@ -96,7 +97,6 @@ def spreadsheet_convert(hed_schema, spreadsheet, command=base_constants.COMMAND_
"""
- schema_version = hed_schema.version
results = spreadsheet_validate(hed_schema, spreadsheet, check_for_warnings=check_for_warnings)
if results['data']:
return results
@@ -115,8 +115,9 @@ def spreadsheet_convert(hed_schema, spreadsheet, command=base_constants.COMMAND_
return {base_constants.COMMAND: command,
base_constants.COMMAND_TARGET: 'spreadsheet', 'data': '',
base_constants.SPREADSHEET: spreadsheet, 'output_display_name': file_name,
- base_constants.SCHEMA_VERSION: schema_version, 'msg_category': 'success',
- 'msg': f'Spreadsheet {display_name} converted_successfully'}
+ base_constants.SCHEMA_VERSION: get_schema_versions(hed_schema, as_string=True),
+ base_constants.MSG_CATEGORY: 'success',
+ base_constants.MSG: f'Spreadsheet {display_name} converted_successfully'}
def spreadsheet_validate(hed_schema, spreadsheet, check_for_warnings=False):
@@ -131,21 +132,25 @@ def spreadsheet_validate(hed_schema, spreadsheet, check_for_warnings=False):
dict: A dictionary containing results of validation in standard format.
"""
- schema_version = hed_schema.version
+
validator = HedValidator(hed_schema=hed_schema)
issues = spreadsheet.validate_file(validator, check_for_warnings=check_for_warnings)
display_name = spreadsheet.name
+ issues = filter_issues(issues, check_for_warnings)
if issues:
- issue_str = get_printable_issue_string(issues, f"Spreadsheet {display_name} validation errors")
+ data = get_printable_issue_string(issues, f"Spreadsheet {display_name} validation errors")
file_name = generate_filename(display_name, name_suffix='_validation_errors',
extension='.txt', append_datetime=True)
- return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE,
- base_constants.COMMAND_TARGET: 'spreadsheet',
- 'data': issue_str, "output_display_name": file_name,
- base_constants.SCHEMA_VERSION: schema_version, "msg_category": "warning",
- 'msg': f"Spreadsheet {display_name} had validation errors"}
+ category = "warning"
+ msg = f"Spreadsheet {file_name} had validation errors"
else:
- return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE,
- base_constants.COMMAND_TARGET: 'spreadsheet', 'data': '',
- base_constants.SCHEMA_VERSION: schema_version, 'msg_category': 'success',
- 'msg': f'Spreadsheet {display_name} had no validation errors'}
+ data = ''
+ file_name = display_name
+ category = 'success'
+ msg = f'Spreadsheet {display_name} had no validation errors'
+
+ return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE,
+ base_constants.COMMAND_TARGET: 'spreadsheet', 'data': data,
+ base_constants.SCHEMA_VERSION: get_schema_versions(hed_schema, as_string=True),
+ "output_display_name": file_name,
+ base_constants.MSG_CATEGORY: category, base_constants.MSG: msg}
diff --git a/hedweb/static/img/temp2.zip b/hedweb/static/img/temp2.zip
new file mode 100644
index 0000000000000000000000000000000000000000..6fe9ca67db312bb05cf9a29a14bb1ffbb4a653ea
GIT binary patch
literal 849
zcmWIWW@Zs#U|`^2=x%fg)p)*?;|`E#&BVZ93ZxzVeLbDylk;;*bMxXeQWH~Bi;Clo
z42+E84GrQAOyWc04b9>WjpB`r<1H
|ZP>%U<>@ZY#HBK#P+2&o$Rx
zh5zmPb5`cc@ndVAT23^1HAT7W;~fbh-l!Gw8EaJR9E|SAG<53d@d|3TR`DO%QTG4U
zqH9n0?Au)LFz?oHll(2`&xZVGzozo@J?D$L7bkrBxA1+rzr*Hb5-cxVbY2Etdg=CI
z=gT^V0F>Z*BWShpBQTim0fWnotl-Kj&d=+e2oEo<=S^0Hw{ssIVf@Iz;Tj1HuVb7W
zjknJF^)B1E<;5ljOV|JZZ|+Z>zpARv$vQ^jvi&|D2tCRi@JpFg%$asUjWy
z<{Pi(+UFO)+`r4mwfj+yLC2ZO(}^4BxPEKfvB&M{nv0Vj{acr7&M(BqdvJ4$vchHC
z%OBk?dCgq)HGkDI;{b0)CJ_eQsSX&DV9>w_qR2{v=vJV|6vP4sh6YA`pe3YR0gAT(
UZ&o&tIZQx!97vY{Qvm}504+^MA^-pY
literal 0
HcmV?d00001
diff --git a/hedweb/static/resources/services.json b/hedweb/static/resources/services.json
index 0314d547..bccd9ecd 100644
--- a/hedweb/static/resources/services.json
+++ b/hedweb/static/resources/services.json
@@ -10,10 +10,7 @@
"Description": "Validate a BIDS-style event file and JSON sidecar if provided. ",
"Parameters": [
"events_string",
- [
- "json_list",
- "json_string"
- ],
+ "sidecar_string",
[
"schema_string",
"schema_url",
@@ -28,10 +25,7 @@
"Description": "Search a BIDS-style event file and return list of event numbers satisfying search.",
"Parameters": [
"events_string",
- [
- "json_list",
- "json_string"
- ],
+ "sidecar_string",
[
"schema_string",
"schema_url",
@@ -47,10 +41,7 @@
"Parameters": [
"events_string",
"columns_included",
- [
- "json_list",
- "json_string"
- ],
+ "sidecar_string",
[
"schema_string",
"schema_url",
@@ -76,10 +67,7 @@
"Parameters": [
"events_string",
"remodel_string",
- [
- "json_list",
- "json_string"
- ],
+ "sidecar_string",
[
"schema_string",
"schema_url",
@@ -92,7 +80,7 @@
"sidecar_validate": {
"Description": "Validate a BIDS JSON sidecar (in string form) and return errors.",
"Parameters": [
- "json_string",
+ "sidecar_string",
[
"schema_string",
"schema_url",
@@ -105,7 +93,7 @@
"sidecar_to_long": {
"Description": "Convert a JSON sidecar with all of its HED tags expressed in long form.",
"Parameters": [
- "json_string",
+ "sidecar_string",
[
"schema_string",
"schema_url",
@@ -118,7 +106,7 @@
"sidecar_to_short": {
"Description": "Convert a JSON sidecar with all of its HED tags expressed in short form.",
"Parameters": [
- "json_string",
+ "sidecar_string",
[
"schema_string",
"schema_url",
@@ -131,14 +119,14 @@
"sidecar_extract_spreadsheet": {
"Description": "Convert the HED portion of a JSON sidecar to a 4-column spreadsheet.",
"Parameters": [
- "json_string"
+ "sidecar_string"
],
"Returns": "A string containing a 4-column tab-separated value spreadsheet extracted from the JSON."
},
"sidecar_merge_spreadsheet": {
"Description": "Merge the information in a 4-column spreadsheet into the HED portion of a JSON sidecar.",
"Parameters": [
- "json_string",
+ "sidecar_string",
"spreadsheet_string",
"include_description_tags"
],
@@ -248,13 +236,12 @@
"has_column_names": "If true, interpret the first row of file as column names.",
"hed_strings": "List of HED strings to be processed.",
"include_description_tag": "Include the Description/XXX tag in the tag string",
- "json_list": "A list of BIDS JSON sidecars as strings.",
- "json_string": "A JSON sidecar as a string.",
"query_list": "A list of query strings for searching.",
"remodel_string": "JSON remodel commands as a string",
"schema_string": "HED XML schema as a string.",
"schema_url": "A URL from which a HED schema can be downloaded.",
"schema_version": "Version of HED to used in processing.",
+ "sidecar_string": "A JSON sidecar as a string or a list of JSON sidecar strings.",
"spreadsheet_string": "A spreadsheet tsv as a string."
},
"returns": {
diff --git a/hedweb/strings.py b/hedweb/strings.py
index 992838bd..ebe987d2 100644
--- a/hedweb/strings.py
+++ b/hedweb/strings.py
@@ -81,7 +81,6 @@ def convert(hed_schema, string_list, command=base_constants.COMMAND_TO_SHORT, ch
"""
- schema_version = hed_schema.version
results = validate(hed_schema, string_list, check_for_warnings=check_for_warnings)
if results['data']:
return results
@@ -100,12 +99,14 @@ def convert(hed_schema, string_list, command=base_constants.COMMAND_TO_SHORT, ch
return {base_constants.COMMAND: command,
base_constants.COMMAND_TARGET: 'strings',
'data': conversion_errors, 'additional_info': string_list,
- base_constants.SCHEMA_VERSION: schema_version, 'msg_category': 'warning',
+ base_constants.SCHEMA_VERSION: hed_schema.get_formatted_version(as_string=True),
+ 'msg_category': 'warning',
'msg': 'Some strings had conversion errors, results of conversion in additional_info'}
else:
return {base_constants.COMMAND: command,
base_constants.COMMAND_TARGET: 'strings', 'data': strings,
- base_constants.SCHEMA_VERSION: schema_version, 'msg_category': 'success',
+ base_constants.SCHEMA_VERSION: hed_schema.get_formatted_version(as_string=True),
+ 'msg_category': 'success',
'msg': 'Strings converted successfully'}
@@ -121,7 +122,6 @@ def validate(hed_schema, string_list, check_for_warnings=False):
dict: The results in standard form.
"""
- schema_version = hed_schema.version
hed_validator = HedValidator(hed_schema=hed_schema)
validation_errors = []
@@ -132,10 +132,12 @@ def validate(hed_schema, string_list, check_for_warnings=False):
if validation_errors:
return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE,
base_constants.COMMAND_TARGET: 'strings', 'data': validation_errors,
- base_constants.SCHEMA_VERSION: schema_version, 'msg_category': 'warning',
+ base_constants.SCHEMA_VERSION: hed_schema.get_formatted_version(as_string=True),
+ 'msg_category': 'warning',
'msg': 'Strings had validation errors'}
else:
return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE,
base_constants.COMMAND_TARGET: 'strings', 'data': '',
- base_constants.SCHEMA_VERSION: schema_version, 'msg_category': 'success',
+ base_constants.SCHEMA_VERSION: hed_schema.get_formatted_version(as_string=True),
+ 'msg_category': 'success',
'msg': 'Strings validated successfully...'}
diff --git a/hedweb/templates/events.html b/hedweb/templates/events.html
index cbf23144..4ebf253a 100644
--- a/hedweb/templates/events.html
+++ b/hedweb/templates/events.html
@@ -1,6 +1,6 @@
{% extends "layout.html" %}
{% from "schema-pulldown.html" import create_schema_pulldown %}
-{% from "json-input.html" import create_json_input %}
+{% from "sidecar-input.html" import create_sidecar_input %}
{% from "remodel-input.html" import create_remodel_input %}
{% from "column-info.html" import create_column_info %}
{% from "actions.html" import create_actions %}
@@ -15,7 +15,7 @@ Process a BIDS-style event
{{ create_actions('Pick an action:',assemble=True,generate_sidecar=True,validate=True,remodel=True) }}
- {{ create_options('Check applicable options if any:',check_for_warnings=True,expand_defs=True) }}
+ {{ create_options('Check applicable options if any:',check_for_warnings=True,expand_defs=True,include_summaries=True) }}
Upload events file:
@@ -35,7 +35,7 @@
Upload events file:
{{ create_remodel_input('Upload remodel instructions (JSON):') }}
- {{ create_json_input('Upload BIDS-style JSON sidecar if needed:') }}
+ {{ create_sidecar_input('Upload BIDS-style JSON sidecar if needed:') }}
{{ create_schema_pulldown('Choose a HED schema version if needed:') }}
@@ -51,7 +51,7 @@
Process
{% include 'js/column-info.js' %}
{% include 'js/form-helpers.js' %}
{% include 'js/schema-pulldown.js' %}
- {% include 'js/json-input.js' %}
+ {% include 'js/sidecar-input.js' %}
{% include 'js/remodel-input.js' %}
{% include 'js/options.js' %}
{% include 'js/events-form.js' %}
diff --git a/hedweb/templates/js/events-form.js b/hedweb/templates/js/events-form.js
index f11c0f31..38e176f8 100644
--- a/hedweb/templates/js/events-form.js
+++ b/hedweb/templates/js/events-form.js
@@ -56,7 +56,7 @@ function clearForm() {
function clearFlashMessages() {
clearColumnInfoFlashMessages();
clearSchemaSelectFlashMessages();
- clearJsonInputFlashMessages();
+ clearSidecarInputFlashMessages();
clearRemodelInputFlashMessages();
flashMessageOnScreen('', 'success', 'events_flash');
flashMessageOnScreen('', 'success', 'events_submit_flash');
@@ -96,34 +96,38 @@ function setEventsTable(event_tag) {
function setOptions() {
if ($("#validate").is(":checked")) {
hideOption("expand_defs");
+ hideOption("include_summaries")
hideOption("use_hed");
showOption("check_for_warnings");
$("#remodel_input_section").hide();
- $("#json_input_section").show();
+ $("#sidecar_input_section").show();
$("#schema_pulldown_section").show();
$("#options_section").show();
} else if ($("#assemble").is(":checked")) {
hideOption("check_for_warnings");
+ hideOption("include_summaries")
hideOption("use_hed");
showOption("expand_defs");
$("#remodel_input_section").hide();
- $("#json_input_section").show();
+ $("#sidecar_input_section").show();
$("#schema_pulldown_section").show();
$("#options_section").show();
} else if ($("#generate_sidecar").is(":checked")) {
hideOption("check_for_warnings");
hideOption("expand_defs");
+ hideOption("include_summaries")
hideOption("use_hed");
$("#remodel_input_section").hide();
- $("#json_input_section").hide();
+ $("#sidecar_input_section").hide();
$("#schema_pulldown_section").hide();
$("#options_section").hide();
} else if ($("#remodel").is(":checked")) {
hideOption("check_for_warnings");
hideOption("expand_defs");
hideOption("use_hed");
- $("#options_section").hide();
- $("#json_input_section").show();
+ showOption("include_summaries")
+ $("#options_section").show();
+ $("#sidecar_input_section").show();
$("#remodel_input_section").show();
$("#schema_pulldown_section").show();
}
@@ -138,23 +142,29 @@ function submitForm() {
let formData = new FormData(eventsForm);
let prefix = 'issues';
let eventsFile = $('#events_file')[0].files[0].name;
+ let includeSummaries = $('#include_summaries').is(':checked')
let display_name = convertToResultsName(eventsFile, prefix)
clearFlashMessages();
flashMessageOnScreen('Event file is being processed ...', 'success',
'events_submit_flash')
- $.ajax({
- type: 'POST',
- url: "{{url_for('route_blueprint.events_results')}}",
- data: formData,
- contentType: false,
- processData: false,
- dataType: 'text',
- success: function (download, status, xhr) {
- getResponseSuccess(download, xhr, display_name, 'events_submit_flash')
- },
- error: function (xhr, status, errorThrown) {
- getResponseFailure(xhr, status, errorThrown, display_name, 'events_submit_flash')
- }
+ let postType = {
+ type: 'POST',
+ url: "{{url_for('route_blueprint.events_results')}}",
+ data: formData,
+ contentType: false,
+ processData: false,
+
+ success: function (download, status, xhr) {
+ getResponseSuccess(download, xhr, display_name, 'events_submit_flash')
+ },
+ error: function (xhr, status, errorThrown) {
+ getResponseFailure(xhr, status, errorThrown, display_name, 'events_submit_flash')
}
- )
+ }
+ if (includeSummaries){
+ postType["xhrFields"] = {
+ responseType: 'blob'
+ }
+ }
+ $.ajax(postType)
}
diff --git a/hedweb/templates/js/form-helpers.js b/hedweb/templates/js/form-helpers.js
index 5fdd2e00..6a59a2e9 100644
--- a/hedweb/templates/js/form-helpers.js
+++ b/hedweb/templates/js/form-helpers.js
@@ -186,12 +186,10 @@ function splitExt(filename) {
* @param {String} content_type - Type of file to create
*/
function triggerDownloadBlob(download_blob, display_name, content_type) {
- // const url = URL.createObjectURL(new Blob([download_blob]));
- const url = URL.createObjectURL(new Blob([download_blob], {type: content_type}))
+ const url = URL.createObjectURL(new Blob([download_blob], {type:content_type}));
const link = document.createElement('a');
link.href = url;
link.setAttribute('download', display_name);
- link.setAttribute('type', content_type)
document.body.appendChild(link);
link.click();
}
diff --git a/hedweb/templates/js/json-input.js b/hedweb/templates/js/json-input.js
deleted file mode 100644
index fc9eee68..00000000
--- a/hedweb/templates/js/json-input.js
+++ /dev/null
@@ -1,37 +0,0 @@
-
-
-/**
- * Sidecar event handler function. Checks if the file uploaded has a valid sidecar extension.
- */
-$('#json_file').on('change',function () {
- let jsonPath = $('#json_file').val();
- clearFlashMessages();
- if (cancelWasPressedInChromeFileUpload(jsonPath)) {
- clearForm();
- }
- else if (fileHasValidExtension(jsonPath, JSON_FILE_EXTENSIONS)) {
- updateFileLabel(jsonPath, '#json_display_name');
- } else {
- clearForm();
- flashMessageOnScreen('Please upload a JSON sidecar (.json)', 'error', 'json_flash');
- }
-});
-
-/**
- * Clears the sidecar file label.
- */
-function clearJsonFileLabel() {
- $('#json_display_name').text('');
-}
-
-/**
- * Resets the flash messages that aren't related to the form submission.
- */
-function clearJsonInputFlashMessages() {
- flashMessageOnScreen('', 'success', 'json_flash');
-}
-
-function getJsonFileLabel() {
- return $('#json_file')[0].files[0].name;
-}
-
diff --git a/hedweb/templates/js/sidecar-form.js b/hedweb/templates/js/sidecar-form.js
index 65578b22..f63520e5 100644
--- a/hedweb/templates/js/sidecar-form.js
+++ b/hedweb/templates/js/sidecar-form.js
@@ -14,7 +14,7 @@ $('#process_actions').change(function(){
* Submit the form on click if schema and json file specified.
*/
$('#sidecar_submit').on('click', function () {
- if (fileIsSpecified('#json_file', 'json_flash', 'JSON is not specified.' ) &&
+ if (fileIsSpecified('#sidecar_file', 'sidecar_flash', 'Sidecar file is not specified.' ) &&
schemaSpecifiedWhenOtherIsSelected()) {
submitForm();
}
@@ -29,7 +29,7 @@ function clearForm() {
clearWorksheet()
setOptions();
clearFlashMessages()
- clearJsonFileLabel();
+ clearSidecarFileLabel();
hideOtherSchemaVersionFileUpload()
}
@@ -37,7 +37,7 @@ function clearForm() {
* Clear the flash messages that aren't related to the form submission.
*/
function clearFlashMessages() {
- clearJsonInputFlashMessages();
+ clearJsonSidecarFlashMessages();
clearSchemaSelectFlashMessages();
flashMessageOnScreen('', 'success', 'sidecar_submit_flash');
}
@@ -60,7 +60,7 @@ function setOptions() {
hideOption("expand_defs");
showOption("check_for_warnings");
hideOption("include_description_tags");
- $("#json_input_section").show();
+ $("#sidecar_input_section").show();
$("#spreadsheet_input_section").hide();
$("#schema_pulldown_section").show();
$("#options_section").show();
@@ -68,7 +68,7 @@ function setOptions() {
hideOption("check_for_warnings");
showOption("expand_defs");
hideOption("include_description_tags");
- $("#json_input_section").show();
+ $("#sidecar_input_section").show();
$("#spreadsheet_input_section").hide();
$("#schema_pulldown_section").show();
$("#options_section").show();
@@ -76,7 +76,7 @@ function setOptions() {
hideOption("check_for_warnings");
showOption("expand_defs");
hideOption("include_description_tags");
- $("#json_input_section").show();
+ $("#sidecar_input_section").show();
$("#spreadsheet_input_section").hide();
$("#schema_pulldown_section").show();
$("#options_section").show();
@@ -84,7 +84,7 @@ function setOptions() {
hideOption("check_for_warnings");
hideOption("expand_defs");
hideOption("include_description_tags");
- $("#json_input_section").show();
+ $("#sidecar_input_section").show();
$("#spreadsheet_input_section").hide();
$("#schema_pulldown_section").hide();
$("#options_section").hide();
@@ -92,7 +92,7 @@ function setOptions() {
hideOption("expand_defs");
hideOption("check_for_warnings");
showOption("include_description_tags");
- $("#json_input_section").show();
+ $("#sidecar_input_section").show();
$("#spreadsheet_input_section").show();
$("#schema_pulldown_section").hide();
$("#options_section").show();
diff --git a/hedweb/templates/js/sidecar-input.js b/hedweb/templates/js/sidecar-input.js
new file mode 100644
index 00000000..0fb0acb9
--- /dev/null
+++ b/hedweb/templates/js/sidecar-input.js
@@ -0,0 +1,37 @@
+
+
+/**
+ * Sidecar event handler function. Checks if the file uploaded has a valid sidecar extension.
+ */
+$('#sidecar_file').on('change',function () {
+ let sidecarPath = $('#sidecar_file').val();
+ clearFlashMessages();
+ if (cancelWasPressedInChromeFileUpload(sidecarPath)) {
+ clearForm();
+ }
+ else if (fileHasValidExtension(sidecarPath, JSON_FILE_EXTENSIONS)) {
+ updateFileLabel(sidecarPath, '#sidecar_display_name');
+ } else {
+ clearForm();
+ flashMessageOnScreen('Please upload a JSON sidecar (.json)', 'error', 'sidecar_flash');
+ }
+});
+
+/**
+ * Clears the sidecar file label.
+ */
+function clearSidecarFileLabel() {
+ $('#sidecar_display_name').text('');
+}
+
+/**
+ * Resets the flash messages that aren't related to the form submission.
+ */
+function clearSidecarInputFlashMessages() {
+ flashMessageOnScreen('', 'success', 'sidecar_flash');
+}
+
+function getSidecarFileLabel() {
+ return $('#sidecar_file')[0].files[0].name;
+}
+
diff --git a/hedweb/templates/js/string-form.js b/hedweb/templates/js/string-form.js
index a0ee6938..0a375f42 100644
--- a/hedweb/templates/js/string-form.js
+++ b/hedweb/templates/js/string-form.js
@@ -71,11 +71,6 @@ function setOptions() {
* Checks to see if a hedstring has been specified.
*/
function stringIsSpecified() {
-/* let jsonFile = $('#json_file');
- if (jsonFile[0].files.length === 0) {
- flashMessageOnScreen('JSON is not specified.', 'error', 'json_flash');
- return false;
- }*/
return true;
}
diff --git a/hedweb/templates/json-input.html b/hedweb/templates/json-input.html
deleted file mode 100644
index a3fbafb9..00000000
--- a/hedweb/templates/json-input.html
+++ /dev/null
@@ -1,14 +0,0 @@
-{% macro create_json_input(title) %}
-
-{% endmacro %}
\ No newline at end of file
diff --git a/hedweb/templates/options.html b/hedweb/templates/options.html
index eef6c74a..248712e4 100644
--- a/hedweb/templates/options.html
+++ b/hedweb/templates/options.html
@@ -1,4 +1,4 @@
-{% macro create_options(title,check_for_warnings=False,expand_defs=False,include_description_tags=False) %}
+{% macro create_options(title,check_for_warnings=False,expand_defs=False,include_description_tags=False,include_summaries=False) %}
{{ title }}
@@ -22,6 +22,13 @@
{{ title }}
{% endif %}
+
+ {% if include_summaries %}
+
+
+
+
+ {% endif %}
{% endmacro %}
\ No newline at end of file
diff --git a/hedweb/templates/sidecar-input.html b/hedweb/templates/sidecar-input.html
new file mode 100644
index 00000000..f4049532
--- /dev/null
+++ b/hedweb/templates/sidecar-input.html
@@ -0,0 +1,14 @@
+{% macro create_sidecar_input(title) %}
+
+{% endmacro %}
\ No newline at end of file
diff --git a/hedweb/templates/sidecar.html b/hedweb/templates/sidecar.html
index 21f05390..5017546f 100644
--- a/hedweb/templates/sidecar.html
+++ b/hedweb/templates/sidecar.html
@@ -1,5 +1,5 @@
{% extends "layout.html" %}
-{% from "json-input.html" import create_json_input %}
+{% from "sidecar-input.html" import create_sidecar_input %}
{% from "schema-pulldown.html" import create_schema_pulldown %}
{% from "actions.html" import create_actions %}
{% from "options.html" import create_options %}
@@ -17,7 +17,7 @@ Process BIDS-style JSON sidecar
check_for_warnings=True,expand_defs=True,include_description_tags=True) }}
- {{ create_json_input('Upload BIDS-style JSON sidecar:') }}
+ {{ create_sidecar_input('Upload BIDS-style JSON sidecar:') }}
{{ create_spreadsheet_input('Upload 4-column HED spreadsheet:',has_column_names_option=False) }}
@@ -34,7 +34,7 @@ Process:
{% include 'js/column-info.js' %}
{% include 'js/form-helpers.js' %}
{% include 'js/options.js' %}
- {% include 'js/json-input.js' %}
+ {% include 'js/sidecar-input.js' %}
{% include 'js/spreadsheet-input.js' %}
{% include 'js/schema-pulldown.js' %}
{% include 'js/sidecar-form.js' %}
diff --git a/hedweb/web_util.py b/hedweb/web_util.py
index f1fadd84..41ddc4da 100644
--- a/hedweb/web_util.py
+++ b/hedweb/web_util.py
@@ -1,14 +1,14 @@
import io
import json
import os
-import base64
+import zipfile
from urllib.parse import urlparse
from flask import current_app, Response, make_response, send_file
from werkzeug.utils import secure_filename
from hed import schema as hedschema
-from hed.errors import HedFileError
+from hed.errors import HedFileError, ErrorSeverity, ErrorHandler
from hedweb.constants import base_constants, file_constants
app_config = current_app.config
@@ -28,6 +28,13 @@ def file_extension_is_valid(filename, accepted_extensions=None):
return not accepted_extensions or os.path.splitext(filename.lower())[1] in accepted_extensions
+def filter_issues(issues, check_for_warnings):
+ """ Filter an issues list by severity level to allow warnings. """
+ if not check_for_warnings:
+ issues = ErrorHandler.filter_issues_by_severity(issues, ErrorSeverity.ERROR)
+ return issues
+
+
def form_has_file(request, file_field, valid_extensions=None):
""" Return True if a file with valid extension is in the request.
@@ -104,9 +111,7 @@ def generate_download_file_from_text(results, file_header=None):
download_text = results.get('data', '')
if not download_text:
raise HedFileError('EmptyDownloadText', "No download text given", "")
- headers = {'Content-Disposition': f"attachment filename={display_name}",
- 'Category': results[base_constants.MSG_CATEGORY],
- 'Message': results[base_constants.MSG]}
+
def generate():
if file_header:
yield file_header
@@ -178,49 +183,19 @@ def generate_download_zip_file(results):
"""
- archive = results['zip_data']
- response = make_response()
- response.data = archive
- response.headers['Content-type'] = 'zip'
- response.headers['Content-Disposition'] = 'attachment; filename=tempA.zip'
- response.headers['Category'] = results[base_constants.MSG_CATEGORY]
+ file_list = results[base_constants.FILE_LIST]
+ archive = io.BytesIO()
+ with zipfile.ZipFile(archive, mode="a", compression=zipfile.ZIP_DEFLATED) as zf:
+ for item in file_list:
+ zf.writestr(item['file_name'], str.encode(item['content'], 'utf-8'))
+ archive.seek(0)
+ zip_name = results.get('zip_name', results['output_display_name'])
+ response = send_file(archive, as_attachment=True, download_name=zip_name)
response.headers['Message'] = results[base_constants.MSG]
- response.mimetype = 'application/zip'
- return response
- # archive = results['zip_data']
- # with open('d:/junk/temp2.zip', 'wb') as fp:
- # fp.write(archive)
- # buffer = io.BytesIO(archive)
- # response = make_response()
- # buffer.seek(0)
- # buflen = len(archive)
- # response.data = buffer.read()
- # response.headers['Content-Disposition'] = 'attachment; filename=temp.zip'
- # response.headers['Category'] = results[base_constants.MSG_CATEGORY]
- # response.headers['Message'] = results[base_constants.MSG]
- # response.headers['Content-Type'] = 'application/zip; charset=utf-8'
- # response.headers['Content-Length'] = buflen
- # response.mimetype = 'application/zip'
- # return response
- # fileobj = io.BytesIO()
- # with zipfile.ZipFile(fileobj, 'w') as zip_file:
- # zip_info = zipfile.ZipInfo(FILEPATH)
- # zip_info.date_time = time.localtime(time.time())[:6]
- # zip_info.compress_type = zipfile.ZIP_DEFLATED
- # with open(FILEPATH, 'rb') as fd:
- # zip_file.writestr(zip_info, fd.read())
- # fileobj.seek(0)
- #
- # response = make_response(fileobj.read())
- # response.headers.set('Content-Type', 'zip')
- # response.headers.set('Content-Disposition', 'attachment', filename='%s.zip' % os.path.basename(FILEPATH))
-
- # archive = results['zip_data']
- # with open('d:/junk/temp2.zip', 'wb') as fp:
- # fp.write(archive)
- # response = send_file('d:/junk/junk3.zip', mimetype='application/zip', as_attachment=True, attachment_filename='junk3.zip')
+ response.headers['Category'] = results[base_constants.MSG_CATEGORY]
return response
+
def get_hed_schema_from_pull_down(request):
""" Create a HedSchema object from form pull-down box.
@@ -235,7 +210,6 @@ def get_hed_schema_from_pull_down(request):
if base_constants.SCHEMA_VERSION not in request.form:
raise HedFileError("NoSchemaError", "Must provide a valid schema or schema version", "")
elif request.form[base_constants.SCHEMA_VERSION] != base_constants.OTHER_VERSION_OPTION:
-
hed_file_path = hedschema.get_path_from_hed_version(request.form[base_constants.SCHEMA_VERSION])
hed_schema = hedschema.load_schema(hed_file_path)
elif request.form[base_constants.SCHEMA_VERSION] == \
@@ -248,6 +222,14 @@ def get_hed_schema_from_pull_down(request):
return hed_schema
+# def get_hed_versions(hed_schema):
+# if hed_schema:
+# hed_versions = hed_schema.get_formatted_version(as_string=False)
+# else:
+# hed_versions = None
+# return hed_versions
+
+
def handle_error(ex, hed_info=None, title=None, return_as_str=True):
""" Handle an error by returning a dictionary or simple string.
@@ -314,10 +296,10 @@ def package_results(results):
"""
- if results.get('data', None):
- return generate_download_file_from_text(results)
- elif results.get('zip_data', None):
+ if results.get(base_constants.FILE_LIST, None):
return generate_download_zip_file(results)
+ elif results.get('data', None):
+ return generate_download_file_from_text(results)
elif not results.get('spreadsheet', None):
return generate_text_response(results)
else:
diff --git a/tests/data/rename_example.json b/tests/data/rename_example_temp_rmdl.json
similarity index 81%
rename from tests/data/rename_example.json
rename to tests/data/rename_example_temp_rmdl.json
index 881917f7..2c06041f 100644
--- a/tests/data/rename_example.json
+++ b/tests/data/rename_example_temp_rmdl.json
@@ -1,6 +1,6 @@
[
{
- "command": "rename_columns",
+ "operation": "rename_columns",
"description": "Create separate response event from response time column.",
"parameters": {
"column_mapping": {"sex": "face_gender"},
diff --git a/tests/data/splitevents_example.json b/tests/data/splitevents_example_temp_rmdl.json
similarity index 100%
rename from tests/data/splitevents_example.json
rename to tests/data/splitevents_example_temp_rmdl.json
diff --git a/tests/test_events.py b/tests/test_events.py
index 2d1a3da1..8f3be4e3 100644
--- a/tests/test_events.py
+++ b/tests/test_events.py
@@ -24,13 +24,14 @@ def test_get_input_from_events_form(self):
from hed.schema import HedSchema
from hedweb.events import get_events_form_input
with self.app.test:
- json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events.json')
+ sidecar_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events.json')
events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events.tsv')
- with open(json_path, 'rb') as fp:
+ with open(sidecar_path, 'rb') as fp:
with open(events_path, 'rb') as fpe:
- environ = create_environ(data={base_constants.JSON_FILE: fp, base_constants.SCHEMA_VERSION: '8.0.0',
- base_constants.EVENTS_FILE: fpe, base_constants.EXPAND_DEFS: 'on',
- base_constants.COMMAND_OPTION: base_constants.COMMAND_ASSEMBLE})
+ environ = create_environ(data={base_constants.SIDECAR_FILE: fp,
+ base_constants.SCHEMA_VERSION: '8.0.0',
+ base_constants.EVENTS_FILE: fpe, base_constants.EXPAND_DEFS: 'on',
+ base_constants.COMMAND_OPTION: base_constants.COMMAND_ASSEMBLE})
request = Request(environ)
arguments = get_events_form_input(request)
self.assertIsInstance(arguments[base_constants.EVENTS], TabularInput,
@@ -52,11 +53,11 @@ def test_events_process_empty_file(self):
def test_events_process_invalid(self):
from hedweb.events import process
events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events.tsv')
- json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events_bad.json')
+ sidecar_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events_bad.json')
schema_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/HED8.0.0.xml')
hed_schema = hedschema.load_schema(schema_path)
- json_sidecar = Sidecar(files=json_path, name='bids_events_bad')
- events = TabularInput(file=events_path, sidecar=json_sidecar, name='bids_events')
+ sidecar = Sidecar(files=sidecar_path, name='bids_events_bad')
+ events = TabularInput(file=events_path, sidecar=sidecar, name='bids_events')
arguments = {base_constants.EVENTS: events, base_constants.COMMAND: base_constants.COMMAND_VALIDATE,
base_constants.EXPAND_DEFS: True,
base_constants.CHECK_FOR_WARNINGS: True, base_constants.SCHEMA: hed_schema}
@@ -73,8 +74,8 @@ def test_events_process_valid(self):
json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events.json')
schema_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/HED8.0.0.xml')
hed_schema = hedschema.load_schema(schema_path)
- json_sidecar = Sidecar(files=json_path, name='bids_json')
- events = TabularInput(file=events_path, sidecar=json_sidecar, name='bids_events')
+ sidecar = Sidecar(files=json_path, name='bids_json')
+ events = TabularInput(file=events_path, sidecar=sidecar, name='bids_events')
arguments = {base_constants.EVENTS: events, base_constants.COMMAND: base_constants.COMMAND_VALIDATE,
base_constants.EXPAND_DEFS: True,
base_constants.CHECK_FOR_WARNINGS: True, base_constants.SCHEMA: hed_schema}
@@ -90,11 +91,11 @@ def test_events_assemble_invalid(self):
from hedweb.events import assemble
events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events.tsv')
json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events_bad.json')
- json_sidecar = Sidecar(files=json_path, name='bids_events_bad')
+ sidecar = Sidecar(files=json_path, name='bids_events_bad')
schema_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/HED8.0.0.xml')
hed_schema = hedschema.load_schema(schema_path)
- events = TabularInput(file=events_path, sidecar=json_sidecar, name='bids_events')
+ events = TabularInput(file=events_path, sidecar=sidecar, name='bids_events')
with self.app.app_context():
results = assemble(hed_schema, events, expand_defs=True)
self.assertTrue('data' in results,
@@ -108,8 +109,8 @@ def test_events_assemble_valid(self):
json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events.json')
schema_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/HED8.0.0.xml')
hed_schema = hedschema.load_schema(schema_path)
- json_sidecar = Sidecar(files=json_path, name='bids_json')
- events = TabularInput(file=events_path, sidecar=json_sidecar, name='bids_events')
+ sidecar = Sidecar(files=json_path, name='bids_json')
+ events = TabularInput(file=events_path, sidecar=sidecar, name='bids_events')
with self.app.app_context():
results = assemble(hed_schema, events, expand_defs=True)
self.assertTrue(results['data'],
@@ -139,11 +140,11 @@ def test_search_invalid(self):
from hedweb.events import search
events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events.tsv')
json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events.json')
- json_sidecar = Sidecar(files=json_path, name='bids_sidecar')
+ sidecar = Sidecar(files=json_path, name='bids_sidecar')
schema_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/HED8.0.0.xml')
hed_schema = hedschema.load_schema(schema_path)
- events = TabularInput(file=events_path, sidecar=json_sidecar, name='bids_events')
+ events = TabularInput(file=events_path, sidecar=sidecar, name='bids_events')
with self.app.app_context():
results = search(hed_schema, events, query="")
self.assertTrue('data' in results, 'make_query results should have a data key when errors')
@@ -156,8 +157,8 @@ def test_events_search_valid(self):
json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events.json')
schema_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/HED8.0.0.xml')
hed_schema = hedschema.load_schema(schema_path)
- json_sidecar = Sidecar(files=json_path, name='bids_json')
- events = TabularInput(file=events_path, sidecar=json_sidecar, name='bids_events')
+ sidecar = Sidecar(files=json_path, name='bids_json')
+ events = TabularInput(file=events_path, sidecar=sidecar, name='bids_events')
with self.app.app_context():
results = search(hed_schema, events, query="Sensory-event")
self.assertTrue(results['data'],
@@ -171,8 +172,8 @@ def test_events_validate_invalid(self):
json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events_bad.json')
schema_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/HED8.0.0.xml')
hed_schema = hedschema.load_schema(schema_path)
- json_sidecar = Sidecar(files=json_path, name='bids_events_bad')
- events = TabularInput(file=events_path, sidecar=json_sidecar, name='bids_events')
+ sidecar = Sidecar(files=json_path, name='bids_events_bad')
+ events = TabularInput(file=events_path, sidecar=sidecar, name='bids_events')
with self.app.app_context():
results = validate(hed_schema, events)
self.assertTrue(results['data'],
@@ -184,8 +185,8 @@ def test_events_validate_valid(self):
from hedweb.events import validate
events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events.tsv')
json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events.json')
- json_sidecar = Sidecar(files=json_path, name='bids_events')
- events = TabularInput(file=events_path, sidecar=json_sidecar, name='bids_events')
+ sidecar = Sidecar(files=json_path, name='bids_events')
+ events = TabularInput(file=events_path, sidecar=sidecar, name='bids_events')
schema_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/HED8.0.0.xml')
hed_schema = hedschema.load_schema(schema_path)
diff --git a/tests/test_routes/test_routes_events.py b/tests/test_routes/test_routes_events.py
index 86227a4e..ee7aedd3 100644
--- a/tests/test_routes/test_routes_events.py
+++ b/tests/test_routes/test_routes_events.py
@@ -17,12 +17,12 @@ def test_events_results_empty_data(self):
self.assertFalse(response.data, "The response data for empty events request is empty")
def test_events_results_assemble_valid(self):
- json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/bids_events.json')
+ sidecar_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/bids_events.json')
events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/bids_events.tsv')
- with open(json_path, 'r') as sc:
+ with open(sidecar_path, 'r') as sc:
x = sc.read()
- json_buffer = io.BytesIO(bytes(x, 'utf-8'))
+ sidecar_buffer = io.BytesIO(bytes(x, 'utf-8'))
with open(events_path, 'r') as sc:
y = sc.read()
@@ -31,7 +31,7 @@ def test_events_results_assemble_valid(self):
with self.app.app_context():
input_data = {base_constants.SCHEMA_VERSION: '8.0.0',
base_constants.COMMAND_OPTION: base_constants.COMMAND_ASSEMBLE,
- 'json_file': (json_buffer, 'bids_events.json'),
+ 'sidecar_file': (sidecar_buffer, 'bids_events.json'),
'events_file': (events_buffer, 'bids_events.tsv'),
'expand_defs': 'on',
base_constants.CHECK_FOR_WARNINGS: 'on'}
@@ -41,7 +41,7 @@ def test_events_results_assemble_valid(self):
self.assertEqual("success", headers_dict["Category"],
"The valid events file should assemble successfully")
self.assertTrue(response.data, "The assembled events file should not be empty")
- json_buffer.close()
+ sidecar_buffer.close()
events_buffer.close()
def test_events_results_assemble_invalid(self):
@@ -59,7 +59,7 @@ def test_events_results_assemble_invalid(self):
with self.app.app_context():
input_data = {base_constants.SCHEMA_VERSION: '7.2.0',
base_constants.COMMAND_OPTION: base_constants.COMMAND_ASSEMBLE,
- base_constants.JSON_FILE: (json_buffer, 'bids_events.json'),
+ base_constants.SIDECAR_FILE: (json_buffer, 'bids_events.json'),
base_constants.EVENTS_FILE: (events_buffer, 'bids_events.tsv'),
base_constants.CHECK_FOR_WARNINGS: 'on'}
response = self.app.test.post('/events_submit', content_type='multipart/form-data', data=input_data)
@@ -146,7 +146,7 @@ def test_events_results_validate_valid(self):
with self.app.app_context():
input_data = {base_constants.SCHEMA_VERSION: '8.0.0',
base_constants.COMMAND_OPTION: base_constants.COMMAND_VALIDATE,
- base_constants.JSON_FILE: (json_buffer, 'bids_events.json'),
+ base_constants.SIDECAR_FILE: (json_buffer, 'bids_events.json'),
base_constants.EVENTS_FILE: (events_buffer, 'bids_events.tsv'),
base_constants.CHECK_FOR_WARNINGS: 'on'}
response = self.app.test.post('/events_submit', content_type='multipart/form-data', data=input_data)
@@ -175,7 +175,7 @@ def test_events_results_validate_invalid(self):
with self.app.app_context():
input_data = {base_constants.SCHEMA_VERSION: '7.2.0',
base_constants.COMMAND_OPTION: base_constants.COMMAND_VALIDATE,
- base_constants.JSON_FILE: (json_buffer, 'bids_events.json'),
+ base_constants.SIDECAR_FILE: (json_buffer, 'bids_events.json'),
base_constants.EVENTS_FILE: (events_buffer, 'events_file'),
base_constants.CHECK_FOR_WARNINGS: 'on'}
response = self.app.test.post('/events_submit', content_type='multipart/form-data', data=input_data)
diff --git a/tests/test_routes/test_routes_sidecar.py b/tests/test_routes/test_routes_sidecar.py
index 735e27d5..09f51294 100644
--- a/tests/test_routes/test_routes_sidecar.py
+++ b/tests/test_routes/test_routes_sidecar.py
@@ -18,13 +18,13 @@ def test_sidecar_results_empty_data(self):
def test_sidecar_results_to_long_valid(self):
with self.app.app_context():
- json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/bids_events.json')
- with open(json_path, 'r') as sc:
+ sidecar_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/bids_events.json')
+ with open(sidecar_path, 'r') as sc:
x = sc.read()
- json_buffer = io.BytesIO(bytes(x, 'utf-8'))
+ sidecar_buffer = io.BytesIO(bytes(x, 'utf-8'))
input_data = {base_constants.SCHEMA_VERSION: '8.0.0',
base_constants.COMMAND_OPTION: base_constants.COMMAND_TO_LONG,
- base_constants.JSON_FILE: (json_buffer, 'bids_events.json'),
+ base_constants.SIDECAR_FILE: (sidecar_buffer, 'bids_events.json'),
base_constants.CHECK_FOR_WARNINGS: 'on'}
response = self.app.test.post('/sidecar_submit', content_type='multipart/form-data', data=input_data)
self.assertTrue(isinstance(response, Response),
@@ -34,17 +34,17 @@ def test_sidecar_results_to_long_valid(self):
self.assertEqual("success", headers_dict["Category"],
"The valid sidecar should convert to long successfully")
self.assertTrue(response.data, "The converted to long sidecar should not be empty")
- json_buffer.close()
+ sidecar_buffer.close()
def test_sidecar_results_to_long_invalid(self):
- json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/bids_events.json')
- with open(json_path, 'r') as sc:
+ sidecar_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/bids_events.json')
+ with open(sidecar_path, 'r') as sc:
x = sc.read()
- json_buffer = io.BytesIO(bytes(x, 'utf-8'))
+ sidecar_buffer = io.BytesIO(bytes(x, 'utf-8'))
with self.app.app_context():
input_data = {base_constants.SCHEMA_VERSION: '7.2.0',
base_constants.COMMAND_OPTION: base_constants.COMMAND_TO_LONG,
- base_constants.JSON_FILE: (json_buffer, 'HED7.2.0.xml'),
+ base_constants.SIDECAR_FILE: (sidecar_buffer, 'HED7.2.0.xml'),
base_constants.CHECK_FOR_WARNINGS: 'on'}
response = self.app.test.post('/sidecar_submit', content_type='multipart/form-data', data=input_data)
@@ -56,7 +56,7 @@ def test_sidecar_results_to_long_invalid(self):
"Conversion of an invalid sidecar to long generates a warning")
self.assertTrue(response.data,
"The response data for invalid conversion to long should have error messages")
- json_buffer.close()
+ sidecar_buffer.close()
def test_sidecar_results_to_short_valid(self):
with self.app.app_context():
@@ -72,7 +72,7 @@ def test_sidecar_results_to_short_valid(self):
input_data = {base_constants.SCHEMA_VERSION: 'Other',
base_constants.SCHEMA_PATH: (schema_buffer, 'HED8.0.0.xml'),
base_constants.COMMAND_OPTION: base_constants.COMMAND_TO_SHORT,
- base_constants.JSON_FILE: (json_buffer, 'bids_events.json'),
+ base_constants.SIDECAR_FILE: (json_buffer, 'bids_events.json'),
base_constants.CHECK_FOR_WARNINGS: 'on'}
response = self.app.test.post('/sidecar_submit', content_type='multipart/form-data', data=input_data)
self.assertTrue(isinstance(response, Response),
@@ -93,7 +93,7 @@ def test_sidecar_results_validate_valid(self):
input_data = {base_constants.SCHEMA_VERSION: '8.0.0',
base_constants.COMMAND_OPTION: base_constants.COMMAND_VALIDATE,
- base_constants.JSON_FILE: (json_buffer, 'bids_events.json'),
+ base_constants.SIDECAR_FILE: (json_buffer, 'bids_events.json'),
base_constants.CHECK_FOR_WARNINGS: 'on'}
response = self.app.test.post('/sidecar_submit', content_type='multipart/form-data', data=input_data)
self.assertTrue(isinstance(response, Response),
@@ -120,7 +120,7 @@ def test_sidecar_results_validate_valid_other(self):
input_data = {base_constants.SCHEMA_VERSION: 'Other',
base_constants.SCHEMA_PATH: (schema_buffer, 'HED8.0.0.xml'),
base_constants.COMMAND_OPTION: base_constants.COMMAND_VALIDATE,
- base_constants.JSON_FILE: (json_buffer, 'bids_events.json'),
+ base_constants.SIDECAR_FILE: (json_buffer, 'bids_events.json'),
base_constants.CHECK_FOR_WARNINGS: 'on'}
response = self.app.test.post('/sidecar_submit', content_type='multipart/form-data', data=input_data)
self.assertTrue(isinstance(response, Response),
@@ -141,7 +141,7 @@ def test_sidecar_results_to_short_invalid(self):
input_data = {base_constants.SCHEMA_VERSION: '7.2.0',
base_constants.COMMAND_OPTION: base_constants.COMMAND_TO_SHORT,
- base_constants.JSON_FILE: (json_buffer, 'bids_events.json'),
+ base_constants.SIDECAR_FILE: (json_buffer, 'bids_events.json'),
base_constants.CHECK_FOR_WARNINGS: 'on'}
response = self.app.test.post('/sidecar_submit', content_type='multipart/form-data', data=input_data)
self.assertTrue(isinstance(response, Response),
@@ -162,7 +162,7 @@ def test_sidecar_results_validate_invalid(self):
json_buffer = io.BytesIO(bytes(x, 'utf-8'))
input_data = {base_constants.SCHEMA_VERSION: '7.2.0',
base_constants.COMMAND_OPTION: base_constants.COMMAND_VALIDATE,
- base_constants.JSON_FILE: (json_buffer, 'bids_events.json'),
+ base_constants.SIDECAR_FILE: (json_buffer, 'bids_events.json'),
base_constants.CHECK_FOR_WARNINGS: 'on'}
response = self.app.test.post('/sidecar_submit', content_type='multipart/form-data',
data=input_data)
diff --git a/tests/test_services.py b/tests/test_services.py
index ff07cdde..5ccfeff3 100644
--- a/tests/test_services.py
+++ b/tests/test_services.py
@@ -22,16 +22,16 @@ def test_get_input_from_service_request(self):
from hed.schema import HedSchema
from hedweb.services import get_input_from_request
with self.app.test:
- json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events.json')
- with open(json_path, 'rb') as fp:
- json_string = fp.read().decode('ascii')
- json_data = {base_constants.JSON_STRING: json_string, base_constants.CHECK_FOR_WARNINGS: 'on',
+ sidecar_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events.json')
+ with open(sidecar_path, 'rb') as fp:
+ sidecar_string = fp.read().decode('ascii')
+ json_data = {base_constants.SIDECAR_STRING: sidecar_string, base_constants.CHECK_FOR_WARNINGS: 'on',
base_constants.SCHEMA_VERSION: '8.0.0', base_constants.SERVICE: 'sidecar_validate'}
environ = create_environ(json=json_data)
request = Request(environ)
arguments = get_input_from_request(request)
- self.assertIn(base_constants.JSON_SIDECAR, arguments, "get_input_from_request should have a json sidecar")
- self.assertIsInstance(arguments[base_constants.JSON_SIDECAR], Sidecar,
+ self.assertIn(base_constants.SIDECAR, arguments, "get_input_from_request should have a json sidecar")
+ self.assertIsInstance(arguments[base_constants.SIDECAR], Sidecar,
"get_input_from_request should contain a sidecar")
self.assertIsInstance(arguments[base_constants.SCHEMA], HedSchema,
"get_input_from_request should have a HED schema")
@@ -40,6 +40,27 @@ def test_get_input_from_service_request(self):
self.assertTrue(arguments[base_constants.CHECK_FOR_WARNINGS],
"get_input_from_request should have check_warnings true when on")
+ def test_get_remodel_parameters(self):
+ from hedweb.services import get_remodel_parameters
+ remodel_file = os.path.realpath(os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ 'data/simple_reorder_rmdl.json'))
+ with open(remodel_file, 'r') as fp:
+ json_obj = json.load(fp)
+ params = {'remodel_string': json.dumps(json_obj)}
+ arguments = {}
+ get_remodel_parameters(arguments, params)
+ self.assertTrue(arguments)
+ self.assertIn('remodel_operations', arguments)
+ self.assertEqual(len(arguments['remodel_operations']), 2)
+
+ def test_get_remodel_parameters_empty(self):
+ from hedweb.services import get_remodel_parameters
+ params = {}
+ arguments = {}
+ get_remodel_parameters(arguments, params)
+ self.assertFalse(arguments)
+ self.assertNotIn('remodel_operations', arguments)
+
def test_services_process_empty(self):
from hedweb.services import process
with self.app.app_context():
@@ -67,7 +88,7 @@ def test_process_services_sidecar(self):
json_sidecar = models.Sidecar(files=fb, name='JSON_Sidecar')
arguments = {base_constants.SERVICE: 'sidecar_validate', base_constants.SCHEMA: hed_schema,
base_constants.COMMAND: 'validate', base_constants.COMMAND_TARGET: 'sidecar',
- base_constants.JSON_SIDECAR: json_sidecar}
+ base_constants.SIDECAR: json_sidecar}
with self.app.app_context():
response = process(arguments)
self.assertFalse(response['error_type'],
@@ -103,24 +124,24 @@ def test_services_get_sidecar(self):
data_upper = json.load(f)
with open(sidecar_path_lower2) as f:
data_lower2 = json.load(f)
- params2 = {base_constants.JSON_LIST: [json.dumps(data_upper), json.dumps(data_lower2)]}
+ params2 = {base_constants.SIDECAR_STRING: [json.dumps(data_upper), json.dumps(data_lower2)]}
arguments2 = {}
get_sidecar(arguments2, params2)
- self.assertIn(base_constants.JSON_SIDECAR, arguments2, 'get_sidecar arguments should have a sidecar')
- self.assertIsInstance(arguments2[base_constants.JSON_SIDECAR], Sidecar)
- sidecar2 = arguments2[base_constants.JSON_SIDECAR]
+ self.assertIn(base_constants.SIDECAR, arguments2, 'get_sidecar arguments should have a sidecar')
+ self.assertIsInstance(arguments2[base_constants.SIDECAR], Sidecar)
+ sidecar2 = arguments2[base_constants.SIDECAR]
self.assertIn('event_type', data_upper, "get_sidecar upper has key event_type")
self.assertNotIn('event_type', data_lower2, "get_sidecar lower2 does not have event_type")
self.assertIn('event_type', sidecar2.loaded_dict, "get_sidecar merged sidecar has event_type")
with open(sidecar_path_lower3) as f:
data_lower3 = json.load(f)
- params3 = {base_constants.JSON_LIST: [json.dumps(data_upper), json.dumps(data_lower3)]}
+ params3 = {base_constants.SIDECAR_STRING: [json.dumps(data_upper), json.dumps(data_lower3)]}
arguments3 = {}
get_sidecar(arguments3, params3)
- self.assertIn(base_constants.JSON_SIDECAR, arguments3, 'get_sidecar arguments should have a sidecar')
- self.assertIsInstance(arguments3[base_constants.JSON_SIDECAR], Sidecar)
- sidecar3 = arguments3[base_constants.JSON_SIDECAR]
+ self.assertIn(base_constants.SIDECAR, arguments3, 'get_sidecar arguments should have a sidecar')
+ self.assertIsInstance(arguments3[base_constants.SIDECAR], Sidecar)
+ sidecar3 = arguments3[base_constants.SIDECAR]
self.assertIn('event_type', data_upper, "get_sidecar upper has key event_type")
self.assertNotIn('event_type', data_lower3, "get_sidecar lower3 does not have event_type")
self.assertIn('event_type', sidecar3.loaded_dict, "get_sidecar merged sidecar has event_type")
diff --git a/tests/test_sidecar.py b/tests/test_sidecar.py
index 935aac8b..511d7fac 100644
--- a/tests/test_sidecar.py
+++ b/tests/test_sidecar.py
@@ -20,14 +20,14 @@ def test_generate_input_from_sidecar_form(self):
from hed.schema import HedSchema
from hedweb.sidecar import get_input_from_form
with self.app.test:
- json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events.json')
- with open(json_path, 'rb') as fp:
- environ = create_environ(data={base_constants.JSON_FILE: fp, base_constants.SCHEMA_VERSION: '8.0.0',
- base_constants.COMMAND_OPTION: base_constants.COMMAND_TO_LONG})
+ sidecar_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events.json')
+ with open(sidecar_path, 'rb') as fp:
+ environ = create_environ(data={base_constants.SIDECAR_FILE: fp, base_constants.SCHEMA_VERSION: '8.0.0',
+ base_constants.COMMAND_OPTION: base_constants.COMMAND_TO_LONG})
request = Request(environ)
arguments = get_input_from_form(request)
- self.assertIsInstance(arguments[base_constants.JSON_SIDECAR], Sidecar,
+ self.assertIsInstance(arguments[base_constants.SIDECAR], Sidecar,
"generate_input_from_sidecar_form should have a JSON dictionary in sidecar list")
self.assertIsInstance(arguments[base_constants.SCHEMA], HedSchema,
"generate_input_from_sidecar_form should have a HED schema")
@@ -41,18 +41,18 @@ def test_sidecar_process_empty_file(self):
from hed.errors.exceptions import HedFileError
with self.assertRaises(HedFileError):
with self.app.app_context():
- arguments = {'json_path': ''}
+ arguments = {'sidecar_path': ''}
process(arguments)
def test_sidecar_process_invalid(self):
from hedweb.sidecar import process
- json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events_bad.json')
- json_sidecar = models.Sidecar(files=json_path, name='bids_events_bad')
+ sidecar_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events_bad.json')
+ json_sidecar = models.Sidecar(files=sidecar_path, name='bids_events_bad')
schema_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/HED8.0.0.xml')
hed_schema = hedschema.load_schema(schema_path)
- arguments = {base_constants.SCHEMA: hed_schema, base_constants.JSON_SIDECAR: json_sidecar,
- base_constants.JSON_DISPLAY_NAME: 'bids_events_bad',
+ arguments = {base_constants.SCHEMA: hed_schema, base_constants.SIDECAR: json_sidecar,
+ base_constants.SIDECAR_DISPLAY_NAME: 'bids_events_bad',
base_constants.COMMAND: base_constants.COMMAND_TO_SHORT}
with self.app.app_context():
results = process(arguments)
@@ -69,8 +69,8 @@ def test_sidecar_process_valid_to_short(self):
json_sidecar = models.Sidecar(files=json_path, name='bids_events')
schema_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/HED8.0.0.xml')
hed_schema = hedschema.load_schema(schema_path)
- arguments = {base_constants.SCHEMA: hed_schema, base_constants.JSON_SIDECAR: json_sidecar,
- base_constants.JSON_DISPLAY_NAME: 'bids_events',
+ arguments = {base_constants.SCHEMA: hed_schema, base_constants.SIDECAR: json_sidecar,
+ base_constants.SIDECAR_DISPLAY_NAME: 'bids_events',
base_constants.EXPAND_DEFS: False,
base_constants.COMMAND: base_constants.COMMAND_TO_SHORT}
@@ -87,8 +87,8 @@ def test_sidecar_process_valid_to_short_defs_expanded(self):
json_sidecar = models.Sidecar(files=json_path, name='bids_events')
schema_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/HED8.0.0.xml')
hed_schema = hedschema.load_schema(schema_path)
- arguments = {base_constants.SCHEMA: hed_schema, base_constants.JSON_SIDECAR: json_sidecar,
- base_constants.JSON_DISPLAY_NAME: 'bids_events',
+ arguments = {base_constants.SCHEMA: hed_schema, base_constants.SIDECAR: json_sidecar,
+ base_constants.SIDECAR_DISPLAY_NAME: 'bids_events',
base_constants.EXPAND_DEFS: True,
base_constants.COMMAND: base_constants.COMMAND_TO_SHORT}
@@ -105,8 +105,8 @@ def test_sidecar_process_valid_to_long(self):
json_sidecar = models.Sidecar(files=json_path, name='bids_events')
schema_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/HED8.0.0.xml')
hed_schema = hedschema.load_schema(schema_path)
- arguments = {base_constants.SCHEMA: hed_schema, base_constants.JSON_SIDECAR: json_sidecar,
- base_constants.JSON_DISPLAY_NAME: 'bids_events',
+ arguments = {base_constants.SCHEMA: hed_schema, base_constants.SIDECAR: json_sidecar,
+ base_constants.SIDECAR_DISPLAY_NAME: 'bids_events',
base_constants.EXPAND_DEFS: False,
base_constants.COMMAND: base_constants.COMMAND_TO_LONG}
@@ -123,8 +123,8 @@ def test_sidecar_process_valid_to_long_defs_expanded(self):
json_sidecar = models.Sidecar(files=json_path, name='bids_events')
schema_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/HED8.0.0.xml')
hed_schema = hedschema.load_schema(schema_path)
- arguments = {base_constants.SCHEMA: hed_schema, base_constants.JSON_SIDECAR: json_sidecar,
- base_constants.JSON_DISPLAY_NAME: 'bids_events',
+ arguments = {base_constants.SCHEMA: hed_schema, base_constants.SIDECAR: json_sidecar,
+ base_constants.SIDECAR_DISPLAY_NAME: 'bids_events',
base_constants.EXPAND_DEFS: True,
base_constants.COMMAND: base_constants.COMMAND_TO_LONG}
diff --git a/tests/test_web_util.py b/tests/test_web_util.py
index dcf7db18..8c29e3f0 100644
--- a/tests/test_web_util.py
+++ b/tests/test_web_util.py
@@ -13,16 +13,16 @@ class Test(TestWebBase):
def test_form_has_file(self):
from hedweb.web_util import form_has_file
with self.app.test as _:
- json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events.json')
- with open(json_path, 'rb') as fp:
- environ = create_environ(data={'json_file': fp})
+ sidecar_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/bids_events.json')
+ with open(sidecar_path, 'rb') as fp:
+ environ = create_environ(data={'sidecar_file': fp})
request = Request(environ)
- self.assertTrue(form_has_file(request, 'json_file'), "Form has file when no extension requirements")
+ self.assertTrue(form_has_file(request, 'sidecar_file'), "Form has file when no extension requirements")
self.assertFalse(form_has_file(request, 'temp'), "Form does not have file when form name is wrong")
- self.assertFalse(form_has_file(request, 'json_file', file_constants.SPREADSHEET_EXTENSIONS),
+ self.assertFalse(form_has_file(request, 'sidecar_file', file_constants.SPREADSHEET_EXTENSIONS),
"Form does not have file when extension is wrong")
- self.assertTrue(form_has_file(request, 'json_file', [".json"]),
+ self.assertTrue(form_has_file(request, 'sidecar_file', [".json"]),
"Form has file when extensions and form field match")
def test_form_has_option(self):
From 95e83af73dbc2d505a5a432f1f153a0dda78ea04 Mon Sep 17 00:00:00 2001
From: Kay Robbins <1189050+VisLab@users.noreply.github.com>
Date: Thu, 13 Oct 2022 07:20:31 -0500
Subject: [PATCH 12/25] Updated the imports for get_schema_versions
---
hedweb/events.py | 10 +++-------
hedweb/sidecar.py | 5 ++---
hedweb/spreadsheet.py | 5 ++---
3 files changed, 7 insertions(+), 13 deletions(-)
diff --git a/hedweb/events.py b/hedweb/events.py
index e9fb40a5..14280872 100644
--- a/hedweb/events.py
+++ b/hedweb/events.py
@@ -6,7 +6,6 @@
from hed import schema as hedschema
from hed.errors import get_printable_issue_string, HedFileError
from hed.models import DefinitionDict, Sidecar, TabularInput
-from hed.schema.hed_schema_io import get_schema_versions
from hed.tools import assemble_hed, Dispatcher, TabularSummary, generate_filename, \
generate_sidecar_entry, search_tabular
from hed.validator import HedValidator
@@ -207,7 +206,7 @@ def remodel(hed_schema, events, sidecar, remodel_operations, include_summaries=T
response = {base_constants.COMMAND: base_constants.COMMAND_REMODEL,
base_constants.COMMAND_TARGET: 'events', 'data': '', "output_display_name": output_name,
- base_constants.SCHEMA_VERSION: get_schema_versions(hed_schema, as_string=True),
+ base_constants.SCHEMA_VERSION: hedschema.get_schema_versions(hed_schema, as_string=True),
base_constants.MSG_CATEGORY: 'success',
base_constants.MSG: f"Command parsing for {display_name} remodeling was successful"}
if dispatch.context_dict and include_summaries:
@@ -300,7 +299,7 @@ def validate(hed_schema, events, sidecar=None, check_for_warnings=False):
return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE, base_constants.COMMAND_TARGET: 'events',
'data': data, "output_display_name": file_name,
- base_constants.SCHEMA_VERSION: get_schema_versions(hed_schema, as_string=True),
+ base_constants.SCHEMA_VERSION: hedschema.get_schema_versions(hed_schema, as_string=True),
base_constants.MSG_CATEGORY: category, base_constants.MSG: msg}
@@ -318,16 +317,13 @@ def validate_query(hed_schema, query):
if not query:
data = "Empty query could not be processed."
- file_name = generate_filename('empty_query', name_suffix='_validation_errors',
- extension='.txt', append_datetime=True)
category = 'warning'
msg = f"Empty query could not be processed"
else:
data = ''
- file_name = 'Nice_query'
category = 'success'
msg = f"Query had no validation errors"
return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE, base_constants.COMMAND_TARGET: 'query',
- 'data': data, base_constants.SCHEMA_VERSION: get_schema_versions(hed_schema, as_string=True),
+ 'data': data, base_constants.SCHEMA_VERSION: hedschema.get_schema_versions(hed_schema, as_string=True),
base_constants.MSG_CATEGORY: category, base_constants.MSG: msg}
diff --git a/hedweb/sidecar.py b/hedweb/sidecar.py
index 261f8d4c..492c61c8 100644
--- a/hedweb/sidecar.py
+++ b/hedweb/sidecar.py
@@ -9,7 +9,6 @@
from hed.errors import HedFileError, get_printable_issue_string
from hed.models import SpreadsheetInput, Sidecar
-from hed.schema.hed_schema_io import get_schema_versions
from hed.tools import df_to_hed, generate_filename, hed_to_df, merge_hed_dict
from hedweb.constants import base_constants, file_constants
from hedweb.web_util import form_has_option, filter_issues, get_hed_schema_from_pull_down
@@ -135,7 +134,7 @@ def sidecar_convert(hed_schema, sidecar, command=base_constants.COMMAND_TO_SHORT
msg = f'Sidecar file {display_name} was successfully converted'
return {base_constants.COMMAND: command, base_constants.COMMAND_TARGET: 'sidecar',
'data': data, 'output_display_name': file_name,
- base_constants.SCHEMA_VERSION: get_schema_versions(hed_schema, as_string=True),
+ base_constants.SCHEMA_VERSION: hedschema.get_schema_versions(hed_schema, as_string=True),
'msg_category': category, 'msg': msg}
@@ -223,5 +222,5 @@ def sidecar_validate(hed_schema, sidecar, check_for_warnings=False):
return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE, base_constants.COMMAND_TARGET: 'sidecar',
'data': data, 'output_display_name': file_name,
- base_constants.SCHEMA_VERSION: get_schema_versions(hed_schema, as_string=True),
+ base_constants.SCHEMA_VERSION: hedschema.get_schema_versions(hed_schema, as_string=True),
base_constants.MSG_CATEGORY: category, base_constants.MSG: msg}
diff --git a/hedweb/spreadsheet.py b/hedweb/spreadsheet.py
index aa89e2fe..53ae423e 100644
--- a/hedweb/spreadsheet.py
+++ b/hedweb/spreadsheet.py
@@ -2,7 +2,6 @@
from flask import current_app
from werkzeug.utils import secure_filename
from hed import schema as hedschema
-from hed.schema.hed_schema_io import get_schema_versions
from hed.errors import get_printable_issue_string, HedFileError
from hed.models import SpreadsheetInput
from hed.tools import generate_filename
@@ -115,7 +114,7 @@ def spreadsheet_convert(hed_schema, spreadsheet, command=base_constants.COMMAND_
return {base_constants.COMMAND: command,
base_constants.COMMAND_TARGET: 'spreadsheet', 'data': '',
base_constants.SPREADSHEET: spreadsheet, 'output_display_name': file_name,
- base_constants.SCHEMA_VERSION: get_schema_versions(hed_schema, as_string=True),
+ base_constants.SCHEMA_VERSION: hedschema.get_schema_versions(hed_schema, as_string=True),
base_constants.MSG_CATEGORY: 'success',
base_constants.MSG: f'Spreadsheet {display_name} converted_successfully'}
@@ -151,6 +150,6 @@ def spreadsheet_validate(hed_schema, spreadsheet, check_for_warnings=False):
return {base_constants.COMMAND: base_constants.COMMAND_VALIDATE,
base_constants.COMMAND_TARGET: 'spreadsheet', 'data': data,
- base_constants.SCHEMA_VERSION: get_schema_versions(hed_schema, as_string=True),
+ base_constants.SCHEMA_VERSION: hedschema.get_schema_versions(hed_schema, as_string=True),
"output_display_name": file_name,
base_constants.MSG_CATEGORY: category, base_constants.MSG: msg}
From 4e0d8b8f5a43d3cb047990c9556e3602e9c9d40f Mon Sep 17 00:00:00 2001
From: Kay Robbins <1189050+VisLab@users.noreply.github.com>
Date: Thu, 13 Oct 2022 09:08:51 -0500
Subject: [PATCH 13/25] Updated httpd.conf for deployment
---
deploy_hed/httpd.conf | 1 +
1 file changed, 1 insertion(+)
diff --git a/deploy_hed/httpd.conf b/deploy_hed/httpd.conf
index 1b358209..85573f99 100644
--- a/deploy_hed/httpd.conf
+++ b/deploy_hed/httpd.conf
@@ -13,3 +13,4 @@ LoadModule authz_core_module /usr/lib/apache2/modules/mod_authz_core.so
#LoadModule unixd_module /usr/lib/apache2/modules/mod_unixd.so
LoadModule wsgi_module /usr/local/lib/python3.9/site-packages/mod_wsgi/server/mod_wsgi-py39.cpython-39-x86_64-linux-gnu.so
WSGIScriptAlias / /var/www/hedtools/web.wsgi
+WSGIApplicationGroup %{GLOBAL}
From da5f60ae611fd039cc8f582d5ff917c9248fda26 Mon Sep 17 00:00:00 2001
From: Kay Robbins <1189050+VisLab@users.noreply.github.com>
Date: Thu, 13 Oct 2022 09:21:42 -0500
Subject: [PATCH 14/25] Updated the hedtools_dev configuration
---
deploy_hed_dev/httpd.conf | 1 +
1 file changed, 1 insertion(+)
diff --git a/deploy_hed_dev/httpd.conf b/deploy_hed_dev/httpd.conf
index 1b358209..85573f99 100644
--- a/deploy_hed_dev/httpd.conf
+++ b/deploy_hed_dev/httpd.conf
@@ -13,3 +13,4 @@ LoadModule authz_core_module /usr/lib/apache2/modules/mod_authz_core.so
#LoadModule unixd_module /usr/lib/apache2/modules/mod_unixd.so
LoadModule wsgi_module /usr/local/lib/python3.9/site-packages/mod_wsgi/server/mod_wsgi-py39.cpython-39-x86_64-linux-gnu.so
WSGIScriptAlias / /var/www/hedtools/web.wsgi
+WSGIApplicationGroup %{GLOBAL}
From 2b5ea912aefc5dee4637067aa96ec29d259f9b5a Mon Sep 17 00:00:00 2001
From: Kay Robbins <1189050+VisLab@users.noreply.github.com>
Date: Fri, 14 Oct 2022 10:50:53 -0500
Subject: [PATCH 15/25] Modified the deployment scripts to use docker rotating
logs
---
deploy_hed/deploy.sh | 2 +-
deploy_hed_dev/deploy.sh | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/deploy_hed/deploy.sh b/deploy_hed/deploy.sh
index 950afc7f..75c7027f 100644
--- a/deploy_hed/deploy.sh
+++ b/deploy_hed/deploy.sh
@@ -77,7 +77,7 @@ docker rm -f $CONTAINER_NAME
run_new_container()
{
echo "Running new container $CONTAINER_NAME ..."
-docker run --restart=always --name $CONTAINER_NAME -d -p 127.0.0.1:$HOST_PORT:$CONTAINER_PORT $IMAGE_NAME
+docker run --restart=always --name $CONTAINER_NAME -d -p 127.0.0.1:$HOST_PORT:$CONTAINER_PORT $IMAGE_NAME --log-opt max-size=50m
}
cleanup_directory()
diff --git a/deploy_hed_dev/deploy.sh b/deploy_hed_dev/deploy.sh
index bb296a05..61c216dc 100644
--- a/deploy_hed_dev/deploy.sh
+++ b/deploy_hed_dev/deploy.sh
@@ -77,7 +77,7 @@ docker rm -f $CONTAINER_NAME
run_new_container()
{
echo "Running new container $CONTAINER_NAME ..."
-docker run --restart=always --name $CONTAINER_NAME -d -p 127.0.0.1:$HOST_PORT:$CONTAINER_PORT $IMAGE_NAME
+docker run --restart=always --name $CONTAINER_NAME -d -p 127.0.0.1:$HOST_PORT:$CONTAINER_PORT $IMAGE_NAME --log-opt max-size=50m
}
cleanup_directory()
From fc3528f843bf9bae4139d5f8e6db8580b8869710 Mon Sep 17 00:00:00 2001
From: Kay Robbins <1189050+VisLab@users.noreply.github.com>
Date: Thu, 22 Dec 2022 09:36:05 -0600
Subject: [PATCH 16/25] Updated remodel test datasets with new format
---
hedweb/events.py | 4 ++--
hedweb/templates/actions.html | 2 +-
tests/data/bad_reorder_remdl.json | 10 +++++-----
tests/data/simple_reorder_rmdl.json | 2 +-
tests/data/splitevents_example_temp_rmdl.json | 4 ++--
5 files changed, 11 insertions(+), 11 deletions(-)
diff --git a/hedweb/events.py b/hedweb/events.py
index 14280872..a53c9cbb 100644
--- a/hedweb/events.py
+++ b/hedweb/events.py
@@ -195,10 +195,10 @@ def remodel(hed_schema, events, sidecar, remodel_operations, include_summaries=T
'msg': f"Remodeling operation list for {display_name} had validation errors"}
df = events.dataframe
dispatch = Dispatcher(operations, data_root=None, hed_versions=hed_schema)
- df = dispatch.prep_events(df)
+ df = dispatch.prep_data(df)
for operation in dispatch.parsed_ops:
df = operation.do_op(dispatch, df, display_name, sidecar=sidecar)
- df = df.fillna('n/a')
+ df = dispatch.post_proc_data(df)
data = df.to_csv(None, sep='\t', index=False, header=True)
name_suffix = f"_remodeled_by_{remodel_name}"
file_name = generate_filename(display_name, name_suffix=name_suffix, extension='.tsv', append_datetime=True)
diff --git a/hedweb/templates/actions.html b/hedweb/templates/actions.html
index 61507730..a7abbb09 100644
--- a/hedweb/templates/actions.html
+++ b/hedweb/templates/actions.html
@@ -78,7 +78,7 @@ {{ title }}
-
+
{% endif %}
diff --git a/tests/data/bad_reorder_remdl.json b/tests/data/bad_reorder_remdl.json
index 0ac7dfbd..ad83d84c 100644
--- a/tests/data/bad_reorder_remdl.json
+++ b/tests/data/bad_reorder_remdl.json
@@ -7,7 +7,7 @@
}
},
{
- "command": "remove_columns",
+ "operation": "remove_columns",
"description": "Get rid of the sample and the value columns",
"parameters": {
"remove_names": [
@@ -18,17 +18,17 @@
}
},
{
- "command": "unknown_command",
+ "operation": "unknown_command",
"description": "bad command",
"parameters": {
"ignore_missing": true
}
},
{
- "command": "reorder_columns",
+ "operation": "reorder_columns",
"description": "Order columns so that response_time and trial_type come after onset and duration",
"parameters": {
- "column_order": [
+ "column_names": [
"onset",
"duration"
],
@@ -37,7 +37,7 @@
}
},
{
- "command": "remove_columns",
+ "operation": "remove_columns",
"description": "bad parameters",
"parameters": {
"ignore_missing": true
diff --git a/tests/data/simple_reorder_rmdl.json b/tests/data/simple_reorder_rmdl.json
index 6b5b4c63..50e2849c 100644
--- a/tests/data/simple_reorder_rmdl.json
+++ b/tests/data/simple_reorder_rmdl.json
@@ -3,7 +3,7 @@
"operation": "remove_columns",
"description": "Get rid of the sample and the value columns",
"parameters": {
- "remove_names": ["sample", "value"],
+ "column_names": ["sample", "value"],
"ignore_missing": true
}
},
diff --git a/tests/data/splitevents_example_temp_rmdl.json b/tests/data/splitevents_example_temp_rmdl.json
index b24965e8..6b45c187 100644
--- a/tests/data/splitevents_example_temp_rmdl.json
+++ b/tests/data/splitevents_example_temp_rmdl.json
@@ -1,6 +1,6 @@
[
{
- "command": "rename_columns",
+ "operation": "rename_columns",
"description": "Create separate response event from response time column.",
"parameters": {
"column_mapping": {
@@ -10,7 +10,7 @@
}
},
{
- "command": "split_event",
+ "operation": "split_event",
"description": "Create separate response event from response time column.",
"parameters": {
"anchor_column": "event_type",
From 4353ef64ef57b77fc52ad863b793cb3d7c359a01 Mon Sep 17 00:00:00 2001
From: Kay Robbins <1189050+VisLab@users.noreply.github.com>
Date: Tue, 31 Jan 2023 08:42:34 -0600
Subject: [PATCH 17/25] Updated the single file remodeling for new format
---
hedweb/events.py | 6 ++--
hedweb/schema.py | 41 ++++++++++++++++++++++---
tests/test_routes/test_routes_schema.py | 2 +-
3 files changed, 40 insertions(+), 9 deletions(-)
diff --git a/hedweb/events.py b/hedweb/events.py
index a53c9cbb..b1323cc1 100644
--- a/hedweb/events.py
+++ b/hedweb/events.py
@@ -195,15 +195,15 @@ def remodel(hed_schema, events, sidecar, remodel_operations, include_summaries=T
'msg': f"Remodeling operation list for {display_name} had validation errors"}
df = events.dataframe
dispatch = Dispatcher(operations, data_root=None, hed_versions=hed_schema)
- df = dispatch.prep_data(df)
+
for operation in dispatch.parsed_ops:
+ df = dispatch.prep_data(df)
df = operation.do_op(dispatch, df, display_name, sidecar=sidecar)
- df = dispatch.post_proc_data(df)
+ df = dispatch.post_proc_data(df)
data = df.to_csv(None, sep='\t', index=False, header=True)
name_suffix = f"_remodeled_by_{remodel_name}"
file_name = generate_filename(display_name, name_suffix=name_suffix, extension='.tsv', append_datetime=True)
output_name = file_name
-
response = {base_constants.COMMAND: base_constants.COMMAND_REMODEL,
base_constants.COMMAND_TARGET: 'events', 'data': '', "output_display_name": output_name,
base_constants.SCHEMA_VERSION: hedschema.get_schema_versions(hed_schema, as_string=True),
diff --git a/hedweb/schema.py b/hedweb/schema.py
index d52fc1dd..b187a8ab 100644
--- a/hedweb/schema.py
+++ b/hedweb/schema.py
@@ -5,7 +5,7 @@
from werkzeug.utils import secure_filename
from hed import schema as hedschema
-from hed.errors import get_exception_issue_string, get_printable_issue_string
+from hed.errors import get_printable_issue_string
from hed.errors import HedFileError
from hed.tools import generate_filename
from hedweb.web_util import form_has_file, form_has_option, form_has_url
@@ -38,9 +38,9 @@ def get_schema(arguments):
else:
file_found = False
except HedFileError as e:
- issues = e.issues
+ issues.append({'code': e.args[0], 'message': e.args[1]})
if not file_found:
- raise HedFileError("NoSchemaProvided", "Must provide a loadable schema", "")
+ raise HedFileError("SCHEMA_NOT_FOUND", "Must provide a loadable schema", "")
return hed_schema, issues
@@ -76,7 +76,7 @@ def get_input_from_form(request):
arguments[base_constants.SCHEMA_FILE_TYPE] = basename(url_parsed.path)
arguments[base_constants.SCHEMA_DISPLAY_NAME] = basename(url_parsed.path)
else:
- raise HedFileError("NoSchemaProvided", "Must provide a loadable schema", "")
+ raise HedFileError("SCHEMA_NOT_FOUND", "Must provide a loadable schema", "")
return arguments
@@ -96,7 +96,7 @@ def process(arguments):
display_name = arguments.get('schema_display_name', 'unknown_source')
hed_schema, issues = get_schema(arguments)
if issues:
- issue_str = get_exception_issue_string(issues, f"Schema for {display_name} had these errors")
+ issue_str = get_issue_string(issues, f"Schema for {display_name} had these errors")
file_name = generate_filename(arguments[base_constants.SCHEMA_DISPLAY_NAME],
name_suffix='schema__errors', extension='.txt')
return {'command': arguments[base_constants.COMMAND],
@@ -174,3 +174,34 @@ def schema_validate(hed_schema, display_name):
'schema_version': hed_schema.get_formatted_version(as_string=True),
'msg_category': 'success',
'msg': 'Schema had no HED-3G validation errors'}
+
+
+def get_issue_string(issues, title=None):
+ """ Return a string with issues list flatted into single string, one issue per line.
+
+ Parameters:
+ issues (list): A list of strings containing issues to print.
+ title (str or None): An optional title that will always show up first if present.
+
+ Returns:
+ str: A str containing printable version of the issues or ''.
+
+ """
+
+ issue_str = ''
+ if issues:
+ issue_list = []
+ for issue in issues:
+ if isinstance(issue, str):
+ issue_list.append(f"ERROR: {issue}.")
+ else:
+ this_str = f"{issue['message']}"
+ if 'code' in issue:
+ this_str = f"{issue['code']}:" + this_str
+ if 'line_number' in issue:
+ this_str = this_str + f"\n\tLine number {issue['line_number']}: {issue.get('line', '')} "
+ issue_list.append(this_str)
+ issue_str += '\n' + '\n'.join(issue_list)
+ if title:
+ issue_str = title + '\n' + issue_str
+ return issue_str
diff --git a/tests/test_routes/test_routes_schema.py b/tests/test_routes/test_routes_schema.py
index e48bce2c..28dbea43 100644
--- a/tests/test_routes/test_routes_schema.py
+++ b/tests/test_routes/test_routes_schema.py
@@ -126,7 +126,7 @@ def test_schema_results_validate_mediawiki_invalid(self):
self.assertEqual(200, response.status_code, 'Validation of a invalid mediawiki has a response')
headers_dict = dict(response.headers)
self.assertEqual("warning", headers_dict["Category"],
- "A schema that cannot be loaded should return an a warning")
+ "A schema that cannot be loaded should return an error")
self.assertTrue(response.data, "The response data for invalid mediawiki validation should not be empty")
self.assertTrue(headers_dict['Message'],
"The error message for invalid mediawiki conversion should not be empty")
From 909c8fae04f27a0b449dc3a663aab2c85db39540 Mon Sep 17 00:00:00 2001
From: Kay Robbins <1189050+VisLab@users.noreply.github.com>
Date: Fri, 3 Feb 2023 14:17:42 -0600
Subject: [PATCH 18/25] Updated the dispatcher call for remodeling
---
hedweb/events.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/hedweb/events.py b/hedweb/events.py
index b1323cc1..006b64f2 100644
--- a/hedweb/events.py
+++ b/hedweb/events.py
@@ -210,7 +210,7 @@ def remodel(hed_schema, events, sidecar, remodel_operations, include_summaries=T
base_constants.MSG_CATEGORY: 'success',
base_constants.MSG: f"Command parsing for {display_name} remodeling was successful"}
if dispatch.context_dict and include_summaries:
- file_list = dispatch.get_context_summaries()
+ file_list = dispatch.get_summaries()
file_list.append({'file_name': output_name, 'file_format': '.tsv', 'file_type': 'tabular', 'content': data})
response[base_constants.FILE_LIST] = file_list
response[base_constants.ZIP_NAME] = generate_filename(display_name, name_suffix=name_suffix + '_zip',
From d5eb81924275598a3feb3160a8e92eb2475ab923 Mon Sep 17 00:00:00 2001
From: Kay Robbins <1189050+VisLab@users.noreply.github.com>
Date: Sat, 11 Feb 2023 12:49:50 -0600
Subject: [PATCH 19/25] Updated the README
---
README.md | 25 ++++++++++++++++++++++++-
deploy_hed/Dockerfile | 2 +-
2 files changed, 25 insertions(+), 2 deletions(-)
diff --git a/README.md b/README.md
index c834827c..106afd42 100644
--- a/README.md
+++ b/README.md
@@ -72,4 +72,27 @@ to your `deploy_hed` directory.
```
The `deploy.sh` script will download the latest versions of the `hed-python`
-and the `hed-web` repositories and deploy.
\ No newline at end of file
+and the `hed-web` repositories and deploy.
+
+### Branches and versions
+
+The `hed-python` repository
+
+| Branch | Meaning | Synchronized with |
+| ------ | -------- | ------------------ |
+| stable | Tagged as a released version - will not change. | `stable@hed-python`