From c85d9916dda917304f578e7ff2b9c4c59ba6fa40 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 9 Mar 2020 14:19:13 +0100 Subject: [PATCH 01/74] minor --- services/web/client/source/class/osparc/desktop/StudyBrowser.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/web/client/source/class/osparc/desktop/StudyBrowser.js b/services/web/client/source/class/osparc/desktop/StudyBrowser.js index d7b88a81fd3..8bb765fa070 100644 --- a/services/web/client/source/class/osparc/desktop/StudyBrowser.js +++ b/services/web/client/source/class/osparc/desktop/StudyBrowser.js @@ -445,7 +445,7 @@ qx.Class.define("osparc.desktop.StudyBrowser", { uuid: study.uuid, studyTitle: study.name, icon: study.thumbnail || "@FontAwesome5Solid/flask/50", - creator: study.prjOwner ? "Created by: " + study.prjOwner + "" : null, + creator: study.prjOwner ? study.prjOwner : null, lastChangeDate: study.lastChangeDate ? new Date(study.lastChangeDate) : null, tags }); From bfd544942492bb7a6336ec09ac982395c683ae38 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 9 Mar 2020 14:19:57 +0100 Subject: [PATCH 02/74] Show "Today" and "Tomorrow" --- .../osparc/desktop/StudyBrowserListItem.js | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js b/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js index 6aa1cacce15..ce90482d9b0 100644 --- a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js @@ -37,7 +37,9 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { // create a date format like "Oct. 19, 2018 11:31 AM" this._dateFormat = new qx.util.format.DateFormat( - qx.locale.Date.getDateFormat("medium") + " " + + qx.locale.Date.getDateFormat("medium") + ); + this._timeFormat = new qx.util.format.DateFormat( qx.locale.Date.getTimeFormat("short") ); @@ -92,6 +94,7 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { members: { // eslint-disable-line qx-rules/no-refs-in-members _dateFormat: null, + _timeFormat: null, _forwardStates: { focused : true, hovered : true, @@ -175,8 +178,16 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { _applylastChangeDate: function(value, old) { let label = this.getChildControl("lastChangeDate"); if (value) { - const dateStr = this._dateFormat.format(value); - label.setValue("Last change: " + dateStr + ""); + let dateStr = null; + if (value.getDate() === (new Date()).getDate()) { + dateStr = this.tr("Today"); + } else if (value.getDate() === (new Date()).getDate() - 1) { + dateStr = this.tr("Yesterday"); + } else { + dateStr = this._dateFormat.format(value); + } + const timeStr = this._timeFormat.format(value); + label.setValue(dateStr + " " + timeStr); } else { label.resetValue(); } @@ -257,6 +268,8 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { destruct : function() { this._dateFormat.dispose(); this._dateFormat = null; + this._timeFormat.dispose(); + this._timeFormat = null; this.removeListener("pointerover", this._onPointerOver, this); this.removeListener("pointerout", this._onPointerOut, this); } From 008ed5cd4de291888d98c88190141602dedbe1ea Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 9 Mar 2020 14:20:15 +0100 Subject: [PATCH 03/74] MenuButton on studySheet --- .../osparc/desktop/StudyBrowserListItem.js | 89 +++++++++++++++---- 1 file changed, 71 insertions(+), 18 deletions(-) diff --git a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js b/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js index ce90482d9b0..0d1bc5b0aab 100644 --- a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js @@ -29,7 +29,7 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { implement : [qx.ui.form.IModel, osparc.component.filter.IFilterable], include : [qx.ui.form.MModelProperty, osparc.component.filter.MFilterable], - construct: function() { + construct: function(hasMenu = true) { this.base(arguments); this.set({ width: 210 @@ -43,10 +43,22 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { qx.locale.Date.getTimeFormat("short") ); - let layout = new qx.ui.layout.VBox(5).set({ + this._setLayout(new qx.ui.layout.Canvas()); + + let mainLayout = this.__mainLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(5).set({ alignY: "middle" + })); + this._add(mainLayout, { + top: 0, + right: 0, + bottom: 0, + left: 0 }); - this._setLayout(layout); + + if (hasMenu !== null) { + const menu = this.__getMenu(); + this.setMenu(menu); + } this.addListener("pointerover", this._onPointerOver, this); this.addListener("pointerout", this._onPointerOut, this); @@ -63,6 +75,14 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { init : "pb-listitem" }, + /** The menu instance to show when tapping on the button */ + menu: { + check : "qx.ui.menu.Menu", + nullable : true, + apply : "_applyMenu", + event : "changeMenu" + }, + uuid: { check: "String", apply : "_applyUuid" @@ -102,10 +122,24 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { dragover : true }, + __mainLayout: null, + // overridden _createChildControlImpl: function(id) { let control; switch (id) { + case "menu-button": + control = new qx.ui.form.MenuButton().set({ + width: 30, + height: 30, + icon: "@FontAwesome5Solid/ellipsis-v/16", + focusable: false + }); + this._add(control, { + top: 0, + right: 0 + }); + break; case "studyTitle": control = new qx.ui.basic.Label(this.getStudyTitle()).set({ margin: [5, 0], @@ -113,17 +147,7 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { anonymous: true }); osparc.utils.Utils.setIdToWidget(control, "studyBrowserListItem_title"); - this._addAt(control, 0); - break; - case "icon": - control = new qx.ui.basic.Image(this.getIcon()).set({ - anonymous: true, - scale: true, - allowStretchX: true, - allowStretchY: true, - height: 120 - }); - this._addAt(control, 1); + this.__mainLayout.addAt(control, 0); break; case "creator": control = new qx.ui.basic.Label(this.getCreator()).set({ @@ -132,7 +156,7 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { anonymous: true }); osparc.utils.Utils.setIdToWidget(control, "studyBrowserListItem_creator"); - this._addAt(control, 2); + this.__mainLayout.addAt(control, 1); break; case "lastChangeDate": control = new qx.ui.basic.Label().set({ @@ -141,18 +165,47 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { anonymous: true }); osparc.utils.Utils.setIdToWidget(control, "studyBrowserListItem_lastChangeDate"); - this._addAt(control, 3); + this.__mainLayout.addAt(control, 2); + break; + case "icon": + control = new qx.ui.basic.Image(this.getIcon()).set({ + anonymous: true, + scale: true, + allowStretchX: true, + allowStretchY: true, + height: 120 + }); + this.__mainLayout.addAt(control, 3); break; case "tags": control = new qx.ui.container.Composite(new qx.ui.layout.Flow(5, 3)); - this._addAt(control, 4); + this.__mainLayout.addAt(control, 4); break; } return control || this.base(arguments, id); }, - // overriden + __getMenu: function() { + const menu = new qx.ui.menu.Menu().set({ + position: "bottom-right" + }); + const undoButton = new qx.ui.menu.Button("Undo"); + const redoButton = new qx.ui.menu.Button("Redo"); + menu.add(undoButton); + menu.add(redoButton); + return menu; + }, + + _applyMenu: function(value, old) { + const menuButton = this.getChildControl("menu-button"); + if (value) { + menuButton.setMenu(value); + } + menuButton.setVisibility(value ? "visible" : "excluded"); + }, + + // overridden _applyUuid: function(value, old) { osparc.utils.Utils.setIdToWidget(this, "studyBrowserListItem_"+value); }, From a4a2cf0a9b4c8a63c281bb3a8a74e18ca6e29bcb Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 9 Mar 2020 15:24:44 +0100 Subject: [PATCH 04/74] minor --- .../osparc/desktop/StudyBrowserListItem.js | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js b/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js index 0d1bc5b0aab..eaf9efc5783 100644 --- a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js @@ -36,10 +36,10 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { }); // create a date format like "Oct. 19, 2018 11:31 AM" - this._dateFormat = new qx.util.format.DateFormat( + this.__dateFormat = new qx.util.format.DateFormat( qx.locale.Date.getDateFormat("medium") ); - this._timeFormat = new qx.util.format.DateFormat( + this.__timeFormat = new qx.util.format.DateFormat( qx.locale.Date.getTimeFormat("short") ); @@ -113,8 +113,9 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { }, members: { // eslint-disable-line qx-rules/no-refs-in-members - _dateFormat: null, - _timeFormat: null, + __dateFormat: null, + __timeFormat: null, + _forwardStates: { focused : true, hovered : true, @@ -237,9 +238,9 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { } else if (value.getDate() === (new Date()).getDate() - 1) { dateStr = this.tr("Yesterday"); } else { - dateStr = this._dateFormat.format(value); + dateStr = this.__dateFormat.format(value); } - const timeStr = this._timeFormat.format(value); + const timeStr = this.__timeFormat.format(value); label.setValue(dateStr + " " + timeStr); } else { label.resetValue(); @@ -319,10 +320,10 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { }, destruct : function() { - this._dateFormat.dispose(); - this._dateFormat = null; - this._timeFormat.dispose(); - this._timeFormat = null; + this.__dateFormat.dispose(); + this.__dateFormat = null; + this.__timeFormat.dispose(); + this.__timeFormat = null; this.removeListener("pointerover", this._onPointerOver, this); this.removeListener("pointerout", this._onPointerOut, this); } From 446d8f8038ec7e31fe4f4afa63f3fa10351e2b80 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 9 Mar 2020 15:25:19 +0100 Subject: [PATCH 05/74] menu comes as an argument --- .../source/class/osparc/desktop/StudyBrowser.js | 17 ++++++++++++++++- .../osparc/desktop/StudyBrowserListItem.js | 16 ++-------------- 2 files changed, 18 insertions(+), 15 deletions(-) diff --git a/services/web/client/source/class/osparc/desktop/StudyBrowser.js b/services/web/client/source/class/osparc/desktop/StudyBrowser.js index 8bb765fa070..5f93e54b244 100644 --- a/services/web/client/source/class/osparc/desktop/StudyBrowser.js +++ b/services/web/client/source/class/osparc/desktop/StudyBrowser.js @@ -437,11 +437,13 @@ qx.Class.define("osparc.desktop.StudyBrowser", { }, __createStudyItem: function(study, isTemplate) { + const menu = this.__getStudyItemMenu(); + const tags = study.tags ? osparc.store.Store.getInstance().getTags().filter(tag => study.tags.includes(tag.id)) : []; - const item = new osparc.desktop.StudyBrowserListItem().set({ + const item = new osparc.desktop.StudyBrowserListItem(menu).set({ uuid: study.uuid, studyTitle: study.name, icon: study.thumbnail || "@FontAwesome5Solid/flask/50", @@ -488,6 +490,19 @@ qx.Class.define("osparc.desktop.StudyBrowser", { return item; }, + __getStudyItemMenu: function() { + const menu = new qx.ui.menu.Menu().set({ + position: "bottom-right" + }); + const moreInfoButton = new qx.ui.menu.Button(this.tr("More info")); + const selectButton = new qx.ui.menu.Button(this.tr("Select")); + const deleteButton = new qx.ui.menu.Button(this.tr("Delete")); + menu.add(moreInfoButton); + menu.add(selectButton); + menu.add(deleteButton); + return menu; + }, + __getStudyData: function(id, isTemplate) { const matchesId = study => study.uuid === id; return isTemplate ? this.__templateStudies.find(matchesId) : this.__userStudies.find(matchesId); diff --git a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js b/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js index eaf9efc5783..0c142477d50 100644 --- a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js @@ -29,7 +29,7 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { implement : [qx.ui.form.IModel, osparc.component.filter.IFilterable], include : [qx.ui.form.MModelProperty, osparc.component.filter.MFilterable], - construct: function(hasMenu = true) { + construct: function(menu) { this.base(arguments); this.set({ width: 210 @@ -55,8 +55,7 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { left: 0 }); - if (hasMenu !== null) { - const menu = this.__getMenu(); + if (menu !== null) { this.setMenu(menu); } @@ -187,17 +186,6 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { return control || this.base(arguments, id); }, - __getMenu: function() { - const menu = new qx.ui.menu.Menu().set({ - position: "bottom-right" - }); - const undoButton = new qx.ui.menu.Button("Undo"); - const redoButton = new qx.ui.menu.Button("Redo"); - menu.add(undoButton); - menu.add(redoButton); - return menu; - }, - _applyMenu: function(value, old) { const menuButton = this.getChildControl("menu-button"); if (value) { From 615298e1c6a403aa5716531e33ae2846d3e0448a Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 9 Mar 2020 15:27:40 +0100 Subject: [PATCH 06/74] minor refactoring --- .../class/osparc/desktop/StudyBrowser.js | 68 +++++++++++-------- 1 file changed, 38 insertions(+), 30 deletions(-) diff --git a/services/web/client/source/class/osparc/desktop/StudyBrowser.js b/services/web/client/source/class/osparc/desktop/StudyBrowser.js index 5f93e54b244..1aaceb7435a 100644 --- a/services/web/client/source/class/osparc/desktop/StudyBrowser.js +++ b/services/web/client/source/class/osparc/desktop/StudyBrowser.js @@ -454,39 +454,14 @@ qx.Class.define("osparc.desktop.StudyBrowser", { item.subscribeToFilterGroup("studyBrowser"); - item.addListener("dbltap", e => { - const studyData = this.__getStudyData(item.getUuid(), isTemplate); - if (isTemplate) { - this.__createStudyBtnClkd(studyData); - } else { - this.__startStudy(studyData); - } - }); - item.addListener("execute", () => { - // Selection logic - if (item.getValue()) { - if (isTemplate) { - this.__userStudyContainer.resetSelection(); - this.__templateStudyContainer.selectOne(item); - } else { - this.__templateStudyContainer.resetSelection(); - } - this.__itemSelected(item.getUuid(), isTemplate); - } else if (isTemplate) { - this.__itemSelected(null); - this.__templateDeleteButton.exclude(); - } else { - const selection = this.__userStudyContainer.getSelection(); - if (selection.length) { - this.__itemSelected(selection[0].getUuid()); - } else { - this.__studiesDeleteButton.exclude(); - this.__itemSelected(null); - } - } + this.__itemClicked(item, isTemplate); }, this); + item.addListener("dbltap", () => { + this.__itemDblClicked(item, isTemplate); + }); + return item; }, @@ -508,6 +483,39 @@ qx.Class.define("osparc.desktop.StudyBrowser", { return isTemplate ? this.__templateStudies.find(matchesId) : this.__userStudies.find(matchesId); }, + __itemClicked: function(item, isTemplate) { + // Selection logic + if (item.getValue()) { + if (isTemplate) { + this.__userStudyContainer.resetSelection(); + this.__templateStudyContainer.selectOne(item); + } else { + this.__templateStudyContainer.resetSelection(); + } + this.__itemSelected(item.getUuid(), isTemplate); + } else if (isTemplate) { + this.__itemSelected(null); + this.__templateDeleteButton.exclude(); + } else { + const selection = this.__userStudyContainer.getSelection(); + if (selection.length) { + this.__itemSelected(selection[0].getUuid()); + } else { + this.__studiesDeleteButton.exclude(); + this.__itemSelected(null); + } + } + }, + + __itemDblClicked: function(item, isTemplate) { + const studyData = this.__getStudyData(item.getUuid(), isTemplate); + if (isTemplate) { + this.__createStudyBtnClkd(studyData); + } else { + this.__startStudy(studyData); + } + }, + __itemSelected: function(studyId, isTemplate = false) { if (studyId === null) { if (this.__userStudyContainer) { From ee810470a57dfd340bac999e4ccf0ac5bd40d384 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 9 Mar 2020 15:27:47 +0100 Subject: [PATCH 07/74] minor --- .../client/source/class/osparc/desktop/StudyBrowserListItem.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js b/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js index 0c142477d50..fbe5fe96cf5 100644 --- a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js @@ -65,7 +65,7 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { events: { /** (Fired by {@link qx.ui.form.List}) */ - "action" : "qx.event.type.Event" + "action": "qx.event.type.Event" }, properties: { From f4d92207548d62bc753c803b21bafa09129d9523 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 9 Mar 2020 17:24:47 +0100 Subject: [PATCH 08/74] Toggled items show a check icon --- .../osparc/desktop/StudyBrowserListItem.js | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js b/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js index fbe5fe96cf5..30314506acc 100644 --- a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js @@ -61,6 +61,8 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { this.addListener("pointerover", this._onPointerOver, this); this.addListener("pointerout", this._onPointerOut, this); + + this.addListener("changeValue", this.__onToggleChange, this); }, events: { @@ -140,6 +142,18 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { right: 0 }); break; + case "tick-selected": + control = new qx.ui.form.MenuButton().set({ + width: 30, + height: 30, + icon: "@FontAwesome5Solid/check-circle/16", + focusable: false + }); + this._add(control, { + top: 0, + right: 0 + }); + break; case "studyTitle": control = new qx.ui.basic.Label(this.getStudyTitle()).set({ margin: [5, 0], @@ -252,6 +266,10 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { } }, + __onToggleChange: function(e) { + this.getChildControl("tick-selected").setVisibility(e.getData() ? "visible" : "excluded"); + }, + /** * Event handler for the pointer over event. */ From 0d6f7709d95ac187d3f49db7828a0ed76a5c1346 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 16 Mar 2020 16:16:04 +0100 Subject: [PATCH 09/74] Update sleepers.js --- tests/e2e/tutorials/sleepers.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/e2e/tutorials/sleepers.js b/tests/e2e/tutorials/sleepers.js index 508f1105012..7bb81afc1bf 100644 --- a/tests/e2e/tutorials/sleepers.js +++ b/tests/e2e/tutorials/sleepers.js @@ -6,6 +6,7 @@ const tutorialBase = require('./tutorialBase'); const args = process.argv.slice(2); if (args.length < 1) { + console.log('More arguments expented'); process.exit(1); } const url = args[0]; @@ -38,7 +39,9 @@ async function runTutorial () { ]; await tutorial.checkResults(outFiles.length); - await tutorial.removeStudy(); + if (!newUser) { + await tutorial.removeStudy(); + } await tutorial.logOut(); await tutorial.close(); } From da45bd113b9fe984b90f2a843ff8e0b203719018 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 16 Mar 2020 16:20:00 +0100 Subject: [PATCH 10/74] merge master --- .travis.yml | 22 - .vscode-template/settings.json | 8 +- Makefile | 9 +- README.md | 1 + .../schemas/node-meta-v0.0.1-converted.yaml | 6 + api/tests/Makefile | 2 +- api/tests/requirements.txt | 27 +- ci/github/system-testing/e2e.bash | 12 +- ci/travis/system-testing/e2e.bash | 9 +- packages/postgres-database/Makefile | 71 +- packages/postgres-database/docker/Makefile | 5 +- .../postgres-database/requirements/_base.in | 5 +- .../postgres-database/requirements/_base.txt | 6 +- .../requirements/_migration.in | 3 +- .../requirements/_migration.txt | 22 +- .../postgres-database/requirements/_test.in | 2 +- .../postgres-database/requirements/_test.txt | 82 +- .../postgres-database/requirements/prod.txt | 11 + .../scripts/remove_comp_tasks_duplicates.sql | 3 + .../src/simcore_postgres_database/cli.py | 21 +- .../models/comp_tasks.py | 9 +- .../models/user_to_projects.py | 6 +- packages/postgres-database/tests/conftest.py | 22 +- .../tests/test_delete_projects_and_users.py | 58 +- packages/s3wrapper/requirements/_base.in | 2 +- packages/s3wrapper/requirements/_base.txt | 4 +- packages/s3wrapper/requirements/_test.txt | 40 +- packages/service-library/Makefile | 44 +- .../service-library/requirements/_base.txt | 28 +- .../service-library/requirements/_test.txt | 10 +- packages/service-library/setup.py | 40 +- .../src/servicelib/__init__.py | 2 +- .../src/servicelib/aiopg_utils.py | 53 +- .../src/servicelib/application.py | 3 +- .../src/servicelib/application_keys.py | 10 +- .../src/servicelib/application_setup.py | 65 +- .../src/servicelib/client_session.py | 14 +- .../src/servicelib/config_schema_utils.py | 7 +- .../src/servicelib/decorators.py | 9 +- .../src/servicelib/jsonschema_specs.py | 8 +- .../src/servicelib/jsonschema_validation.py | 1 + .../src/servicelib/monitoring.py | 67 +- .../src/servicelib/observer.py | 9 +- .../service-library/src/servicelib/openapi.py | 33 +- .../src/servicelib/openapi_servers.py | 3 - .../src/servicelib/openapi_validation.py | 33 +- .../src/servicelib/openapi_wrappers.py | 14 +- .../src/servicelib/request_keys.py | 4 +- .../src/servicelib/requests_utils.py | 10 +- .../src/servicelib/resources.py | 7 +- .../src/servicelib/rest_codecs.py | 3 +- .../src/servicelib/rest_middlewares.py | 42 +- .../src/servicelib/rest_models.py | 13 +- .../src/servicelib/rest_oas.py | 8 +- .../src/servicelib/rest_responses.py | 36 +- .../src/servicelib/rest_routing.py | 40 +- .../src/servicelib/rest_utils.py | 29 +- .../src/servicelib/rest_validators.py | 34 +- .../service-library/src/servicelib/tracing.py | 17 +- .../service-library/src/servicelib/utils.py | 38 +- packages/service-library/tests/conftest.py | 6 +- .../tests/test_application_setup.py | 47 +- .../service-library/tests/test_decorators.py | 7 +- .../tests/test_openapi_validation.py | 32 +- .../service-library/tests/test_package.py | 8 +- .../tests/test_rest_middlewares.py | 33 +- .../tests/test_rest_routing.py | 28 +- .../service-library/tests/test_sandbox.py | 11 +- packages/service-library/tests/tutils.py | 34 +- .../tests/with_postgres/conftest.py | 22 +- .../tests/with_postgres/test_aiopg_utils.py | 98 ++- packages/simcore-sdk/Makefile | 23 +- packages/simcore-sdk/requirements/_test.txt | 24 +- ...equirements.sh => check_requirements.bash} | 2 +- .../{code-climate.sh => code-climate.bash} | 3 +- scripts/common.Makefile | 120 +++ scripts/openapi/oas_resolver/Dockerfile | 2 +- scripts/{shellcheck => shellcheck.bash} | 0 ...ents.sh => upgrade_test_requirements.bash} | 4 +- scripts/{url-encoder.sh => url-encoder.bash} | 0 services/catalog/Dockerfile | 12 +- services/catalog/Makefile | 66 +- services/catalog/requirements/_test.txt | 16 +- .../catalog/src/simcore_service_catalog/db.py | 1 + services/director/Dockerfile | 16 +- services/director/Makefile | 50 +- services/director/requirements/_base.in | 1 + services/director/requirements/_base.txt | 18 +- services/director/requirements/_test.in | 1 - services/director/requirements/_test.txt | 76 +- .../api/v0/openapi.yaml | 15 + .../api/v0/schemas/node-meta-v0.0.1.json | 8 + .../registry_proxy.py | 2 + .../director/tests/fixtures/fake_services.py | 129 ++- .../tests/test_registry_cache_task.py | 40 +- services/docker-compose.yml | 8 +- services/sidecar/Dockerfile | 2 +- services/sidecar/requirements/_base.in | 2 +- services/sidecar/requirements/_base.txt | 22 +- services/sidecar/requirements/_test.txt | 86 +- services/storage/Dockerfile | 12 +- services/storage/Makefile | 51 +- services/storage/docker/healthcheck.py | 17 +- services/storage/requirements/_base.in | 4 +- services/storage/requirements/_base.txt | 60 +- services/storage/requirements/_test.txt | 158 ++-- services/storage/setup.cfg | 2 - services/storage/setup.py | 52 +- .../src/simcore_service_storage/__init__.py | 1 - .../simcore_service_storage/__version__.py | 37 +- .../api/v0/schemas/node-meta-v0.0.1.json | 8 + .../simcore_service_storage/application.py | 26 +- .../src/simcore_service_storage/cli.py | 9 +- .../src/simcore_service_storage/cli_config.py | 13 +- .../simcore_service_storage/config_schema.py | 51 +- .../src/simcore_service_storage/datcore.py | 172 ++-- .../datcore_wrapper.py | 34 +- .../storage/src/simcore_service_storage/db.py | 49 +- .../src/simcore_service_storage/db_tokens.py | 18 +- .../src/simcore_service_storage/dsm.py | 391 +++++++--- .../src/simcore_service_storage/handlers.py | 253 +++--- .../src/simcore_service_storage/models.py | 66 +- .../src/simcore_service_storage/resources.py | 5 +- .../src/simcore_service_storage/rest.py | 8 +- .../simcore_service_storage/rest_config.py | 8 +- .../simcore_service_storage/rest_models.py | 2 +- .../simcore_service_storage/rest_routes.py | 95 ++- .../storage/src/simcore_service_storage/s3.py | 18 +- .../src/simcore_service_storage/settings.py | 30 +- .../src/simcore_service_storage/utils.py | 10 +- services/storage/tests/_test_rawdatcore.py | 19 +- services/storage/tests/conftest.py | 214 ++--- .../storage/tests/helpers/utils_assert.py | 27 +- .../storage/tests/helpers/utils_project.py | 10 +- services/storage/tests/test_configs.py | 66 +- services/storage/tests/test_datcore.py | 5 +- services/storage/tests/test_dsm.py | 353 ++++++--- services/storage/tests/test_package.py | 7 +- services/storage/tests/test_resources.py | 19 +- services/storage/tests/test_rest.py | 212 +++-- services/storage/tests/utils.py | 94 ++- services/web/Dockerfile | 12 +- services/web/client/source/boot/index.html | 2 + .../osparc/component/export/ExportGroup.js | 246 ++++++ .../filter/group/ServiceFilterGroup.js | 10 +- .../class/osparc/component/form/Auto.js | 137 +--- .../component/form/ToggleButtonContainer.js | 2 +- .../component/form/renderer/PropForm.js | 307 ++++---- .../component/form/renderer/PropFormBase.js | 171 ++++ .../component/form/renderer/PropFormEditor.js | 211 +++++ .../osparc/component/metadata/ServiceInfo.js | 2 +- .../osparc/component/node/BaseNodeView.js | 77 +- .../osparc/component/node/GroupNodeView.js | 45 +- .../class/osparc/component/node/NodeView.js | 22 +- .../osparc/component/widget/InputsMapper.js | 8 +- .../osparc/component/widget/NodeInOut.js | 2 +- .../osparc/component/widget/NodesTree.js | 7 +- .../component/widget/PersistentIframe.js | 40 +- .../osparc/component/workbench/NodeUI.js | 6 +- .../component/workbench/ServiceCatalog.js | 13 +- .../osparc/component/workbench/SvgWidget.js | 8 + .../osparc/component/workbench/WorkbenchUI.js | 20 +- .../source/class/osparc/data/Converters.js | 14 + .../source/class/osparc/data/Permissions.js | 1 + .../source/class/osparc/data/Resources.js | 44 +- .../source/class/osparc/data/model/Node.js | 298 +++++-- .../class/osparc/data/model/Workbench.js | 75 +- .../source/class/osparc/desktop/MainPage.js | 6 +- .../class/osparc/desktop/NavigationBar.js | 12 +- .../class/osparc/desktop/ServiceBrowser.js | 185 +++-- .../osparc/desktop/ServiceBrowserListItem.js | 25 +- .../class/osparc/desktop/StudyBrowser.js | 19 +- .../osparc/desktop/StudyBrowserListItem.js | 9 - .../class/osparc/desktop/StudyEditor.js | 52 +- .../osparc/desktop/preferences/Preferences.js | 7 + .../preferences/pages/ExperimentalPage.js | 8 +- .../source/class/osparc/dev/fake/Data.js | 150 ---- .../client/source/class/osparc/store/Store.js | 41 +- .../source/class/osparc/ui/hint/Hint.js | 3 +- .../class/osparc/ui/window/Confirmation.js | 39 + .../source/class/osparc/utils/Services.js | 58 +- .../client/source/class/osparc/utils/Utils.js | 11 +- services/web/server/Makefile | 59 +- services/web/server/docker/healthcheck.py | 13 +- services/web/server/requirements/_test.txt | 172 ++-- services/web/server/setup.py | 43 +- .../simcore_service_webserver/__version__.py | 2 +- .../activity/__init__.py | 28 +- .../activity/config.py | 16 +- .../activity/handlers.py | 92 +-- .../api/v0/openapi.yaml | 5 + .../api/v0/schemas/node-meta-v0.0.1.json | 8 + .../simcore_service_webserver/application.py | 25 +- .../application_config.py | 104 ++- .../application_proxy.py | 38 +- .../src/simcore_service_webserver/cli.py | 20 +- .../simcore_service_webserver/cli_config.py | 22 +- .../simcore_service_webserver/computation.py | 27 +- .../computation_api.py | 133 ++-- .../computation_comp_tasks_listening_task.py | 21 +- .../computation_config.py | 2 +- .../computation_handlers.py | 31 +- .../computation_models.py | 8 +- .../computation_subscribe.py | 37 +- .../data/s4l_converter.py | 58 +- .../src/simcore_service_webserver/db.py | 62 +- .../simcore_service_webserver/db_config.py | 18 +- .../simcore_service_webserver/db_models.py | 26 +- .../director/__init__.py | 34 +- .../director/config.py | 27 +- .../director/director_api.py | 57 +- .../director/director_exceptions.py | 3 + .../director/handlers.py | 12 +- .../src/simcore_service_webserver/email.py | 27 +- .../simcore_service_webserver/email_config.py | 19 +- .../login/__init__.py | 45 +- .../simcore_service_webserver/login/cfg.py | 107 +-- .../simcore_service_webserver/login/config.py | 21 +- .../login/confirmation.py | 29 +- .../login/decorators.py | 8 +- .../login/handlers.py | 270 ++++--- .../login/registration.py | 78 +- .../simcore_service_webserver/login/routes.py | 30 +- .../login/settings.py | 2 +- .../simcore_service_webserver/login/sql.py | 64 +- .../login/storage.py | 51 +- .../simcore_service_webserver/login/utils.py | 52 +- .../projects/__init__.py | 33 +- .../projects/config.py | 4 +- .../projects/nodes_handlers.py | 15 +- .../projects/projects_access.py | 20 +- .../projects/projects_api.py | 239 ++++-- .../projects/projects_db.py | 3 +- .../projects/projects_exceptions.py | 12 +- .../projects/projects_fakes.py | 17 +- .../projects/projects_handlers.py | 364 +++++---- .../projects/projects_models.py | 10 +- .../projects/projects_utils.py | 62 +- .../resource_manager/__init__.py | 17 +- .../resource_manager/config.py | 33 +- .../resource_manager/garbage_collector.py | 40 +- .../resource_manager/redis.py | 9 +- .../resource_manager/registry.py | 49 +- .../resource_manager/websocket_manager.py | 90 ++- .../simcore_service_webserver/resources.py | 6 +- .../src/simcore_service_webserver/rest.py | 28 +- .../simcore_service_webserver/rest_config.py | 14 +- .../simcore_service_webserver/rest_models.py | 13 +- .../simcore_service_webserver/rest_routes.py | 24 +- .../reverse_proxy/__init__.py | 19 +- .../reverse_proxy/abc.py | 2 +- .../handlers/aiohttp_client_extension.py | 184 +++-- .../reverse_proxy/handlers/generic.py | 47 +- .../reverse_proxy/handlers/paraview.py | 80 +- .../reverse_proxy/routing.py | 27 +- .../src/simcore_service_webserver/security.py | 5 +- .../security_access_model.py | 50 +- .../simcore_service_webserver/security_api.py | 32 +- .../security_authorization.py | 26 +- .../security_permissions.py | 2 +- .../security_roles.py | 107 ++- .../src/simcore_service_webserver/session.py | 16 +- .../session_config.py | 6 +- .../socketio/__init__.py | 8 +- .../socketio/config.py | 10 +- .../socketio/events.py | 32 +- .../socketio/handlers.py | 47 +- .../socketio/handlers_utils.py | 46 +- .../src/simcore_service_webserver/statics.py | 22 +- .../src/simcore_service_webserver/storage.py | 9 +- .../simcore_service_webserver/storage_api.py | 47 +- .../storage_config.py | 20 +- .../storage_handlers.py | 32 +- .../storage_routes.py | 84 +- .../studies_access.py | 97 ++- .../simcore_service_webserver/tag_handlers.py | 60 +- .../src/simcore_service_webserver/tags.py | 28 +- .../tracing/__init__.py | 18 +- .../src/simcore_service_webserver/users.py | 28 +- .../users_handlers.py | 87 +-- services/web/server/tests/conftest.py | 19 +- .../tests/data/static/resource/.gitkeep | 0 .../web/server/tests/helpers/utils_assert.py | 21 +- .../web/server/tests/helpers/utils_docker.py | 60 +- .../server/tests/helpers/utils_environs.py | 65 +- .../web/server/tests/helpers/utils_login.py | 40 +- .../server/tests/helpers/utils_projects.py | 52 +- .../web/server/tests/helpers/utils_tokens.py | 22 +- .../tests/integration/computation/conftest.py | 17 +- .../computation/test_computation.py | 183 +++-- .../integration/computation/test_rabbit.py | 151 ++-- .../web/server/tests/integration/conftest.py | 52 +- .../integration/fixtures/celery_service.py | 7 +- .../integration/fixtures/docker_compose.py | 122 +-- .../integration/fixtures/docker_registry.py | 16 +- .../integration/fixtures/docker_swarm.py | 54 +- .../integration/fixtures/postgres_service.py | 6 +- .../integration/fixtures/rabbit_service.py | 1 + .../integration/fixtures/redis_service.py | 5 +- .../integration/fixtures/websocket_client.py | 11 +- .../integration/test_project_workflow.py | 106 ++- .../TODO - integration-proxy/conftest.py | 109 +-- .../test_application_proxy.py | 107 ++- .../web/server/tests/sandbox/jupyter-proxy.py | 45 +- .../server/tests/sandbox/paraview-proxy.py | 55 +- .../web/server/tests/sandbox/reverse_proxy.py | 9 +- services/web/server/tests/unit/conftest.py | 18 +- .../web/server/tests/unit/test_activity.py | 96 ++- .../web/server/tests/unit/test_configs.py | 86 +- .../web/server/tests/unit/test_consistency.py | 16 +- .../web/server/tests/unit/test_package.py | 12 +- .../server/tests/unit/test_projects_models.py | 26 +- .../server/tests/unit/test_projects_utils.py | 23 +- .../web/server/tests/unit/test_resources.py | 16 +- services/web/server/tests/unit/test_rest.py | 61 +- .../server/tests/unit/test_reverse_proxy.py | 117 +-- .../tests/unit/test_security_access_model.py | 111 +-- .../tests/unit/test_template_projects.py | 27 +- .../server/tests/unit/with_dbs/conftest.py | 102 ++- .../unit/with_dbs/test_access_to_studies.py | 106 +-- .../tests/unit/with_dbs/test_change_email.py | 41 +- .../unit/with_dbs/test_change_password.py | 74 +- .../web/server/tests/unit/with_dbs/test_db.py | 9 +- .../unit/with_dbs/test_guests_management.py | 10 +- .../server/tests/unit/with_dbs/test_login.py | 53 +- .../server/tests/unit/with_dbs/test_logout.py | 12 +- .../tests/unit/with_dbs/test_projects.py | 734 ++++++++++++------ .../server/tests/unit/with_dbs/test_redis.py | 8 +- .../unit/with_dbs/test_redis_registry.py | 89 ++- .../tests/unit/with_dbs/test_registration.py | 156 ++-- .../unit/with_dbs/test_reset_password.py | 76 +- .../unit/with_dbs/test_resource_manager.py | 304 +++++--- .../tests/unit/with_dbs/test_storage.py | 176 +++-- .../server/tests/unit/with_dbs/test_users.py | 219 +++--- tests/e2e/Makefile | 12 + tests/e2e/requirements/Makefile | 2 +- tests/e2e/requirements/requirements.txt | 14 +- .../setup_env_insecure_registry.bash} | 1 - tests/e2e/tutorials/tutorialBase.js | 28 +- tests/e2e/utils/responsesQueue.js | 37 +- tests/e2e/utils/utils.js | 3 +- tests/swarm-deploy/Makefile | 16 +- tests/swarm-deploy/requirements.txt | 26 +- tests/swarm-deploy/test_service_images.py | 26 + tests/system-load/Makefile | 37 - tests/system-load/ManualTests.md | 12 - tests/system-load/data/projects.csv | 10 - tests/system-load/data/users.csv | 2 - tests/system-load/locust_files/basic.py | 48 -- tests/system-load/locust_files/published.py | 57 -- tests/system-load/requirements.txt | 2 - 351 files changed, 9558 insertions(+), 6642 deletions(-) create mode 100644 packages/postgres-database/requirements/prod.txt rename scripts/{check_requirements.sh => check_requirements.bash} (64%) rename scripts/{code-climate.sh => code-climate.bash} (96%) mode change 100755 => 100644 create mode 100644 scripts/common.Makefile rename scripts/{shellcheck => shellcheck.bash} (100%) mode change 100755 => 100644 rename scripts/{upgrade_test_requirements.sh => upgrade_test_requirements.bash} (67%) mode change 100755 => 100644 rename scripts/{url-encoder.sh => url-encoder.bash} (100%) mode change 100755 => 100644 create mode 100644 services/web/client/source/class/osparc/component/export/ExportGroup.js create mode 100644 services/web/client/source/class/osparc/component/form/renderer/PropFormBase.js create mode 100644 services/web/client/source/class/osparc/component/form/renderer/PropFormEditor.js create mode 100644 services/web/client/source/class/osparc/ui/window/Confirmation.js delete mode 100644 services/web/server/tests/data/static/resource/.gitkeep rename tests/e2e/{setup_env_insecure_registry => scripts/setup_env_insecure_registry.bash} (96%) mode change 100755 => 100644 create mode 100644 tests/swarm-deploy/test_service_images.py delete mode 100644 tests/system-load/Makefile delete mode 100644 tests/system-load/ManualTests.md delete mode 100644 tests/system-load/data/projects.csv delete mode 100644 tests/system-load/data/users.csv delete mode 100644 tests/system-load/locust_files/basic.py delete mode 100644 tests/system-load/locust_files/published.py delete mode 100644 tests/system-load/requirements.txt diff --git a/.travis.yml b/.travis.yml index d52d65032ca..88767de1ac1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -153,28 +153,6 @@ jobs: after_failure: - unbuffer bash ci/travis/unit-testing/director.bash after_failure - # test python, sidecar ---------------------------------------------------------------------- - # TODO: activate when sidecar tests are READY! - # - stage: build / unit-testing - # name: sidecar - # language: python - # python: - # - "3.6" - # sudo: required - # cache: pip - # before_install: - # - sudo bash ci/travis/unit-testing/sidecar.bash before_install - # install: - # - bash ci/travis/unit-testing/sidecar.bash install - # before_script: - # - bash ci/travis/unit-testing/sidecar.bash before_script - # script: - # - bash ci/travis/unit-testing/sidecar.bash script - # after_success: - # - bash ci/travis/unit-testing/sidecar.bash after_success - # after_failure: - # - bash ci/travis/unit-testing/sidecar.bash after_failure - # test python, service-library ---------------------------------------------------------------------- - stage: build / unit-testing name: service-library diff --git a/.vscode-template/settings.json b/.vscode-template/settings.json index cc88f3c9e7e..719d1639a04 100644 --- a/.vscode-template/settings.json +++ b/.vscode-template/settings.json @@ -8,7 +8,8 @@ ".env*": "ini", "Dockerfile*": "dockerfile", "**/requirements/*.txt": "pip-requirements", - "**/requirements/*.in": "pip-requirements" + "**/requirements/*.in": "pip-requirements", + "*Makefile": "makefile" }, "files.eol": "\n", "files.insertFinalNewline": true, @@ -28,7 +29,8 @@ }, "python.testing.pyTestEnabled": true, "autoDocstring.docstringFormat": "sphinx", - "shellcheck.executablePath": "${workspaceFolder}/scripts/shellcheck", + "shellcheck.executablePath": "${workspaceFolder}/scripts/shellcheck.bash", "shellcheck.run": "onSave", - "shellcheck.enableQuickFix": true + "shellcheck.enableQuickFix": true, + "python.formatting.provider": "black" } diff --git a/Makefile b/Makefile index 383c32d848d..c116bd38d81 100644 --- a/Makefile +++ b/Makefile @@ -126,6 +126,11 @@ $(CLIENT_WEB_OUTPUT): -mkdir $(if $(IS_WIN),,-p) $(CLIENT_WEB_OUTPUT) +.PHONY: shell +shell: + docker run -it local/$(target):production /bin/sh + + ## docker SWARM ------------------------------- # # - All resolved configuration are named as .stack-${name}-*.yml to distinguish from docker-compose files which can be parametrized @@ -335,9 +340,9 @@ openapi-specs: ## bundles and validates openapi specifications and schemas of AL .PHONY: code-analysis code-analysis: .codeclimate.yml ## runs code-climate analysis # Validates $< - ./scripts/code-climate.sh validate-config + ./scripts/code-climate.bash validate-config # Running analysis - ./scripts/code-climate.sh analyze + ./scripts/code-climate.bash analyze .PHONY: info info-images info-swarm info-tools diff --git a/README.md b/README.md index 94b1570c637..b3e7af8eaab 100644 --- a/README.md +++ b/README.md @@ -18,6 +18,7 @@ [![storage]](https://microbadger.com/images/itisfoundation/storage "More on itisfoundation/storage:staging-latest image") --> + [Requires.io]:https://img.shields.io/requires/github/ITISFoundation/osparc-simcore.svg [travis-ci]:https://travis-ci.org/ITISFoundation/osparc-simcore.svg?branch=master diff --git a/api/specs/common/schemas/node-meta-v0.0.1-converted.yaml b/api/specs/common/schemas/node-meta-v0.0.1-converted.yaml index a5135b53878..acb8244240d 100644 --- a/api/specs/common/schemas/node-meta-v0.0.1-converted.yaml +++ b/api/specs/common/schemas/node-meta-v0.0.1-converted.yaml @@ -18,6 +18,12 @@ properties: description: distinctive name for the node based on the docker registry path pattern: '^(simcore)/(services)/(comp|dynamic)(/[^\s/]+)+$' example: simcore/services/comp/itis/sleeper + integration-version: + type: string + description: integration version number + pattern: >- + ^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$ + example: 1.0.0 version: type: string description: semantic version number diff --git a/api/tests/Makefile b/api/tests/Makefile index 08e39b6b3d7..3547ff49b7f 100644 --- a/api/tests/Makefile +++ b/api/tests/Makefile @@ -6,7 +6,7 @@ VENV_DIR ?= $(abspath $(ROOT_DIR)/.venv) .PHONY: all all: install tests -%.txt: %.in +requirements.txt: requirements.in # pip compiling $< @$(VENV_DIR)/bin/pip-compile --output-file $@ $< diff --git a/api/tests/requirements.txt b/api/tests/requirements.txt index 318cbff53b2..057e81747ba 100644 --- a/api/tests/requirements.txt +++ b/api/tests/requirements.txt @@ -10,16 +10,18 @@ attrs==19.3.0 # via aiohttp, jsonschema, openapi-core, pytest chardet==3.0.4 # via aiohttp coverage==4.5.1 idna-ssl==1.1.0 # via aiohttp -idna==2.8 # via idna-ssl, yarl -importlib-metadata==1.3.0 # via jsonschema, pluggy, pytest -isodate==0.6.0 # via openapi-core -jsonschema==3.2.0 # via openapi-spec-validator +idna==2.9 # via idna-ssl, yarl +importlib-metadata==1.5.0 # via jsonschema, pluggy, pytest +isodate==0.6.0 # via openapi-schema-validator +jsonschema==3.2.0 # via openapi-schema-validator, openapi-spec-validator lazy-object-proxy==1.4.3 # via openapi-core -more-itertools==8.0.2 # via pytest, zipp -multidict==4.7.3 # via aiohttp, yarl -openapi-core==0.12.0 +more-itertools==8.2.0 # via openapi-core, pytest +multidict==4.7.5 # via aiohttp, yarl +openapi-core==0.13.3 +openapi-schema-validator==0.1.1 # via openapi-core openapi-spec-validator==0.2.8 # via openapi-core -packaging==20.0 # via pytest, pytest-sugar +packaging==20.3 # via pytest, pytest-sugar +parse==1.15.0 # via openapi-core pluggy==0.13.1 # via pytest py==1.8.1 # via pytest pyparsing==2.4.6 # via packaging @@ -28,15 +30,16 @@ pytest-aiohttp==0.3.0 pytest-cov==2.8.1 pytest-instafail==0.4.1.post0 pytest-sugar==0.9.2 -pytest==5.3.2 +pytest==5.3.5 pyyaml==5.3 # via openapi-spec-validator -six==1.13.0 # via isodate, jsonschema, openapi-core, openapi-spec-validator, packaging, pyrsistent -strict-rfc3339==0.7 # via openapi-core +six==1.14.0 # via isodate, jsonschema, openapi-core, openapi-schema-validator, openapi-spec-validator, packaging, pyrsistent +strict-rfc3339==0.7 # via openapi-schema-validator termcolor==1.1.0 # via pytest-sugar typing-extensions==3.7.4.1 # via aiohttp wcwidth==0.1.8 # via pytest +werkzeug==1.0.0 # via openapi-core yarl==1.4.2 # via aiohttp -zipp==0.6.0 # via importlib-metadata +zipp==3.1.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/ci/github/system-testing/e2e.bash b/ci/github/system-testing/e2e.bash index 7f7ef032446..2fc91bb7332 100755 --- a/ci/github/system-testing/e2e.bash +++ b/ci/github/system-testing/e2e.bash @@ -14,10 +14,12 @@ install() { echo "--------------- getting simcore docker images..." make pull-version || ( (make pull-cache || true) && make build tag-version) make info-images + # configure simcore for testing with a private registry - bash tests/e2e/setup_env_insecure_registry - # start simcore - make up-version + bash tests/e2e/scripts/setup_env_insecure_registry.bash + + # start simcore and set log-level to debug + export LOG_LEVEL=INFO; make up-version echo "-------------- installing test framework..." # create a python venv and activate @@ -34,6 +36,7 @@ install() { echo "--------------- transfering the images to the local registry..." make transfer-images-to-registry echo "--------------- injecting templates in postgres db..." + make pg-db-tables make inject-templates-in-db popd } @@ -53,6 +56,9 @@ recover_artifacts() { (docker service logs --timestamps simcore_storage > simcore_logs/storage.log) || true (docker service logs --timestamps simcore_sidecar > simcore_logs/sidecar.log) || true (docker service logs --timestamps simcore_catalog > simcore_logs/catalog.log) || true + + # stack config + (cp .stack-simcore-version.yml simcore_logs/) || true } clean_up() { diff --git a/ci/travis/system-testing/e2e.bash b/ci/travis/system-testing/e2e.bash index 1cddf0ff10d..9cf070ad4ea 100644 --- a/ci/travis/system-testing/e2e.bash +++ b/ci/travis/system-testing/e2e.bash @@ -24,13 +24,15 @@ before_script() { make pull-version || ( (make pull-cache || true) && make build tag-version) make info-images # configure simcore for testing with a private registry - bash tests/e2e/setup_env_insecure_registry - # start simcore - make up-version + bash tests/e2e/scripts/setup_env_insecure_registry.bash + + # start simcore and set log-level to debug + export LOG_LEVEL=INFO; make up-version echo "-------------- installing test framework..." # create a python venv and activate make .venv + # shellcheck disable=SC1091 source .venv/bin/activate bash ci/helpers/ensure_python_pip.bash pushd tests/e2e; @@ -42,6 +44,7 @@ before_script() { echo "--------------- transfering the images to the local registry..." make transfer-images-to-registry echo "--------------- injecting templates in postgres db..." + make pg-db-tables make inject-templates-in-db popd } diff --git a/packages/postgres-database/Makefile b/packages/postgres-database/Makefile index 66cf24e7180..9b1bfee4a1f 100644 --- a/packages/postgres-database/Makefile +++ b/packages/postgres-database/Makefile @@ -1,26 +1,17 @@ # -# TODO: under development +# Targets for DEVELOPMENT of postgres-database # -.DEFAULT_GOAL := help +include ../../scripts/common.Makefile REPO_BASE_DIR = $(abspath $(CURDIR)/../../) -.PHONY: devenv -devenv: ## build development environment (using main services/docker-compose-build.yml) - @$(MAKE) --directory ${REPO_BASE_DIR} $@ - - .PHONY: requirements requirements: ## compiles pip requirements (.in -> .txt) @$(MAKE) --directory requirements all -.check-venv-active: - # checking whether virtual environment was activated - @python3 -c "import sys; assert sys.base_prefix!=sys.prefix" - .PHONY: install-dev install-prod install-ci -install-dev install-prod install-ci: requirements .check-venv-active ## install app in development/production or CI mode +install-dev install-prod install-ci: requirements _check_venv_active ## install app in development/production or CI mode # installing in $(subst install-,,$@) mode pip-sync requirements/$(subst install-,,$@).txt @@ -31,20 +22,6 @@ tests: ## runs unit tests @pytest -vv --exitfirst --failed-first --durations=10 --pdb $(CURDIR)/tests -.PHONY: version-patch version-minor -version-patch version-minor version-major: ## commits version as patch (bug fixes not affecting the API), minor/minor (backwards-compatible/INcompatible API addition or changes) - # upgrades as $(subst version-,,$@) version, commits and tags - @bump2version --verbose --list $(subst version-,,$@) - - -.PHONY: info -info: ## displays - # installed - @pip list - # version - @cat setup.py | grep version= - - .PHONY: setup-commit setup-commit: install-dev up-pg ## sets up a database to create a new commit into migration history # discovering @@ -57,38 +34,24 @@ setup-commit: install-dev up-pg ## sets up a database to create a new commit int @echo "To add new commit, sc-pg review -m \"Some message\" " -_docker_compose_config := tests/docker-compose.yml - -.PHONY: up-pg down-pg +.PHONY: migrate +DOT_ENV_FILE = $(abspath $(REPO_BASE_DIR)/.env) -up-pg: ## starts pg server - docker-compose -f $(_docker_compose_config) up -d +migrate: $(DOT_ENV_FILE) ## basic migration update (use ONLY for development purposes) + # Discovering postpgres server + @export $(shell grep -v '^#' $< | xargs -0); sc-pg discover + # Upgraded to head + @sc-pg upgrade + @sc-pg clean -down-pg: ## stops pg server - docker-compose -f $(_docker_compose_config) down -.PHONY: autoformat -autoformat: ## runs black python formatter on this service's code [https://black.readthedocs.io/en/stable/] - # auto formatting with black - @python3 -m black --verbose \ - --exclude "/(\.eggs|\.git|\.hg|\.mypy_cache|\.nox|\.tox|\.venv|\.svn|_build|buck-out|build|dist|migration)/" \ - $(CURDIR) +.PHONY: up-pg down-pg +DOCKER_COMPOSE_CONFIG := tests/docker-compose.yml -.PHONY: clean -clean: down ## cleans all unversioned files in project and temp files create by this makefile - # Cleaning unversioned - @git clean -ndxf - @echo -n "Are you sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @echo -n "$(shell whoami), are you REALLY sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @git clean -dxf - +up-pg: $(DOCKER_COMPOSE_CONFIG) ## starts pg server + docker-compose -f $< up -d -.PHONY: help -# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html -help: ## this colorful help - @echo "Recipes for '$(notdir $(CURDIR))':" - @echo "" - @awk --posix 'BEGIN {FS = ":.*?## "} /^[[:alpha:][:space:]_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo "" +down-pg: $(DOCKER_COMPOSE_CONFIG) ## stops pg server + docker-compose -f $< down diff --git a/packages/postgres-database/docker/Makefile b/packages/postgres-database/docker/Makefile index fc29722cdb2..6f6439a2ca4 100644 --- a/packages/postgres-database/docker/Makefile +++ b/packages/postgres-database/docker/Makefile @@ -53,9 +53,12 @@ upgrade: ## migrate the postgres database down: ## stop migration service @docker service rm $(APP_NAME) + +DOCKER_IMAGES = $(shell docker images */$(APP_NAME):* -q) + .PHONY: clean clean: ## clean all created images - -@docker image rm -f $(shell docker images */$(APP_NAME):* -q) + $(if $(DOCKER_IMAGES),@docker image rm -f $(DOCKER_IMAGES),$(info No image to delete)) .PHONY: help # thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html diff --git a/packages/postgres-database/requirements/_base.in b/packages/postgres-database/requirements/_base.in index da6c74026c1..3310b6c2b46 100644 --- a/packages/postgres-database/requirements/_base.in +++ b/packages/postgres-database/requirements/_base.in @@ -1,9 +1,6 @@ # # Specifies third-party dependencies for 'simcore-postgres-database' # - -# psycopg2-binary # enforces binary version -# sqlalchemy>=1.3.3 # https://nvd.nist.gov/vuln/detail/CVE-2019-7164 -sqlalchemy[postgresql_psycopg2binary]>=1.3.3 +sqlalchemy[postgresql_psycopg2binary]>=1.3.3 # binary + Vulnerability yarl diff --git a/packages/postgres-database/requirements/_base.txt b/packages/postgres-database/requirements/_base.txt index 574e3ed80aa..6e035fda46d 100644 --- a/packages/postgres-database/requirements/_base.txt +++ b/packages/postgres-database/requirements/_base.txt @@ -2,10 +2,10 @@ # This file is autogenerated by pip-compile # To update, run: # -# make _base.txt +# pip-compile --output-file=_base.txt _base.in # idna==2.8 # via yarl multidict==4.5.2 # via yarl psycopg2-binary==2.8.4 # via sqlalchemy -sqlalchemy[postgresql_psycopg2binary]==1.3.5 -yarl==1.3.0 +sqlalchemy[postgresql_psycopg2binary]==1.3.5 # via -r requirements/_base.in +yarl==1.4.2 # via -r requirements/_base.in diff --git a/packages/postgres-database/requirements/_migration.in b/packages/postgres-database/requirements/_migration.in index 331a5cf7fb7..acb21655f92 100644 --- a/packages/postgres-database/requirements/_migration.in +++ b/packages/postgres-database/requirements/_migration.in @@ -4,7 +4,8 @@ # frozen specs -r _base.txt -certifi==2019.6.16 # added contraint to fit pre-installation of jupyter/base-notebook:python-3.7.3 (cannot uninstall) +certifi==2019.11.28 # added contraint to fit pre-installation of jupyter/base-notebook:python-3.7.3 (cannot uninstall) +urllib3>=1.25.8 # Vulnerability alembic click diff --git a/packages/postgres-database/requirements/_migration.txt b/packages/postgres-database/requirements/_migration.txt index 7cad6b7b0f5..10baa69c427 100644 --- a/packages/postgres-database/requirements/_migration.txt +++ b/packages/postgres-database/requirements/_migration.txt @@ -4,22 +4,22 @@ # # pip-compile --output-file=_migration.txt _migration.in # -alembic==1.0.8 -certifi==2019.6.16 +alembic==1.4.1 # via -r requirements/_migration.in +certifi==2019.11.28 # via -r requirements/_migration.in, requests chardet==3.0.4 # via requests -click==7.0 -docker==4.0.2 -idna==2.8 +click==7.1.1 # via -r requirements/_migration.in +docker==4.2.0 # via -r requirements/_migration.in +idna==2.8 # via -r requirements/_base.txt, requests, yarl mako==1.0.12 # via alembic markupsafe==1.1.1 # via mako -multidict==4.5.2 -psycopg2-binary==2.8.4 +multidict==4.5.2 # via -r requirements/_base.txt, yarl +psycopg2-binary==2.8.4 # via -r requirements/_base.txt, sqlalchemy python-dateutil==2.8.0 # via alembic python-editor==1.0.4 # via alembic requests==2.22.0 # via docker six==1.12.0 # via docker, python-dateutil, tenacity, websocket-client -sqlalchemy[postgresql_psycopg2binary]==1.3.5 -tenacity==6.0.0 -urllib3==1.25.3 # via requests +sqlalchemy[postgresql_psycopg2binary]==1.3.5 # via -r requirements/_base.txt, alembic +tenacity==6.1.0 # via -r requirements/_migration.in +urllib3==1.25.8 # via -r requirements/_migration.in, requests websocket-client==0.56.0 # via docker -yarl==1.3.0 +yarl==1.4.2 # via -r requirements/_base.txt diff --git a/packages/postgres-database/requirements/_test.in b/packages/postgres-database/requirements/_test.in index 3fa36b45493..d590dc2e27b 100644 --- a/packages/postgres-database/requirements/_test.in +++ b/packages/postgres-database/requirements/_test.in @@ -11,7 +11,7 @@ aiopg[sa] faker # testing -coverage==4.5.1 # TODO: Downgraded because of a bug https://github.com/nedbat/coveragepy/issues/716 +coverage==5.0.3 # TODO: Downgraded because of a bug https://github.com/nedbat/coveragepy/issues/716 pytest pytest-cov pytest-aiohttp # incompatible with pytest-asyncio. See https://github.com/pytest-dev/pytest-asyncio/issues/76 diff --git a/packages/postgres-database/requirements/_test.txt b/packages/postgres-database/requirements/_test.txt index d700dd93b00..ff21efd507c 100644 --- a/packages/postgres-database/requirements/_test.txt +++ b/packages/postgres-database/requirements/_test.txt @@ -5,63 +5,59 @@ # pip-compile --output-file=_test.txt _test.in # aiohttp==3.6.2 # via pytest-aiohttp -aiopg[sa]==1.0.0 -alembic==1.0.8 +aiopg[sa]==1.0.0 # via -r requirements/_test.in +alembic==1.4.1 # via -r requirements/_migration.txt appdirs==1.4.3 # via black astroid==2.3.3 # via pylint async-timeout==3.0.1 # via aiohttp attrs==19.3.0 # via aiohttp, black, pytest, pytest-docker -black==19.10b0 -bump2version==1.0.0 -certifi==2019.6.16 -chardet==3.0.4 -click==7.0 -coverage==4.5.1 -coveralls==1.10.0 -docker==4.0.2 +black==19.10b0 # via -r requirements/../../../requirements.txt +bump2version==1.0.0 # via -r requirements/../../../requirements.txt +certifi==2019.11.28 # via -r requirements/_migration.txt, requests +chardet==3.0.4 # via -r requirements/_migration.txt, aiohttp, requests +click==7.1.1 # via -r requirements/_migration.txt, black, pip-tools +coverage==5.0.3 # via -r requirements/_test.in, coveralls, pytest-cov +coveralls==1.11.1 # via -r requirements/_test.in +docker==4.2.0 # via -r requirements/_migration.txt docopt==0.6.2 # via coveralls -faker==4.0.1 -idna-ssl==1.1.0 # via aiohttp -idna==2.8 -importlib-metadata==1.3.0 # via pluggy, pytest +faker==4.0.2 # via -r requirements/_test.in +idna==2.8 # via -r requirements/_migration.txt, requests, yarl isort==4.3.21 # via pylint lazy-object-proxy==1.4.3 # via astroid -mako==1.0.12 -markupsafe==1.1.1 +mako==1.0.12 # via -r requirements/_migration.txt, alembic +markupsafe==1.1.1 # via -r requirements/_migration.txt, mako mccabe==0.6.1 # via pylint -more-itertools==8.0.2 # via pytest, zipp -multidict==4.5.2 -packaging==20.0 # via pytest +more-itertools==8.2.0 # via pytest +multidict==4.5.2 # via -r requirements/_migration.txt, aiohttp, yarl +packaging==20.3 # via pytest pathspec==0.7.0 # via black -pip-tools==4.4.1 +pip-tools==4.5.1 # via -r requirements/../../../requirements.txt pluggy==0.13.1 # via pytest -psycopg2-binary==2.8.4 +psycopg2-binary==2.8.4 # via -r requirements/_migration.txt, aiopg, sqlalchemy py==1.8.1 # via pytest -pyaml==19.12.0 -pylint==2.4.4 +pyaml==20.3.1 # via -r requirements/_test.in +pylint==2.4.4 # via -r requirements/../../../requirements.txt, -r requirements/_test.in pyparsing==2.4.6 # via packaging -pytest-aiohttp==0.3.0 -pytest-cov==2.8.1 -pytest-docker==0.7.2 -pytest-instafail==0.4.1.post0 -pytest-runner==5.2 -pytest==5.3.2 -python-dateutil==2.8.0 -python-editor==1.0.4 +pytest-aiohttp==0.3.0 # via -r requirements/_test.in +pytest-cov==2.8.1 # via -r requirements/_test.in +pytest-docker==0.7.2 # via -r requirements/_test.in +pytest-instafail==0.4.1.post0 # via -r requirements/_test.in +pytest-runner==5.2 # via -r requirements/_test.in +pytest==5.3.5 # via -r requirements/_test.in, pytest-aiohttp, pytest-cov, pytest-instafail +python-dateutil==2.8.0 # via -r requirements/_migration.txt, alembic, faker +python-editor==1.0.4 # via -r requirements/_migration.txt, alembic pyyaml==5.3 # via pyaml -regex==2020.2.18 # via black -requests==2.22.0 -rope==0.16.0 -six==1.12.0 -sqlalchemy[postgresql_psycopg2binary]==1.3.5 -tenacity==6.0.0 +regex==2020.2.20 # via black +requests==2.22.0 # via -r requirements/_migration.txt, coveralls, docker +rope==0.16.0 # via -r requirements/../../../requirements.txt +six==1.12.0 # via -r requirements/_migration.txt, astroid, docker, packaging, pip-tools, python-dateutil, tenacity, websocket-client +sqlalchemy[postgresql_psycopg2binary]==1.3.5 # via -r requirements/_migration.txt, aiopg, alembic +tenacity==6.1.0 # via -r requirements/_migration.txt text-unidecode==1.3 # via faker toml==0.10.0 # via black -typed-ast==1.4.0 # via astroid, black -typing-extensions==3.7.4.1 # via aiohttp -urllib3==1.25.3 +typed-ast==1.4.1 # via black +urllib3==1.25.8 # via -r requirements/_migration.txt, requests wcwidth==0.1.8 # via pytest -websocket-client==0.56.0 +websocket-client==0.56.0 # via -r requirements/_migration.txt, docker wrapt==1.11.2 # via astroid -yarl==1.3.0 -zipp==0.6.0 # via importlib-metadata +yarl==1.4.2 # via -r requirements/_migration.txt, aiohttp diff --git a/packages/postgres-database/requirements/prod.txt b/packages/postgres-database/requirements/prod.txt new file mode 100644 index 00000000000..eecfe292a7c --- /dev/null +++ b/packages/postgres-database/requirements/prod.txt @@ -0,0 +1,11 @@ +# Shortcut to install 'simcore-postgres-database' +# +# Usage: +# pip install -r requirements/prod.txt +# + +# installs requirements first +-r _base.txt +-r _migration.txt + +.[migration] diff --git a/packages/postgres-database/scripts/remove_comp_tasks_duplicates.sql b/packages/postgres-database/scripts/remove_comp_tasks_duplicates.sql index 711ae71bc11..569113801f5 100644 --- a/packages/postgres-database/scripts/remove_comp_tasks_duplicates.sql +++ b/packages/postgres-database/scripts/remove_comp_tasks_duplicates.sql @@ -5,6 +5,7 @@ ORDER BY CREATE TABLE comp_tasks_temp (LIKE comp_tasks INCLUDING ALL); + ALTER TABLE comp_tasks_temp ADD FOREIGN KEY ("project_id") REFERENCES "comp_pipeline" ("project_id"); INSERT INTO comp_tasks_temp SELECT @@ -14,6 +15,8 @@ FROM ORDER BY project_id; +ALTER SEQUENCE comp_tasks_task_id_seq OWNED BY comp_tasks_temp.task_id + DROP TABLE comp_tasks; ALTER TABLE comp_tasks_temp diff --git a/packages/postgres-database/src/simcore_postgres_database/cli.py b/packages/postgres-database/src/simcore_postgres_database/cli.py index 7ebd1a6a4fd..d809aff174f 100644 --- a/packages/postgres-database/src/simcore_postgres_database/cli.py +++ b/packages/postgres-database/src/simcore_postgres_database/cli.py @@ -109,6 +109,8 @@ def main(): """ Simplified CLI for database migration with alembic """ + + @main.command() @click.option("--user", "-u") @click.option("--password", "-p") @@ -116,21 +118,21 @@ def main(): @click.option("--port", type=int) @click.option("--database", "-d") def discover(**cli_inputs): - """ Discovers databases and stores configs in ~/.simcore_postgres_database.json """ + """ Discovers databases and caches configs in ~/.simcore_postgres_database.json (except if --no-cache)""" # NOTE: Do not add defaults to user, password so we get a chance to ping urls # TODO: if multiple candidates online, then query user to select click.echo(f"Discovering database ...") cli_cfg = {key: value for key, value in cli_inputs.items() if value is not None} - def test_cached(): + def _test_cached(): """Tests cached configuration """ cfg = _load_cache() or {} if cfg: cfg.update(cli_cfg) # overrides return cfg - def test_env(): + def _test_env(): """Tests environ variables """ cfg = { "user": os.getenv("POSTGRES_USER"), @@ -142,15 +144,15 @@ def test_env(): cfg.update(cli_cfg) return cfg - def test_swarm(): + def _test_swarm(): """Tests published port in swarm from host """ - cfg = deepcopy(cli_cfg) + cfg = _test_env() cfg["host"] = "127.0.0.1" cfg["port"] = _get_service_published_port(cli_cfg.get("host", DEFAULT_HOST)) cfg.setdefault("database", DEFAULT_DB) return cfg - for test in [test_cached, test_env, test_swarm]: + for test in [_test_cached, _test_env, _test_swarm]: try: click.echo("-> {0.__name__}: {0.__doc__}".format(test)) @@ -161,14 +163,13 @@ def test_swarm(): click.echo(" ping {0.__name__}: {1} ...".format(test, url)) raise_if_not_responsive(url) - with open(discovered_cache, "w") as fh: json.dump(cfg, fh, sort_keys=True, indent=4) click.echo(f"Saved config at{discovered_cache}: {cfg}") click.secho( f"{test.__name__} succeeded: {url} is online", - blink=True, + blink=False, bold=True, fg="green", ) @@ -180,7 +181,7 @@ def test_swarm(): click.echo("<- {0.__name__} failed : {1}".format(test, inline_msg)) _reset_cache() - click.secho("Sorry, database not found !!", blink=True, bold=True, fg="red") + click.secho("Sorry, database not found !!", blink=False, bold=True, fg="red") @main.command() @@ -205,8 +206,6 @@ def clean(): # Bypasses alembic CLI into a reduced version ------------ -# TODO: systematic bypass?? - @main.command() @click.option("-m", "message") diff --git a/packages/postgres-database/src/simcore_postgres_database/models/comp_tasks.py b/packages/postgres-database/src/simcore_postgres_database/models/comp_tasks.py index 5ed0f0aa3ba..b1fd8bbdf00 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/comp_tasks.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/comp_tasks.py @@ -10,9 +10,9 @@ class NodeClass(enum.Enum): - COMPUTATIONAL="COMPUTATIONAL" - INTERACTIVE="INTERACTIVE" - FRONTEND="FRONTEND" + COMPUTATIONAL = "COMPUTATIONAL" + INTERACTIVE = "INTERACTIVE" + FRONTEND = "FRONTEND" comp_tasks = sa.Table( @@ -37,6 +37,5 @@ class NodeClass(enum.Enum): sa.Column("submit", sa.DateTime), sa.Column("start", sa.DateTime), sa.Column("end", sa.DateTime), - - sa.UniqueConstraint('project_id', 'node_id', name='project_node_uniqueness'), + sa.UniqueConstraint("project_id", "node_id", name="project_node_uniqueness"), ) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/user_to_projects.py b/packages/postgres-database/src/simcore_postgres_database/models/user_to_projects.py index eacd05d0ea5..38f7dbf2b1c 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/user_to_projects.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/user_to_projects.py @@ -15,12 +15,8 @@ nullable=False, ), sa.Column( - "project_id", - sa.BigInteger, - sa.ForeignKey(projects.c.id), - nullable=False, + "project_id", sa.BigInteger, sa.ForeignKey(projects.c.id), nullable=False, ), - # TODO: do not ondelete=cascase for project_id or it will delete SHARED PROJECT # add instead sa.UniqueConstraint('user_id', 'project_id', name='user_project_uniqueness'), # diff --git a/packages/postgres-database/tests/conftest.py b/packages/postgres-database/tests/conftest.py index d3e68121e61..f45d746514a 100644 --- a/packages/postgres-database/tests/conftest.py +++ b/packages/postgres-database/tests/conftest.py @@ -10,40 +10,42 @@ import aiopg.sa -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def postgres_service(docker_services, docker_ip, docker_compose_file) -> str: # container environment with open(docker_compose_file) as fh: config = yaml.safe_load(fh) - environ = config['services']['postgres']['environment'] + environ = config["services"]["postgres"]["environment"] dsn = "postgresql://{user}:{password}@{host}:{port}/{database}".format( - user=environ['POSTGRES_USER'], - password=environ['POSTGRES_PASSWORD'], + user=environ["POSTGRES_USER"], + password=environ["POSTGRES_PASSWORD"], host=docker_ip, - port=docker_services.port_for('postgres', 5432), - database=environ['POSTGRES_DB'], + port=docker_services.port_for("postgres", 5432), + database=environ["POSTGRES_DB"], ) # Wait until service is responsive. docker_services.wait_until_responsive( - check=lambda: is_postgres_responsive(dsn), - timeout=30.0, - pause=0.1, + check=lambda: is_postgres_responsive(dsn), timeout=30.0, pause=0.1, ) return dsn + from typing import Union, Coroutine, Callable + @pytest.fixture def make_engine(postgres_service): dsn = postgres_service + def maker(is_async=True) -> Union[Coroutine, Callable]: return aiopg.sa.create_engine(dsn) if is_async else sa.create_engine(dsn) + return maker -def is_postgres_responsive(dsn)-> bool: +def is_postgres_responsive(dsn) -> bool: """Check if something responds to ``url`` """ try: engine = sa.create_engine(dsn) diff --git a/packages/postgres-database/tests/test_delete_projects_and_users.py b/packages/postgres-database/tests/test_delete_projects_and_users.py index a10d483ee2d..366a6e79226 100644 --- a/packages/postgres-database/tests/test_delete_projects_and_users.py +++ b/packages/postgres-database/tests/test_delete_projects_and_users.py @@ -9,6 +9,7 @@ import faker import pytest +import sqlalchemy as sa from aiopg.sa.result import ResultProxy, RowProxy from simcore_postgres_database.models.base import metadata @@ -18,25 +19,27 @@ fake = faker.Faker() + def random_user(**overrides): data = dict( - name = fake.name(), - email = fake.email(), - password_hash = fake.numerify(text='#'*5), - status = UserStatus.ACTIVE, - created_ip=fake.ipv4() + name=fake.name(), + email=fake.email(), + password_hash=fake.numerify(text="#" * 5), + status=UserStatus.ACTIVE, + created_ip=fake.ipv4(), ) data.update(overrides) return data + def random_project(**overrides): data = dict( - uuid = uuid4(), - name = fake.word(), - description= fake.sentence(), - prj_owner = fake.email(), - workbench = {}, - published = False + uuid=uuid4(), + name=fake.word(), + description=fake.sentence(), + prj_owner=fake.email(), + workbench={}, + published=False, ) data.update(overrides) return data @@ -59,15 +62,38 @@ async def start(): await conn.execute(projects.insert().values(**random_project())) await conn.execute(projects.insert().values(**random_project())) - await conn.execute(user_to_projects.insert().values(user_id=1, project_id=1)) - await conn.execute(user_to_projects.insert().values(user_id=1, project_id=2)) - await conn.execute(user_to_projects.insert().values(user_id=2, project_id=3)) + await conn.execute( + user_to_projects.insert().values(user_id=1, project_id=1) + ) + await conn.execute( + user_to_projects.insert().values(user_id=1, project_id=2) + ) + await conn.execute( + user_to_projects.insert().values(user_id=2, project_id=3) + ) return engine return loop.run_until_complete(start()) + +async def test_count_users(engine): + async with engine.acquire() as conn: + users_count = await conn.scalar(users.count()) + assert users_count == 3 + + users_count = await conn.scalar( + sa.select([sa.func.count()]).where(users.c.name == "A") + ) + assert users_count == 1 + + users_count = await conn.scalar( + sa.select([sa.func.count()]).where(users.c.name == "UNKNOWN NAME") + ) + assert users_count == 0 + + @pytest.mark.skip(reason="UNDER DEV") async def test_view(engine): async with engine.acquire() as conn: @@ -92,8 +118,8 @@ async def test_view(engine): assert len(rows) == 3 # effect of cascade is that relation deletes as well - res = await conn.execute(user_to_projects.select()) + res = await conn.execute(user_to_projects.select()) rows = await res.fetchall() assert len(rows) == 1 - assert not any( row[user_to_projects.c.user_id]==1 for row in rows ) + assert not any(row[user_to_projects.c.user_id] == 1 for row in rows) diff --git a/packages/s3wrapper/requirements/_base.in b/packages/s3wrapper/requirements/_base.in index e6e939fdf64..eee06c2ad64 100644 --- a/packages/s3wrapper/requirements/_base.in +++ b/packages/s3wrapper/requirements/_base.in @@ -1,5 +1,5 @@ # # Specifies third-party dependencies for 's3wrapper' # - +urllib3>=1.25.8 minio diff --git a/packages/s3wrapper/requirements/_base.txt b/packages/s3wrapper/requirements/_base.txt index 39d97c53df5..55455e7e483 100644 --- a/packages/s3wrapper/requirements/_base.txt +++ b/packages/s3wrapper/requirements/_base.txt @@ -5,8 +5,8 @@ # pip-compile --output-file=_base.txt _base.in # certifi==2019.3.9 # via minio -minio==4.0.16 +minio==4.0.16 # via -r _base.in python-dateutil==2.8.0 # via minio pytz==2019.1 # via minio six==1.12.0 # via python-dateutil -urllib3==1.25.2 # via minio +urllib3==1.25.8 # via -r _base.in, minio diff --git a/packages/s3wrapper/requirements/_test.txt b/packages/s3wrapper/requirements/_test.txt index bb5c71c87d7..e81928f8b62 100644 --- a/packages/s3wrapper/requirements/_test.txt +++ b/packages/s3wrapper/requirements/_test.txt @@ -6,33 +6,33 @@ # astroid==2.3.3 # via pylint attrs==19.3.0 # via pytest, pytest-docker -certifi==2019.3.9 +certifi==2019.3.9 # via -r _base.txt, minio, requests chardet==3.0.4 # via requests -coverage==4.5.1 -coveralls==1.10.0 +coverage==4.5.1 # via -r _test.in, coveralls, pytest-cov +coveralls==1.11.1 # via -r _test.in docopt==0.6.2 # via coveralls -idna==2.8 # via requests -importlib-metadata==1.3.0 # via pluggy, pytest +idna==2.9 # via requests +importlib-metadata==1.5.0 # via pluggy, pytest isort==4.3.21 # via pylint lazy-object-proxy==1.4.3 # via astroid mccabe==0.6.1 # via pylint -minio==4.0.16 -more-itertools==8.0.2 # via pytest, zipp -packaging==20.0 # via pytest +minio==4.0.16 # via -r _base.txt +more-itertools==8.2.0 # via pytest +packaging==20.3 # via pytest pluggy==0.13.1 # via pytest py==1.8.1 # via pytest -pylint==2.4.4 +pylint==2.4.4 # via -r _test.in pyparsing==2.4.6 # via packaging -pytest-cov==2.8.1 -pytest-docker==0.6.1 -pytest-runner==5.2 -pytest==5.3.2 -python-dateutil==2.8.0 -pytz==2019.1 -requests==2.22.0 -six==1.12.0 -typed-ast==1.4.0 # via astroid -urllib3==1.25.2 +pytest-cov==2.8.1 # via -r _test.in +pytest-docker==0.7.2 # via -r _test.in +pytest-runner==5.2 # via -r _test.in +pytest==5.3.5 # via -r _test.in, pytest-cov +python-dateutil==2.8.0 # via -r _base.txt, minio +pytz==2019.1 # via -r _base.txt, minio +requests==2.23.0 # via -r _test.in, coveralls +six==1.12.0 # via -r _base.txt, astroid, packaging, python-dateutil +typed-ast==1.4.1 # via astroid +urllib3==1.25.8 # via -r _base.txt, minio, requests wcwidth==0.1.8 # via pytest wrapt==1.11.2 # via astroid -zipp==0.6.0 # via importlib-metadata +zipp==3.1.0 # via importlib-metadata diff --git a/packages/service-library/Makefile b/packages/service-library/Makefile index 7ae88f2e620..37d89b948a9 100644 --- a/packages/service-library/Makefile +++ b/packages/service-library/Makefile @@ -1,54 +1,18 @@ # -# TODO: under development +# Targets for DEVELOPMENT of Service Library # -.DEFAULT_GOAL := help +include ../../scripts/common.Makefile REPO_BASE_DIR = $(abspath $(CURDIR)/../../) -.PHONY: devenv -devenv: ## build development environment (using main services/docker-compose-build.yml) - @$(MAKE) --directory ${REPO_BASE_DIR} $@ - .PHONY: install-dev install-prod install-ci -install-dev install-prod install-ci: ## install app in development/production or CI mode +install-dev install-prod install-ci: _check_venv_active ## install app in development/production or CI mode # installing in $(subst install-,,$@) mode - @pip3 install -r requirements/$(subst install-,,$@).txt + python -m pip install -r requirements/$(subst install-,,$@).txt .PHONY: tests tests: ## runs unit tests # running unit tests @pytest -vv --exitfirst --failed-first --durations=10 --pdb $(CURDIR)/tests - - -.PHONY: version-patch version-minor -version-patch version-minor version-major: ## commits version as patch (bug fixes not affecting the API), minor/minor (backwards-compatible/INcompatible API addition or changes) - # upgrades as $(subst version-,,$@) version, commits and tags - @bump2version --verbose --list $(subst version-,,$@) - - -.PHONY: info -info: ## displays - # installed - @pip list - # version - @cat setup.py | grep version= - - -.PHONY: clean -clean: ## cleans all unversioned files in project and temp files create by this makefile - # Cleaning unversioned - @git clean -ndxf - @echo -n "Are you sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @echo -n "$(shell whoami), are you REALLY sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @git clean -dxf - - -.PHONY: help -# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html -help: ## this colorful help - @echo "Recipes for '$(notdir $(CURDIR))':" - @echo "" - @awk --posix 'BEGIN {FS = ":.*?## "} /^[[:alpha:][:space:]_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo "" diff --git a/packages/service-library/requirements/_base.txt b/packages/service-library/requirements/_base.txt index 0d0babbc191..b47dd5e2d8e 100644 --- a/packages/service-library/requirements/_base.txt +++ b/packages/service-library/requirements/_base.txt @@ -4,30 +4,30 @@ # # pip-compile --output-file=_base.txt _base.in # -aiohttp==3.6.2 # via -r requirements/_base.in (line 10), aiozipkin -aiopg[sa]==1.0.0 # via -r requirements/_base.in (line 11) -aiozipkin==0.6.0 # via -r requirements/_base.in (line 12) +aiohttp==3.6.2 # via -r requirements/_base.in, aiozipkin +aiopg[sa]==1.0.0 # via -r requirements/_base.in +aiozipkin==0.6.0 # via -r requirements/_base.in async-timeout==3.0.1 # via aiohttp -attrs==19.3.0 # via -r requirements/_base.in (line 19), aiohttp, jsonschema, openapi-core +attrs==19.3.0 # via -r requirements/_base.in, aiohttp, jsonschema, openapi-core chardet==3.0.4 # via aiohttp idna==2.8 # via yarl isodate==0.6.0 # via openapi-core -jsonschema==3.2.0 # via -r requirements/_base.in (line 15), openapi-spec-validator +jsonschema==3.2.0 # via -r requirements/_base.in, openapi-spec-validator lazy-object-proxy==1.4.1 # via openapi-core multidict==4.5.2 # via aiohttp, yarl -openapi-core==0.12.0 # via -r requirements/_base.in (line 16) +openapi-core==0.12.0 # via -r requirements/_base.in openapi-spec-validator==0.2.7 # via openapi-core -prometheus-client==0.7.1 # via -r requirements/_base.in (line 17) -psycopg2-binary==2.8.4 # via -r requirements/_base.in (line 7), aiopg, sqlalchemy +prometheus-client==0.7.1 # via -r requirements/_base.in +psycopg2-binary==2.8.4 # via -r requirements/_base.in, aiopg, sqlalchemy pyrsistent==0.15.2 # via jsonschema -pyyaml==5.3 # via -r requirements/_base.in (line 6), openapi-spec-validator +pyyaml==5.3 # via -r requirements/_base.in, openapi-spec-validator six==1.12.0 # via isodate, jsonschema, openapi-core, openapi-spec-validator, pyrsistent, tenacity -sqlalchemy[postgresql_psycopg2binary]==1.3.4 # via -r requirements/_base.in (line 5), aiopg +sqlalchemy[postgresql_psycopg2binary]==1.3.4 # via -r requirements/_base.in, aiopg strict-rfc3339==0.7 # via openapi-core -tenacity==6.1.0 # via -r requirements/_base.in (line 18) -trafaret==2.0.2 # via -r requirements/_base.in (line 20) -ujson==1.35 # via -r requirements/_base.in (line 13) -werkzeug==1.0.0 # via -r requirements/_base.in (line 14) +tenacity==6.1.0 # via -r requirements/_base.in +trafaret==2.0.2 # via -r requirements/_base.in +ujson==2.0.2 # via -r requirements/_base.in +werkzeug==1.0.0 # via -r requirements/_base.in yarl==1.3.0 # via aiohttp # The following packages are considered to be unsafe in a requirements file: diff --git a/packages/service-library/requirements/_test.txt b/packages/service-library/requirements/_test.txt index ff1a866fd30..329fcd16170 100644 --- a/packages/service-library/requirements/_test.txt +++ b/packages/service-library/requirements/_test.txt @@ -21,11 +21,11 @@ isort==4.3.21 # via pylint jsonschema==3.2.0 # via -r requirements/_base.txt, openapi-spec-validator lazy-object-proxy==1.4.1 # via -r requirements/_base.txt, astroid, openapi-core mccabe==0.6.1 # via pylint -more-itertools==8.0.2 # via pytest +more-itertools==8.2.0 # via pytest multidict==4.5.2 # via -r requirements/_base.txt, aiohttp, yarl openapi-core==0.12.0 # via -r requirements/_base.txt openapi-spec-validator==0.2.7 # via -r requirements/_base.txt, openapi-core -packaging==20.0 # via pytest, pytest-sugar +packaging==20.3 # via pytest, pytest-sugar pluggy==0.13.1 # via pytest prometheus-client==0.7.1 # via -r requirements/_base.txt psycopg2-binary==2.8.4 # via -r requirements/_base.txt, aiopg, sqlalchemy @@ -42,15 +42,15 @@ pytest-runner==5.2 # via -r requirements/_test.in pytest-sugar==0.9.2 # via -r requirements/_test.in pytest==5.3.5 # via -r requirements/_test.in, pytest-aiohttp, pytest-cov, pytest-instafail, pytest-mock, pytest-sugar pyyaml==5.3 # via -r requirements/_base.txt, openapi-spec-validator -requests==2.22.0 # via coveralls +requests==2.23.0 # via coveralls six==1.12.0 # via -r requirements/_base.txt, astroid, isodate, jsonschema, openapi-core, openapi-spec-validator, packaging, pyrsistent, tenacity sqlalchemy[postgresql_psycopg2binary]==1.3.4 # via -r requirements/_base.txt, aiopg strict-rfc3339==0.7 # via -r requirements/_base.txt, openapi-core tenacity==6.1.0 # via -r requirements/_base.txt termcolor==1.1.0 # via pytest-sugar trafaret==2.0.2 # via -r requirements/_base.txt -ujson==1.35 # via -r requirements/_base.txt -urllib3==1.25.7 # via requests +ujson==2.0.2 # via -r requirements/_base.txt +urllib3==1.25.8 # via requests wcwidth==0.1.8 # via pytest werkzeug==1.0.0 # via -r requirements/_base.txt wrapt==1.11.2 # via astroid diff --git a/packages/service-library/setup.py b/packages/service-library/setup.py index ef256bb5be9..96ddbbb4e98 100644 --- a/packages/service-library/setup.py +++ b/packages/service-library/setup.py @@ -7,38 +7,40 @@ here = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -def read_reqs( reqs_path: Path): - return re.findall(r'(^[^#-][\w]+[-~>=<.\w]+)', reqs_path.read_text(), re.MULTILINE) +def read_reqs(reqs_path: Path): + return re.findall(r"(^[^#-][\w]+[-~>=<.\w]+)", reqs_path.read_text(), re.MULTILINE) -install_requirements = read_reqs( here / "requirements" / "_base.in" ) # WEAK requirements +install_requirements = read_reqs( + here / "requirements" / "_base.in" +) # WEAK requirements -test_requirements = read_reqs( here / "requirements" / "_test.txt" ) # STRONG requirements +test_requirements = read_reqs( + here / "requirements" / "_test.txt" +) # STRONG requirements -readme = Path( here / "README.rst" ).read_text() +readme = Path(here / "README.rst").read_text() setup( - name='simcore-service-library', - version='0.1.0', + name="simcore-service-library", + version="0.1.0", author="Pedro Crespo (pcrespov)", description="Core service library for simcore (or servicelib)", classifiers=[ - 'Development Status :: 2 - Pre-Alpha', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: MIT License', - 'Natural Language :: English', - 'Programming Language :: Python :: 3.6', + "Development Status :: 2 - Pre-Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python :: 3.6", ], long_description=readme, license="MIT license", install_requires=install_requirements, - packages=find_packages(where='src'), - package_dir={'': 'src'}, + packages=find_packages(where="src"), + package_dir={"": "src"}, include_package_data=True, - test_suite='tests', + test_suite="tests", tests_require=test_requirements, - extras_require= { - 'test': test_requirements - }, - zip_safe=False + extras_require={"test": test_requirements}, + zip_safe=False, ) diff --git a/packages/service-library/src/servicelib/__init__.py b/packages/service-library/src/servicelib/__init__.py index c3c50323628..9e1daa52652 100644 --- a/packages/service-library/src/servicelib/__init__.py +++ b/packages/service-library/src/servicelib/__init__.py @@ -2,4 +2,4 @@ """ -__version__ = '0.1.0' +__version__ = "0.1.0" diff --git a/packages/service-library/src/servicelib/aiopg_utils.py b/packages/service-library/src/servicelib/aiopg_utils.py index 6c3f81b654e..424b4f6c5a9 100644 --- a/packages/service-library/src/servicelib/aiopg_utils.py +++ b/packages/service-library/src/servicelib/aiopg_utils.py @@ -22,24 +22,32 @@ from aiopg.sa import Engine, create_engine from psycopg2 import DatabaseError from psycopg2 import Error as DBAPIError -from tenacity import (RetryCallState, after_log, before_sleep_log, retry, - retry_if_exception_type, stop_after_attempt, wait_fixed) +from tenacity import ( + RetryCallState, + after_log, + before_sleep_log, + retry, + retry_if_exception_type, + stop_after_attempt, + wait_fixed, +) log = logging.getLogger(__name__) DSN = "postgresql://{user}:{password}@{host}:{port}/{database}" + @attr.s(auto_attribs=True) class DataSourceName: # Attributes for postgres db user: str - password: str=attr.ib(repr=False) + password: str = attr.ib(repr=False) database: str - host: str='localhost' - port: int=5432 + host: str = "localhost" + port: int = 5432 # Attributes about the caller - application_name: Optional[str]=None + application_name: Optional[str] = None def asdict(self) -> Dict: return attr.asdict(self) @@ -51,8 +59,7 @@ def to_uri(self, with_query=False) -> str: return uri - -def create_pg_engine(dsn: DataSourceName, minsize:int=1, maxsize:int=4): +def create_pg_engine(dsn: DataSourceName, minsize: int = 1, maxsize: int = 4): """ Adapts the arguments of aiopg.sa.create_engine Returns a coroutine that is awaitable, i.e. @@ -62,10 +69,11 @@ def create_pg_engine(dsn: DataSourceName, minsize:int=1, maxsize:int=4): assert engine.closed """ - awaitable_engine_coro = create_engine(dsn.to_uri(), + awaitable_engine_coro = create_engine( + dsn.to_uri(), application_name=dsn.application_name, minsize=minsize, - maxsize=maxsize + maxsize=maxsize, ) return awaitable_engine_coro @@ -115,11 +123,10 @@ def is_postgres_responsive(dsn: DataSourceName) -> bool: return ok - def raise_http_unavailable_error(retry_state: RetryCallState): # TODO: mark incident on db to determine the quality of service. E.g. next time we do not stop. TIP: obj, query = retry_state.args; obj.app.register_incidents - exc :DatabaseError = retry_state.outcome.exception() + exc: DatabaseError = retry_state.outcome.exception() # StandardError # |__ Warning # |__ Error @@ -136,7 +143,6 @@ def raise_http_unavailable_error(retry_state: RetryCallState): # SEE https://aiopg.readthedocs.io/en/stable/core.html?highlight=Exception#exceptions # SEE http://initd.org/psycopg/docs/module.html#dbapi-exceptions - # TODO: add header with Retry-After https://tools.ietf.org/html/rfc7231#section-7.1.3 resp = web.HTTPServiceUnavailable() @@ -150,26 +156,29 @@ def raise_http_unavailable_error(retry_state: RetryCallState): class PostgresRetryPolicyUponInitialization: """ Retry policy upon service initialization """ + WAIT_SECS = 2 ATTEMPTS_COUNT = 20 - def __init__(self, logger: Optional[logging.Logger]=None): + def __init__(self, logger: Optional[logging.Logger] = None): logger = logger or log self.kwargs = dict( wait=wait_fixed(self.WAIT_SECS), stop=stop_after_attempt(self.ATTEMPTS_COUNT), before_sleep=before_sleep_log(logger, logging.INFO), - reraise=True + reraise=True, ) + class PostgresRetryPolicyUponOperation: """ Retry policy upon service operation """ + WAIT_SECS = 2 ATTEMPTS_COUNT = 3 - def __init__(self, logger: Optional[logging.Logger]=None): + def __init__(self, logger: Optional[logging.Logger] = None): logger = logger or log self.kwargs = dict( @@ -177,9 +186,10 @@ def __init__(self, logger: Optional[logging.Logger]=None): wait=wait_fixed(self.WAIT_SECS), stop=stop_after_attempt(self.ATTEMPTS_COUNT), after=after_log(logger, logging.WARNING), - retry_error_callback=raise_http_unavailable_error + retry_error_callback=raise_http_unavailable_error, ) + # alias postgres_service_retry_policy_kwargs = PostgresRetryPolicyUponOperation().kwargs @@ -199,7 +209,7 @@ async def wrapper(*args, **kargs): result = await _deco_func(*args, **kargs) finally: stats = _deco_func.retry.statistics - _total_retry_count += int(stats.get('attempt_number', 0)) + _total_retry_count += int(stats.get("attempt_number", 0)) return result def total_retry_count(): @@ -210,9 +220,8 @@ def total_retry_count(): return wrapper - __all__ = [ - 'DBAPIError', - 'PostgresRetryPolicyUponInitialization', - 'PostgresRetryPolicyUponOperation' + "DBAPIError", + "PostgresRetryPolicyUponInitialization", + "PostgresRetryPolicyUponOperation", ] diff --git a/packages/service-library/src/servicelib/application.py b/packages/service-library/src/servicelib/application.py index acd5c4fb957..57c639e0fee 100644 --- a/packages/service-library/src/servicelib/application.py +++ b/packages/service-library/src/servicelib/application.py @@ -5,6 +5,7 @@ from .application_keys import APP_CONFIG_KEY from .client_session import persistent_client_session + async def startup_info(app: web.Application): print(f"INFO: STARTING UP {app}...", flush=True) @@ -13,7 +14,7 @@ async def shutdown_info(app: web.Application): print(f"INFO: SHUTING DOWN {app} ...", flush=True) -def create_safe_application(config: Optional[Dict]=None) -> web.Application: +def create_safe_application(config: Optional[Dict] = None) -> web.Application: app = web.Application() # Enxures config entry diff --git a/packages/service-library/src/servicelib/application_keys.py b/packages/service-library/src/servicelib/application_keys.py index d19f14f6f37..d1cf96f6ec4 100644 --- a/packages/service-library/src/servicelib/application_keys.py +++ b/packages/service-library/src/servicelib/application_keys.py @@ -18,13 +18,13 @@ # # web.Application keys, i.e. app[APP_*_KEY] # -APP_CONFIG_KEY = f'{__name__ }.config' -APP_OPENAPI_SPECS_KEY = f'{__name__ }.openapi_specs' -APP_JSONSCHEMA_SPECS_KEY = f'{__name__ }.jsonschema_specs' +APP_CONFIG_KEY = f"{__name__ }.config" +APP_OPENAPI_SPECS_KEY = f"{__name__ }.openapi_specs" +APP_JSONSCHEMA_SPECS_KEY = f"{__name__ }.jsonschema_specs" -APP_DB_ENGINE_KEY = f'{__name__ }.db_engine' +APP_DB_ENGINE_KEY = f"{__name__ }.db_engine" -APP_CLIENT_SESSION_KEY = f'{__name__ }.session' +APP_CLIENT_SESSION_KEY = f"{__name__ }.session" # # web.Response keys, i.e. app[RSP_*_KEY] diff --git a/packages/service-library/src/servicelib/application_setup.py b/packages/service-library/src/servicelib/application_setup.py index b69c8915f5b..c0a8a1637d5 100644 --- a/packages/service-library/src/servicelib/application_setup.py +++ b/packages/service-library/src/servicelib/application_setup.py @@ -12,21 +12,29 @@ APP_SETUP_KEY = f"{__name__ }.setup" + class ModuleCategory(Enum): SYSTEM = 0 ADDON = 1 + class ApplicationSetupError(Exception): pass + class DependencyError(ApplicationSetupError): pass -def app_module_setup(module_name: str, category: ModuleCategory,*, - depends: Optional[List[str]]=None, - config_section: str=None, config_enabled: str=None, - logger: Optional[logging.Logger]=None - ) -> Callable: + +def app_module_setup( + module_name: str, + category: ModuleCategory, + *, + depends: Optional[List[str]] = None, + config_section: str = None, + config_enabled: str = None, + logger: Optional[logging.Logger] = None, +) -> Callable: """ Decorator that marks a function as 'a setup function' for a given module in an application - Marks a function as 'setup' of a given module in an application @@ -75,20 +83,22 @@ def decorate(setup_func): if "setup" not in setup_func.__name__: logger.warning("Rename '%s' to contain 'setup'", setup_func.__name__) - # metadata info + # metadata info def setup_metadata() -> Dict: return { - 'module_name': module_name, - 'dependencies': depends, - 'config_section': section, - 'config_enabled': config_enabled + "module_name": module_name, + "dependencies": depends, + "config_section": section, + "config_enabled": config_enabled, } # wrapper @functools.wraps(setup_func) def setup_wrapper(app: web.Application, *args, **kargs) -> bool: # pre-setup - logger.debug("Setting up '%s' [%s; %s] ... ", module_name, category.name, depends) + logger.debug( + "Setting up '%s' [%s; %s] ... ", module_name, category.name, depends + ) if APP_SETUP_KEY not in app: app[APP_SETUP_KEY] = [] @@ -100,7 +110,9 @@ def setup_wrapper(app: web.Application, *args, **kargs) -> bool: def _get(cfg_, parts): for part in parts: - if section and part == "enabled": # if section exists, no need to explicitly enable it + if ( + section and part == "enabled" + ): # if section exists, no need to explicitly enable it cfg_ = cfg_.get(part, True) else: cfg_ = cfg_[part] @@ -109,14 +121,21 @@ def _get(cfg_, parts): try: is_enabled = _get(cfg, config_enabled.split(".")) except KeyError as ee: - raise ApplicationSetupError(f"Cannot find '{config_enabled}' in app config at [ {ee} ]") + raise ApplicationSetupError( + f"Cannot find '{config_enabled}' in app config at [ {ee} ]" + ) if not is_enabled: - logger.info("Skipping '%s' setup. Explicitly disabled in config", module_name) + logger.info( + "Skipping '%s' setup. Explicitly disabled in config", + module_name, + ) return False if depends: - uninitialized = [dep for dep in depends if dep not in app[APP_SETUP_KEY]] + uninitialized = [ + dep for dep in depends if dep not in app[APP_SETUP_KEY] + ] if uninitialized: msg = f"The following '{module_name}'' dependencies are still uninitialized: {uninitialized}" log.error(msg) @@ -141,14 +160,20 @@ def _get(cfg_, parts): return ok setup_wrapper.metadata = setup_metadata - setup_wrapper.MARK = 'setup' + setup_wrapper.MARK = "setup" return setup_wrapper + return decorate def is_setup_function(fun): - return inspect.isfunction(fun) and \ - hasattr(fun, 'MARK') and fun.MARK == 'setup' and \ - any(param.annotation == web.Application - for name, param in inspect.signature(fun).parameters.items()) + return ( + inspect.isfunction(fun) + and hasattr(fun, "MARK") + and fun.MARK == "setup" + and any( + param.annotation == web.Application + for name, param in inspect.signature(fun).parameters.items() + ) + ) diff --git a/packages/service-library/src/servicelib/client_session.py b/packages/service-library/src/servicelib/client_session.py index a6e6be95fda..8b282dd797c 100644 --- a/packages/service-library/src/servicelib/client_session.py +++ b/packages/service-library/src/servicelib/client_session.py @@ -11,6 +11,7 @@ log = logging.getLogger(__name__) + def get_client_session(app: web.Application) -> ClientSession: """ Lazy initialization of ClientSession @@ -38,19 +39,18 @@ async def persistent_client_session(app: web.Application): # closes held session if session is not app.get(APP_CLIENT_SESSION_KEY): - log.error("Unexpected client session upon cleanup! expected %s, got %s", + log.error( + "Unexpected client session upon cleanup! expected %s, got %s", session, - app.get(APP_CLIENT_SESSION_KEY)) + app.get(APP_CLIENT_SESSION_KEY), + ) await session.close() log.info("Session is actually closed? %s", session.closed) + # FIXME: if get_client_session upon startup fails and session is NOT closed. Implement some kind of gracefull shutdonw https://docs.aiohttp.org/en/latest/client_advanced.html#graceful-shutdown # TODO: add some tests -__all__ = [ - 'APP_CLIENT_SESSION_KEY', - 'get_client_session', - 'persistent_client_session' -] +__all__ = ["APP_CLIENT_SESSION_KEY", "get_client_session", "persistent_client_session"] diff --git a/packages/service-library/src/servicelib/config_schema_utils.py b/packages/service-library/src/servicelib/config_schema_utils.py index 378c674a7ff..33c516031e4 100644 --- a/packages/service-library/src/servicelib/config_schema_utils.py +++ b/packages/service-library/src/servicelib/config_schema_utils.py @@ -1,12 +1,11 @@ import trafaret as T -def addon_section(name: str, optional: bool=False) -> T.Key: +def addon_section(name: str, optional: bool = False) -> T.Key: if optional: return T.Key(name, default=dict(enabled=True), optional=optional) return T.Key(name) + def minimal_addon_schema() -> T.Dict: - return T.Dict({ - T.Key("enabled", default=True, optional=True): T.Bool() - }) + return T.Dict({T.Key("enabled", default=True, optional=True): T.Bool()}) diff --git a/packages/service-library/src/servicelib/decorators.py b/packages/service-library/src/servicelib/decorators.py index ae8e156a284..e51d1fb4a3b 100644 --- a/packages/service-library/src/servicelib/decorators.py +++ b/packages/service-library/src/servicelib/decorators.py @@ -11,11 +11,10 @@ log = logging.getLogger(__name__) - def safe_return(if_fails_return=False, catch=None, logger=None): # defaults if catch is None: - catch = (RuntimeError, ) + catch = (RuntimeError,) if logger is None: logger = log @@ -27,8 +26,10 @@ def safe_func(*args, **kargs): return res except catch as err: logger.info("%s failed: %s", func.__name__, str(err)) - except Exception: #pylint: disable=broad-except + except Exception: # pylint: disable=broad-except logger.info("%s failed unexpectedly", func.__name__, exc_info=True) - return deepcopy(if_fails_return) # avoid issues with default mutables + return deepcopy(if_fails_return) # avoid issues with default mutables + return safe_func + return decorate diff --git a/packages/service-library/src/servicelib/jsonschema_specs.py b/packages/service-library/src/servicelib/jsonschema_specs.py index b7979a74d5b..c3d44917c9c 100644 --- a/packages/service-library/src/servicelib/jsonschema_specs.py +++ b/packages/service-library/src/servicelib/jsonschema_specs.py @@ -14,13 +14,17 @@ def _load_from_path(filepath: Path) -> Dict: spec_dict = json.load(f) return spec_dict + async def _load_from_url(session: ClientSession, url: URL) -> Dict: async with session.get(url) as resp: text = await resp.text() spec_dict = json.loads(text) return spec_dict -async def create_jsonschema_specs(location: Path, session: ClientSession=None) -> Dict: + +async def create_jsonschema_specs( + location: Path, session: ClientSession = None +) -> Dict: """ Loads specs from a given location (url or path), validates them and returns a working instance @@ -38,7 +42,7 @@ async def create_jsonschema_specs(location: Path, session: ClientSession=None) - if URL(str(location)).host: spec_dict = await _load_from_url(session, URL(location)) else: - path = Path(location).expanduser().resolve() #pylint: disable=no-member + path = Path(location).expanduser().resolve() # pylint: disable=no-member spec_dict = _load_from_path(path) try: diff --git a/packages/service-library/src/servicelib/jsonschema_validation.py b/packages/service-library/src/servicelib/jsonschema_validation.py index c40285f1f79..dc1f6201a1d 100644 --- a/packages/service-library/src/servicelib/jsonschema_validation.py +++ b/packages/service-library/src/servicelib/jsonschema_validation.py @@ -5,6 +5,7 @@ log = logging.getLogger(__name__) + def validate_instance(instance: Dict, schema: Dict, *, log_errors=True): try: validate(instance, schema) diff --git a/packages/service-library/src/servicelib/monitoring.py b/packages/service-library/src/servicelib/monitoring.py index 5fe020354cb..375829bc3cd 100644 --- a/packages/service-library/src/servicelib/monitoring.py +++ b/packages/service-library/src/servicelib/monitoring.py @@ -25,9 +25,10 @@ def middleware_factory(app_name): async def middleware_handler(request: web.Request, handler): # See https://prometheus.io/docs/concepts/metric_types try: - request['start_time'] = time.time() - request.app['REQUEST_IN_PROGRESS'].labels( - app_name, request.path, request.method).inc() + request["start_time"] = time.time() + request.app["REQUEST_IN_PROGRESS"].labels( + app_name, request.path, request.method + ).inc() resp = await handler(request) @@ -35,35 +36,42 @@ async def middleware_handler(request: web.Request, handler): # Captures raised reponses (success/failures accounted with resp.status) resp = exc raise - except Exception as exc: #pylint: disable=broad-except + except Exception as exc: # pylint: disable=broad-except # Prevents issue #1025. resp = web.HTTPInternalServerError(reason=str(exc)) - resp_time = time.time() - request['start_time'] + resp_time = time.time() - request["start_time"] # NOTE: all access to API (i.e. and not other paths as /socket, /x, etc) shall return web.HTTPErrors since processed by error_middleware_factory - log.exception('Unexpected server error "%s" from access: %s "%s %s" done in %3.2f secs. Responding with status %s', + log.exception( + 'Unexpected server error "%s" from access: %s "%s %s" done in %3.2f secs. Responding with status %s', type(exc), - request.remote, request.method, request.path, + request.remote, + request.method, + request.path, resp_time, - resp.status + resp.status, ) finally: # metrics on the same request - resp_time = time.time() - request['start_time'] - request.app['REQUEST_LATENCY'].labels( - app_name, request.path).observe(resp_time) + resp_time = time.time() - request["start_time"] + request.app["REQUEST_LATENCY"].labels(app_name, request.path).observe( + resp_time + ) - request.app['REQUEST_IN_PROGRESS'].labels( - app_name, request.path, request.method).dec() + request.app["REQUEST_IN_PROGRESS"].labels( + app_name, request.path, request.method + ).dec() - request.app['REQUEST_COUNT'].labels( - app_name, request.method, request.path, resp.status).inc() + request.app["REQUEST_COUNT"].labels( + app_name, request.method, request.path, resp.status + ).inc() return resp middleware_handler.__middleware_name__ = __name__ return middleware_handler + async def metrics(_request): # TODO: NOT async! # prometheus_client access to a singleton registry! @@ -71,34 +79,39 @@ async def metrics(_request): resp.content_type = CONTENT_TYPE_LATEST return resp + async def check_outermost_middleware(app: web.Application): m = app.middlewares[0] - ok = m and hasattr(m, "__middleware_name__") and m.__middleware_name__==__name__ + ok = m and hasattr(m, "__middleware_name__") and m.__middleware_name__ == __name__ if not ok: # TODO: name all middleware and list middleware in log - log.critical("Monitoring middleware expected in the outermost layer." - "TIP: Check setup order") + log.critical( + "Monitoring middleware expected in the outermost layer." + "TIP: Check setup order" + ) + def setup_monitoring(app: web.Application, app_name: str): # NOTE: prometheus_client registers metrics in **globals**. Therefore # tests might fail when fixtures get re-created # Total number of requests processed - app['REQUEST_COUNT'] = Counter( - 'http_requests_total', 'Total Request Count', - ['app_name', 'method', 'endpoint', 'http_status'] + app["REQUEST_COUNT"] = Counter( + "http_requests_total", + "Total Request Count", + ["app_name", "method", "endpoint", "http_status"], ) # Latency of a request in seconds - app['REQUEST_LATENCY'] = Histogram( - 'http_request_latency_seconds', 'Request latency', - ['app_name', 'endpoint'] + app["REQUEST_LATENCY"] = Histogram( + "http_request_latency_seconds", "Request latency", ["app_name", "endpoint"] ) # Number of requests in progress - app['REQUEST_IN_PROGRESS']=Gauge( - 'http_requests_in_progress_total', 'Requests in progress', - ['app_name', 'endpoint', 'method'] + app["REQUEST_IN_PROGRESS"] = Gauge( + "http_requests_in_progress_total", + "Requests in progress", + ["app_name", "endpoint", "method"], ) # ensures is first layer but cannot guarantee the order setup is applied diff --git a/packages/service-library/src/servicelib/observer.py b/packages/service-library/src/servicelib/observer.py index 43567162f2b..a7e73188a9b 100644 --- a/packages/service-library/src/servicelib/observer.py +++ b/packages/service-library/src/servicelib/observer.py @@ -3,22 +3,25 @@ """ -import asyncio import logging from collections import defaultdict from functools import wraps +from .utils import logged_gather + log = logging.getLogger(__name__) event_registry = defaultdict(list) + async def emit(event: str, *args, **kwargs): if not event_registry[event]: return coroutines = [observer(*args, **kwargs) for observer in event_registry[event]] # all coroutine called in // - await asyncio.gather(*coroutines, return_exceptions=True) + await logged_gather(*coroutines) + def observe(event: str): def decorator(func): @@ -29,5 +32,7 @@ def decorator(func): @wraps(func) def wrapped(*args, **kwargs): return func(*args, **kwargs) + return wrapped + return decorator diff --git a/packages/service-library/src/servicelib/openapi.py b/packages/service-library/src/servicelib/openapi.py index 300a4b153eb..c7f52e1b97a 100644 --- a/packages/service-library/src/servicelib/openapi.py +++ b/packages/service-library/src/servicelib/openapi.py @@ -15,13 +15,17 @@ # Supported version of openapi (last number indicates only editorial changes) # TODO: ensure openapi_core.__version__ is up-to-date with OAI_VERSION -OAI_VERSION = '3.0.2' -OAI_VERSION_URL = 'https://github.com/OAI/OpenAPI-Specification/blob/master/versions/%s.md'%OAI_VERSION +OAI_VERSION = "3.0.2" +OAI_VERSION_URL = ( + "https://github.com/OAI/OpenAPI-Specification/blob/master/versions/%s.md" + % OAI_VERSION +) # alias OpenApiSpec = Spec -def get_base_path(specs: OpenApiSpec) ->str: + +def get_base_path(specs: OpenApiSpec) -> str: """ Expected API basepath By convention, the API basepath indicates the major @@ -33,7 +37,7 @@ def get_base_path(specs: OpenApiSpec) ->str: :rtype: str """ # TODO: guarantee this convention is true - return '/v' + specs.info.version.split('.')[0] + return "/v" + specs.info.version.split(".")[0] # TODO: _load_from_* is also found in jsonshema_specs @@ -42,6 +46,7 @@ def _load_from_path(filepath: Path) -> Tuple[Dict, str]: spec_dict = yaml.safe_load(f) return spec_dict, filepath.as_uri() + async def _load_from_url(session: ClientSession, url: URL) -> Tuple[Dict, str]: async with session.get(url) as resp: text = await resp.text() @@ -49,7 +54,7 @@ async def _load_from_url(session: ClientSession, url: URL) -> Tuple[Dict, str]: return spec_dict, str(url) -async def create_openapi_specs(location, session: ClientSession=None) -> OpenApiSpec: +async def create_openapi_specs(location, session: ClientSession = None) -> OpenApiSpec: """ Loads specs from a given location (url or path), validates them and returns a working instance @@ -69,17 +74,14 @@ async def create_openapi_specs(location, session: ClientSession=None) -> OpenApi raise ValueError("Client session required in arguments") spec_dict, spec_url = await _load_from_url(session, URL(location)) else: - path = Path(location).expanduser().resolve() #pylint: disable=no-member + path = Path(location).expanduser().resolve() # pylint: disable=no-member spec_dict, spec_url = _load_from_path(path) return openapi_core.create_spec(spec_dict, spec_url) - def create_specs(openapi_path: Path) -> OpenApiSpec: - warnings.warn("Use instead create_openapi_specs", - category=DeprecationWarning) - + warnings.warn("Use instead create_openapi_specs", category=DeprecationWarning) # TODO: spec_from_file and spec_from_url with openapi_path.open() as f: @@ -89,11 +91,10 @@ def create_specs(openapi_path: Path) -> OpenApiSpec: return spec - __all__ = ( - 'get_base_path', - 'create_openapi_specs', - 'OpenApiSpec', - 'OpenAPIError', - 'OpenAPIMappingError' + "get_base_path", + "create_openapi_specs", + "OpenApiSpec", + "OpenAPIError", + "OpenAPIMappingError", ) diff --git a/packages/service-library/src/servicelib/openapi_servers.py b/packages/service-library/src/servicelib/openapi_servers.py index 9153e51912f..7b965d7cb54 100644 --- a/packages/service-library/src/servicelib/openapi_servers.py +++ b/packages/service-library/src/servicelib/openapi_servers.py @@ -1,6 +1,3 @@ - - - def get_server(servers, url): # Development server: http://{host}:{port}/{basePath} for server in servers: diff --git a/packages/service-library/src/servicelib/openapi_validation.py b/packages/service-library/src/servicelib/openapi_validation.py index fe640fd0c6e..9266fedfd93 100644 --- a/packages/service-library/src/servicelib/openapi_validation.py +++ b/packages/service-library/src/servicelib/openapi_validation.py @@ -11,8 +11,11 @@ from openapi_core.validation.request.validators import RequestValidator from openapi_core.validation.response.validators import ResponseValidator -from .openapi_wrappers import (PARAMETERS_KEYS, AiohttpOpenAPIRequest, - AiohttpOpenAPIResponse) +from .openapi_wrappers import ( + PARAMETERS_KEYS, + AiohttpOpenAPIRequest, + AiohttpOpenAPIResponse, +) logger = logging.getLogger(__name__) @@ -32,28 +35,30 @@ async def validate_request(request: web.Request, spec: OpenApiSpec): return result.parameters, result.body, result.errors + async def validate_parameters(spec: OpenApiSpec, request: web.Request): req = await AiohttpOpenAPIRequest.create(request) return shortcuts.validate_parameters(spec, req) + async def validate_body(spec: OpenApiSpec, request: web.Request): req = await AiohttpOpenAPIRequest.create(request) return shortcuts.validate_body(spec, req) + async def validate_data(spec: OpenApiSpec, request, response: web.Response): if isinstance(request, web.Request): req = await AiohttpOpenAPIRequest.create(request) else: # TODO: alternative MockRequest - #params = ['host_url', 'method', 'path'] - #opapi_request = MockRequest(*args) + # params = ['host_url', 'method', 'path'] + # opapi_request = MockRequest(*args) - params = ['full_url_pattern', 'method'] - assert all(hasattr(request, attr) for attr in params) # nosec + params = ["full_url_pattern", "method"] + assert all(hasattr(request, attr) for attr in params) # nosec # TODO: if a dict with params, convert dict to dot operations! and reverse - req = request res = await AiohttpOpenAPIResponse.create(response) @@ -65,7 +70,10 @@ async def validate_data(spec: OpenApiSpec, request, response: web.Response): return result.data -async def validate_response(spec: OpenApiSpec, request: web.Request, response: web.Response): + +async def validate_response( + spec: OpenApiSpec, request: web.Request, response: web.Response +): """ Validates server response against openapi specs @@ -74,12 +82,11 @@ async def validate_response(spec: OpenApiSpec, request: web.Request, response: w validator = ResponseValidator(spec) req = await AiohttpOpenAPIRequest.create(request) - res = AiohttpOpenAPIResponse(response, response.text) # FIXME:ONLY IN SERVER side. Async in client! + res = AiohttpOpenAPIResponse( + response, response.text + ) # FIXME:ONLY IN SERVER side. Async in client! result = validator.validate(req, res) result.raise_for_errors() -__all__ = ( - 'validate_request', - 'validate_data' -) +__all__ = ("validate_request", "validate_data") diff --git a/packages/service-library/src/servicelib/openapi_wrappers.py b/packages/service-library/src/servicelib/openapi_wrappers.py index bd116afc4f4..ad090e0f531 100644 --- a/packages/service-library/src/servicelib/openapi_wrappers.py +++ b/packages/service-library/src/servicelib/openapi_wrappers.py @@ -10,14 +10,14 @@ log = logging.getLogger(__name__) -CAPTURES = re.compile(r'\(\?P<([_a-zA-Z][_a-zA-Z0-9]+)>(.[^)]+)\)') -PARAMETERS_KEYS = ('path', 'query', 'header', 'cookie') +CAPTURES = re.compile(r"\(\?P<([_a-zA-Z][_a-zA-Z0-9]+)>(.[^)]+)\)") +PARAMETERS_KEYS = ("path", "query", "header", "cookie") PATH_KEY, QUERY_KEY, HEADER_KEY, COOKIE_KEY = PARAMETERS_KEYS + class AiohttpOpenAPIRequest(BaseOpenAPIRequest): wrappedcls = web.Request - def __init__(self, request: web.Request, data: str): self._request = request self._body = data @@ -48,18 +48,18 @@ def path_pattern(self): info = match_info.get_info() # if PlainResource then - path_pattern = info.get('path') + path_pattern = info.get("path") # if DynamicResource then whe need to undo the conversion to formatter and pattern if not path_pattern: - formatter = info.get('formatter') - re_pattern = info.get('pattern').pattern + formatter = info.get("formatter") + re_pattern = info.get("pattern").pattern kargs = {} # TODO: create a test with '/my/tokens/{service}/' # TODO: create a test with '/my/tokens/{service:google|facebook}/' # TODO: create a test with '/my/tokens/{identifier:\d+}/' for key, value in CAPTURES.findall(re_pattern): - if value == '[^{}/]+': # = no re in pattern + if value == "[^{}/]+": # = no re in pattern kargs[key] = "{%s}" % (key) else: kargs[key] = "{%s:%s}" % (key, value) diff --git a/packages/service-library/src/servicelib/request_keys.py b/packages/service-library/src/servicelib/request_keys.py index 1185c7fa124..89a3b6a2777 100644 --- a/packages/service-library/src/servicelib/request_keys.py +++ b/packages/service-library/src/servicelib/request_keys.py @@ -3,6 +3,4 @@ """ # RQT=request -RQT_USERID_KEY = __name__ + '.userid' - - +RQT_USERID_KEY = __name__ + ".userid" diff --git a/packages/service-library/src/servicelib/requests_utils.py b/packages/service-library/src/servicelib/requests_utils.py index e8e8e78b508..c1ebbbb4cc1 100644 --- a/packages/service-library/src/servicelib/requests_utils.py +++ b/packages/service-library/src/servicelib/requests_utils.py @@ -5,10 +5,12 @@ def get_request(*args, **kwargs) -> web.BaseRequest: """ Helper for handler function decorators to retrieve requests """ - request = kwargs.get('request', args[-1] if args else None) + request = kwargs.get("request", args[-1] if args else None) if not isinstance(request, web.BaseRequest): - msg = ("Incorrect decorator usage. " - "Expecting `def handler(request)` " - "or `def handler(self, request)`.") + msg = ( + "Incorrect decorator usage. " + "Expecting `def handler(request)` " + "or `def handler(self, request)`." + ) raise RuntimeError(msg) return request diff --git a/packages/service-library/src/servicelib/resources.py b/packages/service-library/src/servicelib/resources.py index c9c3991a2ea..b382ea2860d 100644 --- a/packages/service-library/src/servicelib/resources.py +++ b/packages/service-library/src/servicelib/resources.py @@ -17,6 +17,7 @@ class ResourcesFacade: Resources are read-only files/folders """ + package_name: str distribution_name: str config_folder: str @@ -40,7 +41,9 @@ def get_path(self, resource_name: str) -> Path: WARNING: existence of file is not guaranteed. Use resources.exists WARNING: resource files are supposed to be used as read-only! """ - resource_path = pathlib.Path( pkg_resources.resource_filename(self.package_name, resource_name) ) + resource_path = pathlib.Path( + pkg_resources.resource_filename(self.package_name, resource_name) + ) return resource_path def get_distribution(self): @@ -48,13 +51,13 @@ def get_distribution(self): return pkg_resources.get_distribution(self.distribution_name) - # TODO: create abc @attr.s(auto_attribs=True) class FileResource: """ TODO: lazy evaluation of attribs """ + name: str diff --git a/packages/service-library/src/servicelib/rest_codecs.py b/packages/service-library/src/servicelib/rest_codecs.py index 34f33c187e2..4e6effba714 100644 --- a/packages/service-library/src/servicelib/rest_codecs.py +++ b/packages/service-library/src/servicelib/rest_codecs.py @@ -14,7 +14,8 @@ class DataEncoder(json.JSONEncoder): TODO: extend to more types like apiset """ - def default(self, o): #pylint: disable=E0202 + + def default(self, o): # pylint: disable=E0202 if attr.has(o.__class__): return attr.asdict(o) return json.JSONEncoder.default(self, o) diff --git a/packages/service-library/src/servicelib/rest_middlewares.py b/packages/service-library/src/servicelib/rest_middlewares.py index 09e69aeda22..a929dfa7046 100644 --- a/packages/service-library/src/servicelib/rest_middlewares.py +++ b/packages/service-library/src/servicelib/rest_middlewares.py @@ -9,9 +9,14 @@ from openapi_core.schema.exceptions import OpenAPIError from .rest_models import ErrorItemType, ErrorType, LogMessageType -from .rest_responses import (JSON_CONTENT_TYPE, create_data_response, - create_error_response, is_enveloped_from_map, - is_enveloped_from_text, wrap_as_envelope) +from .rest_responses import ( + JSON_CONTENT_TYPE, + create_data_response, + create_error_response, + is_enveloped_from_map, + is_enveloped_from_text, + wrap_as_envelope, +) from .rest_utils import EnvelopeFactory from .rest_validators import OpenApiValidator @@ -29,15 +34,16 @@ def is_api_request(request: web.Request, api_version: str) -> bool: def _process_and_raise_unexpected_error(request: web.BaseRequest, err: Exception): # FIXME: send info + trace to client ONLY in debug mode!!! resp = create_error_response( - [err,], - "Unexpected Server error", - web.HTTPInternalServerError - ) + [err,], "Unexpected Server error", web.HTTPInternalServerError + ) - logger.exception('Unexpected server error "%s" from access: %s "%s %s". Responding with status %s', + logger.exception( + 'Unexpected server error "%s" from access: %s "%s %s". Responding with status %s', type(err), - request.remote, request.method, request.path, - resp.status + request.remote, + request.method, + request.path, + resp.status, ) raise resp @@ -65,9 +71,9 @@ async def _middleware(request: web.Request, handler): if not err.text or not is_enveloped_from_text(err.text): error = ErrorType( - errors=[ErrorItemType.from_error(err), ], + errors=[ErrorItemType.from_error(err),], status=err.status, - logs=[LogMessageType(message=err.reason, level="ERROR"), ] + logs=[LogMessageType(message=err.reason, level="ERROR"),], ) err.text = EnvelopeFactory(error=error).as_text() @@ -89,7 +95,7 @@ async def _middleware(request: web.Request, handler): logger.debug("Redirected to %s", ex) raise - except Exception as err: # pylint: disable=broad-except + except Exception as err: # pylint: disable=broad-except _process_and_raise_unexpected_error(request, err) return _middleware @@ -106,8 +112,7 @@ async def _middleware(request: web.Request, handler): return await handler(request) # TODO: move this outside! - RQ_VALIDATED_DATA_KEYS = ( - "validated-path", "validated-query", "validated-body") + RQ_VALIDATED_DATA_KEYS = ("validated-path", "validated-query", "validated-body") try: validator = OpenApiValidator.create(request.app, api_version) @@ -156,15 +161,18 @@ async def _middleware(request: web.Request, handler): # Enforced by user. Should check it is json? response = resp return response + return _middleware -def append_rest_middlewares(app: web.Application, api_version: str = DEFAULT_API_VERSION): +def append_rest_middlewares( + app: web.Application, api_version: str = DEFAULT_API_VERSION +): """ Helper that appends rest-middlewares in the correct order """ app.middlewares.append(error_middleware_factory(api_version)) # FIXME: openapi-core fails to validate response when specs are in separate files! # FIXME: disabled so webserver and storage do not get this issue - #app.middlewares.append(validate_middleware_factory(api_version)) + # app.middlewares.append(validate_middleware_factory(api_version)) app.middlewares.append(envelope_middleware_factory(api_version)) diff --git a/packages/service-library/src/servicelib/rest_models.py b/packages/service-library/src/servicelib/rest_models.py index 269629e6ae8..a83abf15329 100644 --- a/packages/service-library/src/servicelib/rest_models.py +++ b/packages/service-library/src/servicelib/rest_models.py @@ -1,4 +1,3 @@ - """ rest - common schema models and classes UNDER DEVELOPMENT @@ -17,8 +16,8 @@ @attr.s(auto_attribs=True) class LogMessageType: message: str - level: str = 'INFO' - logger: str = 'user' + level: str = "INFO" + logger: str = "user" @attr.s(auto_attribs=True) @@ -30,11 +29,9 @@ class ErrorItemType: @classmethod def from_error(cls, err: BaseException): - item = cls(code=err.__class__.__name__, - message=str(err), - resource=None, - field=None - ) + item = cls( + code=err.__class__.__name__, message=str(err), resource=None, field=None + ) return item diff --git a/packages/service-library/src/servicelib/rest_oas.py b/packages/service-library/src/servicelib/rest_oas.py index 25299e20b1f..eb51bfbf53e 100644 --- a/packages/service-library/src/servicelib/rest_oas.py +++ b/packages/service-library/src/servicelib/rest_oas.py @@ -16,16 +16,12 @@ def set_specs(app: web.Application, specs: Spec) -> Spec: app[APP_OPENAPI_SPECS_KEY] = specs return app[APP_OPENAPI_SPECS_KEY] + def get_specs(app: web.Application) -> Spec: # TODO consider the case of multiple versions of spec -> Dict[Spec] ?? return app[APP_OPENAPI_SPECS_KEY] - OpenApiSpec = Spec -__all__ = ( - 'set_specs', 'get_specs', - 'OpenApiSpec', - 'create_specs' -) +__all__ = ("set_specs", "get_specs", "OpenApiSpec", "create_specs") diff --git a/packages/service-library/src/servicelib/rest_responses.py b/packages/service-library/src/servicelib/rest_responses.py index 62a767dafd9..032b457ef33 100644 --- a/packages/service-library/src/servicelib/rest_responses.py +++ b/packages/service-library/src/servicelib/rest_responses.py @@ -11,12 +11,14 @@ from .rest_codecs import jsonify, json from .rest_models import ErrorItemType, ErrorType -ENVELOPE_KEYS = ('data', 'error') -JSON_CONTENT_TYPE = 'application/json' +ENVELOPE_KEYS = ("data", "error") +JSON_CONTENT_TYPE = "application/json" + def is_enveloped_from_map(payload: Mapping) -> bool: return all(k in ENVELOPE_KEYS for k in payload.keys()) + def is_enveloped_from_text(text: str) -> bool: try: payload = json.loads(text) @@ -24,6 +26,7 @@ def is_enveloped_from_text(text: str) -> bool: return False return is_enveloped_from_map(payload) + def is_enveloped(payload) -> bool: if isinstance(payload, Mapping): return is_enveloped_from_map(payload) @@ -38,9 +41,9 @@ def wrap_as_envelope(data=None, error=None, as_null=True): """ payload = {} if data or as_null: - payload['data'] = data + payload["data"] = data if error or as_null: - payload['error'] = error + payload["error"] = error return payload @@ -53,6 +56,7 @@ def unwrap_envelope(payload: Dict) -> Tuple: # RESPONSES FACTORIES ------------------------------- + def create_data_response(data) -> web.Response: response = None try: @@ -63,18 +67,15 @@ def create_data_response(data) -> web.Response: response = web.json_response(payload, dumps=jsonify) except (TypeError, ValueError) as err: - response = create_error_response( - [err,], - str(err), - web.HTTPInternalServerError - ) + response = create_error_response([err,], str(err), web.HTTPInternalServerError) return response def create_error_response( - errors: List[Exception], - reason: Optional[str]=None, - error_cls: Optional[web.HTTPError]=None ) -> web.HTTPError: + errors: List[Exception], + reason: Optional[str] = None, + error_cls: Optional[web.HTTPError] = None, +) -> web.HTTPError: # TODO: guarantee no throw! if error_cls is None: error_cls = web.HTTPInternalServerError @@ -82,15 +83,13 @@ def create_error_response( # TODO: assumes openapi error model!!! error = ErrorType( errors=[ErrorItemType.from_error(err) for err in errors], - status=error_cls.status_code + status=error_cls.status_code, ) payload = wrap_as_envelope(error=attr.asdict(error)) response = error_cls( - reason=reason, - text=jsonify(payload), - content_type=JSON_CONTENT_TYPE + reason=reason, text=jsonify(payload), content_type=JSON_CONTENT_TYPE ) return response @@ -103,8 +102,5 @@ def create_log_response(msg: str, level: str) -> web.Response: """ # TODO: link more with real logger msg = LogMessageType(msg, level) - response = web.json_response(data={ - 'data': attr.asdict(msg), - 'error': None - }) + response = web.json_response(data={"data": attr.asdict(msg), "error": None}) return response diff --git a/packages/service-library/src/servicelib/rest_routing.py b/packages/service-library/src/servicelib/rest_routing.py index 35ac96a47f9..675bbe3c93a 100644 --- a/packages/service-library/src/servicelib/rest_routing.py +++ b/packages/service-library/src/servicelib/rest_routing.py @@ -17,8 +17,10 @@ def has_handler_signature(fun) -> bool: # TODO: last parameter is web.Request or called request? - return any(param.annotation == web.Request - for name, param in inspect.signature(fun).parameters.items()) + return any( + param.annotation == web.Request + for name, param in inspect.signature(fun).parameters.items() + ) def get_handlers_from_namespace(handlers_nsp) -> Dict: @@ -31,7 +33,9 @@ def get_handlers_from_namespace(handlers_nsp) -> Dict: elif hasattr(handlers_nsp, "__class__"): predicate = lambda obj: inspect.ismethod(obj) and has_handler_signature(obj) else: - raise ValueError("Expected module or class as namespace, got %s" % type(handlers_nsp)) + raise ValueError( + "Expected module or class as namespace, got %s" % type(handlers_nsp) + ) name_to_handler_map = dict(inspect.getmembers(handlers_nsp, predicate)) return name_to_handler_map @@ -46,14 +50,15 @@ def iter_path_operations(specs: OpenApiSpec) -> Generator: for url, path in specs.paths.items(): for method, operation in path.operations.items(): - yield method.upper(), base_path+url, operation.operation_id, operation.tags + yield method.upper(), base_path + url, operation.operation_id, operation.tags def map_handlers_with_operations( - handlers_map: Mapping[str, Callable], - operations_it: Generator, - * , - strict: bool=True) -> List[web.RouteDef]: + handlers_map: Mapping[str, Callable], + operations_it: Generator, + *, + strict: bool = True +) -> List[web.RouteDef]: """ Matches operation ids with handler names and returns a list of routes :param handlers_map: .See get_handlers_from_namespace @@ -72,24 +77,23 @@ def map_handlers_with_operations( for method, path, operation_id, _tags in operations_it: handler = handlers.pop(operation_id, None) if handler: - routes.append( web.route(method.upper(), path, handler, name=operation_id) ) + routes.append(web.route(method.upper(), path, handler, name=operation_id)) elif strict: raise ValueError("Cannot find any handler named {} ".format(operation_id)) if handlers and strict: - raise RuntimeError("{} handlers were not mapped to routes: {}".format( - len(handlers), - handlers.keys()) + raise RuntimeError( + "{} handlers were not mapped to routes: {}".format( + len(handlers), handlers.keys() ) + ) return routes def create_routes_from_namespace( - specs: OpenApiSpec, - handlers_nsp, - *, - strict: bool=True) -> List[web.RouteDef]: + specs: OpenApiSpec, handlers_nsp, *, strict: bool = True +) -> List[web.RouteDef]: """ Gets *all* available handlers and maps one-to-one to *all* specs routes :param specs: openapi spec object @@ -104,6 +108,8 @@ def create_routes_from_namespace( if not handlers and strict: raise ValueError("No handlers found in %s" % handlers_nsp) - routes = map_handlers_with_operations(handlers, iter_path_operations(specs), strict=strict) + routes = map_handlers_with_operations( + handlers, iter_path_operations(specs), strict=strict + ) return routes diff --git a/packages/service-library/src/servicelib/rest_utils.py b/packages/service-library/src/servicelib/rest_utils.py index 41ffb47cf5d..f0e13249da1 100644 --- a/packages/service-library/src/servicelib/rest_utils.py +++ b/packages/service-library/src/servicelib/rest_utils.py @@ -11,8 +11,13 @@ from aiohttp import web from openapi_core.extensions.models.factories import Model as BodyModel -from .openapi_validation import (COOKIE_KEY, HEADER_KEY, PATH_KEY, QUERY_KEY, - validate_request) +from .openapi_validation import ( + COOKIE_KEY, + HEADER_KEY, + PATH_KEY, + QUERY_KEY, + validate_request, +) from .rest_models import ErrorItemType, ErrorType from .rest_oas import get_specs @@ -20,8 +25,8 @@ def body_to_dict(body: BodyModel) -> Dict: # openapi_core.extensions.models.factories.Model -> dict dikt = {} - for k,v in body.__dict__.items(): - if hasattr(v, '__dict__'): + for k, v in body.__dict__.items(): + if hasattr(v, "__dict__"): v = body_to_dict(v) dikt[k] = v return dikt @@ -33,8 +38,9 @@ class EnvelopeFactory: as suggested in https://medium.com/studioarmix/learn-restful-api-design-ideals-c5ec915a430f """ + def __init__(self, data=None, error=None): - enveloped = {'data': data, 'error': error} + enveloped = {"data": data, "error": error} for key, value in enveloped.items(): if value is not None and not isinstance(value, dict): enveloped[key] = attr.asdict(value) @@ -62,20 +68,15 @@ async def extract_and_validate(request: web.Request): if errors: error = ErrorType( errors=[ErrorItemType.from_error(err) for err in errors], - status=web.HTTPBadRequest.status_code + status=web.HTTPBadRequest.status_code, ) raise web.HTTPBadRequest( reason="Failed request validation against API specs", text=EnvelopeFactory(error=error).as_text(), - content_type='application/json', - ) + content_type="application/json", + ) return params[PATH_KEY], params[QUERY_KEY], body -__all__ = ( - 'COOKIE_KEY', - 'HEADER_KEY', - 'PATH_KEY', - 'QUERY_KEY' -) +__all__ = ("COOKIE_KEY", "HEADER_KEY", "PATH_KEY", "QUERY_KEY") diff --git a/packages/service-library/src/servicelib/rest_validators.py b/packages/service-library/src/servicelib/rest_validators.py index 950897bb3a4..7151e6ba17b 100644 --- a/packages/service-library/src/servicelib/rest_validators.py +++ b/packages/service-library/src/servicelib/rest_validators.py @@ -1,10 +1,13 @@ - from aiohttp import web from openapi_core.validation.request.validators import RequestValidator from openapi_core.validation.response.validators import ResponseValidator -from .openapi_wrappers import (PATH_KEY, QUERY_KEY, AiohttpOpenAPIRequest, - AiohttpOpenAPIResponse) +from .openapi_wrappers import ( + PATH_KEY, + QUERY_KEY, + AiohttpOpenAPIRequest, + AiohttpOpenAPIResponse, +) from .rest_oas import OpenApiSpec, get_specs from .rest_responses import create_error_response @@ -13,6 +16,7 @@ class OpenApiValidator: """ Used to validate data in the request->response cycle against openapi specs """ + @classmethod def create(cls, app: web.Application, _version=""): specs = get_specs(app) @@ -24,7 +28,7 @@ def __init__(self, spec: OpenApiSpec): self._resvtor = ResponseValidator(spec, custom_formatters=None) # Current - self.current_request = None # wrapper request + self.current_request = None # wrapper request async def check_request(self, request: web.Request): self.current_request = None @@ -36,22 +40,28 @@ async def check_request(self, request: web.Request): self.current_request = rq if result.errors: - err = create_error_response(result.errors, - "Failed request validation against API specs", - web.HTTPBadRequest) + err = create_error_response( + result.errors, + "Failed request validation against API specs", + web.HTTPBadRequest, + ) raise err - path, query = [ result.parameters[k] for k in (PATH_KEY, QUERY_KEY) ] + path, query = [result.parameters[k] for k in (PATH_KEY, QUERY_KEY)] return path, query, result.body def check_response(self, response: web.Response): req = self.current_request - res = AiohttpOpenAPIResponse(response, response.text) # FIXME:ONLY IN SERVER side. Async in client! + res = AiohttpOpenAPIResponse( + response, response.text + ) # FIXME:ONLY IN SERVER side. Async in client! result = self._resvtor.validate(req, res) if result.errors: - err = create_error_response(result.errors, - "Failed response validation against API specs", - web.HTTPServiceUnavailable) + err = create_error_response( + result.errors, + "Failed response validation against API specs", + web.HTTPServiceUnavailable, + ) raise err diff --git a/packages/service-library/src/servicelib/tracing.py b/packages/service-library/src/servicelib/tracing.py index 94f650336df..caf531c4bef 100644 --- a/packages/service-library/src/servicelib/tracing.py +++ b/packages/service-library/src/servicelib/tracing.py @@ -15,16 +15,23 @@ log = logging.getLogger(__name__) -def setup_tracing(app: web.Application, app_name: str, host: str, port: str, config: Dict) -> bool: + +def setup_tracing( + app: web.Application, app_name: str, host: str, port: str, config: Dict +) -> bool: zipkin_address = f"{config['zipkin_endpoint']}/api/v2/spans" endpoint = az.create_endpoint(app_name, ipv4=host, port=port) loop = asyncio.get_event_loop() - tracer = loop.run_until_complete(az.create(zipkin_address, endpoint, sample_rate=1.0)) + tracer = loop.run_until_complete( + az.create(zipkin_address, endpoint, sample_rate=1.0) + ) az.setup(app, tracer) return True -schema = T.Dict({ - T.Key("enabled", default=True, optional=True): T.Or(T.Bool(), T.Int), - T.Key('zipkin_endpoint', default="http://jaeger:9411"): T.String() + +schema = T.Dict( + { + T.Key("enabled", default=True, optional=True): T.Or(T.Bool(), T.Int), + T.Key("zipkin_endpoint", default="http://jaeger:9411"): T.String(), } ) diff --git a/packages/service-library/src/servicelib/utils.py b/packages/service-library/src/servicelib/utils.py index 5e013f139bf..07763aad9f7 100644 --- a/packages/service-library/src/servicelib/utils.py +++ b/packages/service-library/src/servicelib/utils.py @@ -4,7 +4,12 @@ I order to avoid cyclic dependences, please DO NOT IMPORT ANYTHING from . """ +import asyncio +import logging from pathlib import Path +from typing import Any, Coroutine, List, Optional, Union + +logger = logging.getLogger(__name__) def is_osparc_repo_dir(path: Path) -> bool: @@ -14,7 +19,7 @@ def is_osparc_repo_dir(path: Path) -> bool: return all(d in got for d in expected) -def search_osparc_repo_dir(start, max_iterations=8): +def search_osparc_repo_dir(start: Union[str, Path], max_iterations=8) -> Optional[Path]: """ Returns path to root repo dir or None if it does not exists NOTE: assumes starts is a path within repo @@ -22,8 +27,37 @@ def search_osparc_repo_dir(start, max_iterations=8): max_iterations = max(max_iterations, 1) root_dir = Path(start) iteration_number = 0 - while not is_osparc_repo_dir(root_dir) and iteration_number None: + future = asyncio.ensure_future(obj) + + def log_exception_callback(fut: asyncio.Future): + try: + fut.result() + except Exception: # pylint: disable=broad-except + logger.exception("Error occured while running task!") + + future.add_done_callback(log_exception_callback) + + +# // tasks +async def logged_gather(*tasks, reraise: bool = True) -> List[Any]: + # all coroutine called in // and we take care of returning the exceptions + results = await asyncio.gather(*tasks, return_exceptions=True) + for value in results: + if isinstance(value, Exception): + if reraise: + raise value + logger.error( + "Exception occured while running %s: %s", + str(tasks[results.index(value)]), + str(value), + ) + return results diff --git a/packages/service-library/tests/conftest.py b/packages/service-library/tests/conftest.py index c6d2f6d32d4..1c51329b588 100644 --- a/packages/service-library/tests/conftest.py +++ b/packages/service-library/tests/conftest.py @@ -24,11 +24,13 @@ def package_dir(): return pdir -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def osparc_simcore_root_dir(here): root_dir = here.parent.parent.parent.resolve() assert root_dir.exists(), "Is this service within osparc-simcore repo?" - assert any(root_dir.glob("packages/service-library")), "%s not look like rootdir" % root_dir + assert any(root_dir.glob("packages/service-library")), ( + "%s not look like rootdir" % root_dir + ) return root_dir diff --git a/packages/service-library/tests/test_application_setup.py b/packages/service-library/tests/test_application_setup.py index e848152b43f..30dac37af4a 100644 --- a/packages/service-library/tests/test_application_setup.py +++ b/packages/service-library/tests/test_application_setup.py @@ -11,7 +11,12 @@ from aiohttp import web from servicelib.application_keys import APP_CONFIG_KEY -from servicelib.application_setup import app_module_setup, ModuleCategory, DependencyError, APP_SETUP_KEY +from servicelib.application_setup import ( + app_module_setup, + ModuleCategory, + DependencyError, + APP_SETUP_KEY, +) log = logging.getLogger(__name__) @@ -20,19 +25,22 @@ def setup_bar(app: web.Application, arg1, kargs=55): return True + @app_module_setup("package.foo", ModuleCategory.ADDON, logger=log) def setup_foo(app: web.Application, arg1, kargs=33): return True -@app_module_setup("package.zee", ModuleCategory.ADDON, - config_enabled="main.zee_enabled", - logger=log) + +@app_module_setup( + "package.zee", ModuleCategory.ADDON, config_enabled="main.zee_enabled", logger=log +) def setup_zee(app: web.Application, arg1, kargs=55): return True -@app_module_setup("package.needs_foo", ModuleCategory.SYSTEM, - depends=['package.foo',], logger=log) +@app_module_setup( + "package.needs_foo", ModuleCategory.SYSTEM, depends=["package.foo",], logger=log +) def setup_needs_foo(app: web.Application, arg1, kargs=55): return True @@ -40,17 +48,12 @@ def setup_needs_foo(app: web.Application, arg1, kargs=55): @pytest.fixture def app_config() -> Dict: return { - 'foo': { - "enabled": True - }, - 'bar': { - "enabled": False - }, - 'main':{ - 'zee_enabled': True - } + "foo": {"enabled": True}, + "bar": {"enabled": False}, + "main": {"zee_enabled": True}, } + @pytest.fixture def app(app_config): _app = web.Application() @@ -61,8 +64,8 @@ def app(app_config): def test_setup_config_enabled(app_config, app): assert setup_zee(app, 1) - assert setup_zee.metadata()['config_enabled'] == "main.zee_enabled" - app_config['main']['zee_enabled'] = False + assert setup_zee.metadata()["config_enabled"] == "main.zee_enabled" + app_config["main"]["zee_enabled"] = False assert not setup_zee(app, 2) @@ -74,14 +77,16 @@ def test_setup_dependencies(app_config, app): assert setup_foo(app, 1) assert setup_needs_foo(app, 2) - assert setup_needs_foo.metadata()['dependencies'] == [setup_foo.metadata()['module_name'], ] + assert setup_needs_foo.metadata()["dependencies"] == [ + setup_foo.metadata()["module_name"], + ] def test_marked_setup(app_config, app): assert setup_foo(app, 1) - assert setup_foo.metadata()['module_name'] == 'package.foo' - assert setup_foo.metadata()['module_name'] in app[APP_SETUP_KEY] + assert setup_foo.metadata()["module_name"] == "package.foo" + assert setup_foo.metadata()["module_name"] in app[APP_SETUP_KEY] - app_config['foo']['enabled'] = False + app_config["foo"]["enabled"] = False assert not setup_foo(app, 2) diff --git a/packages/service-library/tests/test_decorators.py b/packages/service-library/tests/test_decorators.py index 69142b6c30c..a6a6cae3a2b 100644 --- a/packages/service-library/tests/test_decorators.py +++ b/packages/service-library/tests/test_decorators.py @@ -11,12 +11,13 @@ def raise_my_exception(): assert not raise_my_exception() + def test_safe_return_mutables(): - some_mutable_return = ['some', 'defaults'] + some_mutable_return = ["some", "defaults"] @safe_return(if_fails_return=some_mutable_return) def return_mutable(): raise RuntimeError("Runtime is default") - assert return_mutable() == some_mutable_return # contains the same - assert not (return_mutable() is some_mutable_return) # but is not the same + assert return_mutable() == some_mutable_return # contains the same + assert not (return_mutable() is some_mutable_return) # but is not the same diff --git a/packages/service-library/tests/test_openapi_validation.py b/packages/service-library/tests/test_openapi_validation.py index 195ea584025..9d53d216eba 100644 --- a/packages/service-library/tests/test_openapi_validation.py +++ b/packages/service-library/tests/test_openapi_validation.py @@ -11,9 +11,11 @@ from servicelib import openapi from servicelib.application_keys import APP_OPENAPI_SPECS_KEY -from servicelib.rest_middlewares import (envelope_middleware_factory, - error_middleware_factory, - validate_middleware_factory) +from servicelib.rest_middlewares import ( + envelope_middleware_factory, + error_middleware_factory, + validate_middleware_factory, +) from servicelib.rest_responses import is_enveloped, unwrap_envelope from servicelib.rest_routing import create_routes_from_namespace from tutils import Handlers @@ -26,6 +28,7 @@ async def specs(loop, here): specs = await openapi.create_openapi_specs(openapi_path) return specs + @pytest.fixture def client(loop, aiohttp_client, specs): app = web.Application() @@ -48,19 +51,12 @@ def client(loop, aiohttp_client, specs): return loop.run_until_complete(aiohttp_client(app)) - -@pytest.mark.parametrize("path", [ - "/health", - "/dict", - "/envelope", - "/list", - "/attobj", - "/string", - "/number", -]) +@pytest.mark.parametrize( + "path", ["/health", "/dict", "/envelope", "/list", "/attobj", "/string", "/number",] +) async def test_validate_handlers(path, client, specs): base = openapi.get_base_path(specs) - response = await client.get(base+path) + response = await client.get(base + path) payload = await response.json() assert is_enveloped(payload) @@ -70,12 +66,12 @@ async def test_validate_handlers(path, client, specs): assert data - -#"/mixed" FIXME: openapi core bug reported in https://github.com/p1c2u/openapi-core/issues/153 +# "/mixed" FIXME: openapi core bug reported in https://github.com/p1c2u/openapi-core/issues/153 # Raises AssertionError: assert not {'errors': [{'code': 'InvalidMediaTypeValue', 'field': None, 'message': 'Mimetype invalid: Value not valid for schema', 'resource': None}], 'logs': [], 'status': 503} @pytest.mark.xfail( reason="openapi core bug reported in https://github.com/p1c2u/openapi-core/issues/153", strict=True, - raises=AssertionError) + raises=AssertionError, +) async def test_validate_handlers_mixed(client, specs): - await test_validate_handlers('/mixed', client, specs) + await test_validate_handlers("/mixed", client, specs) diff --git a/packages/service-library/tests/test_package.py b/packages/service-library/tests/test_package.py index 03ba51b4e47..d07cdf8258e 100644 --- a/packages/service-library/tests/test_package.py +++ b/packages/service-library/tests/test_package.py @@ -21,8 +21,8 @@ def pylintrc(osparc_simcore_root_dir): def test_run_pylint(pylintrc, package_dir): try: - AUTODETECT=0 - cmd = f'pylint --jobs={AUTODETECT} --rcfile {pylintrc} -v {package_dir}'.split() + AUTODETECT = 0 + cmd = f"pylint --jobs={AUTODETECT} --rcfile {pylintrc} -v {package_dir}".split() assert subprocess.check_call(cmd) == 0 except subprocess.CalledProcessError as err: pytest.fail("Linting error. Linter existed with code %d" % err.returncode) @@ -32,12 +32,12 @@ def test_no_pdbs_in_place(package_dir): # TODO: add also test_dir excluding this function!? # TODO: it can be commented! # TODO: add check on other undesired code strings?! - MATCH = re.compile(r'pdb.set_trace()') + MATCH = re.compile(r"pdb.set_trace()") EXCLUDE = ["__pycache__", ".git"] for root, dirs, files in os.walk(package_dir): for name in files: if name.endswith(".py"): - pypth = (Path(root) / name) + pypth = Path(root) / name code = pypth.read_text() found = MATCH.findall(code) # TODO: should return line number diff --git a/packages/service-library/tests/test_rest_middlewares.py b/packages/service-library/tests/test_rest_middlewares.py index 58dc42a4a26..180b295071a 100644 --- a/packages/service-library/tests/test_rest_middlewares.py +++ b/packages/service-library/tests/test_rest_middlewares.py @@ -5,8 +5,10 @@ from aiohttp import web from servicelib import openapi from servicelib.application_keys import APP_OPENAPI_SPECS_KEY -from servicelib.rest_middlewares import (envelope_middleware_factory, - error_middleware_factory) +from servicelib.rest_middlewares import ( + envelope_middleware_factory, + error_middleware_factory, +) from servicelib.rest_responses import is_enveloped, unwrap_envelope from servicelib.rest_routing import create_routes_from_namespace from tutils import Handlers @@ -37,22 +39,25 @@ def client(loop, aiohttp_client, specs): app.middlewares.append(error_middleware_factory(base)) app.middlewares.append(envelope_middleware_factory(base)) - return loop.run_until_complete(aiohttp_client(app)) -@pytest.mark.parametrize("path,expected_data", [ - ("/health", Handlers.get('health')), - ("/dict", Handlers.get('dict')), - ("/envelope", Handlers.get('envelope')['data']), - ("/list", Handlers.get('list')), - ("/attobj", Handlers.get('attobj')), - ("/string", Handlers.get('string')), - ("/number", Handlers.get('number')), - ("/mixed", Handlers.get('mixed')) -]) + +@pytest.mark.parametrize( + "path,expected_data", + [ + ("/health", Handlers.get("health")), + ("/dict", Handlers.get("dict")), + ("/envelope", Handlers.get("envelope")["data"]), + ("/list", Handlers.get("list")), + ("/attobj", Handlers.get("attobj")), + ("/string", Handlers.get("string")), + ("/number", Handlers.get("number")), + ("/mixed", Handlers.get("mixed")), + ], +) async def test_envelope_middleware(path, expected_data, client, specs): base = openapi.get_base_path(specs) - response = await client.get(base+path) + response = await client.get(base + path) payload = await response.json() assert is_enveloped(payload) diff --git a/packages/service-library/tests/test_rest_routing.py b/packages/service-library/tests/test_rest_routing.py index cd714e0fc0e..fb4bc5e5f86 100644 --- a/packages/service-library/tests/test_rest_routing.py +++ b/packages/service-library/tests/test_rest_routing.py @@ -4,10 +4,12 @@ import pytest from servicelib import openapi -from servicelib.rest_routing import (create_routes_from_namespace, - get_handlers_from_namespace, - iter_path_operations, - map_handlers_with_operations) +from servicelib.rest_routing import ( + create_routes_from_namespace, + get_handlers_from_namespace, + iter_path_operations, + map_handlers_with_operations, +) from tutils import Handlers @@ -23,13 +25,11 @@ def test_filtered_routing(specs): handlers = Handlers() found = get_handlers_from_namespace(handlers) - hdl_sel = { name:hdl - for name, hdl in found.items() - if "i" in name - } - opr_iter = ( (mth, url, opname, _tags) - for mth, url, opname, _tags in iter_path_operations(specs) - if "i" in opname + hdl_sel = {name: hdl for name, hdl in found.items() if "i" in name} + opr_iter = ( + (mth, url, opname, _tags) + for mth, url, opname, _tags in iter_path_operations(specs) + if "i" in opname ) routes = map_handlers_with_operations(hdl_sel, opr_iter, strict=True) @@ -45,7 +45,7 @@ def test_create_routes_from_namespace(specs): # not - strict try: routes = create_routes_from_namespace(specs, handlers, strict=False) - except Exception: # pylint: disable=W0703 + except Exception: # pylint: disable=W0703 pytest.fail("Non-strict failed", pytrace=True) # strict @@ -67,10 +67,10 @@ def test_prepends_basepath(specs): try: handlers = Handlers() routes = create_routes_from_namespace(specs, handlers, strict=False) - except Exception: # pylint: disable=W0703 + except Exception: # pylint: disable=W0703 pytest.fail("Non-strict failed", pytrace=True) basepath = openapi.get_base_path(specs) for route in routes: assert route.path.startswith(basepath) - assert route.handler.__name__[len("get_"):] in route.path + assert route.handler.__name__[len("get_") :] in route.path diff --git a/packages/service-library/tests/test_sandbox.py b/packages/service-library/tests/test_sandbox.py index b6cbd40f6e5..18d0f9ae519 100644 --- a/packages/service-library/tests/test_sandbox.py +++ b/packages/service-library/tests/test_sandbox.py @@ -12,12 +12,14 @@ def multi_doc_oas(here): assert openapi_path.exists() return openapi_path + @pytest.fixture def single_doc_oas(here): openapi_path = here / "data" / "oas3" / "petstore.yaml" assert openapi_path.exists() return openapi_path + async def test_multi_doc_openapi_specs(multi_doc_oas, single_doc_oas): try: # specs created out of multiple documents @@ -26,11 +28,12 @@ async def test_multi_doc_openapi_specs(multi_doc_oas, single_doc_oas): # a single-document spec single_doc_specs = await openapi.create_openapi_specs(single_doc_oas) - except Exception: # pylint: disable=W0703 + except Exception: # pylint: disable=W0703 pytest.fail("Failed specs validation") - assert single_doc_specs.paths.keys() == multi_doc_specs.paths.keys() - assert single_doc_specs.paths['/tags'].operations['get'].operation_id == \ - multi_doc_specs.paths['/tags'].operations['get'].operation_id + assert ( + single_doc_specs.paths["/tags"].operations["get"].operation_id + == multi_doc_specs.paths["/tags"].operations["get"].operation_id + ) diff --git a/packages/service-library/tests/tutils.py b/packages/service-library/tests/tutils.py index 9b4668fdc81..29a2165e2d4 100644 --- a/packages/service-library/tests/tutils.py +++ b/packages/service-library/tests/tutils.py @@ -8,41 +8,41 @@ from aiohttp import web from servicelib.rest_codecs import DataEncoder + @attr.s(auto_attribs=True) class Data: - x: int=3 - y: str="foo" + x: int = 3 + y: str = "foo" class Handlers: - async def get_health_wrong(self, request: web.Request): out = { - 'name':__name__.split('.')[0], - 'version': "1.0", - 'status': 'SERVICE_RUNNING', - 'invalid_entry': 125 + "name": __name__.split(".")[0], + "version": "1.0", + "status": "SERVICE_RUNNING", + "invalid_entry": 125, } return out async def get_health(self, request: web.Request): out = { - 'name':__name__.split('.')[0], - 'version': "1.0", - 'status': 'SERVICE_RUNNING', - 'api_version': "1.0" + "name": __name__.split(".")[0], + "version": "1.0", + "status": "SERVICE_RUNNING", + "api_version": "1.0", } return out async def get_dict(self, request: web.Request): - return {'x':3, 'y':"3"} + return {"x": 3, "y": "3"} async def get_envelope(self, request: web.Request): - data = {'x':3, 'y':"3"} + data = {"x": 3, "y": "3"} return {"error": None, "data": data} async def get_list(self, request: web.Request): - return [ {'x':3, 'y':"3"} ]*3 + return [{"x": 3, "y": "3"}] * 3 async def get_attobj(self, request: web.Request): return Data(3, "3") @@ -54,15 +54,13 @@ async def get_number(self, request: web.Request): return 3 async def get_mixed(self, request: web.Request): - data = [{'x': 3, 'y': "3", 'z': [Data(3, "3")]*2}]*3 + data = [{"x": 3, "y": "3", "z": [Data(3, "3")] * 2}] * 3 return data - - @classmethod def get(cls, suffix, process=True): handlers = cls() - coro = getattr(handlers, "get_"+suffix) + coro = getattr(handlers, "get_" + suffix) loop = asyncio.get_event_loop() data = loop.run_until_complete(coro(None)) diff --git a/packages/service-library/tests/with_postgres/conftest.py b/packages/service-library/tests/with_postgres/conftest.py index dc3b3b29c59..86ef3646067 100644 --- a/packages/service-library/tests/with_postgres/conftest.py +++ b/packages/service-library/tests/with_postgres/conftest.py @@ -12,33 +12,31 @@ current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def docker_compose_file() -> Path: # overrides fixture from https://github.com/AndreLouisCaron/pytest-docker - return current_dir / 'docker-compose.yml' + return current_dir / "docker-compose.yml" -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def postgres_service(docker_services, docker_ip, docker_compose_file) -> DataSourceName: # container environment with open(docker_compose_file) as fh: config = yaml.safe_load(fh) - environ = config['services']['postgres']['environment'] + environ = config["services"]["postgres"]["environment"] dsn = DataSourceName( - user=environ['POSTGRES_USER'], - password=environ['POSTGRES_PASSWORD'], + user=environ["POSTGRES_USER"], + password=environ["POSTGRES_PASSWORD"], host=docker_ip, - port=docker_services.port_for('postgres', 5432), - database=environ['POSTGRES_DB'], - application_name="test-app" + port=docker_services.port_for("postgres", 5432), + database=environ["POSTGRES_DB"], + application_name="test-app", ) # Wait until service is responsive. docker_services.wait_until_responsive( - check=lambda: is_postgres_responsive(dsn), - timeout=30.0, - pause=0.1, + check=lambda: is_postgres_responsive(dsn), timeout=30.0, pause=0.1, ) return dsn diff --git a/packages/service-library/tests/with_postgres/test_aiopg_utils.py b/packages/service-library/tests/with_postgres/test_aiopg_utils.py index 301b58e5b23..e74ff4019c4 100644 --- a/packages/service-library/tests/with_postgres/test_aiopg_utils.py +++ b/packages/service-library/tests/with_postgres/test_aiopg_utils.py @@ -14,34 +14,48 @@ import sqlalchemy as sa from aiohttp import web import asyncio -from servicelib.aiopg_utils import (DatabaseError, DataSourceName, - PostgresRetryPolicyUponOperation, - create_pg_engine, init_pg_tables, - is_pg_responsive, retry_pg_api) +from servicelib.aiopg_utils import ( + DatabaseError, + DataSourceName, + PostgresRetryPolicyUponOperation, + create_pg_engine, + init_pg_tables, + is_pg_responsive, + retry_pg_api, +) current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent metadata = sa.MetaData() -tbl = sa.Table('tbl', metadata, - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('val', sa.String(255))) - - +tbl = sa.Table( + "tbl", + metadata, + sa.Column("id", sa.Integer, primary_key=True), + sa.Column("val", sa.String(255)), +) @pytest.fixture -async def postgres_service_with_fake_data(request, loop, postgres_service: DataSourceName)-> DataSourceName: +async def postgres_service_with_fake_data( + request, loop, postgres_service: DataSourceName +) -> DataSourceName: async def _create_table(engine: aiopg.sa.Engine): async with engine.acquire() as conn: - await conn.execute(f'DROP TABLE IF EXISTS {tbl.name}') - await conn.execute(f'''CREATE TABLE {tbl.name} ( + await conn.execute(f"DROP TABLE IF EXISTS {tbl.name}") + await conn.execute( + f"""CREATE TABLE {tbl.name} ( id serial PRIMARY KEY, - val varchar(255))''') + val varchar(255))""" + ) dsn = deepcopy(postgres_service) - dsn.application_name = f"setup {request.module.__name__}.{request.function.__name__}" + dsn.application_name = ( + f"setup {request.module.__name__}.{request.function.__name__}" + ) - async with aiopg.sa.create_engine(dsn.to_uri(), application_name=dsn.application_name) as engine: + async with aiopg.sa.create_engine( + dsn.to_uri(), application_name=dsn.application_name + ) as engine: await _create_table(engine) dsn.application_name = f"{request.module.__name__}.{request.function.__name__}" @@ -52,8 +66,8 @@ def test_dsn_uri_with_query(postgres_service_with_fake_data): uri = postgres_service_with_fake_data.to_uri(with_query=True) try: sa_engine = sa.create_engine(uri, echo=True, echo_pool=True) - assert sa_engine.name == 'postgresql' - assert sa_engine.driver == 'psycopg2' + assert sa_engine.name == "postgresql" + assert sa_engine.driver == "psycopg2" # if url is wrong, these will fail metadata.create_all(sa_engine) @@ -93,6 +107,7 @@ async def test_create_pg_engine(postgres_service_with_fake_data): except ValueError: assert engine4.closed + @pytest.mark.skip(reason="for documentation only and needs a swarm") async def test_engine_when_idle_for_some_time(): # NOTE: this test needs a docker swarm and a running postgres service @@ -102,30 +117,31 @@ async def test_engine_when_idle_for_some_time(): host="127.0.0.1", port=5432, database="db", - application_name="test-app" + application_name="test-app", ) engine = await create_pg_engine(dsn, minsize=1, maxsize=1) init_pg_tables(dsn, metadata) # import pdb; pdb.set_trace() - assert not engine.closed # does not mean anything!!! + assert not engine.closed # does not mean anything!!! # pylint: disable=no-value-for-parameter async with engine.acquire() as conn: # writes - await conn.execute(tbl.insert().values(val=f'first')) + await conn.execute(tbl.insert().values(val=f"first")) # by default docker swarm kills connections that are idle for more than 15 minutes await asyncio.sleep(901) # import pdb; pdb.set_trace() async with engine.acquire() as conn: - await conn.execute(tbl.insert().values(val=f'third')) + await conn.execute(tbl.insert().values(val=f"third")) # import pdb; pdb.set_trace() - async def test_engine_when_pg_not_reachable(): - dsn = DataSourceName(database='db', user='foo', password='foo', host='localhost', port=123) + dsn = DataSourceName( + database="db", user="foo", password="foo", host="localhost", port=123 + ) with pytest.raises(psycopg2.OperationalError): await create_pg_engine(dsn) @@ -143,7 +159,9 @@ async def test_retry_pg_api_policy(postgres_service_with_fake_data, caplog): dsn = postgres_service_with_fake_data.to_uri() app_name = postgres_service_with_fake_data.application_name - async with aiopg.sa.create_engine(dsn, application_name=app_name, echo=True) as engine: + async with aiopg.sa.create_engine( + dsn, application_name=app_name, echo=True + ) as engine: # goes await dec_go(engine, gid=0) @@ -156,12 +174,17 @@ async def test_retry_pg_api_policy(postgres_service_with_fake_data, caplog): assert "Postgres service non-responsive, responding 503" in caplog.text print(dec_go.retry.statistics) - assert dec_go.total_retry_count() == PostgresRetryPolicyUponOperation.ATTEMPTS_COUNT+1 + assert ( + dec_go.total_retry_count() + == PostgresRetryPolicyUponOperation.ATTEMPTS_COUNT + 1 + ) # goes and keeps count of all retrials await dec_go(engine, gid=2) - assert dec_go.total_retry_count() == PostgresRetryPolicyUponOperation.ATTEMPTS_COUNT+2 - + assert ( + dec_go.total_retry_count() + == PostgresRetryPolicyUponOperation.ATTEMPTS_COUNT + 2 + ) # TODO: review tests below @@ -170,10 +193,10 @@ async def test_engine_when_pg_refuses(postgres_service_with_fake_data): dsn = postgres_service_with_fake_data dsn.password = "Wrong pass" - #async with create_pg_engine(dsn) as engine: + # async with create_pg_engine(dsn) as engine: engine = await create_pg_engine(dsn) - assert not engine.closed # does not mean anything!!! + assert not engine.closed # does not mean anything!!! # acquiring connection must fail with pytest.raises(RuntimeError) as execinfo: @@ -192,11 +215,9 @@ async def test_connections(postgres_service_with_fake_data): ## number of seconds after which connection is recycled, helps to deal with stale connections in pool, default value is -1, means recycling logic is disabled. POOL_RECYCLE_SECS = 2 - async def conn_callback(conn): print(f"Opening {conn.raw}") - async with aiopg.sa.create_engine( dsn, minsize=20, @@ -204,10 +225,13 @@ async def conn_callback(conn): # timeout=1, pool_recycle=POOL_RECYCLE_SECS, echo=True, - enable_json=True, enable_hstore=True, enable_uuid=True, + enable_json=True, + enable_hstore=True, + enable_uuid=True, on_connect=conn_callback, # extra kwargs in https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS - application_name=app_name) as engine: + application_name=app_name, + ) as engine: # used and free connections # size_before = engine.size @@ -221,6 +245,7 @@ async def conn_callback(conn): # HELPERS ------------ + @retry_pg_api async def dec_go(*args, **kargs): return await go(*args, **kargs) @@ -231,14 +256,13 @@ async def go(engine: aiopg.sa.Engine, gid="", raise_cls=None): async with engine.acquire() as conn: # writes async with conn.begin(): - await conn.execute(tbl.insert().values(val=f'first-{gid}')) - await conn.execute(tbl.insert().values(val=f'second-{gid}')) + await conn.execute(tbl.insert().values(val=f"first-{gid}")) + await conn.execute(tbl.insert().values(val=f"second-{gid}")) if raise_cls is not None: raise raise_cls - # reads async for row in conn.execute(tbl.select()): print(row.id, row.val) - assert any(prefix in row.val for prefix in ('first', 'second')) + assert any(prefix in row.val for prefix in ("first", "second")) diff --git a/packages/simcore-sdk/Makefile b/packages/simcore-sdk/Makefile index bd91accd86d..2e0efa3ad9a 100644 --- a/packages/simcore-sdk/Makefile +++ b/packages/simcore-sdk/Makefile @@ -6,13 +6,14 @@ # - In windows, only WSL is supported # # by sanderegg, pcrespov -.DEFAULT_GOAL := help +# +include ../../scripts/common.Makefile .PHONY: install-dev install-prod install-ci -install-dev install-prod install-ci: openapi-specs ## install app in development/production or CI mode +install-dev install-prod install-ci: openapi-specs _check_venv_active ## install app in development/production or CI mode # installing in $(subst install-,,$@) mode - @pip3 install -r requirements/$(subst install-,,$@).txt + python -m pip install -r requirements/$(subst install-,,$@).txt .PHONY: tests-unit tests-integration tests tests-unit: ## runs unit tests @@ -28,19 +29,3 @@ tests-integration: ## runs integration tests against local+production images tests: tests-unit tests-integration ## runs all tests # running tests - -.PHONY: clean -clean: ## cleans all unversioned files in project and temp files create by this makefile - # Cleaning unversioned - @git clean -ndxf -e .vscode/ - @echo -n "Are you sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @echo -n "$(shell whoami), are you REALLY sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @git clean -dxf -e .vscode/ - -.PHONY: help -# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html -help: ## this colorful help - @echo "Recipes for '$(notdir $(CURDIR))':" - @echo "" - @awk --posix 'BEGIN {FS = ":.*?## "} /^[[:alpha:][:space:]_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo "" diff --git a/packages/simcore-sdk/requirements/_test.txt b/packages/simcore-sdk/requirements/_test.txt index a6e040d9d93..0b851af1ff9 100644 --- a/packages/simcore-sdk/requirements/_test.txt +++ b/packages/simcore-sdk/requirements/_test.txt @@ -12,21 +12,21 @@ attrs==19.1.0 certifi==2019.11.28 # via requests chardet==3.0.4 coverage==4.5.1 -coveralls==1.10.0 +coveralls==1.11.1 decorator==4.4.0 -docker==4.1.0 +docker==4.2.0 docopt==0.6.2 # via coveralls idna-ssl==1.1.0 idna==2.8 -importlib-metadata==1.3.0 # via pluggy, pytest +importlib-metadata==1.5.0 # via pluggy, pytest isort==4.3.21 # via pylint lazy-object-proxy==1.4.3 # via astroid mccabe==0.6.1 # via pylint -mock==3.0.5 -more-itertools==8.0.2 # via pytest, zipp +mock==4.0.1 +more-itertools==8.2.0 # via pytest multidict==4.5.2 networkx==2.3 -packaging==20.0 # via pytest, pytest-sugar +packaging==20.3 # via pytest, pytest-sugar pika==1.0.1 pluggy==0.13.1 # via pytest psycopg2-binary==2.8.4 @@ -35,25 +35,25 @@ pylint==2.4.4 pyparsing==2.4.6 # via packaging pytest-aiohttp==0.3.0 pytest-cov==2.8.1 -pytest-docker==0.6.1 +pytest-docker==0.7.2 pytest-instafail==0.4.1.post0 pytest-mock==2.0.0 pytest-runner==5.2 pytest-sugar==0.9.2 -pytest==5.3.2 +pytest==5.3.5 pyyaml==5.3 -requests==2.22.0 +requests==2.23.0 six==1.12.0 sqlalchemy==1.3.3 tenacity==6.0.0 termcolor==1.1.0 # via pytest-sugar trafaret-config==2.0.2 trafaret==1.2.0 -typed-ast==1.4.0 # via astroid +typed-ast==1.4.1 # via astroid typing-extensions==3.7.2 -urllib3==1.25.7 # via requests +urllib3==1.25.8 # via requests wcwidth==0.1.8 # via pytest websocket-client==0.57.0 # via docker wrapt==1.11.2 # via astroid yarl==1.3.0 -zipp==0.6.0 # via importlib-metadata +zipp==3.1.0 # via importlib-metadata diff --git a/scripts/check_requirements.sh b/scripts/check_requirements.bash similarity index 64% rename from scripts/check_requirements.sh rename to scripts/check_requirements.bash index 92fd3194ef2..06e3396578d 100644 --- a/scripts/check_requirements.sh +++ b/scripts/check_requirements.bash @@ -2,4 +2,4 @@ # lists all python packages used throughout all the repository that are not tied to a specific version -find . \( -name "requirements.txt" -o -name "common.txt" -o -name "devel.txt" -o -name "prod.txt" \) | xargs -I % grep -v "\-r " % | sort |uniq | awk '$0 !~ /==/' +find . \( -name "requirements.txt" -o -name "common.txt" -o -name "devel.txt" -o -name "prod.txt" \) | xargs -I % grep -v "\-r " % | sort |uniq | awk '$0 !~ /==/' diff --git a/scripts/code-climate.sh b/scripts/code-climate.bash old mode 100755 new mode 100644 similarity index 96% rename from scripts/code-climate.sh rename to scripts/code-climate.bash index 1cf7b7aa442..570f575a43a --- a/scripts/code-climate.sh +++ b/scripts/code-climate.bash @@ -19,7 +19,8 @@ docker run \ codeclimate/codeclimate "$@" -if [ -z "$@" ];then +if test -z "$@" +then echo "----" echo "Listing other engines (in dockers)" docker images codeclimate/* diff --git a/scripts/common.Makefile b/scripts/common.Makefile new file mode 100644 index 00000000000..dad682d6978 --- /dev/null +++ b/scripts/common.Makefile @@ -0,0 +1,120 @@ +# +# These are common target and recipes +# This file is included at the top of every Makefile +# $(CURDIR) in this file refers to the directory where this file is included +# +# SEE https://mattandre.ws/2016/05/makefile-inheritance/ +# + +# defaults +.DEFAULT_GOAL := help + +# Use bash not sh +SHELL := /bin/bash + +# Some handy flag variables +ifeq ($(filter Windows_NT,$(OS)),) +IS_WSL := $(if $(findstring Microsoft,$(shell uname -a)),WSL,) +IS_OSX := $(filter Darwin,$(shell uname -a)) +IS_LINUX:= $(if $(or $(IS_WSL),$(IS_OSX)),,$(filter Linux,$(shell uname -a))) +endif +IS_WIN := $(strip $(if $(or $(IS_LINUX),$(IS_OSX),$(IS_WSL)),,$(OS))) + +# version control +VCS_URL := $(shell git config --get remote.origin.url) +VCS_REF := $(shell git rev-parse --short HEAD) +NOW_TIMESTAMP := $(shell date -u +"%Y-%m-%dT%H:%M:%SZ") + + +$(if $(IS_WIN),\ +$(error Windows is not supported in all recipes. Use WSL instead. Follow instructions in README.md),) + + +# +# COMMON TASKS +# + + +.PHONY: help +# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html +help: + @echo "usage: make [target] ..." + @echo "" + @echo "Targets for '$(notdir $(CURDIR))':" + @echo "" + @awk --posix 'BEGIN {FS = ":.*?## "} /^[[:alpha:][:space:]_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) + @echo "" + + +devenv: ## build development environment (using main services/docker-compose-build.yml) + @$(MAKE) --directory ${REPO_BASE_DIR} --no-print-directory $@ + + +GIT_CLEAN_ARGS = -dxf -e .vscode +clean: ## cleans all unversioned files in project and temp files create by this makefile + # Cleaning unversioned + @git clean -n $(GIT_CLEAN_ARGS) + @echo -n "Are you sure? [y/N] " && read ans && [ $${ans:-N} = y ] + @echo -n "$(shell whoami), are you REALLY sure? [y/N] " && read ans && [ $${ans:-N} = y ] + @git clean $(GIT_CLEAN_ARGS) + + +info: ## displays basic info + # system + @echo ' OS : $(IS_LINUX)$(IS_OSX)$(IS_WSL)$(IS_WIN)' + @echo ' CURDIR : ${CURDIR}' + @echo ' NOW_TIMESTAMP : ${NOW_TIMESTAMP}' + @echo ' VCS_URL : ${VCS_URL}' + @echo ' VCS_REF : ${VCS_REF}' + # installed + @pip list + # version + @cat setup.py | grep name= + @cat setup.py | grep version= + + +.PHONY: autoformat +autoformat: ## runs black python formatter on this service's code [https://black.readthedocs.io/en/stable/] + # auto formatting with black + @python3 -m black --verbose \ + --exclude "/(\.eggs|\.git|\.hg|\.mypy_cache|\.nox|\.tox|\.venv|\.svn|_build|buck-out|build|dist|migration|client-sdk)/" \ + $(CURDIR) + + +.PHONY: version-patch version-minor version-major +version-patch: ## commits version with bug fixes not affecting the cookiecuter config + $(_bumpversion) +version-minor: ## commits version with backwards-compatible API addition or changes (i.e. can replay) + $(_bumpversion) +version-major: ## commits version with backwards-INcompatible addition or changes + $(_bumpversion) + + +buil%: ## builds docker image (using main services/docker-compose-build.yml) + # building docker image for ${APP_NAME} + @$(MAKE) --directory ${REPO_BASE_DIR} --no-print-directory build target=${APP_NAME} + +# FIXME: +#.PHONY: build build-nc build-devel build-devel-nc build-cache build-cache-nc +#build build-nc build-devel build-devel-nc build-cache build-cache-nc: ## docker image build in many flavours +# # building docker image for ${APP_NAME} ... +# @$(MAKE) --directory ${REPO_BASE_DIR} $@ target=${APP_NAME} + +.PHONY: shell +shell: ## runs shell in production container + @$(MAKE) --directory ${REPO_BASE_DIR} --no-print-directory shell target=${APP_NAME} + +# +# SUBTASKS +# + +.PHONY: _check_venv_active +_check_venv_active: + # checking whether virtual environment was activated + @python3 -c "import sys; assert sys.base_prefix!=sys.prefix" + + +define _bumpversion + # upgrades as $(subst version-,,$@) version, commits and tags + @bump2version --verbose --list $(subst version-,,$@) +endef diff --git a/scripts/openapi/oas_resolver/Dockerfile b/scripts/openapi/oas_resolver/Dockerfile index 6f15455cccb..58420fbffed 100644 --- a/scripts/openapi/oas_resolver/Dockerfile +++ b/scripts/openapi/oas_resolver/Dockerfile @@ -12,7 +12,7 @@ WORKDIR /src # update pip RUN pip install --no-cache-dir --upgrade \ - pip~=19.1.1 \ + pip~=20.0.2 \ wheel \ setuptools diff --git a/scripts/shellcheck b/scripts/shellcheck.bash old mode 100755 new mode 100644 similarity index 100% rename from scripts/shellcheck rename to scripts/shellcheck.bash diff --git a/scripts/upgrade_test_requirements.sh b/scripts/upgrade_test_requirements.bash old mode 100755 new mode 100644 similarity index 67% rename from scripts/upgrade_test_requirements.sh rename to scripts/upgrade_test_requirements.bash index 4226be7d031..7104212f8be --- a/scripts/upgrade_test_requirements.sh +++ b/scripts/upgrade_test_requirements.bash @@ -5,6 +5,6 @@ for path_to_req_test in $(find ../ -type f -name '_test.txt') do - rm --verbose $path_to_req_test - make --directory $(dirname -- $path_to_req_test) + rm --verbose "$path_to_req_test" + make --directory "$(dirname -- "$path_to_req_test")" done diff --git a/scripts/url-encoder.sh b/scripts/url-encoder.bash old mode 100755 new mode 100644 similarity index 100% rename from scripts/url-encoder.sh rename to scripts/url-encoder.bash diff --git a/services/catalog/Dockerfile b/services/catalog/Dockerfile index c62bff3757d..48868012a0b 100644 --- a/services/catalog/Dockerfile +++ b/services/catalog/Dockerfile @@ -32,6 +32,16 @@ FROM base as build ENV SC_BUILD_TARGET build +# Installing client libraries and any other package you need +# +# libpq: client library for PostgreSQL https://www.postgresql.org/docs/9.5/libpq.html +# libstdc++: needed in ujson https://github.com/kohlschutter/junixsocket/issues/33 +# +RUN apk update && \ + apk add --no-cache \ + libpq \ + libstdc++ + RUN apk add --no-cache \ alpine-sdk \ python3-dev \ @@ -39,7 +49,7 @@ RUN apk add --no-cache \ postgresql-dev RUN pip3 --no-cache-dir install --upgrade \ - pip \ + pip~=20.0.2 \ wheel \ setuptools diff --git a/services/catalog/Makefile b/services/catalog/Makefile index c722f864807..d333b222119 100644 --- a/services/catalog/Makefile +++ b/services/catalog/Makefile @@ -1,10 +1,7 @@ # # Targets for DEVELOPMENT of Components Catalog Service # - -# Makefile config -.DEFAULT_GOAL := help -SHELL = /bin/bash +include ../../scripts/common.Makefile # Custom variables APP_NAME := $(notdir $(CURDIR)) @@ -15,28 +12,18 @@ REPO_BASE_DIR = $(abspath $(CURDIR)/../../) VENV_DIR ?= $(abspath $(REPO_BASE_DIR)/.venv) -.PHONY: devenv -devenv: ## build development environment (using main services/docker-compose-build.yml) - @$(MAKE) --directory ${REPO_BASE_DIR} $@ - - .PHONY: requirements requirements: ## compiles pip requirements (.in -> .txt) @$(MAKE) --directory requirements all -.check-venv-active: - # checking whether virtual environment was activated - @python3 -c "import sys; assert sys.base_prefix!=sys.prefix" - .PHONY: install-dev install-prod install-ci -install-dev install-prod install-ci: requirements .check-venv-active ## install app in development/production or CI mode +install-dev install-prod install-ci: requirements _check_venv_active ## install app in development/production or CI mode # installing in $(subst install-,,$@) mode pip-sync requirements/$(subst install-,,$@).txt PHONY: tests-unit tests-integration tests - tests: tests-unit tests-integration tests-unit: ## runs unit tests @@ -77,64 +64,15 @@ build build-nc build-devel build-devel-nc build-cache build-cache-nc: ## docker @$(MAKE) --directory ${REPO_BASE_DIR} $@ target=${APP_NAME} -.PHONY: autoformat -autoformat: ## runs black python formatter on this service's code [https://black.readthedocs.io/en/stable/] - # auto formatting with black - @python3 -m black --verbose $(CURDIR) - - .PHONY: openapi-specs openapi-specs: install-dev ## TODO: implementing a way to serialize openapi python3 -c "from simcore_service_catalog.main import *; dump_openapi()" -define _bumpversion - # upgrades as $(subst version-,,$@) version, commits and tags - @bump2version --verbose --list $(subst version-,,$@) -endef - -version-patch: ## commits version with bug fixes not affecting the cookiecuter config - $(_bumpversion) -version-minor: ## commits version with backwards-compatible API addition or changes (i.e. can replay) - $(_bumpversion) -version-major: ## commits version with backwards-INcompatible addition or changes - $(_bumpversion) - - .PHONY: replay - replay: .cookiecutterrc ## re-applies cookiecutter # Replaying ../cookiecutter-simcore-py-fastapi/ ... @cookiecutter --no-input --overwrite-if-exists \ --config-file=$< \ --output-dir="$(abspath $(CURDIR)/..)" \ "../cookiecutter-simcore-py-fastapi/" - - -.PHONY: info -info: ## displays information - # installed - @pip list - # version - @cat VERSION - - -.PHONY: clean clean-all -git_clean_args = -dxf -e .vscode - -clean: ## cleans all unversioned files in project and temp files create by this makefile - # Cleaning unversioned - @git clean -n $(git_clean_args) - @echo -n "Are you sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @echo -n "$(shell whoami), are you REALLY sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @git clean $(git_clean_args) - - -#----------------------------------- -.PHONY: help -# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html -help: ## this colorful help - @echo "Recipes for '$(notdir $(CURDIR))':" - @echo "" - @awk --posix 'BEGIN {FS = ":.*?## "} /^[[:alpha:][:space:]_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo "" diff --git a/services/catalog/requirements/_test.txt b/services/catalog/requirements/_test.txt index 8eedd5654e6..f4b78e441b7 100644 --- a/services/catalog/requirements/_test.txt +++ b/services/catalog/requirements/_test.txt @@ -19,14 +19,14 @@ bump2version==1.0.0 certifi==2019.11.28 chardet==3.0.4 click==7.0 -codecov==2.0.15 +codecov==2.0.16 coverage==5.0.3 # via codecov, coveralls, pytest-cov -coveralls==1.10.0 +coveralls==1.11.1 dataclasses==0.7 dnspython==1.16.0 docopt==0.6.2 # via coveralls email-validator==1.0.5 -faker==4.0.0 +faker==4.0.1 fastapi[all]==0.48.0 graphene==2.1.8 graphql-core==2.3.1 @@ -44,9 +44,9 @@ markupsafe==1.1.1 mccabe==0.6.1 # via pylint more-itertools==8.2.0 # via pytest multidict==4.7.4 -packaging==20.1 # via pytest +packaging==20.3 # via pytest pathspec==0.7.0 # via black -pip-tools==4.4.1 +pip-tools==4.5.1 pluggy==0.13.1 # via pytest promise==2.3 psycopg2-binary==2.8.4 @@ -57,14 +57,14 @@ pylint==2.4.4 pyparsing==2.4.6 # via packaging pytest-aiohttp==0.3.0 pytest-cov==2.8.1 -pytest-docker==0.7.1 +pytest-docker==0.7.2 pytest-mock==2.0.0 pytest-runner==5.2 pytest==5.3.5 python-dateutil==2.8.1 # via faker python-multipart==0.0.5 pyyaml==5.3 -regex==2020.2.18 # via black +regex==2020.2.20 # via black requests==2.22.0 rope==0.16.0 rx==1.6.1 @@ -84,4 +84,4 @@ wcwidth==0.1.8 # via pytest websockets==8.1 wrapt==1.11.2 # via astroid yarl==1.4.2 -zipp==2.1.0 # via importlib-metadata +zipp==3.1.0 # via importlib-metadata diff --git a/services/catalog/src/simcore_service_catalog/db.py b/services/catalog/src/simcore_service_catalog/db.py index e318de15279..675ec192395 100644 --- a/services/catalog/src/simcore_service_catalog/db.py +++ b/services/catalog/src/simcore_service_catalog/db.py @@ -35,6 +35,7 @@ async def teardown_engine() -> None: async def create_tables(conn: SAConnection): + # FIXME: this is dangerous since it enforces an empty table await conn.execute(f"DROP TABLE IF EXISTS {DAG.__tablename__}") await conn.execute(CreateTable(dags)) diff --git a/services/director/Dockerfile b/services/director/Dockerfile index f6a13b59b3b..412da6e1408 100644 --- a/services/director/Dockerfile +++ b/services/director/Dockerfile @@ -47,16 +47,21 @@ ENV SC_BUILD_TARGET build # Installing client libraries and any other package you need # -# - client library for PostgreSQL https://www.postgresql.org/docs/9.5/libpq.html +# libpq: client library for PostgreSQL https://www.postgresql.org/docs/9.5/libpq.html +# libstdc++: needed in ujson https://github.com/kohlschutter/junixsocket/issues/33 # RUN apk update && \ apk add --no-cache \ - libpq + libpq \ + libstdc++ + + + # Installing build dependencies (will be deleted in production) RUN apk add --virtual .build-deps \ git \ - gcc \ + g++ \ libc-dev \ python-dev \ musl-dev \ @@ -65,7 +70,7 @@ RUN apk add --virtual .build-deps \ RUN $SC_PIP install --upgrade \ - pip~=19.1.1 \ + pip~=20.0.2 \ wheel \ setuptools @@ -150,9 +155,6 @@ FROM build as development ENV SC_BUILD_TARGET development -# WORKDIR /build -# NOTE: can copy from /build if necessary - WORKDIR /devel VOLUME /devel/packages VOLUME /devel/services/director/ diff --git a/services/director/Makefile b/services/director/Makefile index c4b164283eb..4c2544348db 100644 --- a/services/director/Makefile +++ b/services/director/Makefile @@ -1,4 +1,8 @@ -.DEFAULT_GOAL := help +# +# Targets for DEVELOPMENT for Director service +# +include ../../scripts/common.Makefile + REPO_BASE_DIR = $(abspath $(CURDIR)/../../) VENV_DIR ?= $(abspath $(REPO_BASE_DIR)/.venv) @@ -6,21 +10,15 @@ VENV_DIR ?= $(abspath $(REPO_BASE_DIR)/.venv) APP_NAME := $(notdir $(CURDIR)) - -.PHONY: devenv -devenv: ## build development environment (using main services/docker-compose-build.yml) - @$(MAKE) -C ${REPO_BASE_DIR} $@ - - .PHONY: openapi-specs openapi-specs: ## updates and validates openapi specifications $(MAKE) -C $(CURDIR)/src/simcore_service_${APP_NAME}/api $@ .PHONY: install-dev install-prod install-ci -install-dev install-prod install-ci: openapi-specs ## install app in development/production or CI mode +install-dev install-prod install-ci: openapi-specs _check_venv_active ## install app in development/production or CI mode # installing in $(subst install-,,$@) mode - @$(VENV_DIR)/bin/pip3 install -r requirements/$(subst install-,,$@).txt + python -m pip install -r requirements/$(subst install-,,$@).txt .PHONY: tests @@ -31,36 +29,4 @@ tests: ## runs unit tests .PHONY: build build: openapi-specs ## builds docker image (using main services/docker-compose-build.yml) - @$(MAKE) -C ${REPO_BASE_DIR} target=${APP_NAME} $@ - - -.PHONY: version-patch version-minor -version-patch version-minor: ## commits version as patch (bug fixes not affecting the API), minor/minor (backwards-compatible/INcompatible API addition or changes) - # upgrades as $(subst version-,,$@) version, commits and tags - @bump2version --verbose --list $(subst version-,,$@) - - -.PHONY: info -info: ## displays - # installed - @pip list - # version - @cat setup.py | grep version= - - -.PHONY: clean -clean: ## cleans all unversioned files in project and temp files create by this makefile - # Cleaning unversioned - @git clean -ndxf -e .vscode/ - @echo -n "Are you sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @echo -n "$(shell whoami), are you REALLY sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @git clean -dxf -e .vscode/ - - -.PHONY: help -# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html -help: ## this colorful help - @echo "Recipes for '${APP_NAME}':" - @echo "" - @awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo "" + @make --no-print-directory build-super diff --git a/services/director/requirements/_base.in b/services/director/requirements/_base.in index efcc79cb726..bbc1072a7b5 100644 --- a/services/director/requirements/_base.in +++ b/services/director/requirements/_base.in @@ -1,6 +1,7 @@ # # Specifies third-party dependencies for 'director' # +urllib3>=1.25.8 # Vulnerability pyyaml>=5.3 # Vulnerable aiohttp diff --git a/services/director/requirements/_base.txt b/services/director/requirements/_base.txt index a8c843ac05f..7b75d5a67a4 100644 --- a/services/director/requirements/_base.txt +++ b/services/director/requirements/_base.txt @@ -4,22 +4,22 @@ # # pip-compile --output-file=_base.txt _base.in # -aiodocker==0.14.0 -aiohttp==3.3.2 -git+https://github.com/ITISFoundation/aiohttp_apiset.git@fixes_4_osparc#egg=aiohttp_apiset +aiodocker==0.14.0 # via -r _base.in +aiohttp==3.3.2 # via -r _base.in, aiodocker, aiohttp-apiset +git+https://github.com/ITISFoundation/aiohttp_apiset.git@fixes_4_osparc#egg=aiohttp_apiset # via -r _base.in async-generator==1.10 # via asyncio-extras async-timeout==3.0.1 # via aiohttp -asyncio-extras==1.3.2 +asyncio-extras==1.3.2 # via -r _base.in attrs==19.1.0 # via aiohttp certifi==2019.3.9 # via requests chardet==3.0.4 # via aiohttp, requests idna-ssl==1.1.0 # via aiohttp idna==2.8 # via idna-ssl, requests, yarl -jsonschema==2.6.0 +jsonschema==2.6.0 # via aiohttp-apiset multidict==4.5.2 # via aiohttp, yarl -pyyaml==5.3 -requests==2.22.0 +pyyaml==5.3 # via -r _base.in, aiohttp-apiset +requests==2.22.0 # via -r _base.in six==1.12.0 # via tenacity -tenacity==6.0.0 -urllib3==1.25.2 # via requests +tenacity==6.0.0 # via -r _base.in +urllib3==1.25.8 # via -r _base.in, requests yarl==1.3.0 # via aiodocker, aiohttp diff --git a/services/director/requirements/_test.in b/services/director/requirements/_test.in index a6892d520d2..7e9f2b6d5c4 100644 --- a/services/director/requirements/_test.in +++ b/services/director/requirements/_test.in @@ -5,7 +5,6 @@ # frozen specs -r _base.txt - # testing coverage==4.5.1 # TODO: Downgraded because of a bug https://github.com/nedbat/coveragepy/issues/716 pytest diff --git a/services/director/requirements/_test.txt b/services/director/requirements/_test.txt index 5d0c3a17692..9b56465a6e8 100644 --- a/services/director/requirements/_test.txt +++ b/services/director/requirements/_test.txt @@ -4,53 +4,53 @@ # # pip-compile --output-file=_test.txt _test.in # -aiodocker==0.14.0 -aiohttp==3.3.2 -git+https://github.com/ITISFoundation/aiohttp_apiset.git@fixes_4_osparc#egg=aiohttp_apiset +aiodocker==0.14.0 # via -r _base.txt +aiohttp==3.3.2 # via -r _base.txt, aiodocker, aiohttp-apiset, pytest-aiohttp +git+https://github.com/ITISFoundation/aiohttp_apiset.git@fixes_4_osparc#egg=aiohttp_apiset # via -r _base.txt astroid==2.3.3 # via pylint -async-generator==1.10 -async-timeout==3.0.1 -asyncio-extras==1.3.2 -attrs==19.1.0 -certifi==2019.3.9 -chardet==3.0.4 -codecov==2.0.15 -coverage==4.5.1 -coveralls==1.10.0 -docker==4.1.0 +async-generator==1.10 # via -r _base.txt, asyncio-extras +async-timeout==3.0.1 # via -r _base.txt, aiohttp +asyncio-extras==1.3.2 # via -r _base.txt +attrs==19.1.0 # via -r _base.txt, aiohttp, pytest +certifi==2019.3.9 # via -r _base.txt, requests +chardet==3.0.4 # via -r _base.txt, aiohttp, requests +codecov==2.0.16 # via -r _test.in +coverage==4.5.1 # via -r _test.in, codecov, coveralls, pytest-cov +coveralls==1.11.1 # via -r _test.in +docker==4.2.0 # via -r _test.in docopt==0.6.2 # via coveralls -idna-ssl==1.1.0 -idna==2.8 -importlib-metadata==1.3.0 # via pluggy, pytest +idna-ssl==1.1.0 # via -r _base.txt, aiohttp +idna==2.8 # via -r _base.txt, idna-ssl, requests, yarl +importlib-metadata==1.5.0 # via pluggy, pytest isort==4.3.21 # via pylint -jsonschema==2.6.0 +jsonschema==2.6.0 # via -r _base.txt, aiohttp-apiset, openapi-spec-validator lazy-object-proxy==1.4.3 # via astroid mccabe==0.6.1 # via pylint -more-itertools==8.0.2 # via pytest, zipp -multidict==4.5.2 -openapi-spec-validator==0.2.8 -packaging==20.0 # via pytest, pytest-sugar +more-itertools==8.2.0 # via pytest +multidict==4.5.2 # via -r _base.txt, aiohttp, yarl +openapi-spec-validator==0.2.8 # via -r _test.in +packaging==20.3 # via pytest, pytest-sugar pluggy==0.13.1 # via pytest -ptvsd==4.3.2 +ptvsd==4.3.2 # via -r _test.in py==1.8.1 # via pytest -pylint==2.4.4 +pylint==2.4.4 # via -r _test.in pyparsing==2.4.6 # via packaging -pytest-aiohttp==0.3.0 -pytest-cov==2.8.1 -pytest-instafail==0.4.1.post0 -pytest-mock==2.0.0 -pytest-runner==5.2 -pytest-sugar==0.9.2 -pytest==5.3.2 -pyyaml==5.3 -requests==2.22.0 -six==1.12.0 -tenacity==6.0.0 +pytest-aiohttp==0.3.0 # via -r _test.in +pytest-cov==2.8.1 # via -r _test.in +pytest-instafail==0.4.1.post0 # via -r _test.in +pytest-mock==2.0.0 # via -r _test.in +pytest-runner==5.2 # via -r _test.in +pytest-sugar==0.9.2 # via -r _test.in +pytest==5.3.5 # via -r _test.in, pytest-aiohttp, pytest-cov, pytest-instafail, pytest-mock, pytest-sugar +pyyaml==5.3 # via -r _base.txt, aiohttp-apiset, openapi-spec-validator +requests==2.22.0 # via -r _base.txt, codecov, coveralls, docker +six==1.12.0 # via -r _base.txt, astroid, docker, openapi-spec-validator, packaging, tenacity, websocket-client +tenacity==6.0.0 # via -r _base.txt termcolor==1.1.0 # via pytest-sugar -typed-ast==1.4.0 # via astroid -urllib3==1.25.2 +typed-ast==1.4.1 # via astroid +urllib3==1.25.8 # via -r _base.txt, requests wcwidth==0.1.8 # via pytest websocket-client==0.57.0 # via docker wrapt==1.11.2 # via astroid -yarl==1.3.0 -zipp==0.6.0 # via importlib-metadata +yarl==1.3.0 # via -r _base.txt, aiodocker, aiohttp +zipp==3.1.0 # via importlib-metadata diff --git a/services/director/src/simcore_service_director/api/v0/openapi.yaml b/services/director/src/simcore_service_director/api/v0/openapi.yaml index fb3cb857d2f..55e20af3fc9 100644 --- a/services/director/src/simcore_service_director/api/v0/openapi.yaml +++ b/services/director/src/simcore_service_director/api/v0/openapi.yaml @@ -161,6 +161,11 @@ paths: description: distinctive name for the node based on the docker registry path pattern: '^(simcore)/(services)/(comp|dynamic)(/[^\s/]+)+$' example: simcore/services/comp/itis/sleeper + integration-version: + type: string + description: integration version number + pattern: '^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$' + example: 1.0.0 version: type: string description: semantic version number @@ -540,6 +545,11 @@ paths: description: distinctive name for the node based on the docker registry path pattern: '^(simcore)/(services)/(comp|dynamic)(/[^\s/]+)+$' example: simcore/services/comp/itis/sleeper + integration-version: + type: string + description: integration version number + pattern: '^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$' + example: 1.0.0 version: type: string description: semantic version number @@ -1967,6 +1977,11 @@ components: description: distinctive name for the node based on the docker registry path pattern: '^(simcore)/(services)/(comp|dynamic)(/[^\s/]+)+$' example: simcore/services/comp/itis/sleeper + integration-version: + type: string + description: integration version number + pattern: '^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$' + example: 1.0.0 version: type: string description: semantic version number diff --git a/services/director/src/simcore_service_director/api/v0/schemas/node-meta-v0.0.1.json b/services/director/src/simcore_service_director/api/v0/schemas/node-meta-v0.0.1.json index b4978cbd794..2909aae313a 100644 --- a/services/director/src/simcore_service_director/api/v0/schemas/node-meta-v0.0.1.json +++ b/services/director/src/simcore_service_director/api/v0/schemas/node-meta-v0.0.1.json @@ -26,6 +26,14 @@ "simcore/services/dynamic/3dviewer" ] }, + "integration-version": { + "type": "string", + "description": "integration version number", + "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", + "examples": [ + "1.0.0" + ] + }, "version": { "type": "string", "description": "semantic version number", diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index 2e61606840a..746014c80fd 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -195,6 +195,8 @@ async def list_services(app: web.Application, service_type: ServiceType) -> List for repo_details in results: if repo_details and isinstance(repo_details, list): services.extend(repo_details) + elif isinstance(repo_details, Exception): + _logger.error("Exception occured while listing services %s", repo_details) return services async def list_interactive_service_dependencies(app: web.Application, service_key: str, service_tag: str) -> List[Dict]: diff --git a/services/director/tests/fixtures/fake_services.py b/services/director/tests/fixtures/fake_services.py index 1799c4e3462..4ee03852db7 100644 --- a/services/director/tests/fixtures/fake_services.py +++ b/services/director/tests/fixtures/fake_services.py @@ -13,6 +13,7 @@ _logger = logging.getLogger(__name__) + @pytest.fixture(scope="function") def push_services(loop, docker_registry, tmpdir): registry_url = docker_registry @@ -20,19 +21,50 @@ def push_services(loop, docker_registry, tmpdir): list_of_pushed_images_tags = [] dependent_images = [] - def build_push_images(number_of_computational_services, number_of_interactive_services, inter_dependent_services=False, bad_json_format=False, version="1.0."): + + def build_push_images( + number_of_computational_services, + number_of_interactive_services, + inter_dependent_services=False, + bad_json_format=False, + version="1.0.", + ): try: dependent_image = None if inter_dependent_services: - dependent_image = _build_push_image(tmp_dir, registry_url, "computational", "dependency", "10.52.999999", None, bad_json_format=bad_json_format) + dependent_image = _build_push_image( + tmp_dir, + registry_url, + "computational", + "dependency", + "10.52.999999", + None, + bad_json_format=bad_json_format, + ) dependent_images.append(dependent_image) for image_index in range(0, number_of_computational_services): - image = _build_push_image(tmp_dir, registry_url, "computational", "test", version + str(image_index), dependent_image, bad_json_format=bad_json_format) + image = _build_push_image( + tmp_dir, + registry_url, + "computational", + "test", + version + str(image_index), + dependent_image, + bad_json_format=bad_json_format, + ) list_of_pushed_images_tags.append(image) for image_index in range(0, number_of_interactive_services): - image = _build_push_image(tmp_dir, registry_url, "dynamic", "test", version + str(image_index), dependent_image, bad_json_format=bad_json_format) + image = _build_push_image( + tmp_dir, + registry_url, + "dynamic", + "test", + version + str(image_index), + dependent_image, + bad_json_format=bad_json_format, + ) list_of_pushed_images_tags.append(image) except docker.errors.APIError: _logger.exception("Unexpected docker API error") @@ -45,70 +77,110 @@ def build_push_images(number_of_computational_services, number_of_interactive_se _clean_registry(registry_url, list_of_pushed_images_tags) _clean_registry(registry_url, dependent_images) -def _build_push_image(docker_dir, registry_url, service_type, name, tag, dependent_image=None, *, bad_json_format=False): # pylint: disable=R0913 + +def _build_push_image( + docker_dir, + registry_url, + service_type, + name, + tag, + dependent_image=None, + *, + bad_json_format=False +): # pylint: disable=R0913 docker_client = docker.from_env() # crate image service_description = _create_service_description(service_type, name, tag) docker_labels = _create_docker_labels(service_description, bad_json_format) - additional_docker_labels = [{"name": "constraints", "type": "string", "value": ["node.role==manager"]}] + additional_docker_labels = [ + {"name": "constraints", "type": "string", "value": ["node.role==manager"]} + ] internal_port = None - entry_point = '' + entry_point = "" if service_type == "dynamic": internal_port = random.randint(1, 65535) - additional_docker_labels.append({"name": "ports", "type": "int", "value": internal_port}) + additional_docker_labels.append( + {"name": "ports", "type": "int", "value": internal_port} + ) entry_point = "/test/entry_point" - docker_labels["simcore.service.bootsettings"] = json.dumps([{"name": "entry_point", "type": "string", "value": entry_point}]) + docker_labels["simcore.service.bootsettings"] = json.dumps( + [{"name": "entry_point", "type": "string", "value": entry_point}] + ) docker_labels["simcore.service.settings"] = json.dumps(additional_docker_labels) if bad_json_format: - docker_labels["simcore.service.settings"] = "'fjks" + docker_labels["simcore.service.settings"] + docker_labels["simcore.service.settings"] = ( + "'fjks" + docker_labels["simcore.service.settings"] + ) if dependent_image is not None: dependent_description = dependent_image["service_description"] - dependency_docker_labels = [{"key":dependent_description["key"], "tag":dependent_description["version"]}] - docker_labels["simcore.service.dependencies"] = json.dumps(dependency_docker_labels) + dependency_docker_labels = [ + { + "key": dependent_description["key"], + "tag": dependent_description["version"], + } + ] + docker_labels["simcore.service.dependencies"] = json.dumps( + dependency_docker_labels + ) if bad_json_format: - docker_labels["simcore.service.dependencies"] = "'fjks" + docker_labels["simcore.service.dependencies"] + docker_labels["simcore.service.dependencies"] = ( + "'fjks" + docker_labels["simcore.service.dependencies"] + ) image = _create_base_image(docker_dir, docker_labels) # tag image - image_tag = registry_url + "/{key}:{version}".format(key=service_description["key"], version=tag) + image_tag = registry_url + "/{key}:{version}".format( + key=service_description["key"], version=tag + ) assert image.tag(image_tag) is True # push image to registry docker_client.images.push(image_tag) # remove image from host docker_client.images.remove(image_tag) return { - "service_description":service_description, - "docker_labels":docker_labels, - "image_path":image_tag, - "internal_port":internal_port, - "entry_point": entry_point - } + "service_description": service_description, + "docker_labels": docker_labels, + "image_path": image_tag, + "internal_port": internal_port, + "entry_point": entry_point, + } + def _clean_registry(registry_url, list_of_images): - request_headers = {'accept': "application/vnd.docker.distribution.manifest.v2+json"} + request_headers = {"accept": "application/vnd.docker.distribution.manifest.v2+json"} for image in list_of_images: service_description = image["service_description"] # get the image digest tag = service_description["version"] - url = "http://{host}/v2/{name}/manifests/{tag}".format(host=registry_url, name=service_description["key"], tag=tag) + url = "http://{host}/v2/{name}/manifests/{tag}".format( + host=registry_url, name=service_description["key"], tag=tag + ) response = requests.get(url, headers=request_headers) docker_content_digest = response.headers["Docker-Content-Digest"] # remove the image from the registry - url = "http://{host}/v2/{name}/manifests/{digest}".format(host=registry_url, name=service_description["key"], digest=docker_content_digest) + url = "http://{host}/v2/{name}/manifests/{digest}".format( + host=registry_url, + name=service_description["key"], + digest=docker_content_digest, + ) response = requests.delete(url, headers=request_headers) + def _create_base_image(base_dir, labels): # create a basic dockerfile docker_file = base_dir / "Dockerfile" with docker_file.open("w") as file_pointer: - file_pointer.write('FROM alpine\nCMD while true; do sleep 10; done\n') + file_pointer.write("FROM alpine\nCMD while true; do sleep 10; done\n") assert docker_file.exists() == True # build docker base image docker_client = docker.from_env() - base_docker_image = docker_client.images.build(path=str(base_dir), rm=True, labels=labels) + base_docker_image = docker_client.images.build( + path=str(base_dir), rm=True, labels=labels + ) return base_docker_image[0] + def _create_service_description(service_type, name, tag): file_name = "dummy_service_description-v1.json" dummy_description_path = Path(__file__).parent / file_name @@ -125,10 +197,13 @@ def _create_service_description(service_type, name, tag): return service_desc + def _create_docker_labels(service_description, bad_json_format): docker_labels = {} for key, value in service_description.items(): - docker_labels[".".join(["io", "simcore", key])] = json.dumps({key:value}) + docker_labels[".".join(["io", "simcore", key])] = json.dumps({key: value}) if bad_json_format: - docker_labels[".".join(["io", "simcore", key])] = "d32;'" + docker_labels[".".join(["io", "simcore", key])] + docker_labels[".".join(["io", "simcore", key])] = ( + "d32;'" + docker_labels[".".join(["io", "simcore", key])] + ) return docker_labels diff --git a/services/director/tests/test_registry_cache_task.py b/services/director/tests/test_registry_cache_task.py index a096972eb67..ced8f50e479 100644 --- a/services/director/tests/test_registry_cache_task.py +++ b/services/director/tests/test_registry_cache_task.py @@ -4,22 +4,28 @@ import pytest -from simcore_service_director import (config, main, registry_cache_task, - registry_proxy) +from simcore_service_director import config, main, registry_cache_task, registry_proxy @pytest.fixture -def client(loop, aiohttp_client, aiohttp_unused_port, configure_schemas_location, configure_registry_access): +def client( + loop, + aiohttp_client, + aiohttp_unused_port, + configure_schemas_location, + configure_registry_access, +): config.DIRECTOR_REGISTRY_CACHING = True config.DIRECTOR_REGISTRY_CACHING_TTL = 5 # config.DIRECTOR_REGISTRY_CACHING_TTL = 5 app = main.setup_app() - server_kwargs={'port': aiohttp_unused_port(), 'host': 'localhost'} + server_kwargs = {"port": aiohttp_unused_port(), "host": "localhost"} registry_cache_task.setup(app) yield loop.run_until_complete(aiohttp_client(app, server_kwargs=server_kwargs)) + async def test_registry_caching_task(loop, client, push_services): app = client.app assert app @@ -30,17 +36,29 @@ async def test_registry_caching_task(loop, client, push_services): assert registry_cache_task.APP_REGISTRY_CACHE_DATA_KEY in app # check we do not get any repository - list_of_services = await registry_proxy.list_services(app, registry_proxy.ServiceType.ALL) + list_of_services = await registry_proxy.list_services( + app, registry_proxy.ServiceType.ALL + ) assert not list_of_services assert app[registry_cache_task.APP_REGISTRY_CACHE_DATA_KEY] != {} # create services in the registry - pushed_services = push_services(1,1) + pushed_services = push_services( + number_of_computational_services=1, number_of_interactive_services=1 + ) # the services shall be updated await sleep(config.DIRECTOR_REGISTRY_CACHING_TTL) - list_of_services = await registry_proxy.list_services(app, registry_proxy.ServiceType.ALL) + list_of_services = await registry_proxy.list_services( + app, registry_proxy.ServiceType.ALL + ) assert len(list_of_services) == 2 # add more - pushed_services = push_services(2,2, version="2.0.") - await sleep(config.DIRECTOR_REGISTRY_CACHING_TTL) - list_of_services = await registry_proxy.list_services(app, registry_proxy.ServiceType.ALL) - assert len(list_of_services) == len(pushed_services) \ No newline at end of file + pushed_services = push_services( + number_of_computational_services=2, + number_of_interactive_services=2, + version="2.0.", + ) + await sleep(config.DIRECTOR_REGISTRY_CACHING_TTL * 1.1) # NOTE: this sometimes takes a bit more. Sleep increased a 10%. + list_of_services = await registry_proxy.list_services( + app, registry_proxy.ServiceType.ALL + ) + assert len(list_of_services) == len(pushed_services) diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 1167d25479e..374e93c1478 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -10,7 +10,7 @@ services: - POSTGRES_HOST=${POSTGRES_HOST} - POSTGRES_PORT=${POSTGRES_PORT} - TESTING=false - - LOGLEVEL=WARNING + - LOGLEVEL=${LOG_LEVEL:-WARNING} depends_on: - postgres networks: @@ -64,7 +64,7 @@ services: - STORAGE_PORT=8080 - SWARM_STACK_NAME=${SWARM_STACK_NAME:-simcore} - WEBSERVER_MONITORING_ENABLED=1 - - WEBSERVER_LOGLEVEL=WARNING + - WEBSERVER_LOGLEVEL=${LOG_LEVEL:-WARNING} env_file: - ../.env depends_on: @@ -118,7 +118,7 @@ services: - REGISTRY_USER=${REGISTRY_USER} - REGISTRY_PW=${REGISTRY_PW} - SWARM_STACK_NAME=${SWARM_STACK_NAME:-simcore} - - SIDECAR_LOGLEVEL=WARNING + - SIDECAR_LOGLEVEL=${LOG_LEVEL:-WARNING} depends_on: - rabbit - postgres @@ -135,7 +135,7 @@ services: - POSTGRES_DB=${POSTGRES_DB} - POSTGRES_HOST=${POSTGRES_HOST} - POSTGRES_PORT=${POSTGRES_PORT} - - STORAGE_LOGLEVEL=WARNING + - STORAGE_LOGLEVEL=${LOG_LEVEL:-WARNING} - STORAGE_MONITORING_ENABLED=1 - S3_ENDPOINT=${S3_ENDPOINT} - S3_ACCESS_KEY=${S3_ACCESS_KEY} diff --git a/services/sidecar/Dockerfile b/services/sidecar/Dockerfile index 0945d7afa71..71de3080257 100644 --- a/services/sidecar/Dockerfile +++ b/services/sidecar/Dockerfile @@ -51,7 +51,7 @@ RUN apk add --no-cache \ libc-dev RUN $SC_PIP install --upgrade \ - pip~=19.1.1 \ + pip~=20.0.2 \ wheel \ setuptools diff --git a/services/sidecar/requirements/_base.in b/services/sidecar/requirements/_base.in index c26893d6171..7f488cfa53b 100644 --- a/services/sidecar/requirements/_base.in +++ b/services/sidecar/requirements/_base.in @@ -2,7 +2,7 @@ # Specifies third-party dependencies for 'sidecar' # - +urllib3>=1.25.8 # Vulnerability sqlalchemy>=1.3.3 # https://nvd.nist.gov/vuln/detail/CVE-2019-7164 psycopg2-binary # enforces binary version - http://initd.org/psycopg/docs/install.html#binary-install-from-pypi diff --git a/services/sidecar/requirements/_base.txt b/services/sidecar/requirements/_base.txt index 6a75a91b8be..f01e03a3474 100644 --- a/services/sidecar/requirements/_base.txt +++ b/services/sidecar/requirements/_base.txt @@ -2,28 +2,28 @@ # This file is autogenerated by pip-compile # To update, run: # -# make _base.txt +# pip-compile --output-file=_base.txt _base.in # amqp==2.4.2 # via kombu billiard==3.6.0.0 # via celery -celery==4.3.0 +celery==4.3.0 # via -r _base.in certifi==2019.3.9 # via minio, requests chardet==3.0.4 # via requests decorator==4.4.0 # via networkx docker-pycreds==0.4.0 # via docker -docker==3.7.2 +docker==3.7.2 # via -r _base.in idna==2.8 # via requests -kombu==4.5.0 -minio==4.0.16 -networkx==2.3 -pika==1.0.1 -psycopg2-binary==2.8.4 +kombu==4.5.0 # via -r _base.in, celery +minio==4.0.16 # via -r _base.in +networkx==2.3 # via -r _base.in +pika==1.0.1 # via -r _base.in +psycopg2-binary==2.8.4 # via -r _base.in python-dateutil==2.8.0 # via minio pytz==2019.1 # via celery, minio requests==2.22.0 # via docker six==1.12.0 # via docker, docker-pycreds, python-dateutil, tenacity, websocket-client -sqlalchemy==1.3.3 -tenacity==6.0.0 -urllib3==1.25.2 # via minio, requests +sqlalchemy==1.3.3 # via -r _base.in +tenacity==6.0.0 # via -r _base.in +urllib3==1.25.8 # via -r _base.in, minio, requests vine==1.3.0 # via amqp, celery websocket-client==0.56.0 # via docker diff --git a/services/sidecar/requirements/_test.txt b/services/sidecar/requirements/_test.txt index 1e0eabda67f..6d4b68a5700 100644 --- a/services/sidecar/requirements/_test.txt +++ b/services/sidecar/requirements/_test.txt @@ -4,60 +4,54 @@ # # pip-compile --output-file=_test.txt _test.in # -aiohttp==3.6.2 -aiopg==1.0.0 -amqp==2.4.2 +aiopg==1.0.0 # via -r _test.in +amqp==2.4.2 # via -r _base.txt, kombu astroid==2.3.3 # via pylint -async-timeout==3.0.1 -attrs==19.3.0 -billiard==3.6.0.0 -celery==4.3.0 -certifi==2019.3.9 -chardet==3.0.4 -coverage==4.5.1 -coveralls==1.10.0 -decorator==4.4.0 -docker-pycreds==0.4.0 -docker==3.7.2 +attrs==19.3.0 # via pytest, pytest-docker +billiard==3.6.0.0 # via -r _base.txt, celery +celery==4.3.0 # via -r _base.txt +certifi==2019.3.9 # via -r _base.txt, minio, requests +chardet==3.0.4 # via -r _base.txt, requests +coverage==4.5.1 # via -r _test.in, coveralls, pytest-cov +coveralls==1.11.1 # via -r _test.in +decorator==4.4.0 # via -r _base.txt, networkx +docker-pycreds==0.4.0 # via -r _base.txt, docker +docker==3.7.2 # via -r _base.txt docopt==0.6.2 # via coveralls -idna-ssl==1.1.0 -idna==2.8 -importlib-metadata==1.3.0 # via pluggy, pytest +idna==2.8 # via -r _base.txt, requests +importlib-metadata==1.5.0 # via pluggy, pytest isort==4.3.21 # via pylint -kombu==4.5.0 +kombu==4.5.0 # via -r _base.txt, celery lazy-object-proxy==1.4.3 # via astroid mccabe==0.6.1 # via pylint -minio==4.0.16 -more-itertools==8.0.2 # via pytest, zipp -multidict==4.5.2 -networkx==2.3 -packaging==20.0 # via pytest, pytest-sugar -pika==1.0.1 +minio==4.0.16 # via -r _base.txt +more-itertools==8.2.0 # via pytest +networkx==2.3 # via -r _base.txt +packaging==20.3 # via pytest, pytest-sugar +pika==1.0.1 # via -r _base.txt pluggy==0.13.1 # via pytest -psycopg2-binary==2.8.4 -ptvsd==4.3.2 +psycopg2-binary==2.8.4 # via -r _base.txt, aiopg +ptvsd==4.3.2 # via -r _test.in py==1.8.1 # via pytest -pylint==2.4.4 +pylint==2.4.4 # via -r _test.in pyparsing==2.4.6 # via packaging -pytest-cov==2.8.1 -pytest-docker==0.6.1 -pytest-instafail==0.4.1.post0 -pytest-mock==2.0.0 -pytest-sugar==0.9.2 -pytest==5.3.2 -python-dateutil==2.8.0 -pytz==2019.1 -requests==2.22.0 -six==1.12.0 -sqlalchemy==1.3.3 -tenacity==5.0.4 +pytest-cov==2.8.1 # via -r _test.in +pytest-docker==0.7.2 # via -r _test.in +pytest-instafail==0.4.1.post0 # via -r _test.in +pytest-mock==2.0.0 # via -r _test.in +pytest-sugar==0.9.2 # via -r _test.in +pytest==5.3.5 # via -r _test.in, pytest-cov, pytest-instafail, pytest-mock, pytest-sugar +python-dateutil==2.8.0 # via -r _base.txt, minio +pytz==2019.1 # via -r _base.txt, celery, minio +requests==2.22.0 # via -r _base.txt, coveralls, docker +six==1.12.0 # via -r _base.txt, astroid, docker, docker-pycreds, packaging, python-dateutil, tenacity, websocket-client +sqlalchemy==1.3.3 # via -r _base.txt +tenacity==6.0.0 # via -r _base.txt termcolor==1.1.0 # via pytest-sugar -typed-ast==1.4.0 # via astroid -typing-extensions==3.7.4 -urllib3==1.25.2 -vine==1.3.0 +typed-ast==1.4.1 # via astroid +urllib3==1.25.8 # via -r _base.txt, minio, requests +vine==1.3.0 # via -r _base.txt, amqp, celery wcwidth==0.1.8 # via pytest -websocket-client==0.56.0 +websocket-client==0.56.0 # via -r _base.txt, docker wrapt==1.11.2 # via astroid -yarl==1.3.0 -zipp==0.6.0 # via importlib-metadata +zipp==3.1.0 # via importlib-metadata diff --git a/services/storage/Dockerfile b/services/storage/Dockerfile index fea2b89e0bd..512ff02b9e6 100644 --- a/services/storage/Dockerfile +++ b/services/storage/Dockerfile @@ -37,6 +37,16 @@ EXPOSE 8080 FROM base as build +# Installing client libraries and any other package you need +# +# libpq: client library for PostgreSQL https://www.postgresql.org/docs/9.5/libpq.html +# libstdc++: needed in ujson https://github.com/kohlschutter/junixsocket/issues/33 +# +RUN apk update && \ + apk add --no-cache \ + libpq \ + libstdc++ + RUN apk add --no-cache \ postgresql-dev \ gcc \ @@ -46,7 +56,7 @@ RUN apk add --no-cache \ linux-headers RUN $SC_PIP install --upgrade \ - pip~=19.1.1 \ + pip~=20.0.2 \ wheel \ setuptools diff --git a/services/storage/Makefile b/services/storage/Makefile index 14cc5e5f391..b4dac456df9 100644 --- a/services/storage/Makefile +++ b/services/storage/Makefile @@ -1,25 +1,21 @@ -.DEFAULT_GOAL := help +# +# Targets for DEVELOPMENT for Storage service +# +include ../../scripts/common.Makefile REPO_BASE_DIR = $(abspath $(CURDIR)/../../) -VENV_DIR ?= $(abspath $(REPO_BASE_DIR)/.venv) - APP_NAME := $(notdir $(CURDIR)) -.PHONY: devenv -devenv: ## build development environment (using main services/docker-compose-build.yml) - @$(MAKE) -C ${REPO_BASE_DIR} $@ - - .PHONY: openapi-specs openapi-specs: ## updates and validates openapi specifications $(MAKE) -C $(CURDIR)/src/simcore_service_${APP_NAME}/api $@ .PHONY: install-dev install-prod install-ci -install-dev install-prod install-ci: openapi-specs ## install app in development/production or CI mode +install-dev install-prod install-ci: openapi-specs _check_venv_active ## install app in development/production or CI mode # installing in $(subst install-,,$@) mode - @$(VENV_DIR)/bin/pip3 install -r requirements/$(subst install-,,$@).txt + python -m pip install -r requirements/$(subst install-,,$@).txt .PHONY: tests @@ -28,39 +24,6 @@ tests: ## runs unit tests @pytest -vv --exitfirst --failed-first --durations=10 --pdb $(CURDIR)/tests - .PHONY: build build: openapi-specs ## builds docker image (using main services/docker-compose-build.yml) - @$(MAKE) -C ${REPO_BASE_DIR} target=${APP_NAME} $@ - - -.PHONY: version-patch version-minor -version-patch version-minor: ## commits version as patch (bug fixes not affecting the API), minor/minor (backwards-compatible/INcompatible API addition or changes) - # upgrades as $(subst version-,,$@) version, commits and tags - @bump2version --verbose --list $(subst version-,,$@) - - -.PHONY: info -info: ## displays - # installed - @pip list - # version - @cat setup.py | grep version= - - -.PHONY: clean -clean: ## cleans all unversioned files in project and temp files create by this makefile - # Cleaning unversioned - @git clean -ndxf -e .vscode/ - @echo -n "Are you sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @echo -n "$(shell whoami), are you REALLY sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @git clean -dxf -e .vscode/ - - -.PHONY: help -# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html -help: ## this colorful help - @echo "Recipes for '${APP_NAME}':" - @echo "" - @awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z_- ]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo "" + @$(MAKE) -C ${REPO_BASE_DIR} $@ target=${APP_NAME} diff --git a/services/storage/docker/healthcheck.py b/services/storage/docker/healthcheck.py index de57f4f6abb..5a5edba5230 100644 --- a/services/storage/docker/healthcheck.py +++ b/services/storage/docker/healthcheck.py @@ -27,9 +27,14 @@ ok = os.environ.get("SC_BOOT_MODE").lower() == "debug" # Queries host -ok = ok or urlopen("{host}{baseurl}".format( - host=sys.argv[1], - baseurl=os.environ.get("SIMCORE_NODE_BASEPATH", "")) # adds a base-path if defined in environ - ).getcode() == 200 - -sys.exit(SUCCESS if ok else UNHEALTHY) \ No newline at end of file +ok = ( + ok + or urlopen( + "{host}{baseurl}".format( + host=sys.argv[1], baseurl=os.environ.get("SIMCORE_NODE_BASEPATH", "") + ) # adds a base-path if defined in environ + ).getcode() + == 200 +) + +sys.exit(SUCCESS if ok else UNHEALTHY) diff --git a/services/storage/requirements/_base.in b/services/storage/requirements/_base.in index c03fa1ea766..3a9e32c0435 100644 --- a/services/storage/requirements/_base.in +++ b/services/storage/requirements/_base.in @@ -6,8 +6,8 @@ -r ../../../packages/postgres-database/requirements/_base.in -r ../../../packages/service-library/requirements/_base.in - -urllib3~=1.24.2 # See https://nvd.nist.gov/vuln/detail/CVE-2019-11324 +psutil>=5.6.6 # Vulnerability https://github.com/advisories/GHSA-qfc5-mcwq-26q8 +urllib3>=1.25.8 # Vulnerability psycopg2-binary~=2.8.4 # See http://initd.org/psycopg/docs/install.html#binary-install-from-pypi sqlalchemy~=1.3.3 # https://nvd.nist.gov/vuln/detail/CVE-2019-7164 boto3==1.9.252 # do not use lastest version, this would require botocore<1.13.0,>=1.12.179 but aiobotocore[boto3]==0.10.2 hardcodes boto3==1.9.91 that requires botocore<1.12.92,>=1.12.91 diff --git a/services/storage/requirements/_base.txt b/services/storage/requirements/_base.txt index 2fedd32e722..22dcf691780 100644 --- a/services/storage/requirements/_base.txt +++ b/services/storage/requirements/_base.txt @@ -4,18 +4,18 @@ # # pip-compile --output-file=_base.txt _base.in # -aioboto3==6.4.1 +aioboto3==6.4.1 # via -r _base.in aiobotocore[boto3]==0.10.4 # via aioboto3 -aiofiles==0.4.0 -aiohttp==3.6.2 -aiopg[sa]==1.0.0 -aiozipkin==0.6.0 +aiofiles==0.4.0 # via -r _base.in +aiohttp==3.6.2 # via -r ../../../packages/service-library/requirements/_base.in, -r _base.in, aiobotocore, aiozipkin +aiopg[sa]==1.0.0 # via -r ../../../packages/service-library/requirements/_base.in, -r _base.in +aiozipkin==0.6.0 # via -r ../../../packages/service-library/requirements/_base.in apipkg==1.5 # via execnet async-generator==1.10 # via aiobotocore async-timeout==3.0.1 # via aiohttp -attrs==19.1.0 -blackfynn==2.11.1 -boto3==1.9.252 +attrs==19.1.0 # via -r ../../../packages/service-library/requirements/_base.in, aiohttp, jsonschema, openapi-core +blackfynn==2.11.1 # via -r _base.in +boto3==1.9.252 # via -r _base.in, aiobotocore, blackfynn botocore==1.12.252 # via aiobotocore, boto3, s3transfer certifi==2019.3.9 # via requests chardet==3.0.4 # via aiohttp, requests @@ -23,47 +23,47 @@ configparser==3.7.4 # via blackfynn deprecated==1.2.5 # via blackfynn docopt==0.6.2 # via blackfynn docutils==0.15.2 # via botocore -execnet==1.6.0 +execnet==1.6.0 # via -r _base.in future==0.17.1 # via blackfynn idna-ssl==1.1.0 # via aiohttp idna==2.8 # via idna-ssl, requests, yarl importlib-metadata==1.3.0 # via jsonschema isodate==0.6.0 # via openapi-core jmespath==0.9.4 # via boto3, botocore -jsonschema==3.2.0 +jsonschema==3.2.0 # via -r ../../../packages/service-library/requirements/_base.in, openapi-spec-validator lazy-object-proxy==1.4.3 # via openapi-core -marshmallow==2.19.2 +marshmallow==2.19.2 # via -r _base.in more-itertools==8.0.2 # via zipp multidict==4.5.2 # via aiohttp, yarl -numpy==1.16.3 -openapi-core==0.12.0 +numpy==1.16.3 # via -r python-with-pandas_89f709.txt, blackfynn, pandas +openapi-core==0.12.0 # via -r ../../../packages/service-library/requirements/_base.in openapi-spec-validator==0.2.8 # via openapi-core -pandas==0.24.2 -prometheus-client==0.7.1 +pandas==0.24.2 # via -r python-with-pandas_89f709.txt, blackfynn +prometheus-client==0.7.1 # via -r ../../../packages/service-library/requirements/_base.in protobuf==3.2.0 # via blackfynn -psutil==5.6.2 # via blackfynn -psycopg2-binary==2.8.4 +psutil==5.7.0 # via -r _base.in, blackfynn +psycopg2-binary==2.8.4 # via -r ../../../packages/service-library/requirements/_base.in, -r _base.in, aiopg, sqlalchemy pyrsistent==0.15.6 # via jsonschema -python-dateutil==2.8.0 -pytz==2019.1 -pyyaml==5.3 +python-dateutil==2.8.0 # via -r python-with-pandas_89f709.txt, botocore, pandas +pytz==2019.1 # via -r python-with-pandas_89f709.txt, blackfynn, pandas +pyyaml==5.3 # via -r ../../../packages/service-library/requirements/_base.in, openapi-spec-validator, trafaret-config requests==2.22.0 # via blackfynn s3transfer==0.2.1 # via boto3 -semantic-version==2.6.0 +semantic-version==2.6.0 # via -r _base.in semver==2.8.1 # via blackfynn -six==1.12.0 -sqlalchemy[postgresql_psycopg2binary]==1.3.3 +six==1.12.0 # via -r python-with-pandas_89f709.txt, isodate, jsonschema, openapi-core, openapi-spec-validator, protobuf, pyrsistent, python-dateutil, tenacity, websocket-client +sqlalchemy[postgresql_psycopg2binary]==1.3.3 # via -r ../../../packages/postgres-database/requirements/_base.in, -r ../../../packages/service-library/requirements/_base.in, -r _base.in, aiopg strict-rfc3339==0.7 # via openapi-core -tenacity==6.0.0 -trafaret-config==2.0.2 -trafaret==1.2.0 +tenacity==6.0.0 # via -r ../../../packages/service-library/requirements/_base.in, -r _base.in +trafaret-config==2.0.2 # via -r _base.in +trafaret==1.2.0 # via -r ../../../packages/service-library/requirements/_base.in, -r _base.in, trafaret-config typing-extensions==3.7.2 # via aiohttp -ujson==1.35 -urllib3==1.24.3 +ujson==1.35 # via -r ../../../packages/service-library/requirements/_base.in +urllib3==1.25.8 # via -r _base.in, botocore, requests websocket-client==0.56.0 # via blackfynn -werkzeug==0.16.0 +werkzeug==0.16.0 # via -r ../../../packages/service-library/requirements/_base.in wrapt==1.11.2 # via aiobotocore, deprecated -yarl==1.3.0 +yarl==1.3.0 # via -r ../../../packages/postgres-database/requirements/_base.in, aiohttp zipp==0.6.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: diff --git a/services/storage/requirements/_test.txt b/services/storage/requirements/_test.txt index 086dba915f5..f518a63a2a9 100644 --- a/services/storage/requirements/_test.txt +++ b/services/storage/requirements/_test.txt @@ -4,91 +4,95 @@ # # pip-compile --output-file=_test.txt _test.in # -aioboto3==6.4.1 -aiobotocore[boto3]==0.10.4 -aiofiles==0.4.0 -aiohttp==3.6.2 -aiopg[sa]==1.0.0 -aiozipkin==0.6.0 -apipkg==1.5 +aioboto3==6.4.1 # via -r _base.txt +aiobotocore[boto3]==0.10.4 # via -r _base.txt, aioboto3 +aiofiles==0.4.0 # via -r _base.txt +aiohttp==3.6.2 # via -r _base.txt, aiobotocore, aiozipkin, pytest-aiohttp +aiopg[sa]==1.0.0 # via -r _base.txt +aiozipkin==0.6.0 # via -r _base.txt +apipkg==1.5 # via -r _base.txt, execnet +appdirs==1.4.3 # via virtualenv astroid==2.3.3 # via pylint -async-generator==1.10 -async-timeout==3.0.1 -attrs==19.1.0 -blackfynn==2.11.1 -boto3==1.9.252 -botocore==1.12.252 -certifi==2019.3.9 -chardet==3.0.4 -codecov==2.0.15 -configparser==3.7.4 -coverage==4.5.1 -coveralls==1.10.0 -deprecated==1.2.5 -docopt==0.6.2 -docutils==0.15.2 -execnet==1.6.0 -future==0.17.1 -idna-ssl==1.1.0 -idna==2.8 -importlib-metadata==1.3.0 -isodate==0.6.0 +async-generator==1.10 # via -r _base.txt, aiobotocore +async-timeout==3.0.1 # via -r _base.txt, aiohttp +attrs==19.1.0 # via -r _base.txt, aiohttp, jsonschema, openapi-core, pytest, pytest-docker +blackfynn==2.11.1 # via -r _base.txt +boto3==1.9.252 # via -r _base.txt, aiobotocore, blackfynn +botocore==1.12.252 # via -r _base.txt, aiobotocore, boto3, s3transfer +certifi==2019.3.9 # via -r _base.txt, requests +chardet==3.0.4 # via -r _base.txt, aiohttp, requests +codecov==2.0.16 # via -r _test.in +configparser==3.7.4 # via -r _base.txt, blackfynn +coverage==4.5.1 # via -r _test.in, codecov, coveralls, pytest-cov +coveralls==1.11.1 # via -r _test.in +deprecated==1.2.5 # via -r _base.txt, blackfynn +distlib==0.3.0 # via virtualenv +docopt==0.6.2 # via -r _base.txt, blackfynn, coveralls +docutils==0.15.2 # via -r _base.txt, botocore +execnet==1.6.0 # via -r _base.txt +filelock==3.0.12 # via virtualenv +future==0.17.1 # via -r _base.txt, blackfynn +idna-ssl==1.1.0 # via -r _base.txt, aiohttp +idna==2.8 # via -r _base.txt, idna-ssl, requests, yarl +importlib-metadata==1.3.0 # via -r _base.txt, importlib-resources, jsonschema, pluggy, pytest, virtualenv +importlib-resources==1.3.1 # via virtualenv +isodate==0.6.0 # via -r _base.txt, openapi-core isort==4.3.21 # via pylint -jmespath==0.9.4 -jsonschema==3.2.0 -lazy-object-proxy==1.4.3 -marshmallow==2.19.2 +jmespath==0.9.4 # via -r _base.txt, boto3, botocore +jsonschema==3.2.0 # via -r _base.txt, openapi-spec-validator +lazy-object-proxy==1.4.3 # via -r _base.txt, astroid, openapi-core +marshmallow==2.19.2 # via -r _base.txt mccabe==0.6.1 # via pylint -more-itertools==8.0.2 -multidict==4.5.2 -numpy==1.16.3 -openapi-core==0.12.0 -openapi-spec-validator==0.2.8 -packaging==20.0 # via pytest, pytest-sugar -pandas==0.24.2 +more-itertools==8.0.2 # via -r _base.txt, pytest, zipp +multidict==4.5.2 # via -r _base.txt, aiohttp, yarl +numpy==1.16.3 # via -r _base.txt, blackfynn, pandas +openapi-core==0.12.0 # via -r _base.txt +openapi-spec-validator==0.2.8 # via -r _base.txt, openapi-core +packaging==20.3 # via pytest, pytest-sugar +pandas==0.24.2 # via -r _base.txt, blackfynn pluggy==0.13.1 # via pytest -prometheus-client==0.7.1 -protobuf==3.2.0 -psutil==5.6.2 -psycopg2-binary==2.8.4 -ptvsd==4.3.2 +prometheus-client==0.7.1 # via -r _base.txt +protobuf==3.2.0 # via -r _base.txt, blackfynn +psutil==5.7.0 # via -r _base.txt, blackfynn +psycopg2-binary==2.8.4 # via -r _base.txt, aiopg, sqlalchemy +ptvsd==4.3.2 # via -r _test.in py==1.8.1 # via pytest -pylint==2.4.4 +pylint==2.4.4 # via -r _test.in pyparsing==2.4.6 # via packaging -pyrsistent==0.15.6 -pytest-aiohttp==0.3.0 -pytest-cov==2.8.1 -pytest-docker==0.6.1 -pytest-instafail==0.4.1.post0 -pytest-mock==2.0.0 -pytest-runner==5.2 -pytest-sugar==0.9.2 -pytest==5.3.2 -python-dateutil==2.8.0 -pytz==2019.1 -pyyaml==5.3 -requests==2.22.0 -s3transfer==0.2.1 -semantic-version==2.6.0 -semver==2.8.1 -six==1.12.0 -sqlalchemy[postgresql_psycopg2binary]==1.3.3 -strict-rfc3339==0.7 -tenacity==6.0.0 +pyrsistent==0.15.6 # via -r _base.txt, jsonschema +pytest-aiohttp==0.3.0 # via -r _test.in +pytest-cov==2.8.1 # via -r _test.in +pytest-docker==0.7.2 # via -r _test.in +pytest-instafail==0.4.1.post0 # via -r _test.in +pytest-mock==2.0.0 # via -r _test.in +pytest-runner==5.2 # via -r _test.in +pytest-sugar==0.9.2 # via -r _test.in +pytest==5.3.5 # via -r _test.in, pytest-aiohttp, pytest-cov, pytest-instafail, pytest-mock, pytest-sugar +python-dateutil==2.8.0 # via -r _base.txt, botocore, pandas +pytz==2019.1 # via -r _base.txt, blackfynn, pandas +pyyaml==5.3 # via -r _base.txt, openapi-spec-validator, trafaret-config +requests==2.22.0 # via -r _base.txt, blackfynn, codecov, coveralls +s3transfer==0.2.1 # via -r _base.txt, boto3 +semantic-version==2.6.0 # via -r _base.txt +semver==2.8.1 # via -r _base.txt, blackfynn +six==1.12.0 # via -r _base.txt, astroid, isodate, jsonschema, openapi-core, openapi-spec-validator, packaging, protobuf, pyrsistent, python-dateutil, tenacity, virtualenv, websocket-client +sqlalchemy[postgresql_psycopg2binary]==1.3.3 # via -r _base.txt, aiopg +strict-rfc3339==0.7 # via -r _base.txt, openapi-core +tenacity==6.0.0 # via -r _base.txt termcolor==1.1.0 # via pytest-sugar -trafaret-config==2.0.2 -trafaret==1.2.0 -typed-ast==1.4.0 # via astroid -typing-extensions==3.7.2 -ujson==1.35 -urllib3==1.24.3 -virtualenv==16.7.9 +trafaret-config==2.0.2 # via -r _base.txt +trafaret==1.2.0 # via -r _base.txt, trafaret-config +typed-ast==1.4.1 # via astroid +typing-extensions==3.7.2 # via -r _base.txt, aiohttp +ujson==1.35 # via -r _base.txt +urllib3==1.25.8 # via -r _base.txt, botocore, requests +virtualenv==20.0.10 # via -r _test.in wcwidth==0.1.8 # via pytest -websocket-client==0.56.0 -werkzeug==0.16.0 -wrapt==1.11.2 -yarl==1.3.0 -zipp==0.6.0 +websocket-client==0.56.0 # via -r _base.txt, blackfynn +werkzeug==0.16.0 # via -r _base.txt +wrapt==1.11.2 # via -r _base.txt, aiobotocore, astroid, deprecated +yarl==1.3.0 # via -r _base.txt, aiohttp +zipp==0.6.0 # via -r _base.txt, importlib-metadata, importlib-resources # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/services/storage/setup.cfg b/services/storage/setup.cfg index d24645148a7..ec3dbde71a3 100644 --- a/services/storage/setup.cfg +++ b/services/storage/setup.cfg @@ -14,8 +14,6 @@ replace = version='{new_version}' [bumpversion:file:./src/simcore_service_storage/api/v0/openapi.yaml] -[bumpversion:file:src/simcore_service_storage/__version__.py] - [bumpversion:file:client-sdk/codegen_config.json] search = "packageVersion":"{current_version}" replace = "packageVersion":"{new_version}" diff --git a/services/storage/setup.py b/services/storage/setup.py index ffc7cfeafb6..8c12d6ba77c 100644 --- a/services/storage/setup.py +++ b/services/storage/setup.py @@ -7,46 +7,49 @@ here = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent if sys.version_info < (3, 6): - raise RuntimeError("Requires 3.6, got %s. Did you forget to activate virtualenv?" % sys.version_info) + raise RuntimeError( + "Requires 3.6, got %s. Did you forget to activate virtualenv?" + % sys.version_info + ) -def read_reqs( reqs_path: Path): - return re.findall(r'(^[^#-][\w]+[-~>=<.\w]+)', reqs_path.read_text(), re.MULTILINE) +def read_reqs(reqs_path: Path): + return re.findall(r"(^[^#-][\w]+[-~>=<.\w]+)", reqs_path.read_text(), re.MULTILINE) -install_requirements = read_reqs( here / "requirements" / "_base.txt" ) + [ +install_requirements = read_reqs(here / "requirements" / "_base.txt") + [ "s3wrapper==0.1.0", "simcore-postgres-database", "simcore-sdk==0.1.0", - "simcore-service-library" + "simcore-service-library", ] -test_requirements = read_reqs( here / "requirements" / "_test.txt" ) +test_requirements = read_reqs(here / "requirements" / "_test.txt") setup_config = dict( - name='simcore-service-storage', - version='0.2.1', - description='Service to manage data storage in simcore', - author='Manuel Guidon (mguidon)', - python_requires='>3.6, <3.7', - packages=find_packages(where='src'), - package_dir={'': 'src'}, + name="simcore-service-storage", + version="0.2.1", + description="Service to manage data storage in simcore", + author="Manuel Guidon (mguidon)", + python_requires=">3.6, <3.7", + packages=find_packages(where="src"), + package_dir={"": "src"}, include_package_data=True, - install_requires= install_requirements, + install_requires=install_requirements, tests_require=test_requirements, package_data={ - '': [ - 'api/v0/openapi.yaml', - 'api/v0/schemas/*.json', - 'data/*.json', - 'data/*.yml', - 'data/*.yaml', - ], + "": [ + "api/v0/openapi.yaml", + "api/v0/schemas/*.json", + "data/*.json", + "data/*.yml", + "data/*.yaml", + ], }, entry_points={ - 'console_scripts': [ - 'simcore-service-storage = simcore_service_storage.cli:main', + "console_scripts": [ + "simcore-service-storage = simcore_service_storage.cli:main", ], }, ) @@ -57,7 +60,8 @@ def main(): """ setup(**setup_config) - return 0 # syccessful termination + return 0 # syccessful termination + if __name__ == "__main__": raise SystemExit(main()) diff --git a/services/storage/src/simcore_service_storage/__init__.py b/services/storage/src/simcore_service_storage/__init__.py index cb16e69b262..acdd02fe8ff 100644 --- a/services/storage/src/simcore_service_storage/__init__.py +++ b/services/storage/src/simcore_service_storage/__init__.py @@ -3,4 +3,3 @@ """ from .__version__ import __version__ from .cli import main - diff --git a/services/storage/src/simcore_service_storage/__version__.py b/services/storage/src/simcore_service_storage/__version__.py index 1806ab44fd7..c839c64ff0c 100644 --- a/services/storage/src/simcore_service_storage/__version__.py +++ b/services/storage/src/simcore_service_storage/__version__.py @@ -1,39 +1,12 @@ -""" Current version of the simcore_service_storage application. - -This project uses the Semantic Versioning scheme in conjunction with PEP 0440: - - - - - -Major versions introduce significant changes to the API, and backwards -compatibility is not guaranteed. - -Minor versions are for new features and other backwards-compatible changes to the API. - -Patch versions are for bug fixes and internal code changes that do not affect the API. - -Pre-release and development versions are denoted appending a hyphen, i.e. 0.2.1-dev - -Build metadata (e.g. git commit id, build id, ...) can be appended with a plus, i.e. 0.2.1-dev+asd21ff - -Package version is defined in the setup.py following the principle of single-sourcing (option 5): - +""" Current version of the simcore_service_storage application and its API """ import pkg_resources -import semantic_version -# TODO: introduce metadata info from vcs +from semantic_version import Version -try: - # access metadata - __version__ = pkg_resources.get_distribution('simcore_service_storage').version - assert __version__=="0.2.1", "Did you install this package?" -except AssertionError as ee: - import logging - logging.debug(ee) +__version__: str = pkg_resources.get_distribution("simcore_service_storage").version +version = Version(__version__) -def get_version_object(): - return semantic_version.Version(__version__) +api_version_prefix: str = f"v{version.major}" diff --git a/services/storage/src/simcore_service_storage/api/v0/schemas/node-meta-v0.0.1.json b/services/storage/src/simcore_service_storage/api/v0/schemas/node-meta-v0.0.1.json index b4978cbd794..2909aae313a 100644 --- a/services/storage/src/simcore_service_storage/api/v0/schemas/node-meta-v0.0.1.json +++ b/services/storage/src/simcore_service_storage/api/v0/schemas/node-meta-v0.0.1.json @@ -26,6 +26,14 @@ "simcore/services/dynamic/3dviewer" ] }, + "integration-version": { + "type": "string", + "description": "integration version number", + "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", + "examples": [ + "1.0.0" + ] + }, "version": { "type": "string", "description": "semantic version number", diff --git a/services/storage/src/simcore_service_storage/application.py b/services/storage/src/simcore_service_storage/application.py index a10a47168eb..2c631efb559 100644 --- a/services/storage/src/simcore_service_storage/application.py +++ b/services/storage/src/simcore_service_storage/application.py @@ -21,30 +21,36 @@ def create(config: Dict) -> web.Application: - log.debug("Initializing app with config:\n%s", - json.dumps(config, indent=2, sort_keys=True)) + log.debug( + "Initializing app with config:\n%s", + json.dumps(config, indent=2, sort_keys=True), + ) app = create_safe_application(config) tracing = config["tracing"]["enabled"] if tracing: - setup_tracing(app, "simcore_service_storage", - config["main"]["host"], config["main"]["port"], config["tracing"]) - setup_db(app) # -> postgres service - setup_s3(app) # -> minio service + setup_tracing( + app, + "simcore_service_storage", + config["main"]["host"], + config["main"]["port"], + config["tracing"], + ) + setup_db(app) # -> postgres service + setup_s3(app) # -> minio service setup_dsm(app) # core subsystem. Needs s3 and db setups done - setup_rest(app) # lastly, we expose API to the world + setup_rest(app) # lastly, we expose API to the world if config["main"].get("monitoring_enabled", False): setup_monitoring(app, "simcore_service_storage") return app + def run(config, app=None): log.debug("Serving application ") if not app: app = create(config) - web.run_app(app, - host=config["main"]["host"], - port=config["main"]["port"]) + web.run_app(app, host=config["main"]["host"], port=config["main"]["port"]) diff --git a/services/storage/src/simcore_service_storage/cli.py b/services/storage/src/simcore_service_storage/cli.py index 3d43916ff6c..17781d94deb 100644 --- a/services/storage/src/simcore_service_storage/cli.py +++ b/services/storage/src/simcore_service_storage/cli.py @@ -34,16 +34,15 @@ def create_environ(skip_system_environ=False): # project-related environment variables here = os.path.dirname(__file__) - environ['THIS_PACKAGE_DIR'] = here + environ["THIS_PACKAGE_DIR"] = here rootdir = search_osparc_repo_dir(start=here) if rootdir is not None: - environ['OSPARC_SIMCORE_REPO_ROOTDIR'] = str(rootdir) + environ["OSPARC_SIMCORE_REPO_ROOTDIR"] = str(rootdir) return environ - def setup(_parser): cli_config.add_cli_options(_parser) return _parser @@ -62,7 +61,9 @@ def parse(args, _parser): return config -parser = argparse.ArgumentParser(description='Service to manage data storage in simcore.') +parser = argparse.ArgumentParser( + description="Service to manage data storage in simcore." +) setup(parser) diff --git a/services/storage/src/simcore_service_storage/cli_config.py b/services/storage/src/simcore_service_storage/cli_config.py index 2dafcd3a7aa..0886bbe9c92 100644 --- a/services/storage/src/simcore_service_storage/cli_config.py +++ b/services/storage/src/simcore_service_storage/cli_config.py @@ -1,4 +1,3 @@ - import argparse import logging import os @@ -13,7 +12,6 @@ log = logging.getLogger(__name__) - def add_cli_options(argument_parser=None): """ Adds settings group to cli with options: @@ -28,13 +26,13 @@ def add_cli_options(argument_parser=None): argument_parser = argparse.ArgumentParser() commandline.standard_argparse_options( - argument_parser.add_argument_group('settings'), - default_config=DEFAULT_CONFIG) + argument_parser.add_argument_group("settings"), default_config=DEFAULT_CONFIG + ) return argument_parser -def config_from_options(options, vars=None): # pylint: disable=W0622 +def config_from_options(options, vars=None): # pylint: disable=W0622 if vars is None: vars = os.environ @@ -43,7 +41,7 @@ def config_from_options(options, vars=None): # pylint: disable=W0622 if resources.exists(resource_name): options.config = resources.get_path(resource_name) else: - resource_name = RSC_CONFIG_DIR_KEY + '/' + resource_name + resource_name = RSC_CONFIG_DIR_KEY + "/" + resource_name if resources.exists(resource_name): options.config = resources.get_path(resource_name) @@ -51,7 +49,8 @@ def config_from_options(options, vars=None): # pylint: disable=W0622 return commandline.config_from_options(options, trafaret=schema, vars=vars) -def read_and_validate(filepath, vars=None): # pylint: disable=W0622 + +def read_and_validate(filepath, vars=None): # pylint: disable=W0622 if vars is None: vars = os.environ # NOTE: vars=os.environ in signature freezes default to os.environ before it gets diff --git a/services/storage/src/simcore_service_storage/config_schema.py b/services/storage/src/simcore_service_storage/config_schema.py index e55687d5ad2..465a7058562 100644 --- a/services/storage/src/simcore_service_storage/config_schema.py +++ b/services/storage/src/simcore_service_storage/config_schema.py @@ -5,29 +5,38 @@ from . import rest_config -app_schema = T.Dict({ - T.Key("host", default="0.0.0.0"): T.IP, - "port": T.Int(), - "log_level": T.Enum("DEBUG", "WARNING", "INFO", "ERROR", "CRITICAL", "FATAL", "NOTSET"), - "testing": T.Bool(), - T.Key("max_workers", default=8, optional=True) : T.Int(), - T.Key("monitoring_enabled", default=False): T.Or(T.Bool(), T.Int), # Int added to use environs - T.Key("test_datcore", optional=True): T.Dict({ - "token_key": T.String(), - "token_secret": T.String() - }), - T.Key("disable_services", default=[], optional=True): T.List(T.String()) -}) +app_schema = T.Dict( + { + T.Key("host", default="0.0.0.0"): T.IP, + "port": T.Int(), + "log_level": T.Enum( + "DEBUG", "WARNING", "INFO", "ERROR", "CRITICAL", "FATAL", "NOTSET" + ), + "testing": T.Bool(), + T.Key("max_workers", default=8, optional=True): T.Int(), + T.Key("monitoring_enabled", default=False): T.Or( + T.Bool(), T.Int + ), # Int added to use environs + T.Key("test_datcore", optional=True): T.Dict( + {"token_key": T.String(), "token_secret": T.String()} + ), + T.Key("disable_services", default=[], optional=True): T.List(T.String()), + } +) -schema = T.Dict({ - "version": T.String(), - T.Key("main"): app_schema, - T.Key("postgres"): db.CONFIG_SCHEMA, - T.Key("s3"): s3.CONFIG_SCHEMA, - addon_section(rest_config.CONFIG_SECTION_NAME, optional=True): rest_config.schema, - T.Key("tracing"): tracing_schema -}) +schema = T.Dict( + { + "version": T.String(), + T.Key("main"): app_schema, + T.Key("postgres"): db.CONFIG_SCHEMA, + T.Key("s3"): s3.CONFIG_SCHEMA, + addon_section( + rest_config.CONFIG_SECTION_NAME, optional=True + ): rest_config.schema, + T.Key("tracing"): tracing_schema, + } +) # TODO: config submodule that knows about schema with web.Application intpu parameters diff --git a/services/storage/src/simcore_service_storage/datcore.py b/services/storage/src/simcore_service_storage/datcore.py index f12696f44bf..3ecc8c5cc37 100644 --- a/services/storage/src/simcore_service_storage/datcore.py +++ b/services/storage/src/simcore_service_storage/datcore.py @@ -3,16 +3,17 @@ requires Blackfynn, check Makefile env2 """ -import logging # pylint: skip-file + +import logging import os import urllib +from contextlib import suppress from pathlib import Path from typing import List from blackfynn import Blackfynn from blackfynn.models import BaseCollection, Collection, DataPackage - from simcore_service_storage.models import (DatasetMetaData, FileMetaData, FileMetaDataEx) from simcore_service_storage.settings import DATCORE_ID, DATCORE_STR @@ -22,14 +23,17 @@ DatasetMetaDataVec = List[DatasetMetaData] -#FIXME: W0611:Unused IOAPI imported from blackfynn.api.transfers -#from blackfynn.api.transfers import IOAPI +# FIXME: W0611:Unused IOAPI imported from blackfynn.api.transfers +# from blackfynn.api.transfers import IOAPI -#FIXME: W0212:Access to a protected member _api of a client class +# FIXME: W0212:Access to a protected member _api of a client class # pylint: disable=W0212 -def _get_collection_id(folder: BaseCollection, _collections: List[str], collection_id: str)-> str: + +def _get_collection_id( + folder: BaseCollection, _collections: List[str], collection_id: str +) -> str: if not len(_collections): return collection_id @@ -50,8 +54,13 @@ def _get_collection_id(folder: BaseCollection, _collections: List[str], collecti class DatcoreClient(object): def __init__(self, api_token=None, api_secret=None, host=None, streaming_host=None): - self.client = Blackfynn(profile=None, api_token=api_token, api_secret=api_secret, - host=host, streaming_host=streaming_host) + self.client = Blackfynn( + profile=None, + api_token=api_token, + api_secret=api_secret, + host=host, + streaming_host=streaming_host, + ) def profile(self): """ @@ -87,7 +96,7 @@ def _destination_from_id(self, destination_id: str): return destination - def list_files_recursively(self, dataset_filter: str=""): + def list_files_recursively(self, dataset_filter: str = ""): files = [] for dataset in self.client.datasets(): @@ -96,39 +105,45 @@ def list_files_recursively(self, dataset_filter: str=""): return files - def list_files_raw_dataset(self, dataset_id: str)->List[FileMetaDataEx]: - files = [] # raw packages - _files = [] # fmds - data = {} # map to keep track of parents-child + def list_files_raw_dataset(self, dataset_id: str) -> List[FileMetaDataEx]: + files = [] # raw packages + _files = [] # fmds + data = {} # map to keep track of parents-child - cursor = '' + cursor = "" page_size = 1000 api = self.client._api.datasets dataset = self.client.get_dataset(dataset_id) if dataset is not None: while True: - resp = api._get(api._uri('/{id}/packages?cursor={cursor}&pageSize={pageSize}&includeSourceFiles={includeSourceFiles}', id=dataset_id, - cursor=cursor, pageSize=page_size, includeSourceFiles=False)) - for package in resp.get('packages', list()): - id = package['content']['id'] + resp = api._get( + api._uri( + "/{id}/packages?cursor={cursor}&pageSize={pageSize}&includeSourceFiles={includeSourceFiles}", + id=dataset_id, + cursor=cursor, + pageSize=page_size, + includeSourceFiles=False, + ) + ) + for package in resp.get("packages", list()): + id = package["content"]["id"] data[id] = package files.append(package) - cursor = resp.get('cursor') + cursor = resp.get("cursor") if cursor is None: break - for f in files: - if f['content']['packageType'] != 'Collection': - filename = f['content']['name'] + if f["content"]["packageType"] != "Collection": + filename = f["content"]["name"] file_path = "" - file_id = f['content']['nodeId'] + file_id = f["content"]["nodeId"] _f = f - while 'parentId' in _f['content'].keys(): - parentid = _f['content']['parentId'] + while "parentId" in _f["content"].keys(): + parentid = _f["content"]["parentId"] _f = data[parentid] - file_path = _f['content']['name'] +"/" + file_path + file_path = _f["content"]["name"] + "/" + file_path bucket_name = dataset.name file_name = filename @@ -136,23 +151,33 @@ def list_files_raw_dataset(self, dataset_id: str)->List[FileMetaDataEx]: object_name = str(Path(file_path) / file_name) file_uuid = str(Path(bucket_name) / object_name) - created_at = f['content']['createdAt'] - last_modified = f['content']['updatedAt'] + created_at = f["content"]["createdAt"] + last_modified = f["content"]["updatedAt"] parent_id = dataset_id - if 'parentId' in f['content']: - parentId = f['content']['parentId'] - parent_id = data[parentId]['content']['nodeId'] - - fmd = FileMetaData(bucket_name=bucket_name, file_name=file_name, object_name=object_name, - location=DATCORE_STR, location_id=DATCORE_ID, file_uuid=file_uuid, file_id=file_id, - raw_file_path=file_uuid, display_file_path=file_uuid, created_at=created_at, - last_modified=last_modified, file_size=file_size) + if "parentId" in f["content"]: + parentId = f["content"]["parentId"] + parent_id = data[parentId]["content"]["nodeId"] + + fmd = FileMetaData( + bucket_name=bucket_name, + file_name=file_name, + object_name=object_name, + location=DATCORE_STR, + location_id=DATCORE_ID, + file_uuid=file_uuid, + file_id=file_id, + raw_file_path=file_uuid, + display_file_path=file_uuid, + created_at=created_at, + last_modified=last_modified, + file_size=file_size, + ) fmdx = FileMetaDataEx(fmd=fmd, parent_id=parent_id) _files.append(fmdx) return _files - def list_files_raw(self, dataset_filter: str="")->List[FileMetaDataEx]: + def list_files_raw(self, dataset_filter: str = "") -> List[FileMetaDataEx]: _files = [] for dataset in self.client.datasets(): @@ -160,10 +185,12 @@ def list_files_raw(self, dataset_filter: str="")->List[FileMetaDataEx]: return _files - def list_dataset_files_recursively(self, files: List[FileMetaData], base: BaseCollection, current_root: Path): + def list_dataset_files_recursively( + self, files: List[FileMetaData], base: BaseCollection, current_root: Path + ): for item in base: if isinstance(item, Collection): - _current_root = current_root / Path(item.name) + _current_root = current_root / Path(item.name) self.list_dataset_files_recursively(files, item, _current_root) else: parts = current_root.parts @@ -172,11 +199,11 @@ def list_dataset_files_recursively(self, files: List[FileMetaData], base: BaseCo file_size = 0 # lets assume we have only one file if item.files: - file_name = Path(item.files[0].as_dict()['content']['s3key']).name - file_size = item.files[0].as_dict()['content']['size'] + file_name = Path(item.files[0].as_dict()["content"]["s3key"]).name + file_size = item.files[0].as_dict()["content"]["size"] # if this is in the root directory, the object_name is the filename only if len(parts) > 1: - object_name = str(Path(*list(parts)[1:])/ Path(file_name)) + object_name = str(Path(*list(parts)[1:]) / Path(file_name)) else: object_name = str(Path(file_name)) @@ -184,13 +211,22 @@ def list_dataset_files_recursively(self, files: List[FileMetaData], base: BaseCo file_id = item.id created_at = item.created_at last_modified = item.updated_at - fmd = FileMetaData(bucket_name=bucket_name, file_name=file_name, object_name=object_name, - location=DATCORE_STR, location_id=DATCORE_ID, file_uuid=file_uuid, file_id=file_id, - raw_file_path=file_uuid, display_file_path=file_uuid, created_at=created_at, - last_modified=last_modified, file_size=file_size) + fmd = FileMetaData( + bucket_name=bucket_name, + file_name=file_name, + object_name=object_name, + location=DATCORE_STR, + location_id=DATCORE_ID, + file_uuid=file_uuid, + file_id=file_id, + raw_file_path=file_uuid, + display_file_path=file_uuid, + created_at=created_at, + last_modified=last_modified, + file_size=file_size, + ) files.append(fmd) - def create_dataset(self, ds_name, force_delete=False): """ Creates a new dataset for the current user and returns it. Returns existing one @@ -202,13 +238,11 @@ def create_dataset(self, ds_name, force_delete=False): """ ds = None - try: + with suppress(Exception): ds = self.client.get_dataset(ds_name) if force_delete: ds.delete() ds = None - except Exception: # pylint: disable=W0703 - pass if ds is None: ds = self.client.create_dataset(ds_name) @@ -225,10 +259,8 @@ def get_dataset(self, ds_name, create_if_not_exists=False): """ ds = None - try: + with suppress(Exception): ds = self.client.get_dataset(ds_name) - except Exception: # pylint: disable=W0703 - pass if ds is None and create_if_not_exists: ds = self.client.create_dataset(ds_name) @@ -259,7 +291,7 @@ def exists_dataset(self, ds_name): ds = self.get_dataset(ds_name) return ds is not None - def upload_file(self, destination: str, filepath: str, meta_data = None): + def upload_file(self, destination: str, filepath: str, meta_data=None): """ Uploads a file to a given dataset/collection given its filepath on the host. Optionally adds some meta data @@ -303,7 +335,7 @@ def _update_meta_data(self, package, meta_data): """ for key in meta_data.keys(): - package.set_property(key, meta_data[key], category='simcore') + package.set_property(key, meta_data[key], category="simcore") package.update() @@ -334,9 +366,11 @@ def download_link(self, destination, filename): # pylint: disable = E1101 for item in collection: if isinstance(item, DataPackage): - if Path(item.files[0].as_dict()['content']['s3key']).name == filename: + if Path(item.files[0].as_dict()["content"]["s3key"]).name == filename: file_desc = self.client._api.packages.get_sources(item.id)[0] - url = self.client._api.packages.get_presigned_url_for_file(item.id, file_desc.id) + url = self.client._api.packages.get_presigned_url_for_file( + item.id, file_desc.id + ) return url return "" @@ -349,10 +383,12 @@ def download_link_by_id(self, file_id): filename = "" package = self.client.get(file_id) if package is not None: - filename = Path(package.files[0].as_dict()['content']['s3key']).name + filename = Path(package.files[0].as_dict()["content"]["s3key"]).name file_desc = self.client._api.packages.get_sources(file_id)[0] - url = self.client._api.packages.get_presigned_url_for_file(file_id, file_desc.id) + url = self.client._api.packages.get_presigned_url_for_file( + file_id, file_desc.id + ) return url, filename @@ -388,7 +424,7 @@ def delete_file(self, destination, filename): collection.update() for item in collection: if isinstance(item, DataPackage): - if Path(item.files[0].as_dict()['content']['s3key']).name == filename: + if Path(item.files[0].as_dict()["content"]["s3key"]).name == filename: self.client.delete(item) return True @@ -436,7 +472,6 @@ def update_meta_data(self, dataset, filename, meta_data): if package is not None: self._update_meta_data(package, meta_data) - def get_meta_data(self, dataset, filename): """ Returns metadata for a file @@ -472,10 +507,10 @@ def delete_meta_data(self, dataset, filename, keys=None): if package is not None: if keys is None: for p in package.properties: - package.remove_property(p.key, category='simcore') + package.remove_property(p.key, category="simcore") else: for k in keys: - package.remove_property(k, category='simcore') + package.remove_property(k, category="simcore") def search(self, what, max_count): """ @@ -508,14 +543,15 @@ def upload_file_to_id(self, destination_id: str, filepath: str): files = [filepath] try: - result = self.client._api.io.upload_files(destination, files, display_progress=True) - if result and result[0] and 'package' in result[0][0]: - _id = result[0][0]['package']['content']['id'] + result = self.client._api.io.upload_files( + destination, files, display_progress=True + ) + if result and result[0] and "package" in result[0][0]: + _id = result[0][0]["package"]["content"]["id"] except Exception: logger.exception("Error uploading file to datcore") - return _id def create_collection(self, destination_id: str, collection_name: str): @@ -539,7 +575,7 @@ def create_collection(self, destination_id: str, collection_name: str): return _id - def list_datasets(self)->DatasetMetaDataVec: + def list_datasets(self) -> DatasetMetaDataVec: data = [] for dataset in self.client.datasets(): dmd = DatasetMetaData(dataset_id=dataset.id, display_name=dataset.name) diff --git a/services/storage/src/simcore_service_storage/datcore_wrapper.py b/services/storage/src/simcore_service_storage/datcore_wrapper.py index 05137975acf..1491a918ca6 100644 --- a/services/storage/src/simcore_service_storage/datcore_wrapper.py +++ b/services/storage/src/simcore_service_storage/datcore_wrapper.py @@ -17,41 +17,49 @@ CURRENT_DIR = Path(__file__).resolve().parent logger = logging.getLogger(__name__) -#FIXME: W0703: Catching too general exception Exception (broad-except) +# FIXME: W0703: Catching too general exception Exception (broad-except) # pylint: disable=W0703 -#TODO: Use async callbacks for retreival of progress and pass via rabbit to server +# TODO: Use async callbacks for retreival of progress and pass via rabbit to server def make_async(func): @wraps(func) async def async_wrapper(self, *args, **kwargs): - blocking_task = self.loop.run_in_executor(self.pool, func, self, *args, **kwargs) + blocking_task = self.loop.run_in_executor( + self.pool, func, self, *args, **kwargs + ) _completed, _pending = await asyncio.wait([blocking_task]) results = [t.result() for t in _completed] # TODO: does this always work? return results[0] + return async_wrapper + class DatcoreWrapper: """ Wrapper to call the python2 api from datcore This can go away now. Next cleanup round... """ + # pylint: disable=R0913 # Too many arguments - def __init__(self, api_token: str, api_secret: str, loop: object, pool: ThreadPoolExecutor): + def __init__( + self, api_token: str, api_secret: str, loop: object, pool: ThreadPoolExecutor + ): self.api_token = api_token self.api_secret = api_secret self.loop = loop self.pool = pool - self.d_client = DatcoreClient(api_token=api_token, api_secret=api_secret, - host='https://api.blackfynn.io') + self.d_client = DatcoreClient( + api_token=api_token, api_secret=api_secret, host="https://api.blackfynn.io" + ) @make_async - def list_files_recursively(self)->FileMetaDataVec: #pylint: disable=W0613 + def list_files_recursively(self) -> FileMetaDataVec: # pylint: disable=W0613 files = [] try: files = self.d_client.list_files_recursively() @@ -61,7 +69,7 @@ def list_files_recursively(self)->FileMetaDataVec: #pylint: disable=W0613 return files @make_async - def list_files_raw(self)->FileMetaDataExVec: #pylint: disable=W0613 + def list_files_raw(self) -> FileMetaDataExVec: # pylint: disable=W0613 files = [] try: files = self.d_client.list_files_raw() @@ -71,7 +79,9 @@ def list_files_raw(self)->FileMetaDataExVec: #pylint: disable=W0613 return files @make_async - def list_files_raw_dataset(self, dataset_id: str)->FileMetaDataExVec: #pylint: disable=W0613 + def list_files_raw_dataset( + self, dataset_id: str + ) -> FileMetaDataExVec: # pylint: disable=W0613 files = [] try: files = self.d_client.list_files_raw_dataset(dataset_id) @@ -140,14 +150,16 @@ def delete_test_dataset(self, dataset): logger.exception("Error deleting test dataset") @make_async - def upload_file(self, destination: str, local_path: str, meta_data: FileMetaData = None): + def upload_file( + self, destination: str, local_path: str, meta_data: FileMetaData = None + ): json_meta = "" if meta_data: json_meta = json.dumps(attr.asdict(meta_data)) try: str_meta = json_meta result = False - if str_meta : + if str_meta: meta_data = json.loads(str_meta) result = self.d_client.upload_file(destination, local_path, meta_data) else: diff --git a/services/storage/src/simcore_service_storage/db.py b/services/storage/src/simcore_service_storage/db.py index b2f736219c7..59119b3d657 100644 --- a/services/storage/src/simcore_service_storage/db.py +++ b/services/storage/src/simcore_service_storage/db.py @@ -1,10 +1,14 @@ import logging from aiohttp import web -from servicelib.aiopg_utils import (DataSourceName, - PostgresRetryPolicyUponInitialization, - create_pg_engine, init_pg_tables, - is_pg_responsive, raise_if_not_responsive) +from servicelib.aiopg_utils import ( + DataSourceName, + PostgresRetryPolicyUponInitialization, + create_pg_engine, + init_pg_tables, + is_pg_responsive, + raise_if_not_responsive, +) from tenacity import Retrying from .models import metadata @@ -12,26 +16,25 @@ log = logging.getLogger(__name__) -THIS_SERVICE_NAME = 'postgres' +THIS_SERVICE_NAME = "postgres" async def pg_engine(app: web.Application): pg_cfg = app[APP_CONFIG_KEY][THIS_SERVICE_NAME] dsn = DataSourceName( - application_name=f'{__name__}_{id(app)}', - database=pg_cfg['database'], - user=pg_cfg['user'], - password=pg_cfg['password'], - host=pg_cfg['host'], - port=pg_cfg['port'] - ) + application_name=f"{__name__}_{id(app)}", + database=pg_cfg["database"], + user=pg_cfg["user"], + password=pg_cfg["password"], + host=pg_cfg["host"], + port=pg_cfg["port"], + ) log.info("Creating pg engine for %s", dsn) for attempt in Retrying(**PostgresRetryPolicyUponInitialization(log).kwargs): with attempt: - engine = await create_pg_engine(dsn, - minsize=pg_cfg['minsize'], - maxsize=pg_cfg['maxsize'] + engine = await create_pg_engine( + dsn, minsize=pg_cfg["minsize"], maxsize=pg_cfg["maxsize"] ) await raise_if_not_responsive(engine) @@ -39,10 +42,10 @@ async def pg_engine(app: web.Application): log.info("Initializing tables for %s", dsn) init_pg_tables(dsn, schema=metadata) - assert engine # nosec + assert engine # nosec app[APP_DB_ENGINE_KEY] = engine - yield # ---------- + yield # ---------- if engine is not app.get(APP_DB_ENGINE_KEY): log.critical("app does not hold right db engine. Somebody has changed it??") @@ -50,11 +53,15 @@ async def pg_engine(app: web.Application): if engine: engine.close() await engine.wait_closed() - log.debug("engine '%s' after shutdown: closed=%s, size=%d", engine.dsn, engine.closed, engine.size) - + log.debug( + "engine '%s' after shutdown: closed=%s, size=%d", + engine.dsn, + engine.closed, + engine.size, + ) -async def is_service_responsive(app:web.Application): +async def is_service_responsive(app: web.Application): """ Returns true if the app can connect to db service """ @@ -63,7 +70,7 @@ async def is_service_responsive(app:web.Application): def setup_db(app: web.Application): - disable_services = app[APP_CONFIG_KEY].get("main", {}).get("disable_services",[]) + disable_services = app[APP_CONFIG_KEY].get("main", {}).get("disable_services", []) if THIS_SERVICE_NAME in disable_services: app[APP_DB_ENGINE_KEY] = None diff --git a/services/storage/src/simcore_service_storage/db_tokens.py b/services/storage/src/simcore_service_storage/db_tokens.py index 271a9705bf0..e54dab43d45 100644 --- a/services/storage/src/simcore_service_storage/db_tokens.py +++ b/services/storage/src/simcore_service_storage/db_tokens.py @@ -1,4 +1,3 @@ - import logging from typing import Tuple @@ -18,7 +17,7 @@ @retry(**PostgresRetryPolicyUponOperation(log).kwargs) async def _get_tokens_from_db(engine, userid): async with engine.acquire() as conn: - stmt = sa.select([tokens, ]).where(tokens.c.user_id == userid) + stmt = sa.select([tokens,]).where(tokens.c.user_id == userid) result = await conn.execute(stmt) row = await result.first() data = dict(row) if row else {} @@ -32,17 +31,22 @@ async def get_api_token_and_secret(request: web.Request, userid) -> Tuple[str, s # defaults from config if any, othewise None defaults = request.app[APP_CONFIG_KEY]["main"].get("test_datcore", {}) - api_token, api_secret = defaults.get('api_token'), defaults.get('api_secret') + api_token, api_secret = defaults.get("api_token"), defaults.get("api_secret") if engine: try: data = await _get_tokens_from_db(engine, userid) except DbApiError: # NOTE this shall not log as error since is a possible outcome with an alternative - log.warning("Cannot retrieve tokens for user %s in pgdb %s", userid, engine, exc_info=True) + log.warning( + "Cannot retrieve tokens for user %s in pgdb %s", + userid, + engine, + exc_info=True, + ) else: - data = data.get('token_data', {}) - api_token = data.get('token_key', api_token) - api_secret = data.get('token_secret', api_secret) + data = data.get("token_data", {}) + api_token = data.get("token_key", api_token) + api_secret = data.get("token_secret", api_secret) return api_token, api_secret diff --git a/services/storage/src/simcore_service_storage/dsm.py b/services/storage/src/simcore_service_storage/dsm.py index 24d6638e68d..0c57defb7e2 100644 --- a/services/storage/src/simcore_service_storage/dsm.py +++ b/services/storage/src/simcore_service_storage/dsm.py @@ -24,13 +24,26 @@ from servicelib.client_session import get_client_session from .datcore_wrapper import DatcoreWrapper -from .models import (DatasetMetaData, FileMetaData, FileMetaDataEx, - _location_from_id, file_meta_data, projects, - user_to_projects) +from .models import ( + DatasetMetaData, + FileMetaData, + FileMetaDataEx, + _location_from_id, + file_meta_data, + projects, + user_to_projects, +) from .s3 import get_config_s3 -from .settings import (APP_CONFIG_KEY, APP_DB_ENGINE_KEY, APP_DSM_KEY, - APP_S3_KEY, DATCORE_ID, DATCORE_STR, SIMCORE_S3_ID, - SIMCORE_S3_STR) +from .settings import ( + APP_CONFIG_KEY, + APP_DB_ENGINE_KEY, + APP_DSM_KEY, + APP_S3_KEY, + DATCORE_ID, + DATCORE_STR, + SIMCORE_S3_ID, + SIMCORE_S3_STR, +) # pylint: disable=no-value-for-parameter @@ -48,6 +61,7 @@ FileMetaDataExVec = List[FileMetaDataEx] DatasetMetaDataVec = List[DatasetMetaData] + async def _setup_dsm(app: web.Application): cfg = app[APP_CONFIG_KEY] @@ -64,16 +78,20 @@ async def _setup_dsm(app: web.Application): bucket_name = s3_cfg["bucket_name"] testing = main_cfg["testing"] - dsm = DataStorageManager(s3_client, engine, loop, pool, bucket_name, not testing, app) + dsm = DataStorageManager( + s3_client, engine, loop, pool, bucket_name, not testing, app + ) app[APP_DSM_KEY] = dsm yield - #clean up + # clean up + def setup_dsm(app: web.Application): app.cleanup_ctx.append(_setup_dsm) + @attr.s(auto_attribs=True) class DatCoreApiToken: api_token: str = None @@ -82,6 +100,7 @@ class DatCoreApiToken: def to_tuple(self): return (self.api_token, self.api_secret) + @attr.s(auto_attribs=True) class DataStorageManager: """ Data storage manager @@ -112,42 +131,39 @@ class DataStorageManager: https://blog.minio.io/part-5-5-publish-minio-events-via-postgresql-50f6cc7a7346 https://docs.minio.io/docs/minio-bucket-notification-guide.html """ + s3_client: S3Client engine: Engine loop: object pool: ThreadPoolExecutor simcore_bucket_name: str has_project_db: bool - app: web.Application=None + app: web.Application = None - datcore_tokens: Dict[str, DatCoreApiToken]=attr.Factory(dict) + datcore_tokens: Dict[str, DatCoreApiToken] = attr.Factory(dict) # TODO: perhaps can be used a cache? add a lifetime? - - def _get_datcore_tokens(self, user_id: str)->Tuple[str, str]: - token = self.datcore_tokens.get(user_id, DatCoreApiToken()) # pylint: disable=E1101 + def _get_datcore_tokens(self, user_id: str) -> Tuple[str, str]: + # pylint: disable=no-member + token = self.datcore_tokens.get( + user_id, DatCoreApiToken() + ) return token.to_tuple() async def locations(self, user_id: str): locs = [] - simcore_s3 = { - "name" : SIMCORE_S3_STR, - "id" : SIMCORE_S3_ID - } + simcore_s3 = {"name": SIMCORE_S3_STR, "id": SIMCORE_S3_ID} locs.append(simcore_s3) ping_ok = await self.ping_datcore(user_id=user_id) if ping_ok: - datcore = { - "name" : DATCORE_STR, - "id" : DATCORE_ID - } + datcore = {"name": DATCORE_STR, "id": DATCORE_ID} locs.append(datcore) return locs @classmethod - def location_from_id(cls, location_id : str): + def location_from_id(cls, location_id: str): return _location_from_id(location_id) async def ping_datcore(self, user_id: str) -> bool: @@ -175,7 +191,9 @@ async def ping_datcore(self, user_id: str) -> bool: # pylint: disable=too-many-arguments # pylint: disable=too-many-branches # pylint: disable=too-many-statements - async def list_files(self, user_id: str, location: str, uuid_filter: str ="", regex: str="") -> FileMetaDataExVec: + async def list_files( + self, user_id: str, location: str, uuid_filter: str = "", regex: str = "" + ) -> FileMetaDataExVec: """ Returns a list of file paths Works for simcore.s3 and datcore @@ -187,7 +205,9 @@ async def list_files(self, user_id: str, location: str, uuid_filter: str ="", re data = [] if location == SIMCORE_S3_STR: async with self.engine.acquire() as conn: - query = sa.select([file_meta_data]).where(file_meta_data.c.user_id == user_id) + query = sa.select([file_meta_data]).where( + file_meta_data.c.user_id == user_id + ) async for row in conn.execute(query): result_dict = dict(zip(row._result_proxy.keys, row._row)) d = FileMetaData(**result_dict) @@ -201,16 +221,19 @@ async def list_files(self, user_id: str, location: str, uuid_filter: str ="", re try: async with self.engine.acquire() as conn: joint_table = user_to_projects.join(projects) - query = sa.select([projects]).select_from(joint_table)\ + query = ( + sa.select([projects]) + .select_from(joint_table) .where(user_to_projects.c.user_id == user_id) + ) async for row in conn.execute(query): proj_data = dict(row.items()) uuid_name_dict[proj_data["uuid"]] = proj_data["name"] - wb = proj_data['workbench'] + wb = proj_data["workbench"] for node in wb.keys(): - uuid_name_dict[node] = wb[node]['label'] + uuid_name_dict[node] = wb[node]["label"] except DBAPIError as _err: logger.exception("Error querying database for project names") @@ -227,20 +250,34 @@ async def list_files(self, user_id: str, location: str, uuid_filter: str ="", re if d.node_id in uuid_name_dict: d.node_name = uuid_name_dict[d.node_id] - d.raw_file_path = str(Path(d.project_id) / Path(d.node_id) / Path(d.file_name)) + d.raw_file_path = str( + Path(d.project_id) / Path(d.node_id) / Path(d.file_name) + ) d.display_file_path = d.raw_file_path d.file_id = d.file_uuid if d.node_name and d.project_name: - d.display_file_path = str(Path(d.project_name) / Path(d.node_name) / Path(d.file_name)) + d.display_file_path = str( + Path(d.project_name) + / Path(d.node_name) + / Path(d.file_name) + ) async with self.engine.acquire() as conn: - query = file_meta_data.update().\ - where(and_(file_meta_data.c.node_id==d.node_id, - file_meta_data.c.user_id==d.user_id)).\ - values(project_name=d.project_name, - node_name = d.node_name, + query = ( + file_meta_data.update() + .where( + and_( + file_meta_data.c.node_id == d.node_id, + file_meta_data.c.user_id == d.user_id, + ) + ) + .values( + project_name=d.project_name, + node_name=d.node_name, raw_file_path=d.raw_file_path, file_id=d.file_id, - display_file_path=d.display_file_path) + display_file_path=d.display_file_path, + ) + ) await conn.execute(query) clean_data.append(dx) @@ -251,21 +288,40 @@ async def list_files(self, user_id: str, location: str, uuid_filter: str ="", re # MaG: This is inefficient: Do this automatically when file is modified _loop = asyncio.get_event_loop() session = aiobotocore.get_session(loop=_loop) - async with session.create_client('s3', endpoint_url=self.s3_client.endpoint_url, aws_access_key_id=self.s3_client.access_key, - aws_secret_access_key=self.s3_client.secret_key) as client: - responses = await asyncio.gather(*[client.list_objects_v2(Bucket=_d.bucket_name, Prefix=_d.object_name) for _d in [__d.fmd for __d in data]]) + async with session.create_client( + "s3", + endpoint_url=self.s3_client.endpoint_url, + aws_access_key_id=self.s3_client.access_key, + aws_secret_access_key=self.s3_client.secret_key, + ) as client: + responses = await asyncio.gather( + *[ + client.list_objects_v2( + Bucket=_d.bucket_name, Prefix=_d.object_name + ) + for _d in [__d.fmd for __d in data] + ] + ) for dx, resp in zip(data, responses): - if 'Contents' in resp: + if "Contents" in resp: clean_data.append(dx) d = dx.fmd - d.file_size = resp['Contents'][0]['Size'] - d.last_modified = str(resp['Contents'][0]['LastModified']) + d.file_size = resp["Contents"][0]["Size"] + d.last_modified = str(resp["Contents"][0]["LastModified"]) async with self.engine.acquire() as conn: - query = file_meta_data.update().\ - where(and_(file_meta_data.c.node_id==d.node_id, - file_meta_data.c.user_id==d.user_id)).\ - values(file_size=d.file_size, - last_modified=d.last_modified) + query = ( + file_meta_data.update() + .where( + and_( + file_meta_data.c.node_id == d.node_id, + file_meta_data.c.user_id == d.user_id, + ) + ) + .values( + file_size=d.file_size, + last_modified=d.last_modified, + ) + ) await conn.execute(query) data = clean_data @@ -298,11 +354,15 @@ async def list_files(self, user_id: str, location: str, uuid_filter: str ="", re return data - async def list_files_dataset(self, user_id: str, location: str, dataset_id: str)->FileMetaDataVec: + async def list_files_dataset( + self, user_id: str, location: str, dataset_id: str + ) -> FileMetaDataVec: # this is a cheap shot, needs fixing once storage/db is in sync data = [] if location == SIMCORE_S3_STR: - data = await self.list_files(user_id, location, uuid_filter=dataset_id+"/") + data = await self.list_files( + user_id, location, uuid_filter=dataset_id + "/" + ) elif location == DATCORE_STR: api_token, api_secret = self._get_datcore_tokens(user_id) dcw = DatcoreWrapper(api_token, api_secret, self.loop, self.pool) @@ -324,12 +384,17 @@ async def list_datasets(self, user_id: str, location: str) -> DatasetMetaDataVec try: async with self.engine.acquire() as conn: joint_table = user_to_projects.join(projects) - query = sa.select([projects]).select_from(joint_table)\ - .where(user_to_projects.c.user_id == user_id) + query = ( + sa.select([projects]) + .select_from(joint_table) + .where(user_to_projects.c.user_id == user_id) + ) async for row in conn.execute(query): proj_data = dict(row.items()) - dmd = DatasetMetaData(dataset_id=proj_data["uuid"], - display_name=proj_data["name"]) + dmd = DatasetMetaData( + dataset_id=proj_data["uuid"], + display_name=proj_data["name"], + ) data.append(dmd) except DBAPIError as _err: logger.exception("Error querying database for project names") @@ -340,12 +405,18 @@ async def list_datasets(self, user_id: str, location: str) -> DatasetMetaDataVec return data - async def list_file(self, user_id: str, location: str, file_uuid: str) -> FileMetaDataEx: + async def list_file( + self, user_id: str, location: str, file_uuid: str + ) -> FileMetaDataEx: if location == SIMCORE_S3_STR: # TODO: get engine from outside async with self.engine.acquire() as conn: - query = sa.select([file_meta_data]).where(and_(file_meta_data.c.user_id == user_id, - file_meta_data.c.file_uuid == file_uuid)) + query = sa.select([file_meta_data]).where( + and_( + file_meta_data.c.user_id == user_id, + file_meta_data.c.file_uuid == file_uuid, + ) + ) async for row in conn.execute(query): result_dict = dict(zip(row._result_proxy.keys, row._row)) d = FileMetaData(**result_dict) @@ -354,7 +425,7 @@ async def list_file(self, user_id: str, location: str, file_uuid: str) -> FileMe elif location == DATCORE_STR: api_token, api_secret = self._get_datcore_tokens(user_id) _dcw = DatcoreWrapper(api_token, api_secret, self.loop, self.pool) - data = [] #await _dcw.list_file(file_uuid) + data = [] # await _dcw.list_file(file_uuid) return data async def delete_file(self, user_id: str, location: str, file_uuid: str): @@ -371,14 +442,20 @@ async def delete_file(self, user_id: str, location: str, file_uuid: str): if location == SIMCORE_S3_STR: to_delete = [] async with self.engine.acquire() as conn: - query = sa.select([file_meta_data]).where(file_meta_data.c.file_uuid == file_uuid) + query = sa.select([file_meta_data]).where( + file_meta_data.c.file_uuid == file_uuid + ) async for row in conn.execute(query): result_dict = dict(zip(row._result_proxy.keys, row._row)) d = FileMetaData(**result_dict) # make sure this is the current user if d.user_id == user_id: - if self.s3_client.remove_objects(d.bucket_name, [d.object_name]): - stmt = file_meta_data.delete().where(file_meta_data.c.file_uuid == file_uuid) + if self.s3_client.remove_objects( + d.bucket_name, [d.object_name] + ): + stmt = file_meta_data.delete().where( + file_meta_data.c.file_uuid == file_uuid + ) to_delete.append(stmt) async with self.engine.acquire() as conn: @@ -388,11 +465,13 @@ async def delete_file(self, user_id: str, location: str, file_uuid: str): elif location == DATCORE_STR: api_token, api_secret = self._get_datcore_tokens(user_id) dcw = DatcoreWrapper(api_token, api_secret, self.loop, self.pool) - #destination, filename = _parse_datcore(file_uuid) + # destination, filename = _parse_datcore(file_uuid) file_id = file_uuid return await dcw.delete_file_by_id(file_id) - async def upload_file_to_datcore(self, user_id: str, local_file_path: str, destination_id: str): # pylint: disable=W0613 + async def upload_file_to_datcore( + self, user_id: str, local_file_path: str, destination_id: str + ): # pylint: disable=W0613 # uploads a locally available file to dat core given the storage path, optionally attached some meta data api_token, api_secret = self._get_datcore_tokens(user_id) dcw = DatcoreWrapper(api_token, api_secret, self.loop, self.pool) @@ -400,16 +479,16 @@ async def upload_file_to_datcore(self, user_id: str, local_file_path: str, desti # actually we have to query the master db - async def upload_link(self, user_id: str, file_uuid: str): - @retry(**postgres_service_retry_policy_kwargs) async def _execute_query(): async with self.engine.acquire() as conn: fmd = FileMetaData() fmd.simcore_from_uuid(file_uuid, self.simcore_bucket_name) fmd.user_id = user_id - query = sa.select([file_meta_data]).where(file_meta_data.c.file_uuid == file_uuid) + query = sa.select([file_meta_data]).where( + file_meta_data.c.file_uuid == file_uuid + ) # if file already exists, we might want to update a time-stamp rows = await conn.execute(query) exists = await rows.scalar() @@ -431,7 +510,9 @@ async def copy_file_s3_s3(self, user_id: str, dest_uuid: str, source_uuid: str): from_object_name = source_uuid from_bucket_object_name = os.path.join(from_bucket, from_object_name) # FIXME: This is not async! - self.s3_client.copy_object(to_bucket_name, to_object_name, from_bucket_object_name) + self.s3_client.copy_object( + to_bucket_name, to_object_name, from_bucket_object_name + ) # update db async with self.engine.acquire() as conn: fmd = FileMetaData() @@ -440,7 +521,9 @@ async def copy_file_s3_s3(self, user_id: str, dest_uuid: str, source_uuid: str): ins = file_meta_data.insert().values(**vars(fmd)) await conn.execute(ins) - async def copy_file_s3_datcore(self, user_id: str, dest_uuid: str, source_uuid: str): + async def copy_file_s3_datcore( + self, user_id: str, dest_uuid: str, source_uuid: str + ): # source is s3, get link and copy to datcore bucket_name = self.simcore_bucket_name object_name = source_uuid @@ -451,40 +534,60 @@ async def copy_file_s3_datcore(self, user_id: str, dest_uuid: str, source_uuid: session = get_client_session(self.app) async with session.get(url) as resp: if resp.status == 200: - f = await aiofiles.open(local_file_path, mode='wb') + f = await aiofiles.open(local_file_path, mode="wb") await f.write(await resp.read()) await f.close() # and then upload - await self.upload_file_to_datcore(user_id=user_id, local_file_path=local_file_path, - destination_id=dest_uuid) + await self.upload_file_to_datcore( + user_id=user_id, + local_file_path=local_file_path, + destination_id=dest_uuid, + ) shutil.rmtree(tmp_dirpath) - async def copy_file_datcore_s3(self, user_id: str, dest_uuid: str, source_uuid: str, filename_missing: bool=False): + async def copy_file_datcore_s3( + self, + user_id: str, + dest_uuid: str, + source_uuid: str, + filename_missing: bool = False, + ): # 2 steps: Get download link for local copy, the upload link to s3 # TODO: This should be a redirect stream! - dc_link, filename = await self.download_link_datcore(user_id=user_id, file_id=source_uuid) + dc_link, filename = await self.download_link_datcore( + user_id=user_id, file_id=source_uuid + ) if filename_missing: - dest_uuid = str(Path(dest_uuid)/ filename) + dest_uuid = str(Path(dest_uuid) / filename) s3_upload_link = await self.upload_link(user_id, dest_uuid) tmp_dirpath = tempfile.mkdtemp() - local_file_path = os.path.join(tmp_dirpath,filename) + local_file_path = os.path.join(tmp_dirpath, filename) session = get_client_session(self.app) async with session.get(dc_link) as resp: if resp.status == 200: - f = await aiofiles.open(local_file_path, mode='wb') + f = await aiofiles.open(local_file_path, mode="wb") await f.write(await resp.read()) await f.close() s3_upload_link = URL(s3_upload_link) - async with session.put(s3_upload_link, data=Path(local_file_path).open('rb')) as resp: + async with session.put( + s3_upload_link, data=Path(local_file_path).open("rb") + ) as resp: if resp.status > 299: _response_text = await resp.text() return dest_uuid - async def copy_file(self, user_id: str, dest_location: str, dest_uuid: str, source_location: str, source_uuid: str): + async def copy_file( + self, + user_id: str, + dest_location: str, + dest_uuid: str, + source_location: str, + source_uuid: str, + ): if source_location == SIMCORE_S3_STR: if dest_location == DATCORE_STR: await self.copy_file_s3_datcore(user_id, dest_uuid, source_uuid) @@ -496,21 +599,23 @@ async def copy_file(self, user_id: str, dest_location: str, dest_uuid: str, sour if dest_location == SIMCORE_S3_STR: await self.copy_file_datcore_s3(user_id, dest_uuid, source_uuid) - async def download_link_s3(self, file_uuid: str)->str: + async def download_link_s3(self, file_uuid: str) -> str: link = None bucket_name = self.simcore_bucket_name object_name = file_uuid link = self.s3_client.create_presigned_get_url(bucket_name, object_name) return link - async def download_link_datcore(self, user_id: str, file_id: str)->Dict[str,str]: + async def download_link_datcore(self, user_id: str, file_id: str) -> Dict[str, str]: link = "" api_token, api_secret = self._get_datcore_tokens(user_id) dcw = DatcoreWrapper(api_token, api_secret, self.loop, self.pool) link, filename = await dcw.download_link_by_id(file_id) return link, filename - async def deep_copy_project_simcore_s3(self, user_id: str, source_project, destination_project, node_mapping): + async def deep_copy_project_simcore_s3( + self, user_id: str, source_project, destination_project, node_mapping + ): """ Parses a given source project and copies all related files to the destination project Since all files are organized as @@ -534,21 +639,27 @@ async def deep_copy_project_simcore_s3(self, user_id: str, source_project, desti # build up naming map based on labels uuid_name_dict = {} uuid_name_dict[dest_folder] = destination_project["name"] - for src_node_id, src_node in source_project['workbench'].items(): + for src_node_id, src_node in source_project["workbench"].items(): new_node_id = node_mapping.get(src_node_id) if new_node_id is not None: - uuid_name_dict[new_node_id] = src_node['label'] + uuid_name_dict[new_node_id] = src_node["label"] # Step 1: List all objects for this project replace them with the destination object name and do a copy at the same time collect some names _loop = asyncio.get_event_loop() session = aiobotocore.get_session(loop=_loop) - async with session.create_client('s3', endpoint_url=self.s3_client.endpoint_url, aws_access_key_id=self.s3_client.access_key, - aws_secret_access_key=self.s3_client.secret_key) as client: - response = await client.list_objects_v2(Bucket=self.simcore_bucket_name, Prefix=source_folder) + async with session.create_client( + "s3", + endpoint_url=self.s3_client.endpoint_url, + aws_access_key_id=self.s3_client.access_key, + aws_secret_access_key=self.s3_client.secret_key, + ) as client: + response = await client.list_objects_v2( + Bucket=self.simcore_bucket_name, Prefix=source_folder + ) if "Contents" in response: - for f in response['Contents']: - source_object_name = f['Key'] + for f in response["Contents"]: + source_object_name = f["Key"] source_object_parts = Path(source_object_name).parts if len(source_object_parts) == 3: @@ -556,92 +667,130 @@ async def deep_copy_project_simcore_s3(self, user_id: str, source_project, desti new_node_id = node_mapping.get(old_node_id) if new_node_id is not None: old_filename = source_object_parts[2] - dest_object_name = str(Path(dest_folder) / new_node_id / old_filename) - copy_source = {'Bucket' : self.simcore_bucket_name, 'Key': source_object_name} - response = await client.copy_object(CopySource=copy_source, Bucket=self.simcore_bucket_name, Key=dest_object_name) + dest_object_name = str( + Path(dest_folder) / new_node_id / old_filename + ) + copy_source = { + "Bucket": self.simcore_bucket_name, + "Key": source_object_name, + } + response = await client.copy_object( + CopySource=copy_source, + Bucket=self.simcore_bucket_name, + Key=dest_object_name, + ) else: # This may happen once we have shared/home folders logger.info("len(object.parts != 3") - # Step 2: List all references in outputs that point to datcore and copy over - for node_id, node in destination_project['workbench'].items(): + for node_id, node in destination_project["workbench"].items(): outputs = node.get("outputs") if outputs is not None: for _output_key, output in outputs.items(): - if "store" in output and output["store"]==DATCORE_ID: + if "store" in output and output["store"] == DATCORE_ID: src = output["path"] dest = str(Path(dest_folder) / node_id) logger.info("Need to copy %s to %s", src, dest) - dest = await self.copy_file_datcore_s3(user_id=user_id, dest_uuid=dest, source_uuid=src, filename_missing=True) + dest = await self.copy_file_datcore_s3( + user_id=user_id, + dest_uuid=dest, + source_uuid=src, + filename_missing=True, + ) # and change the dest project accordingly output["store"] = SIMCORE_S3_ID - output['path'] = dest - elif "store" in output and output["store"]==SIMCORE_S3_ID: - source = output['path'] - dest = dest = str(Path(dest_folder) / node_id / Path(source).name) + output["path"] = dest + elif "store" in output and output["store"] == SIMCORE_S3_ID: + source = output["path"] + dest = dest = str( + Path(dest_folder) / node_id / Path(source).name + ) output["store"] = SIMCORE_S3_ID - output['path'] = dest + output["path"] = dest # step 3: list files first to create fmds session = aiobotocore.get_session(loop=_loop) fmds = [] - async with session.create_client('s3', endpoint_url=self.s3_client.endpoint_url, aws_access_key_id=self.s3_client.access_key, - aws_secret_access_key=self.s3_client.secret_key) as client: - response = await client.list_objects_v2(Bucket=self.simcore_bucket_name, Prefix=dest_folder+"/") - if 'Contents' in response: - for f in response['Contents']: + async with session.create_client( + "s3", + endpoint_url=self.s3_client.endpoint_url, + aws_access_key_id=self.s3_client.access_key, + aws_secret_access_key=self.s3_client.secret_key, + ) as client: + response = await client.list_objects_v2( + Bucket=self.simcore_bucket_name, Prefix=dest_folder + "/" + ) + if "Contents" in response: + for f in response["Contents"]: fmd = FileMetaData() fmd.simcore_from_uuid(f["Key"], self.simcore_bucket_name) fmd.project_name = uuid_name_dict.get(dest_folder, "Untitled") fmd.node_name = uuid_name_dict.get(fmd.node_id, "Untitled") fmd.raw_file_path = fmd.file_uuid - fmd.display_file_path = str(Path(fmd.project_name) / fmd.node_name / fmd.file_name) + fmd.display_file_path = str( + Path(fmd.project_name) / fmd.node_name / fmd.file_name + ) fmd.user_id = user_id - fmd.file_size = f['Size'] - fmd.last_modified = str(f['LastModified']) + fmd.file_size = f["Size"] + fmd.last_modified = str(f["LastModified"]) fmds.append(fmd) - # step 4 sync db async with self.engine.acquire() as conn: for fmd in fmds: - query = sa.select([file_meta_data]).where(file_meta_data.c.file_uuid == fmd.file_uuid) + query = sa.select([file_meta_data]).where( + file_meta_data.c.file_uuid == fmd.file_uuid + ) # if file already exists, we might w rows = await conn.execute(query) exists = await rows.scalar() if exists: - delete_me = file_meta_data.delete().where(file_meta_data.c.file_uuid == fmd.file_uuid) + delete_me = file_meta_data.delete().where( + file_meta_data.c.file_uuid == fmd.file_uuid + ) await conn.execute(delete_me) ins = file_meta_data.insert().values(**vars(fmd)) await conn.execute(ins) - async def delete_project_simcore_s3(self, user_id: str, project_id: str, node_id: Optional[str]) -> web.Response: + async def delete_project_simcore_s3( + self, user_id: str, project_id: str, node_id: Optional[str] + ) -> web.Response: """ Deletes all files from a given node in a project in simcore.s3 and updated db accordingly. If node_id is not given, then all the project files db entries are deleted. """ async with self.engine.acquire() as conn: delete_me = file_meta_data.delete().where( - and_(file_meta_data.c.user_id == user_id, - file_meta_data.c.project_id == project_id - )) + and_( + file_meta_data.c.user_id == user_id, + file_meta_data.c.project_id == project_id, + ) + ) if node_id: delete_me = delete_me.where(file_meta_data.c.node_id == node_id) await conn.execute(delete_me) _loop = asyncio.get_event_loop() session = aiobotocore.get_session(loop=_loop) - async with session.create_client('s3', endpoint_url=self.s3_client.endpoint_url, aws_access_key_id=self.s3_client.access_key, - aws_secret_access_key=self.s3_client.secret_key) as client: - response = await client.list_objects_v2(Bucket=self.simcore_bucket_name, - Prefix=f"{project_id}/{node_id}/" if node_id else f"{project_id}/" - ) + async with session.create_client( + "s3", + endpoint_url=self.s3_client.endpoint_url, + aws_access_key_id=self.s3_client.access_key, + aws_secret_access_key=self.s3_client.secret_key, + ) as client: + response = await client.list_objects_v2( + Bucket=self.simcore_bucket_name, + Prefix=f"{project_id}/{node_id}/" if node_id else f"{project_id}/", + ) if "Contents" in response: objects_to_delete = [] - for f in response['Contents']: - objects_to_delete.append( { 'Key': f['Key'] }) + for f in response["Contents"]: + objects_to_delete.append({"Key": f["Key"]}) if objects_to_delete: - response = await client.delete_objects(Bucket=self.simcore_bucket_name, Delete={'Objects' : objects_to_delete}) + response = await client.delete_objects( + Bucket=self.simcore_bucket_name, + Delete={"Objects": objects_to_delete}, + ) return response diff --git a/services/storage/src/simcore_service_storage/handlers.py b/services/storage/src/simcore_service_storage/handlers.py index ce024294bab..6e673481a39 100644 --- a/services/storage/src/simcore_service_storage/handlers.py +++ b/services/storage/src/simcore_service_storage/handlers.py @@ -14,77 +14,73 @@ log = logging.getLogger(__name__) -file_schema = FileMetaDataSchema() +file_schema = FileMetaDataSchema() files_schema = FileMetaDataSchema(many=True) - async def check_health(request: web.Request): - log.debug("CHECK HEALTH INCOMING PATH %s",request.path) + log.debug("CHECK HEALTH INCOMING PATH %s", request.path) await extract_and_validate(request) return { - 'name':__name__.split('.')[0], - 'version': __version__, - 'status': 'SERVICE_RUNNING' + "name": __name__.split(".")[0], + "version": __version__, + "status": "SERVICE_RUNNING", } async def check_action(request: web.Request): params, query, body = await extract_and_validate(request) - assert params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert body, "body %s" % body # nosec + assert params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert body, "body %s" % body # nosec - if params['action'] == 'fail': + if params["action"] == "fail": raise ValueError("some randome failure") # echo's input FIXME: convert to dic # FIXME: output = fake_schema.dump(body) output = { - "path_value" : params.get('action'), - "query_value": query.get('data'), - "body_value" :{ - "key1": 1, #body.body_value.key1, - "key2": 0 #body.body_value.key2, - } + "path_value": params.get("action"), + "query_value": query.get("data"), + "body_value": { + "key1": 1, # body.body_value.key1, + "key2": 0, # body.body_value.key2, + }, } return output async def get_storage_locations(request: web.Request): - log.debug("CHECK LOCATION PATH %s %s",request.path, request.url) + log.debug("CHECK LOCATION PATH %s %s", request.path, request.url) params, query, body = await extract_and_validate(request) - assert not params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert not body, "body %s" % body # nosec + assert not params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert not body, "body %s" % body # nosec - assert query["user_id"] # nosec + assert query["user_id"] # nosec user_id = query["user_id"] dsm = await _prepare_storage_manager(params, query, request) locs = await dsm.locations(user_id) - return { - 'error': None, - 'data': locs - } + return {"error": None, "data": locs} async def get_datasets_metadata(request: web.Request): - log.debug("GET METADATA DATASETS %s %s",request.path, request.url) + log.debug("GET METADATA DATASETS %s %s", request.path, request.url) params, query, body = await extract_and_validate(request) - assert params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert not body, "body %s" % body # nosec + assert params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert not body, "body %s" % body # nosec - assert params["location_id"] # nosec - assert query["user_id"] # nosec + assert params["location_id"] # nosec + assert query["user_id"] # nosec location_id = params["location_id"] user_id = query["user_id"] @@ -95,22 +91,20 @@ async def get_datasets_metadata(request: web.Request): # To implement data = await dsm.list_datasets(user_id, location) - return { - 'error': None, - 'data': data - } + return {"error": None, "data": data} + async def get_files_metadata(request: web.Request): - log.debug("GET FILES METADATA %s %s",request.path, request.url) + log.debug("GET FILES METADATA %s %s", request.path, request.url) params, query, body = await extract_and_validate(request) - assert params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert not body, "body %s" % body # nosec + assert params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert not body, "body %s" % body # nosec - assert params["location_id"] # nosec - assert query["user_id"] # nosec + assert params["location_id"] # nosec + assert query["user_id"] # nosec location_id = params["location_id"] user_id = query["user_id"] @@ -121,32 +115,32 @@ async def get_files_metadata(request: web.Request): log.debug("list files %s %s %s", user_id, location, uuid_filter) - data = await dsm.list_files(user_id=user_id, location=location, uuid_filter=uuid_filter) + data = await dsm.list_files( + user_id=user_id, location=location, uuid_filter=uuid_filter + ) data_as_dict = [] for d in data: - log.info("DATA %s",attr.asdict(d.fmd)) - data_as_dict.append({**attr.asdict(d.fmd), 'parent_id': d.parent_id}) + log.info("DATA %s", attr.asdict(d.fmd)) + data_as_dict.append({**attr.asdict(d.fmd), "parent_id": d.parent_id}) - envelope = { - 'error': None, - 'data': data_as_dict - } + envelope = {"error": None, "data": data_as_dict} return envelope + async def get_files_metadata_dataset(request: web.Request): - log.debug("GET FILES METADATA DATASET %s %s",request.path, request.url) + log.debug("GET FILES METADATA DATASET %s %s", request.path, request.url) params, query, body = await extract_and_validate(request) - assert params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert not body, "body %s" % body # nosec + assert params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert not body, "body %s" % body # nosec - assert params["location_id"] # nosec - assert params["dataset_id"] # nosec - assert query["user_id"] # nosec + assert params["location_id"] # nosec + assert params["dataset_id"] # nosec + assert query["user_id"] # nosec location_id = params["location_id"] user_id = query["user_id"] @@ -158,17 +152,16 @@ async def get_files_metadata_dataset(request: web.Request): log.debug("list files %s %s %s", user_id, location, dataset_id) - data = await dsm.list_files_dataset(user_id=user_id, location=location, dataset_id=dataset_id) + data = await dsm.list_files_dataset( + user_id=user_id, location=location, dataset_id=dataset_id + ) data_as_dict = [] for d in data: - log.info("DATA %s",attr.asdict(d.fmd)) - data_as_dict.append({**attr.asdict(d.fmd), 'parent_id': d.parent_id}) + log.info("DATA %s", attr.asdict(d.fmd)) + data_as_dict.append({**attr.asdict(d.fmd), "parent_id": d.parent_id}) - envelope = { - 'error': None, - 'data': data_as_dict - } + envelope = {"error": None, "data": data_as_dict} return envelope @@ -176,13 +169,13 @@ async def get_files_metadata_dataset(request: web.Request): async def get_file_metadata(request: web.Request): params, query, body = await extract_and_validate(request) - assert params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert not body, "body %s" % body # nosec + assert params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert not body, "body %s" % body # nosec - assert params["location_id"] # nosec - assert params["fileId"] # nosec - assert query["user_id"] # nosec + assert params["location_id"] # nosec + assert params["fileId"] # nosec + assert query["user_id"] # nosec location_id = params["location_id"] user_id = query["user_id"] @@ -194,9 +187,9 @@ async def get_file_metadata(request: web.Request): data = await dsm.list_file(user_id=user_id, location=location, file_uuid=file_uuid) envelope = { - 'error': None, - 'data': {**attr.asdict(data.fmd), 'parent_id': data.parent_id} - } + "error": None, + "data": {**attr.asdict(data.fmd), "parent_id": data.parent_id}, + } return envelope @@ -204,13 +197,13 @@ async def get_file_metadata(request: web.Request): async def update_file_meta_data(request: web.Request): params, query, body = await extract_and_validate(request) - assert params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert not body, "body %s" % body # nosec + assert params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert not body, "body %s" % body # nosec - assert params["location_id"] # nosec - assert params["fileId"] # nosec - assert query["user_id"] # nosec + assert params["location_id"] # nosec + assert params["fileId"] # nosec + assert query["user_id"] # nosec location_id = params["location_id"] _user_id = query["user_id"] @@ -223,13 +216,13 @@ async def update_file_meta_data(request: web.Request): async def download_file(request: web.Request): params, query, body = await extract_and_validate(request) - assert params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert not body, "body %s" % body # nosec + assert params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert not body, "body %s" % body # nosec - assert params["location_id"] # nosec - assert params["fileId"] # nosec - assert query["user_id"] # nosec + assert params["location_id"] # nosec + assert params["fileId"] # nosec + assert query["user_id"] # nosec location_id = params["location_id"] user_id = query["user_id"] @@ -242,20 +235,15 @@ async def download_file(request: web.Request): else: link, _filename = await dsm.download_link_datcore(user_id, file_uuid) - return { - 'error': None, - 'data': { - "link": link - } - } + return {"error": None, "data": {"link": link}} async def upload_file(request: web.Request): params, query, body = await extract_and_validate(request) - assert params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert not body, "body %s" % body # nosec + assert params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert not body, "body %s" % body # nosec location_id = params["location_id"] user_id = query["user_id"] @@ -268,29 +256,29 @@ async def upload_file(request: web.Request): source_uuid = query["extra_source"] source_id = query["extra_location"] source_location = dsm.location_from_id(source_id) - link = await dsm.copy_file(user_id=user_id, dest_location=location, - dest_uuid=file_uuid, source_location=source_location, source_uuid=source_uuid) + link = await dsm.copy_file( + user_id=user_id, + dest_location=location, + dest_uuid=file_uuid, + source_location=source_location, + source_uuid=source_uuid, + ) else: link = await dsm.upload_link(user_id=user_id, file_uuid=file_uuid) - return { - 'error': None, - 'data': { - "link":link - } - } + return {"error": None, "data": {"link": link}} async def delete_file(request: web.Request): params, query, body = await extract_and_validate(request) - assert params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert not body, "body %s" % body # nosec + assert params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert not body, "body %s" % body # nosec - assert params["location_id"] # nosec - assert params["fileId"] # nosec - assert query["user_id"] # nosec + assert params["location_id"] # nosec + assert params["fileId"] # nosec + assert query["user_id"] # nosec location_id = params["location_id"] user_id = query["user_id"] @@ -298,57 +286,62 @@ async def delete_file(request: web.Request): dsm = await _prepare_storage_manager(params, query, request) location = dsm.location_from_id(location_id) - _discard = await dsm.delete_file(user_id=user_id, location=location, file_uuid=file_uuid) + _discard = await dsm.delete_file( + user_id=user_id, location=location, file_uuid=file_uuid + ) - return { - 'error': None, - 'data': None - } + return {"error": None, "data": None} async def create_folders_from_project(request: web.Request): - #FIXME: Update openapi-core. Fails with additionalProperties https://github.com/p1c2u/openapi-core/issues/124. Fails with project + # FIXME: Update openapi-core. Fails with additionalProperties https://github.com/p1c2u/openapi-core/issues/124. Fails with project # params, query, body = await extract_and_validate(request) user_id = request.query.get("user_id") body = await request.json() - source_project = body.get('source', {}) - destination_project = body.get('destination', {}) - nodes_map = body.get('nodes_map', {}) + source_project = body.get("source", {}) + destination_project = body.get("destination", {}) + nodes_map = body.get("nodes_map", {}) - assert set(nodes_map.keys()) == set(source_project['workbench'].keys()) # nosec - assert set(nodes_map.values()) == set(destination_project['workbench'].keys()) # nosec + assert set(nodes_map.keys()) == set(source_project["workbench"].keys()) # nosec + assert set(nodes_map.values()) == set( # nosec + destination_project["workbench"].keys() # nosec + ) # nosec # TODO: validate project with jsonschema instead?? - params = { "location_id" : SIMCORE_S3_ID } - query = { "user_id": user_id} + params = {"location_id": SIMCORE_S3_ID} + query = {"user_id": user_id} dsm = await _prepare_storage_manager(params, query, request) - await dsm.deep_copy_project_simcore_s3(user_id, source_project, destination_project, nodes_map) + await dsm.deep_copy_project_simcore_s3( + user_id, source_project, destination_project, nodes_map + ) + + raise web.HTTPCreated( + text=json.dumps(destination_project), content_type="application/json" + ) - raise web.HTTPCreated(text=json.dumps(destination_project), - content_type='application/json') async def delete_folders_of_project(request: web.Request): - folder_id = request.match_info['folder_id'] + folder_id = request.match_info["folder_id"] user_id = request.query.get("user_id") node_id = request.query.get("node_id", None) - params = { "location_id" : SIMCORE_S3_ID } - query = { "user_id": user_id} + params = {"location_id": SIMCORE_S3_ID} + query = {"user_id": user_id} dsm = await _prepare_storage_manager(params, query, request) await dsm.delete_project_simcore_s3(user_id, folder_id, node_id) - raise web.HTTPNoContent(content_type='application/json') - - - + raise web.HTTPNoContent(content_type="application/json") # HELPERS ----------------------------------------------------- INIT_STR = "init" -async def _prepare_storage_manager(params, query, request: web.Request) -> DataStorageManager: + +async def _prepare_storage_manager( + params, query, request: web.Request +) -> DataStorageManager: dsm = request.app[APP_DSM_KEY] user_id = query.get("user_id") diff --git a/services/storage/src/simcore_service_storage/models.py b/services/storage/src/simcore_service_storage/models.py index 7552a81fdb8..d5caa05d8b4 100644 --- a/services/storage/src/simcore_service_storage/models.py +++ b/services/storage/src/simcore_service_storage/models.py @@ -7,17 +7,21 @@ import attr -from simcore_postgres_database.storage_models import (file_meta_data, metadata, - projects, tokens, - user_to_projects, users) -from simcore_service_storage.settings import (DATCORE_STR, SIMCORE_S3_ID, - SIMCORE_S3_STR) +from simcore_postgres_database.storage_models import ( + file_meta_data, + metadata, + projects, + tokens, + user_to_projects, + users, +) +from simcore_service_storage.settings import DATCORE_STR, SIMCORE_S3_ID, SIMCORE_S3_STR -#FIXME: W0611:Unused UUID imported from sqlalchemy.dialects.postgresql -#from sqlalchemy.dialects.postgresql import UUID +# FIXME: W0611:Unused UUID imported from sqlalchemy.dialects.postgresql +# from sqlalchemy.dialects.postgresql import UUID -#FIXME: R0902: Too many instance attributes (11/7) (too-many-instance-attributes) -#pylint: disable=R0902 +# FIXME: R0902: Too many instance attributes (11/7) (too-many-instance-attributes) +# pylint: disable=R0902 def _parse_datcore(file_uuid: str) -> Tuple[str, str]: @@ -29,19 +33,15 @@ def _parse_datcore(file_uuid: str) -> Tuple[str, str]: return destination, file_name + def _locations(): # TODO: so far this is hardcoded - simcore_s3 = { - "name" : SIMCORE_S3_STR, - "id" : 0 - } - datcore = { - "name" : DATCORE_STR, - "id" : 1 - } + simcore_s3 = {"name": SIMCORE_S3_STR, "id": 0} + datcore = {"name": DATCORE_STR, "id": 1} return [simcore_s3, datcore] -def _location_from_id(location_id : str) ->str: + +def _location_from_id(location_id: str) -> str: # TODO create a map to sync _location_from_id and _location_from_str loc_str = "undefined" if location_id == "0": @@ -51,7 +51,8 @@ def _location_from_id(location_id : str) ->str: return loc_str -def _location_from_str(location : str) ->str: + +def _location_from_str(location: str) -> str: intstr = "undefined" if location == SIMCORE_S3_STR: intstr = "0" @@ -60,10 +61,12 @@ def _location_from_str(location : str) ->str: return intstr + @attr.s(auto_attribs=True) class DatasetMetaData: - dataset_id: str="" - display_name: str="" + dataset_id: str = "" + display_name: str = "" + class FileMetaData: """ This is a proposal, probably no everything is needed. @@ -110,7 +113,8 @@ class FileMetaData: state: on of OK, UPLOADING, DELETED """ - #pylint: disable=attribute-defined-outside-init + + # pylint: disable=attribute-defined-outside-init def simcore_from_uuid(self, file_uuid: str, bucket_name: str): parts = file_uuid.split("/") if len(parts) == 3: @@ -124,31 +128,35 @@ def simcore_from_uuid(self, file_uuid: str, bucket_name: str): self.file_uuid = file_uuid self.file_id = file_uuid self.raw_file_path = self.file_uuid - self.display_file_path = str(Path("not") / Path("yet") / Path("implemented")) + self.display_file_path = str( + Path("not") / Path("yet") / Path("implemented") + ) self.created_at = str(datetime.datetime.now()) self.last_modified = self.created_at self.file_size = -1 def __str__(self): d = attr.asdict(self) - _str ="" + _str = "" for _d in d: _str += " {0: <25}: {1}\n".format(_d, str(d[_d])) return _str attr.s( - these={c.name:attr.ib(default=None) for c in file_meta_data.c}, + these={c.name: attr.ib(default=None) for c in file_meta_data.c}, init=True, - kw_only=True)(FileMetaData) + kw_only=True, +)(FileMetaData) @attr.s(auto_attribs=True) -class FileMetaDataEx(): +class FileMetaDataEx: """Extend the base type by some additional attributes that shall not end up in the db """ + fmd: FileMetaData - parent_id: str="" + parent_id: str = "" def __str__(self): _str = str(self.fmd) @@ -164,5 +172,5 @@ def __str__(self): "FileMetaDataEx", "projects", "users", - "user_to_projects" + "user_to_projects", ] diff --git a/services/storage/src/simcore_service_storage/resources.py b/services/storage/src/simcore_service_storage/resources.py index 4d249f1ffec..a7a22e67d4e 100644 --- a/services/storage/src/simcore_service_storage/resources.py +++ b/services/storage/src/simcore_service_storage/resources.py @@ -12,7 +12,4 @@ ) -__all__ = ( - 'resources', - 'RSC_CONFIG_DIR_KEY' -) +__all__ = ("resources", "RSC_CONFIG_DIR_KEY") diff --git a/services/storage/src/simcore_service_storage/rest.py b/services/storage/src/simcore_service_storage/rest.py index 1efae24187e..05e3a989b05 100644 --- a/services/storage/src/simcore_service_storage/rest.py +++ b/services/storage/src/simcore_service_storage/rest.py @@ -32,7 +32,7 @@ def setup(app: web.Application): """ log.debug("Setting up %s ...", __name__) - spec_path = resources.get_path('api/v0/openapi.yaml') + spec_path = resources.get_path("api/v0/openapi.yaml") with spec_path.open() as fh: spec_dict = yaml.safe_load(fh) api_specs = openapi_core.create_spec(spec_dict, spec_path.as_uri()) @@ -44,7 +44,7 @@ def setup(app: web.Application): routes = rest_routes.create(api_specs) app.router.add_routes(routes) - log.debug("routes:\n\t%s", "\n\t".join(map(str, routes)) ) + log.debug("routes:\n\t%s", "\n\t".join(map(str, routes))) # Enable error, validation and envelop middleware on API routes base_path = get_base_path(api_specs) @@ -54,6 +54,4 @@ def setup(app: web.Application): # alias setup_rest = setup -__all__ = ( - 'setup_rest' -) +__all__ = "setup_rest" diff --git a/services/storage/src/simcore_service_storage/rest_config.py b/services/storage/src/simcore_service_storage/rest_config.py index d835198d6c6..c05e30c90ea 100644 --- a/services/storage/src/simcore_service_storage/rest_config.py +++ b/services/storage/src/simcore_service_storage/rest_config.py @@ -8,12 +8,8 @@ from .settings import APP_OPENAPI_SPECS_KEY -CONFIG_SECTION_NAME: str = 'rest' +CONFIG_SECTION_NAME: str = "rest" schema: T.Dict = minimal_addon_schema() -__all__ = ( - 'APP_OPENAPI_SPECS_KEY', - 'CONFIG_SECTION_NAME', - 'schema' -) +__all__ = ("APP_OPENAPI_SPECS_KEY", "CONFIG_SECTION_NAME", "schema") diff --git a/services/storage/src/simcore_service_storage/rest_models.py b/services/storage/src/simcore_service_storage/rest_models.py index 177ef3153da..f3b2893fe6e 100644 --- a/services/storage/src/simcore_service_storage/rest_models.py +++ b/services/storage/src/simcore_service_storage/rest_models.py @@ -6,6 +6,7 @@ # NOTE: using these, optional and required fields are always transmitted! # NOTE: make some attrs nullable by default!? + class FileMetaDataSchema(Schema): filename = fields.Str() version = fields.Str() @@ -14,5 +15,4 @@ class FileMetaDataSchema(Schema): storage_location = fields.Str() - # TODO: fix __all__ diff --git a/services/storage/src/simcore_service_storage/rest_routes.py b/services/storage/src/simcore_service_storage/rest_routes.py index 2cb95fb11ad..69f80b59595 100644 --- a/services/storage/src/simcore_service_storage/rest_routes.py +++ b/services/storage/src/simcore_service_storage/rest_routes.py @@ -18,7 +18,7 @@ def create(specs: OpenApiSpec) -> List[web.RouteDef]: # TODO: consider the case in which server creates routes for both v0 and v1!!! # TODO: should this be taken from servers instead? - BASEPATH = '/v' + specs.info.version.split('.')[0] + BASEPATH = "/v" + specs.info.version.split(".")[0] log.debug("creating %s ", __name__) routes = [] @@ -27,57 +27,66 @@ def create(specs: OpenApiSpec) -> List[web.RouteDef]: # routes = auto_routing(specs, handlers) # diagnostics -- - path, handle = '/', handlers.check_health - operation_id = specs.paths[path].operations['get'].operation_id - routes.append( web.get(BASEPATH+path, handle, name=operation_id) ) - - path, handle = '/check/{action}', handlers.check_action - operation_id = specs.paths[path].operations['post'].operation_id - routes.append( web.post(BASEPATH+path, handle, name=operation_id) ) - - path, handle = '/locations', handlers.get_storage_locations - operation_id = specs.paths[path].operations['get'].operation_id - routes.append( web.get(BASEPATH+path, handle, name=operation_id) ) - - path, handle = '/locations/{location_id}/files/metadata', handlers.get_files_metadata - operation_id = specs.paths[path].operations['get'].operation_id - routes.append( web.get(BASEPATH+path, handle, name=operation_id) ) - - path, handle = '/locations/{location_id}/datasets', handlers.get_datasets_metadata - operation_id = specs.paths[path].operations['get'].operation_id - routes.append( web.get(BASEPATH+path, handle, name=operation_id) ) - - path, handle = '/locations/{location_id}/files/{fileId}/metadata', handlers.get_file_metadata - operation_id = specs.paths[path].operations['get'].operation_id - routes.append( web.get(BASEPATH+path, handle, name=operation_id) ) - - path, handle = '/locations/{location_id}/datasets/{dataset_id}/metadata', handlers.get_files_metadata_dataset - operation_id = specs.paths[path].operations['get'].operation_id - routes.append( web.get(BASEPATH+path, handle, name=operation_id) ) + path, handle = "/", handlers.check_health + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) + + path, handle = "/check/{action}", handlers.check_action + operation_id = specs.paths[path].operations["post"].operation_id + routes.append(web.post(BASEPATH + path, handle, name=operation_id)) + + path, handle = "/locations", handlers.get_storage_locations + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) + + path, handle = ( + "/locations/{location_id}/files/metadata", + handlers.get_files_metadata, + ) + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) + + path, handle = "/locations/{location_id}/datasets", handlers.get_datasets_metadata + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) + + path, handle = ( + "/locations/{location_id}/files/{fileId}/metadata", + handlers.get_file_metadata, + ) + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) + + path, handle = ( + "/locations/{location_id}/datasets/{dataset_id}/metadata", + handlers.get_files_metadata_dataset, + ) + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) # TODO: Implements update # path, handle = '/{location_id}/files/{fileId}/metadata', handlers.update_file_metadata # operation_id = specs.paths[path].operations['patch'].operation_id # routes.append( web.patch(BASEPATH+path, handle, name=operation_id) ) - path, handle = '/locations/{location_id}/files/{fileId}', handlers.download_file - operation_id = specs.paths[path].operations['get'].operation_id - routes.append( web.get(BASEPATH+path, handle, name=operation_id) ) + path, handle = "/locations/{location_id}/files/{fileId}", handlers.download_file + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) - path, handle = '/locations/{location_id}/files/{fileId}', handlers.delete_file - operation_id = specs.paths[path].operations['delete'].operation_id - routes.append( web.delete(BASEPATH+path, handle, name=operation_id) ) + path, handle = "/locations/{location_id}/files/{fileId}", handlers.delete_file + operation_id = specs.paths[path].operations["delete"].operation_id + routes.append(web.delete(BASEPATH + path, handle, name=operation_id)) - path, handle = '/locations/{location_id}/files/{fileId}', handlers.upload_file - operation_id = specs.paths[path].operations['put'].operation_id - routes.append( web.put(BASEPATH+path, handle, name=operation_id) ) + path, handle = "/locations/{location_id}/files/{fileId}", handlers.upload_file + operation_id = specs.paths[path].operations["put"].operation_id + routes.append(web.put(BASEPATH + path, handle, name=operation_id)) - path, handle = '/simcore-s3/folders', handlers.create_folders_from_project - operation_id = specs.paths[path].operations['post'].operation_id - routes.append( web.post(BASEPATH+path, handle, name=operation_id) ) + path, handle = "/simcore-s3/folders", handlers.create_folders_from_project + operation_id = specs.paths[path].operations["post"].operation_id + routes.append(web.post(BASEPATH + path, handle, name=operation_id)) - path, handle = '/simcore-s3/folders/{folder_id}', handlers.delete_folders_of_project - operation_id = specs.paths[path].operations['delete'].operation_id - routes.append( web.delete(BASEPATH+path, handle, name=operation_id) ) + path, handle = "/simcore-s3/folders/{folder_id}", handlers.delete_folders_of_project + operation_id = specs.paths[path].operations["delete"].operation_id + routes.append(web.delete(BASEPATH + path, handle, name=operation_id)) return routes diff --git a/services/storage/src/simcore_service_storage/s3.py b/services/storage/src/simcore_service_storage/s3.py index 0042b7b5426..f54bc7d5a81 100644 --- a/services/storage/src/simcore_service_storage/s3.py +++ b/services/storage/src/simcore_service_storage/s3.py @@ -15,7 +15,8 @@ log = logging.getLogger(__name__) -_SERVICE_NAME = 's3' +_SERVICE_NAME = "s3" + async def _setup_s3_bucket(app): log.debug("setup %s.setup.cleanup_ctx", __name__) @@ -24,10 +25,12 @@ async def _setup_s3_bucket(app): s3_client = app[APP_S3_KEY] cfg = app[APP_CONFIG_KEY] - @retry(wait=wait_fixed(RETRY_WAIT_SECS), + @retry( + wait=wait_fixed(RETRY_WAIT_SECS), stop=stop_after_attempt(RETRY_COUNT), before_sleep=before_sleep_log(log, logging.WARNING), - reraise=True) + reraise=True, + ) async def do_create_bucket(): s3_cfg = cfg[_SERVICE_NAME] s3_bucket = s3_cfg["bucket_name"] @@ -36,7 +39,7 @@ async def do_create_bucket(): try: await do_create_bucket() - except Exception: #pylint: disable=broad-except + except Exception: # pylint: disable=broad-except log.exception("Impossible to create s3 bucket. Stoping") # ok, failures_count = False, 0 @@ -60,7 +63,7 @@ def setup(app: web.Application): """ minio/s3 service setup""" log.debug("Setting up %s ...", __name__) - disable_services = app[APP_CONFIG_KEY].get("main", {}).get("disable_services",[]) + disable_services = app[APP_CONFIG_KEY].get("main", {}).get("disable_services", []) if _SERVICE_NAME in disable_services: log.warning("Service '%s' explicitly disabled in config", _SERVICE_NAME) @@ -73,11 +76,14 @@ def setup(app: web.Application): s3_secret_key = s3_cfg["secret_key"] s3_secure = s3_cfg["secure"] - s3_client = S3Client(s3_endpoint, s3_access_key, s3_secret_key, secure=s3_secure == 1) + s3_client = S3Client( + s3_endpoint, s3_access_key, s3_secret_key, secure=s3_secure == 1 + ) app[APP_S3_KEY] = s3_client app.cleanup_ctx.append(_setup_s3_bucket) + def get_config(app: web.Application) -> Dict: cfg = app[APP_CONFIG_KEY][_SERVICE_NAME] return cfg diff --git a/services/storage/src/simcore_service_storage/settings.py b/services/storage/src/simcore_service_storage/settings.py index 97b9f984947..c6c2e23f92b 100644 --- a/services/storage/src/simcore_service_storage/settings.py +++ b/services/storage/src/simcore_service_storage/settings.py @@ -21,7 +21,7 @@ # IMPORTANT: lowest level module # I order to avoid cyclic dependences, please # DO NOT IMPORT ANYTHING from . (except for __version__) -from .__version__ import get_version_object +from .__version__ import version log = logging.getLogger(__name__) @@ -32,40 +32,42 @@ CONNECT_TIMEOUT_SECS = 30 ## VERSION----------------------------- -service_version = get_version_object() +service_version = version ## CONFIGURATION FILES------------------ -DEFAULT_CONFIG='docker-prod-config.yaml' +DEFAULT_CONFIG = "docker-prod-config.yaml" -APP_CONFIG_KEY = application_keys.APP_CONFIG_KEY # app-storage-key for config object -RSC_CONFIG_DIR_KEY = "data" # resource folder +APP_CONFIG_KEY = application_keys.APP_CONFIG_KEY # app-storage-key for config object +RSC_CONFIG_DIR_KEY = "data" # resource folder # DSM specific constants -SIMCORE_S3_ID = 0 -SIMCORE_S3_STR = "simcore.s3" +SIMCORE_S3_ID = 0 +SIMCORE_S3_STR = "simcore.s3" -DATCORE_ID = 1 -DATCORE_STR = "datcore" +DATCORE_ID = 1 +DATCORE_STR = "datcore" # RSC=resource -RSC_CONFIG_DIR_KEY = "data" +RSC_CONFIG_DIR_KEY = "data" RSC_CONFIG_SCHEMA_KEY = RSC_CONFIG_DIR_KEY + "/config-schema-v1.json" # REST API ---------------------------- -API_MAJOR_VERSION = service_version.major # NOTE: syncs with service key +API_MAJOR_VERSION = service_version.major # NOTE: syncs with service key API_VERSION_TAG = "v{:.0f}".format(API_MAJOR_VERSION) -APP_OPENAPI_SPECS_KEY = application_keys.APP_OPENAPI_SPECS_KEY # app-storage-key for openapi specs object +APP_OPENAPI_SPECS_KEY = ( + application_keys.APP_OPENAPI_SPECS_KEY +) # app-storage-key for openapi specs object # DATABASE ---------------------------- -APP_DB_ENGINE_KEY = __name__ + '.db_engine' +APP_DB_ENGINE_KEY = __name__ + ".db_engine" # DATA STORAGE MANAGER ---------------------------------- -APP_DSM_THREADPOOL = __name__ + '.dsm_threadpool' +APP_DSM_THREADPOOL = __name__ + ".dsm_threadpool" APP_DSM_KEY = __name__ + ".DSM" APP_S3_KEY = __name__ + ".S3_CLIENT" diff --git a/services/storage/src/simcore_service_storage/utils.py b/services/storage/src/simcore_service_storage/utils.py index eddd27711b4..4e7bc86a56c 100644 --- a/services/storage/src/simcore_service_storage/utils.py +++ b/services/storage/src/simcore_service_storage/utils.py @@ -11,12 +11,15 @@ RETRY_COUNT = 20 CONNECT_TIMEOUT_SECS = 30 + @tenacity.retry( wait=tenacity.wait_fixed(RETRY_WAIT_SECS), stop=tenacity.stop_after_attempt(RETRY_COUNT), - before_sleep=tenacity.before_sleep_log(logger, logging.INFO) - ) -async def assert_enpoint_is_ok(session: ClientSession, url: URL, expected_response:int =200): + before_sleep=tenacity.before_sleep_log(logger, logging.INFO), +) +async def assert_enpoint_is_ok( + session: ClientSession, url: URL, expected_response: int = 200 +): """ Tenace check to GET given url endpoint Typically used to check connectivity to a given service @@ -33,5 +36,6 @@ async def assert_enpoint_is_ok(session: ClientSession, url: URL, expected_respon if resp.status != expected_response: raise AssertionError(f"{resp.status} != {expected_response}") + def is_url(location): return bool(URL(str(location)).host) diff --git a/services/storage/tests/_test_rawdatcore.py b/services/storage/tests/_test_rawdatcore.py index 9160fc11b5e..a3ea625bf5e 100644 --- a/services/storage/tests/_test_rawdatcore.py +++ b/services/storage/tests/_test_rawdatcore.py @@ -22,19 +22,19 @@ fd, path = tempfile.mkstemp() try: - with os.fdopen(fd, 'w') as tmp: + with os.fdopen(fd, "w") as tmp: # do stuff with temp file - tmp.write('stuff') + tmp.write("stuff") f = client.upload_file(destination, path) - f = client.delete_file(destination,Path(path).name) + f = client.delete_file(destination, Path(path).name) finally: os.remove(path) - files = [] + files = [] if True: dataset = client.get_dataset("mag") - # dataset.print_tree() + # dataset.print_tree() client.list_dataset_files_recursively(files, dataset, Path(dataset.name)) else: files = client.list_files_recursively() @@ -42,12 +42,11 @@ fd, path = tempfile.mkstemp() try: - with os.fdopen(fd, 'w') as tmp: + with os.fdopen(fd, "w") as tmp: # do stuff with temp file - tmp.write('stuff') + tmp.write("stuff") - - print(fd,path) + print(fd, path) destination_path = Path("mag/level1/level2/bla.txt") parts = destination_path.parts assert len(parts) > 1 @@ -72,7 +71,7 @@ def _get_collection_id(folder, _collections, collection_id): return _get_collection_id(folder, _collections, collection_id) my_id = "" - my_id =_get_collection_id(destination, collections, my_id) + my_id = _get_collection_id(destination, collections, my_id) package = client.client.get(my_id) client.upload_file(package, path) print(my_id) diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index d2082496641..84dc5d0f92e 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -24,54 +24,55 @@ from simcore_service_storage.dsm import DataStorageManager, DatCoreApiToken from simcore_service_storage.models import FileMetaData from simcore_service_storage.settings import SIMCORE_S3_STR -from utils import (ACCESS_KEY, BUCKET_NAME, DATABASE, PASS, SECRET_KEY, USER, - USER_ID) +from utils import ACCESS_KEY, BUCKET_NAME, DATABASE, PASS, SECRET_KEY, USER, USER_ID current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -sys.path.append(str(current_dir / 'helpers')) +sys.path.append(str(current_dir / "helpers")) - -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def here(): return current_dir -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def package_dir(here): dirpath = Path(simcore_service_storage.__file__).parent assert dirpath.exists() return dirpath -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def osparc_simcore_root_dir(here): root_dir = here.parent.parent.parent - assert root_dir.exists() and any(root_dir.glob("services")), "Is this service within osparc-simcore repo?" + assert root_dir.exists() and any( + root_dir.glob("services") + ), "Is this service within osparc-simcore repo?" return root_dir -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def osparc_api_specs_dir(osparc_simcore_root_dir): dirpath = osparc_simcore_root_dir / "api" / "specs" assert dirpath.exists() return dirpath -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def docker_compose_file(here): """ Overrides pytest-docker fixture """ old = os.environ.copy() # docker-compose reads these environs - os.environ['POSTGRES_DB'] = DATABASE - os.environ['POSTGRES_USER'] = USER - os.environ['POSTGRES_PASSWORD'] = PASS - os.environ['POSTGRES_ENDPOINT'] = "FOO" # TODO: update config schema!! - os.environ['MINIO_ACCESS_KEY'] = ACCESS_KEY - os.environ['MINIO_SECRET_KEY'] = SECRET_KEY + os.environ["POSTGRES_DB"] = DATABASE + os.environ["POSTGRES_USER"] = USER + os.environ["POSTGRES_PASSWORD"] = PASS + os.environ["POSTGRES_ENDPOINT"] = "FOO" # TODO: update config schema!! + os.environ["MINIO_ACCESS_KEY"] = ACCESS_KEY + os.environ["MINIO_SECRET_KEY"] = SECRET_KEY - dc_path = here / 'docker-compose.yml' + dc_path = here / "docker-compose.yml" assert dc_path.exists() yield str(dc_path) @@ -79,50 +80,48 @@ def docker_compose_file(here): os.environ = old -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def postgres_service(docker_services, docker_ip): - url = 'postgresql://{user}:{password}@{host}:{port}/{database}'.format( + url = "postgresql://{user}:{password}@{host}:{port}/{database}".format( user=USER, password=PASS, database=DATABASE, host=docker_ip, - port=docker_services.port_for('postgres', 5432), + port=docker_services.port_for("postgres", 5432), ) # Wait until service is responsive. docker_services.wait_until_responsive( - check=lambda: utils.is_postgres_responsive(url), - timeout=30.0, - pause=0.1, + check=lambda: utils.is_postgres_responsive(url), timeout=30.0, pause=0.1, ) postgres_service = { - 'user': USER, - 'password': PASS, - 'database': DATABASE, - 'host': docker_ip, - 'port': docker_services.port_for('postgres', 5432), - 'minsize':1, - 'maxsize':4 + "user": USER, + "password": PASS, + "database": DATABASE, + "host": docker_ip, + "port": docker_services.port_for("postgres", 5432), + "minsize": 1, + "maxsize": 4, } return postgres_service -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def postgres_service_url(postgres_service, docker_services, docker_ip): - postgres_service_url = 'postgresql://{user}:{password}@{host}:{port}/{database}'.format( + postgres_service_url = "postgresql://{user}:{password}@{host}:{port}/{database}".format( user=USER, password=PASS, database=DATABASE, host=docker_ip, - port=docker_services.port_for('postgres', 5432), + port=docker_services.port_for("postgres", 5432), ) return postgres_service_url -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") async def postgres_engine(loop, postgres_service_url): postgres_engine = await create_engine(postgres_service_url) @@ -133,28 +132,25 @@ async def postgres_engine(loop, postgres_service_url): await postgres_engine.wait_closed() -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def minio_service(docker_services, docker_ip): - # Build URL to service listening on random port. - url = 'http://%s:%d/' % ( - docker_ip, - docker_services.port_for('minio', 9000), - ) + # Build URL to service listening on random port. + url = "http://%s:%d/" % (docker_ip, docker_services.port_for("minio", 9000),) # Wait until service is responsive. docker_services.wait_until_responsive( - check=lambda: utils.is_responsive(url, 403), - timeout=30.0, - pause=0.1, + check=lambda: utils.is_responsive(url, 403), timeout=30.0, pause=0.1, ) return { - 'endpoint': '{ip}:{port}'.format(ip=docker_ip, port=docker_services.port_for('minio', 9000)), - 'access_key': ACCESS_KEY, - 'secret_key': SECRET_KEY, - 'bucket_name': BUCKET_NAME, - 'secure' : 0 + "endpoint": "{ip}:{port}".format( + ip=docker_ip, port=docker_services.port_for("minio", 9000) + ), + "access_key": ACCESS_KEY, + "secret_key": SECRET_KEY, + "bucket_name": BUCKET_NAME, + "secure": 0, } @@ -163,7 +159,10 @@ def s3_client(minio_service): from s3wrapper.s3_client import S3Client s3_client = S3Client( - endpoint=minio_service['endpoint'], access_key=minio_service["access_key"], secret_key=minio_service["secret_key"]) + endpoint=minio_service["endpoint"], + access_key=minio_service["access_key"], + secret_key=minio_service["secret_key"], + ) return s3_client @@ -174,14 +173,17 @@ def _create_files(count): for _i in range(count): name = str(uuid.uuid4()) filepath = os.path.normpath( - str(tmpdir_factory.mktemp('data').join(name + ".txt"))) - with open(filepath, 'w') as fout: + str(tmpdir_factory.mktemp("data").join(name + ".txt")) + ) + with open(filepath, "w") as fout: fout.write("Hello world\n") filepaths.append(filepath) return filepaths + return _create_files + @pytest.fixture(scope="function") def dsm_mockup_complete_db(postgres_service_url, s3_client) -> Tuple[str, str]: utils.create_full_tables(url=postgres_service_url) @@ -190,21 +192,21 @@ def dsm_mockup_complete_db(postgres_service_url, s3_client) -> Tuple[str, str]: file_1 = { "project_id": "161b8782-b13e-5840-9ae2-e2250c231001", "node_id": "ad9bda7f-1dc5-5480-ab22-5fef4fc53eac", - "filename": "outputController.dat" - } - f = utils.data_dir() /Path("outputController.dat") + "filename": "outputController.dat", + } + f = utils.data_dir() / Path("outputController.dat") object_name = "{project_id}/{node_id}/{filename}".format(**file_1) s3_client.upload_file(bucket_name, object_name, f) file_2 = { "project_id": "161b8782-b13e-5840-9ae2-e2250c231001", "node_id": "a3941ea0-37c4-5c1d-a7b3-01b5fd8a80c8", - "filename": "notebooks.zip" - } - f = utils.data_dir() /Path("notebooks.zip") + "filename": "notebooks.zip", + } + f = utils.data_dir() / Path("notebooks.zip") object_name = "{project_id}/{node_id}/{filename}".format(**file_2) s3_client.upload_file(bucket_name, object_name, f) - yield (file_1,file_2) + yield (file_1, file_2) utils.drop_all_tables(url=postgres_service_url) @@ -218,13 +220,20 @@ def dsm_mockup_db(postgres_service_url, s3_client, mock_files_factory): s3_client.create_bucket(bucket_name, delete_contents_if_exists=True) # TODO: use pip install Faker - users = ['alice', 'bob', 'chuck', 'dennis'] - - projects = ['astronomy', 'biology', 'chemistry', - 'dermatology', 'economics', 'futurology', 'geology'] + users = ["alice", "bob", "chuck", "dennis"] + + projects = [ + "astronomy", + "biology", + "chemistry", + "dermatology", + "economics", + "futurology", + "geology", + ] location = SIMCORE_S3_STR - nodes = ['alpha', 'beta', 'gamma', 'delta'] + nodes = ["alpha", "beta", "gamma", "delta"] N = 100 files = mock_files_factory(count=N) @@ -241,43 +250,41 @@ def dsm_mockup_db(postgres_service_url, s3_client, mock_files_factory): node = nodes[idx] node_id = idx + 10000 file_name = str(counter) - object_name = Path(str(project_id), str( - node_id), str(counter)).as_posix() + object_name = Path(str(project_id), str(node_id), str(counter)).as_posix() file_uuid = Path(object_name).as_posix() raw_file_path = file_uuid - display_file_path = str(Path(project_name)/Path(node)/Path(file_name)) + display_file_path = str(Path(project_name) / Path(node) / Path(file_name)) created_at = str(datetime.datetime.now()) file_size = 1234 assert s3_client.upload_file(bucket_name, object_name, _file) - d = {'file_uuid': file_uuid, - 'location_id': "0", - 'location': location, - 'bucket_name': bucket_name, - 'object_name': object_name, - 'project_id': str(project_id), - 'project_name': project_name, - 'node_id': str(node_id), - 'node_name': node, - 'file_name': file_name, - 'user_id': str(user_id), - 'user_name': user_name, - "file_id": str(uuid.uuid4()), - "raw_file_path": file_uuid, - "display_file_path": display_file_path, - "created_at": created_at, - "last_modified": created_at, - "file_size": file_size, - } + d = { + "file_uuid": file_uuid, + "location_id": "0", + "location": location, + "bucket_name": bucket_name, + "object_name": object_name, + "project_id": str(project_id), + "project_name": project_name, + "node_id": str(node_id), + "node_name": node, + "file_name": file_name, + "user_id": str(user_id), + "user_name": user_name, + "file_id": str(uuid.uuid4()), + "raw_file_path": file_uuid, + "display_file_path": display_file_path, + "created_at": created_at, + "last_modified": created_at, + "file_size": file_size, + } counter = counter + 1 data[object_name] = FileMetaData(**d) # pylint: disable=no-member - utils.insert_metadata(postgres_service_url, - data[object_name]) - + utils.insert_metadata(postgres_service_url, data[object_name]) total_count = 0 for _obj in s3_client.list_objects_v2(bucket_name, recursive=True): @@ -320,7 +327,8 @@ async def datcore_testbucket(loop, mock_files_factory): def dsm_fixture(s3_client, postgres_engine, loop): pool = ThreadPoolExecutor(3) dsm_fixture = DataStorageManager( - s3_client, postgres_engine, loop, pool, BUCKET_NAME, False) + s3_client, postgres_engine, loop, pool, BUCKET_NAME, False + ) api_token = os.environ.get("BF_API_KEY", "none") api_secret = os.environ.get("BF_API_SECRET", "none") @@ -348,17 +356,27 @@ async def datcore_structured_testbucket(loop, mock_files_factory): # create first level folder collection_id1 = await dcw.create_collection(dataset_id, "level1") # upload second file - file_id2 = await dcw.upload_file_to_id(collection_id1, os.path.normpath(tmp_files[1])) + file_id2 = await dcw.upload_file_to_id( + collection_id1, os.path.normpath(tmp_files[1]) + ) # create 3rd level folder collection_id2 = await dcw.create_collection(collection_id1, "level2") - file_id3 = await dcw.upload_file_to_id(collection_id2, os.path.normpath(tmp_files[2])) - - yield { 'dataset_id' : dataset_id, - 'coll1_id' : collection_id1, 'coll2_id' : collection_id2, - 'file_id1' : file_id1, 'filename1' : tmp_files[0], - 'file_id2' : file_id2, 'filename2' : tmp_files[1], - 'file_id3' : file_id3, 'filename3' : tmp_files[2], - 'dcw' : dcw } + file_id3 = await dcw.upload_file_to_id( + collection_id2, os.path.normpath(tmp_files[2]) + ) + + yield { + "dataset_id": dataset_id, + "coll1_id": collection_id1, + "coll2_id": collection_id2, + "file_id1": file_id1, + "filename1": tmp_files[0], + "file_id2": file_id2, + "filename2": tmp_files[1], + "file_id3": file_id3, + "filename3": tmp_files[2], + "dcw": dcw, + } await dcw.delete_test_dataset(BUCKET_NAME) diff --git a/services/storage/tests/helpers/utils_assert.py b/services/storage/tests/helpers/utils_assert.py index 0e4f2e6ac23..b33aca63138 100644 --- a/services/storage/tests/helpers/utils_assert.py +++ b/services/storage/tests/helpers/utils_assert.py @@ -5,10 +5,13 @@ from servicelib.rest_responses import unwrap_envelope -async def assert_status(response: web.Response, expected_cls:web.HTTPException, expected_msg: str=None): +async def assert_status( + response: web.Response, expected_cls: web.HTTPException, expected_msg: str = None +): data, error = unwrap_envelope(await response.json()) - assert response.status == expected_cls.status_code, \ - f"got {response.status}, expected {expected_cls.status_code}:\n data:{data},\n error:{error}" + assert ( + response.status == expected_cls.status_code + ), f"got {response.status}, expected {expected_cls.status_code}:\n data:{data},\n error:{error}" if issubclass(expected_cls, web.HTTPError): do_assert_error(data, error, expected_cls, expected_msg) @@ -25,20 +28,26 @@ async def assert_status(response: web.Response, expected_cls:web.HTTPException, return data, error -async def assert_error(response: web.Response, expected_cls:web.HTTPException, expected_msg: str=None): + +async def assert_error( + response: web.Response, expected_cls: web.HTTPException, expected_msg: str = None +): data, error = unwrap_envelope(await response.json()) return do_assert_error(data, error, expected_cls, expected_msg) -def do_assert_error(data, error, expected_cls:web.HTTPException, expected_msg: str=None): + +def do_assert_error( + data, error, expected_cls: web.HTTPException, expected_msg: str = None +): assert not data, pformat(data) assert error, pformat(error) # TODO: improve error messages - assert len(error['errors']) == 1 + assert len(error["errors"]) == 1 - err = error['errors'][0] + err = error["errors"][0] if expected_msg: - assert expected_msg in err['message'] - assert expected_cls.__name__ == err['code'] + assert expected_msg in err["message"] + assert expected_cls.__name__ == err["code"] return data, error diff --git a/services/storage/tests/helpers/utils_project.py b/services/storage/tests/helpers/utils_project.py index b8d59f5ba76..11d70f508fe 100644 --- a/services/storage/tests/helpers/utils_project.py +++ b/services/storage/tests/helpers/utils_project.py @@ -8,15 +8,15 @@ def clone_project_data(project: Dict) -> Tuple[Dict, Dict]: # Update project id # NOTE: this can be re-assigned by dbapi if not unique - project_copy_uuid = uuidlib.uuid1() # random project id - project_copy['uuid'] = str(project_copy_uuid) + project_copy_uuid = uuidlib.uuid1() # random project id + project_copy["uuid"] = str(project_copy_uuid) # Workbench nodes shall be unique within the project context def _create_new_node_uuid(old_uuid): - return str( uuidlib.uuid5(project_copy_uuid, str(old_uuid)) ) + return str(uuidlib.uuid5(project_copy_uuid, str(old_uuid))) nodes_map = {} - for node_uuid in project.get('workbench', {}).keys(): + for node_uuid in project.get("workbench", {}).keys(): nodes_map[node_uuid] = _create_new_node_uuid(node_uuid) def _replace_uuids(node): @@ -34,5 +34,5 @@ def _replace_uuids(node): node[key] = _replace_uuids(value) return node - project_copy['workbench'] = _replace_uuids(project_copy.get('workbench', {})) + project_copy["workbench"] = _replace_uuids(project_copy.get("workbench", {})) return project_copy, nodes_map diff --git a/services/storage/tests/test_configs.py b/services/storage/tests/test_configs.py index e332aadc953..184fcb60a24 100644 --- a/services/storage/tests/test_configs.py +++ b/services/storage/tests/test_configs.py @@ -14,8 +14,9 @@ from simcore_service_storage.cli import create_environ, parse, setup_parser from simcore_service_storage.resources import resources -THIS_SERVICE = 'storage' -CONFIG_DIR = 'data' +THIS_SERVICE = "storage" +CONFIG_DIR = "data" + @pytest.fixture("session") def env_devel_file(osparc_simcore_root_dir): @@ -30,9 +31,10 @@ def services_docker_compose_file(osparc_simcore_root_dir): assert dcpath.exists() return dcpath + @pytest.fixture("session") def devel_environ(env_devel_file): - PATTERN_ENVIRON_EQUAL= re.compile(r"^(\w+)=(.*)$") + PATTERN_ENVIRON_EQUAL = re.compile(r"^(\w+)=(.*)$") env_devel = {} with env_devel_file.open() as f: for line in f: @@ -43,8 +45,35 @@ def devel_environ(env_devel_file): return env_devel +variable_expansion_pattern = re.compile(r"\$\{*(\w+)+[:-]*(\w+)*\}") + +@pytest.mark.parametrize( + "sample,expected_match", + [ + (r"${varname:-default}", ("varname", "default")), + (r"${varname}", ("varname", None)), + (r"33", None), + (r"${VAR_name:-33}", ("VAR_name", "33")), + (r"${varname-default}", ("varname", "default")), # this is not standard! + (r"${varname:default}", ("varname", "default")), # this is not standard! + ], +) +def test_variable_expansions(sample, expected_match): + # TODO: extend variable expansions + # https://en.wikibooks.org/wiki/Bourne_Shell_Scripting/Variable_Expansion + match = variable_expansion_pattern.match(sample) + if expected_match: + assert match + varname, default = match.groups() + assert (varname, default) == expected_match + else: + assert not match + + @pytest.fixture("session") -def container_environ(services_docker_compose_file, devel_environ, osparc_simcore_root_dir): +def container_environ( + services_docker_compose_file, devel_environ, osparc_simcore_root_dir +): """ Creates a dict with the environment variables inside of a webserver container """ @@ -53,32 +82,32 @@ def container_environ(services_docker_compose_file, devel_environ, osparc_simcor dc = yaml.safe_load(f) container_environ = create_environ(skip_system_environ=True) - container_environ.update({ - 'OSPARC_SIMCORE_REPO_ROOTDIR':str(osparc_simcore_root_dir) - }) + container_environ.update( + {"OSPARC_SIMCORE_REPO_ROOTDIR": str(osparc_simcore_root_dir)} + ) environ_items = dc["services"][THIS_SERVICE].get("environment", list()) - MATCH = re.compile(r'\$\{(\w+)+') for item in environ_items: key, value = item.split("=") - m = MATCH.match(value) - if m: - envkey = m.groups()[0] - value = devel_environ[envkey] + match = variable_expansion_pattern.match(value) + if match: + varname, default_value = match.groups() + value = devel_environ.get(varname, default_value) container_environ[key] = value return container_environ -@pytest.mark.parametrize("configfile", [str(n) - for n in resources.listdir(CONFIG_DIR) if n.endswith(("yaml", "yml")) - ]) +@pytest.mark.parametrize( + "configfile", + [str(n) for n in resources.listdir(CONFIG_DIR) if n.endswith(("yaml", "yml"))], +) def test_config_files(configfile, container_environ, capsys): parser = setup_parser(argparse.ArgumentParser("test-parser")) - with mock.patch('os.environ', container_environ): + with mock.patch("os.environ", container_environ): cmd = ["-c", configfile] try: config = parse(cmd, parser) @@ -86,6 +115,7 @@ def test_config_files(configfile, container_environ, capsys): except SystemExit as err: pytest.fail(capsys.readouterr().err) - for key, value in config.items(): - assert value!='None', "Use instead Null in {} for {}".format(configfile, key) + assert value != "None", "Use instead Null in {} for {}".format( + configfile, key + ) diff --git a/services/storage/tests/test_datcore.py b/services/storage/tests/test_datcore.py index 37a06106e73..90bcac0c3ca 100644 --- a/services/storage/tests/test_datcore.py +++ b/services/storage/tests/test_datcore.py @@ -24,6 +24,7 @@ async def test_datcore_ping(loop): responsive = await dcw.ping() assert responsive + async def test_datcore_list_files_recursively(loop): if not utils.has_datcore_tokens(): return @@ -35,6 +36,7 @@ async def test_datcore_list_files_recursively(loop): f = await dcw.list_files_recursively() assert len(f) + async def test_datcore_list_files_raw(loop): if not utils.has_datcore_tokens(): return @@ -47,7 +49,6 @@ async def test_datcore_list_files_raw(loop): assert len(f) - async def test_datcore_nested_download_link(loop): if not utils.has_datcore_tokens(): return @@ -60,4 +61,4 @@ async def test_datcore_nested_download_link(loop): filename = "initial_WTstates.txt" f = await dcw.download_link(destination, filename) - assert(f) + assert f diff --git a/services/storage/tests/test_dsm.py b/services/storage/tests/test_dsm.py index deade321d32..5994f81ffbf 100644 --- a/services/storage/tests/test_dsm.py +++ b/services/storage/tests/test_dsm.py @@ -20,13 +20,13 @@ import utils from simcore_service_storage.models import FileMetaData -from simcore_service_storage.settings import (DATCORE_STR, SIMCORE_S3_ID, - SIMCORE_S3_STR) +from simcore_service_storage.settings import DATCORE_STR, SIMCORE_S3_ID, SIMCORE_S3_STR from utils import BUCKET_NAME, USER_ID, has_datcore_tokens def test_mockup(dsm_mockup_db): - assert len(dsm_mockup_db)==100 + assert len(dsm_mockup_db) == 100 + # Too many branches (13/12) (too-many-branches) # pylint: disable=R0912 @@ -55,7 +55,7 @@ async def test_dsm_s3(dsm_mockup_db, dsm_fixture): data_as_dict.append(attr.asdict(d)) if write_data: - with open("example.json", 'w') as _f: + with open("example.json", "w") as _f: json.dump(data_as_dict, _f) # Get files from bob from the project biology @@ -66,27 +66,38 @@ async def test_dsm_s3(dsm_mockup_db, dsm_fixture): break assert not bob_id == 0 - data = await dsm.list_files(user_id=bob_id, location=SIMCORE_S3_STR, regex="biology") - data1 = await dsm.list_files(user_id=bob_id, location=SIMCORE_S3_STR, regex="astronomy") + data = await dsm.list_files( + user_id=bob_id, location=SIMCORE_S3_STR, regex="biology" + ) + data1 = await dsm.list_files( + user_id=bob_id, location=SIMCORE_S3_STR, regex="astronomy" + ) data = data + data1 bobs_biostromy_files = [] for d in dsm_mockup_db.keys(): md = dsm_mockup_db[d] - if md.user_id == bob_id and (md.project_name == "biology" or md.project_name == "astronomy"): + if md.user_id == bob_id and ( + md.project_name == "biology" or md.project_name == "astronomy" + ): bobs_biostromy_files.append(md) assert len(data) == len(bobs_biostromy_files) - # among bobs bio files, filter by project/node, take first one - uuid_filter = os.path.join(bobs_biostromy_files[0].project_id, bobs_biostromy_files[0].node_id) - filtered_data = await dsm.list_files(user_id=bob_id, location=SIMCORE_S3_STR, uuid_filter=str(uuid_filter)) + uuid_filter = os.path.join( + bobs_biostromy_files[0].project_id, bobs_biostromy_files[0].node_id + ) + filtered_data = await dsm.list_files( + user_id=bob_id, location=SIMCORE_S3_STR, uuid_filter=str(uuid_filter) + ) assert filtered_data[0].fmd == bobs_biostromy_files[0] for dx in data: d = dx.fmd - await dsm.delete_file(user_id=d.user_id, location=SIMCORE_S3_STR, file_uuid=d.file_uuid) + await dsm.delete_file( + user_id=d.user_id, location=SIMCORE_S3_STR, file_uuid=d.file_uuid + ) # now we should have less items new_size = 0 @@ -103,7 +114,6 @@ def _create_file_meta_for_s3(postgres_url, s3_client, tmp_file): bucket_name = BUCKET_NAME s3_client.create_bucket(bucket_name, delete_contents_if_exists=True) - # create file and upload filename = os.path.basename(tmp_file) project_id = "22" @@ -116,31 +126,35 @@ def _create_file_meta_for_s3(postgres_url, s3_client, tmp_file): created_at = str(datetime.datetime.now()) file_size = 1234 - d = { 'object_name' : os.path.join(str(project_id), str(node_id), str(file_name)), - 'bucket_name' : bucket_name, - 'file_name' : filename, - 'user_id' : USER_ID, - 'user_name' : "starbucks", - 'location' : SIMCORE_S3_STR, - 'location_id' : SIMCORE_S3_ID, - 'project_id' : project_id, - 'project_name' : project_name, - 'node_id' : node_id, - 'node_name' : node_name, - 'file_uuid' : file_uuid, - 'file_id' : file_uuid, - 'raw_file_path' : file_uuid, - 'display_file_path' : display_name, - 'created_at' : created_at, - 'last_modified' : created_at, - 'file_size' : file_size - } + d = { + "object_name": os.path.join(str(project_id), str(node_id), str(file_name)), + "bucket_name": bucket_name, + "file_name": filename, + "user_id": USER_ID, + "user_name": "starbucks", + "location": SIMCORE_S3_STR, + "location_id": SIMCORE_S3_ID, + "project_id": project_id, + "project_name": project_name, + "node_id": node_id, + "node_name": node_name, + "file_uuid": file_uuid, + "file_id": file_uuid, + "raw_file_path": file_uuid, + "display_file_path": display_name, + "created_at": created_at, + "last_modified": created_at, + "file_size": file_size, + } fmd = FileMetaData(**d) return fmd -async def test_links_s3(postgres_service_url, s3_client, mock_files_factory, dsm_fixture): + +async def test_links_s3( + postgres_service_url, s3_client, mock_files_factory, dsm_fixture +): utils.create_tables(url=postgres_service_url) tmp_file = mock_files_factory(1)[0] @@ -149,9 +163,9 @@ async def test_links_s3(postgres_service_url, s3_client, mock_files_factory, dsm dsm = dsm_fixture up_url = await dsm.upload_link(fmd.user_id, fmd.file_uuid) - with io.open(tmp_file, 'rb') as fp: + with io.open(tmp_file, "rb") as fp: d = fp.read() - req = urllib.request.Request(up_url, data=d, method='PUT') + req = urllib.request.Request(up_url, data=d, method="PUT") with urllib.request.urlopen(req) as _f: pass @@ -163,7 +177,10 @@ async def test_links_s3(postgres_service_url, s3_client, mock_files_factory, dsm assert filecmp.cmp(tmp_file2, tmp_file) -async def test_copy_s3_s3(postgres_service_url, s3_client, mock_files_factory, dsm_fixture): + +async def test_copy_s3_s3( + postgres_service_url, s3_client, mock_files_factory, dsm_fixture +): utils.create_tables(url=postgres_service_url) tmp_file = mock_files_factory(1)[0] @@ -175,9 +192,9 @@ async def test_copy_s3_s3(postgres_service_url, s3_client, mock_files_factory, d # upload the file up_url = await dsm.upload_link(fmd.user_id, fmd.file_uuid) - with io.open(tmp_file, 'rb') as fp: + with io.open(tmp_file, "rb") as fp: d = fp.read() - req = urllib.request.Request(up_url, data=d, method='PUT') + req = urllib.request.Request(up_url, data=d, method="PUT") with urllib.request.urlopen(req) as _f: pass @@ -187,26 +204,38 @@ async def test_copy_s3_s3(postgres_service_url, s3_client, mock_files_factory, d from_uuid = fmd.file_uuid new_project = "zoology" to_uuid = os.path.join(new_project, fmd.node_id, fmd.file_name) - await dsm.copy_file(user_id=fmd.user_id, dest_location=SIMCORE_S3_STR, dest_uuid=to_uuid, source_location=SIMCORE_S3_STR, source_uuid=from_uuid) + await dsm.copy_file( + user_id=fmd.user_id, + dest_location=SIMCORE_S3_STR, + dest_uuid=to_uuid, + source_location=SIMCORE_S3_STR, + source_uuid=from_uuid, + ) data = await dsm.list_files(user_id=fmd.user_id, location=SIMCORE_S3_STR) assert len(data) == 2 -#NOTE: Below tests directly access the datcore platform, use with care! + +# NOTE: Below tests directly access the datcore platform, use with care! def test_datcore_fixture(datcore_structured_testbucket): if not has_datcore_tokens(): return print(datcore_structured_testbucket) -async def test_dsm_datcore(postgres_service_url, dsm_fixture, datcore_structured_testbucket): + +async def test_dsm_datcore( + postgres_service_url, dsm_fixture, datcore_structured_testbucket +): if not has_datcore_tokens(): return utils.create_tables(url=postgres_service_url) dsm = dsm_fixture user_id = "0" - data = await dsm.list_files(user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME) + data = await dsm.list_files( + user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME + ) # the fixture creates two files assert len(data) == 3 @@ -215,10 +244,19 @@ async def test_dsm_datcore(postgres_service_url, dsm_fixture, datcore_structured print("Deleting", fmd_to_delete.bucket_name, fmd_to_delete.object_name) await dsm.delete_file(user_id, DATCORE_STR, fmd_to_delete.file_id) - data = await dsm.list_files(user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME) + data = await dsm.list_files( + user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME + ) assert len(data) == 2 -async def test_dsm_s3_to_datcore(postgres_service_url, s3_client, mock_files_factory, dsm_fixture, datcore_structured_testbucket): + +async def test_dsm_s3_to_datcore( + postgres_service_url, + s3_client, + mock_files_factory, + dsm_fixture, + datcore_structured_testbucket, +): if not has_datcore_tokens(): return utils.create_tables(url=postgres_service_url) @@ -229,9 +267,9 @@ async def test_dsm_s3_to_datcore(postgres_service_url, s3_client, mock_files_fac dsm = dsm_fixture up_url = await dsm.upload_link(fmd.user_id, fmd.file_uuid) - with io.open(tmp_file, 'rb') as fp: + with io.open(tmp_file, "rb") as fp: d = fp.read() - req = urllib.request.Request(up_url, data=d, method='PUT') + req = urllib.request.Request(up_url, data=d, method="PUT") with urllib.request.urlopen(req) as _f: pass @@ -242,24 +280,41 @@ async def test_dsm_s3_to_datcore(postgres_service_url, s3_client, mock_files_fac urllib.request.urlretrieve(down_url, tmp_file2) assert filecmp.cmp(tmp_file2, tmp_file) # now we have the file locally, upload the file - await dsm.upload_file_to_datcore(user_id=user_id, local_file_path=tmp_file2, destination_id=datcore_structured_testbucket['dataset_id']) + await dsm.upload_file_to_datcore( + user_id=user_id, + local_file_path=tmp_file2, + destination_id=datcore_structured_testbucket["dataset_id"], + ) # and into a deeper strucutre - await dsm.upload_file_to_datcore(user_id=user_id, local_file_path=tmp_file2, destination_id=datcore_structured_testbucket['coll2_id']) - - data = await dsm.list_files(user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME) + await dsm.upload_file_to_datcore( + user_id=user_id, + local_file_path=tmp_file2, + destination_id=datcore_structured_testbucket["coll2_id"], + ) + + data = await dsm.list_files( + user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME + ) # there should now be 5 files assert len(data) == 5 -async def test_dsm_datcore_to_local(postgres_service_url, dsm_fixture, mock_files_factory, datcore_structured_testbucket): + +async def test_dsm_datcore_to_local( + postgres_service_url, dsm_fixture, mock_files_factory, datcore_structured_testbucket +): if not has_datcore_tokens(): return utils.create_tables(url=postgres_service_url) dsm = dsm_fixture user_id = USER_ID - data = await dsm.list_files(user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME) + data = await dsm.list_files( + user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME + ) assert len(data) - url, filename = await dsm.download_link_datcore(user_id, datcore_structured_testbucket['file_id1']) + url, filename = await dsm.download_link_datcore( + user_id, datcore_structured_testbucket["file_id1"] + ) tmp_file = mock_files_factory(1)[0] tmp_file2 = tmp_file + ".fromdatcore" @@ -268,7 +323,14 @@ async def test_dsm_datcore_to_local(postgres_service_url, dsm_fixture, mock_file assert filecmp.cmp(tmp_file2, tmp_file) -async def test_dsm_datcore_to_S3(postgres_service_url, s3_client, dsm_fixture, mock_files_factory, datcore_structured_testbucket): + +async def test_dsm_datcore_to_S3( + postgres_service_url, + s3_client, + dsm_fixture, + mock_files_factory, + datcore_structured_testbucket, +): if not has_datcore_tokens(): return utils.create_tables(url=postgres_service_url) @@ -283,19 +345,28 @@ async def test_dsm_datcore_to_S3(postgres_service_url, s3_client, dsm_fixture, m s3_data = await dsm.list_files(user_id=user_id, location=SIMCORE_S3_STR) assert len(s3_data) == 0 - dc_data = await dsm.list_files(user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME) + dc_data = await dsm.list_files( + user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME + ) assert len(dc_data) == 3 src_fmd = dc_data[0] - await dsm.copy_file(user_id=user_id, dest_location=SIMCORE_S3_STR, dest_uuid=dest_uuid, source_location=DATCORE_STR, - source_uuid=datcore_structured_testbucket["file_id1"]) + await dsm.copy_file( + user_id=user_id, + dest_location=SIMCORE_S3_STR, + dest_uuid=dest_uuid, + source_location=DATCORE_STR, + source_uuid=datcore_structured_testbucket["file_id1"], + ) s3_data = await dsm.list_files(user_id=user_id, location=SIMCORE_S3_STR) assert len(s3_data) == 1 # now download the original file tmp_file1 = tmp_file + ".fromdatcore" - down_url_dc, filename = await dsm.download_link_datcore(user_id, datcore_structured_testbucket["file_id1"]) + down_url_dc, filename = await dsm.download_link_datcore( + user_id, datcore_structured_testbucket["file_id1"] + ) urllib.request.urlretrieve(down_url_dc, tmp_file1) # and the one on s3 @@ -305,7 +376,14 @@ async def test_dsm_datcore_to_S3(postgres_service_url, s3_client, dsm_fixture, m assert filecmp.cmp(tmp_file1, tmp_file2) -async def test_copy_datcore(postgres_service_url, s3_client, dsm_fixture, mock_files_factory, datcore_structured_testbucket): + +async def test_copy_datcore( + postgres_service_url, + s3_client, + dsm_fixture, + mock_files_factory, + datcore_structured_testbucket, +): if not has_datcore_tokens(): return utils.create_tables(url=postgres_service_url) @@ -313,7 +391,9 @@ async def test_copy_datcore(postgres_service_url, s3_client, dsm_fixture, mock_f # the fixture should provide 3 files dsm = dsm_fixture user_id = USER_ID - data = await dsm.list_files(user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME) + data = await dsm.list_files( + user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME + ) assert len(data) == 3 # create temporary file and upload to s3 @@ -321,23 +401,31 @@ async def test_copy_datcore(postgres_service_url, s3_client, dsm_fixture, mock_f fmd = _create_file_meta_for_s3(postgres_service_url, s3_client, tmp_file) up_url = await dsm.upload_link(fmd.user_id, fmd.file_uuid) - with io.open(tmp_file, 'rb') as fp: + with io.open(tmp_file, "rb") as fp: d = fp.read() - req = urllib.request.Request(up_url, data=d, method='PUT') + req = urllib.request.Request(up_url, data=d, method="PUT") with urllib.request.urlopen(req) as _f: pass - #now copy to datcore + # now copy to datcore dat_core_uuid = os.path.join(BUCKET_NAME, fmd.file_name) - await dsm.copy_file(user_id=user_id, dest_location=DATCORE_STR, dest_uuid=datcore_structured_testbucket["coll2_id"], source_location=SIMCORE_S3_STR, - source_uuid=fmd.file_uuid) + await dsm.copy_file( + user_id=user_id, + dest_location=DATCORE_STR, + dest_uuid=datcore_structured_testbucket["coll2_id"], + source_location=SIMCORE_S3_STR, + source_uuid=fmd.file_uuid, + ) - data = await dsm.list_files(user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME) + data = await dsm.list_files( + user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME + ) # there should now be 4 files assert len(data) == 4 + def test_fmd_build(): file_uuid = str(Path("1234") / Path("abcd") / Path("xx.dat")) fmd = FileMetaData() @@ -352,6 +440,7 @@ def test_fmd_build(): assert fmd.location_id == SIMCORE_S3_ID assert fmd.bucket_name == "test-bucket" + async def test_dsm_complete_db(dsm_fixture, dsm_mockup_complete_db): dsm = dsm_fixture _id = "21" @@ -366,19 +455,27 @@ async def test_dsm_complete_db(dsm_fixture, dsm_mockup_complete_db): assert d.project_name assert d.raw_file_path + async def test_delete_data_folders(dsm_fixture, dsm_mockup_complete_db): file_1, file_2 = dsm_mockup_complete_db _id = "21" data = await dsm_fixture.list_files(user_id=_id, location=SIMCORE_S3_STR) - response = await dsm_fixture.delete_project_simcore_s3(user_id=_id, project_id=file_1["project_id"], node_id=file_1["node_id"]) + response = await dsm_fixture.delete_project_simcore_s3( + user_id=_id, project_id=file_1["project_id"], node_id=file_1["node_id"] + ) data = await dsm_fixture.list_files(user_id=_id, location=SIMCORE_S3_STR) assert len(data) == 1 assert data[0].fmd.file_name == file_2["filename"] - response = await dsm_fixture.delete_project_simcore_s3(user_id=_id, project_id=file_1["project_id"], node_id=None) + response = await dsm_fixture.delete_project_simcore_s3( + user_id=_id, project_id=file_1["project_id"], node_id=None + ) data = await dsm_fixture.list_files(user_id=_id, location=SIMCORE_S3_STR) assert not data -async def test_deep_copy_project_simcore_s3(dsm_fixture, s3_client, postgres_service_url, datcore_structured_testbucket): + +async def test_deep_copy_project_simcore_s3( + dsm_fixture, s3_client, postgres_service_url, datcore_structured_testbucket +): if not has_datcore_tokens(): return dsm = dsm_fixture @@ -388,7 +485,7 @@ async def test_deep_copy_project_simcore_s3(dsm_fixture, s3_client, postgres_ser file_name_in_datcore = Path(datcore_structured_testbucket["filename3"]).name user_id = USER_ID - source_project = { + source_project = { "uuid": "template-uuid-4d5e-b80e-401c8066782f", "name": "ISAN: 2D Plot", "description": "2D RawGraphs viewer with one input", @@ -397,53 +494,47 @@ async def test_deep_copy_project_simcore_s3(dsm_fixture, s3_client, postgres_ser "creationDate": "2019-05-24T10:36:57.813Z", "lastChangeDate": "2019-05-24T11:36:12.015Z", "workbench": { - "template-uuid-48eb-a9d2-aaad6b72400a": { - "key": "simcore/services/frontend/file-picker", - "version": "1.0.0", - "label": "File Picker", - "inputs": {}, - "inputNodes": [], - "outputs": { - "outFile": { - "store": 1, - "path": "N:package:ab8c214d-a596-401f-a90c-9c50e3c048b0" - } + "template-uuid-48eb-a9d2-aaad6b72400a": { + "key": "simcore/services/frontend/file-picker", + "version": "1.0.0", + "label": "File Picker", + "inputs": {}, + "inputNodes": [], + "outputs": { + "outFile": { + "store": 1, + "path": "N:package:ab8c214d-a596-401f-a90c-9c50e3c048b0", + } + }, + "progress": 100, + "thumbnail": "", + "position": {"x": 100, "y": 100}, }, - "progress": 100, - "thumbnail": "", - "position": { - "x": 100, - "y": 100 - } - }, - "template-uuid-4c63-a705-03a2c339646c": { - "key": "simcore/services/dynamic/raw-graphs", - "version": "2.8.0", - "label": "2D plot", - "inputs": { - "input_1": { - "nodeUuid": "template-uuid-48eb-a9d2-aaad6b72400a", - "output": "outFile" - } + "template-uuid-4c63-a705-03a2c339646c": { + "key": "simcore/services/dynamic/raw-graphs", + "version": "2.8.0", + "label": "2D plot", + "inputs": { + "input_1": { + "nodeUuid": "template-uuid-48eb-a9d2-aaad6b72400a", + "output": "outFile", + } + }, + "inputNodes": ["template-uuid-48eb-a9d2-aaad6b72400a"], + "outputs": {}, + "progress": 0, + "thumbnail": "", + "position": {"x": 400, "y": 100}, }, - "inputNodes": [ - "template-uuid-48eb-a9d2-aaad6b72400a" - ], - "outputs": {}, - "progress": 0, - "thumbnail": "", - "position": { - "x": 400, - "y": 100 - } - } - } + }, } bucket_name = BUCKET_NAME s3_client.create_bucket(bucket_name, delete_contents_if_exists=True) - source_project["workbench"]["template-uuid-48eb-a9d2-aaad6b72400a"]["outputs"]["outFile"]["path"] = path_in_datcore + source_project["workbench"]["template-uuid-48eb-a9d2-aaad6b72400a"]["outputs"][ + "outFile" + ]["path"] = path_in_datcore destination_project = copy.deepcopy(source_project) source_project_id = source_project["uuid"] @@ -453,28 +544,38 @@ async def test_deep_copy_project_simcore_s3(dsm_fixture, s3_client, postgres_ser node_mapping = {} for node_id, node in source_project["workbench"].items(): - object_name = str(Path(source_project_id) / Path(node_id) / Path(node_id + ".dat")) + object_name = str( + Path(source_project_id) / Path(node_id) / Path(node_id + ".dat") + ) f = utils.data_dir() / Path("notebooks.zip") s3_client.upload_file(bucket_name, object_name, f) key = node_id.replace("template", "deep-copy") destination_project["workbench"][key] = node node_mapping[node_id] = key - status = await dsm.deep_copy_project_simcore_s3(user_id, source_project, destination_project, node_mapping) + status = await dsm.deep_copy_project_simcore_s3( + user_id, source_project, destination_project, node_mapping + ) - new_path = destination_project["workbench"]["deep-copy-uuid-48eb-a9d2-aaad6b72400a"]["outputs"]["outFile"]["path"] + new_path = destination_project["workbench"][ + "deep-copy-uuid-48eb-a9d2-aaad6b72400a" + ]["outputs"]["outFile"]["path"] assert new_path != path_in_datcore assert Path(new_path).name == file_name_in_datcore files = await dsm.list_files(user_id=user_id, location=SIMCORE_S3_STR) assert len(files) == 3 # one of the files in s3 should be the dowloaded one from datcore - assert any(f.fmd.file_name == Path(datcore_structured_testbucket["filename3"]).name for f in files) + assert any( + f.fmd.file_name == Path(datcore_structured_testbucket["filename3"]).name + for f in files + ) response = await dsm.delete_project_simcore_s3(user_id, destination_project["uuid"]) files = await dsm.list_files(user_id=user_id, location=SIMCORE_S3_STR) assert len(files) == 0 + async def test_dsm_list_datasets_s3(dsm_fixture, dsm_mockup_complete_db): dsm_fixture.has_project_db = True @@ -483,6 +584,7 @@ async def test_dsm_list_datasets_s3(dsm_fixture, dsm_mockup_complete_db): assert len(datasets) == 1 assert any("Kember" in d.display_name for d in datasets) + async def test_dsm_list_datasets_datcore(dsm_fixture, datcore_structured_testbucket): if not has_datcore_tokens(): return @@ -492,6 +594,7 @@ async def test_dsm_list_datasets_datcore(dsm_fixture, datcore_structured_testbuc assert len(datasets) assert any(BUCKET_NAME in d.display_name for d in datasets) + async def test_dsm_list_dataset_files_s3(dsm_fixture, dsm_mockup_complete_db): dsm_fixture.has_project_db = True @@ -499,13 +602,18 @@ async def test_dsm_list_dataset_files_s3(dsm_fixture, dsm_mockup_complete_db): assert len(datasets) == 1 assert any("Kember" in d.display_name for d in datasets) for d in datasets: - files = await dsm_fixture.list_files_dataset(user_id="21", location=SIMCORE_S3_STR, dataset_id=d.dataset_id) + files = await dsm_fixture.list_files_dataset( + user_id="21", location=SIMCORE_S3_STR, dataset_id=d.dataset_id + ) if "Kember" in d.display_name: assert len(files) == 2 else: assert len(files) == 0 -async def test_dsm_list_dataset_files_datcore(dsm_fixture, datcore_structured_testbucket): + +async def test_dsm_list_dataset_files_datcore( + dsm_fixture, datcore_structured_testbucket +): if not has_datcore_tokens(): return @@ -515,12 +623,17 @@ async def test_dsm_list_dataset_files_datcore(dsm_fixture, datcore_structured_te assert any(BUCKET_NAME in d.display_name for d in datasets) for d in datasets: - files = await dsm_fixture.list_files_dataset(user_id=USER_ID, location=DATCORE_STR, dataset_id=d.dataset_id) + files = await dsm_fixture.list_files_dataset( + user_id=USER_ID, location=DATCORE_STR, dataset_id=d.dataset_id + ) if BUCKET_NAME in d.display_name: assert len(files) == 3 + @pytest.mark.skip(reason="develop only") -async def test_download_links(datcore_structured_testbucket, s3_client, mock_files_factory): +async def test_download_links( + datcore_structured_testbucket, s3_client, mock_files_factory +): s3_client.create_bucket(BUCKET_NAME, delete_contents_if_exists=True) _file = mock_files_factory(count=1)[0] @@ -528,15 +641,15 @@ async def test_download_links(datcore_structured_testbucket, s3_client, mock_fil link = s3_client.create_presigned_get_url(BUCKET_NAME, "test.txt") print(link) - dcw = datcore_structured_testbucket['dcw'] + dcw = datcore_structured_testbucket["dcw"] - endings = ['txt', 'json', 'zip', 'dat', 'mat'] + endings = ["txt", "json", "zip", "dat", "mat"] counter = 1 for e in endings: file_name = "test{}.{}".format(counter, e) file2 = str(Path(_file).parent / file_name) copyfile(_file, file_name) - dataset_id = datcore_structured_testbucket['dataset_id'] + dataset_id = datcore_structured_testbucket["dataset_id"] file_id = await dcw.upload_file_to_id(dataset_id, file_name) link, _file_name = await dcw.download_link_by_id(file_id) print(_file_name, link) diff --git a/services/storage/tests/test_package.py b/services/storage/tests/test_package.py index 33d090b1f77..f2a9c8a9855 100644 --- a/services/storage/tests/test_package.py +++ b/services/storage/tests/test_package.py @@ -18,16 +18,17 @@ def pylintrc(osparc_simcore_root_dir): assert pylintrc.exists() return pylintrc + def test_run_pylint(pylintrc, package_dir): try: - AUTODETECT=0 - cmd = f'pylint --jobs={AUTODETECT} --rcfile {pylintrc} -v {package_dir}'.split() + AUTODETECT = 0 + cmd = f"pylint --jobs={AUTODETECT} --rcfile {pylintrc} -v {package_dir}".split() assert subprocess.check_call(cmd) == 0 except subprocess.CalledProcessError as err: pytest.fail("Linting error. Linter existed with code %d" % err.returncode) -def test_main(here): # pylint: disable=unused-variable +def test_main(here): # pylint: disable=unused-variable with pytest.raises(SystemExit) as excinfo: main("--help".split()) diff --git a/services/storage/tests/test_resources.py b/services/storage/tests/test_resources.py index 0ebeeec0a23..1a0ed7c5500 100644 --- a/services/storage/tests/test_resources.py +++ b/services/storage/tests/test_resources.py @@ -14,16 +14,21 @@ log = logging.getLogger(__name__) + @pytest.fixture def app_resources(package_dir): resource_names = [] - for name in (RSC_CONFIG_DIR_KEY, 'api'): + for name in (RSC_CONFIG_DIR_KEY, "api"): folder = package_dir / name - resource_names += [ str(p.relative_to(package_dir)) for p in folder.rglob("*.y*ml") ] + resource_names += [ + str(p.relative_to(package_dir)) for p in folder.rglob("*.y*ml") + ] return resource_names -#------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ + def test_resource_io_utils(app_resources): @@ -41,14 +46,20 @@ def test_resource_io_utils(app_resources): assert ostream.closed + def test_named_resources(): - exposed = [getattr(resources, name) for name in dir(resources) if name.startswith("RESOURCES")] + exposed = [ + getattr(resources, name) + for name in dir(resources) + if name.startswith("RESOURCES") + ] for resource_name in exposed: assert resources.exists(resource_name) assert resources.isdir(resource_name) assert resources.listdir(resource_name) + def test_paths(app_resources): for resource_name in app_resources: assert resources.get_path(resource_name).exists() diff --git a/services/storage/tests/test_rest.py b/services/storage/tests/test_rest.py index 2f856c833ac..a6407f98a46 100644 --- a/services/storage/tests/test_rest.py +++ b/services/storage/tests/test_rest.py @@ -18,8 +18,7 @@ from aiohttp import web from simcore_service_storage.db import setup_db -from simcore_service_storage.dsm import (APP_DSM_KEY, DataStorageManager, - setup_dsm) +from simcore_service_storage.dsm import APP_DSM_KEY, DataStorageManager, setup_dsm from simcore_service_storage.rest import setup_rest from simcore_service_storage.s3 import setup_s3 from simcore_service_storage.settings import APP_CONFIG_KEY, SIMCORE_S3_ID @@ -29,6 +28,7 @@ current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + def parse_db(dsm_mockup_db): id_name_map = {} id_file_count = {} @@ -42,34 +42,43 @@ def parse_db(dsm_mockup_db): return id_file_count, id_name_map + @pytest.fixture -def client(loop, aiohttp_unused_port, aiohttp_client, postgres_service, minio_service, osparc_api_specs_dir): +def client( + loop, + aiohttp_unused_port, + aiohttp_client, + postgres_service, + minio_service, + osparc_api_specs_dir, +): app = web.Application() api_token = os.environ.get("BF_API_KEY", "none") api_secret = os.environ.get("BF_API_SECRET", "none") main_cfg = { - 'port': aiohttp_unused_port(), - 'host': 'localhost', - "max_workers" : 4, - "testing" : True, - "test_datcore" : { 'api_token' : api_token, 'api_secret' : api_secret} + "port": aiohttp_unused_port(), + "host": "localhost", + "max_workers": 4, + "testing": True, + "test_datcore": {"api_token": api_token, "api_secret": api_secret}, } rest_cfg = { - 'oas_repo': str(osparc_api_specs_dir), #'${OSPARC_SIMCORE_REPO_ROOTDIR}/api/specs', - #oas_repo: http://localhost:8043/api/specs + "oas_repo": str( + osparc_api_specs_dir + ), #'${OSPARC_SIMCORE_REPO_ROOTDIR}/api/specs', + # oas_repo: http://localhost:8043/api/specs } postgres_cfg = postgres_service s3_cfg = minio_service - # fake config app[APP_CONFIG_KEY] = { - 'main': main_cfg, - 'postgres' : postgres_cfg, - 's3' : s3_cfg, - 'rest': rest_cfg + "main": main_cfg, + "postgres": postgres_cfg, + "s3": s3_cfg, + "rest": rest_cfg, } setup_db(app) @@ -77,9 +86,10 @@ def client(loop, aiohttp_unused_port, aiohttp_client, postgres_service, minio_se setup_dsm(app) setup_s3(app) - cli = loop.run_until_complete( aiohttp_client(app, server_kwargs=main_cfg) ) + cli = loop.run_until_complete(aiohttp_client(app, server_kwargs=main_cfg)) return cli + async def test_health_check(client): resp = await client.get("/v0/") text = await resp.text() @@ -87,13 +97,14 @@ async def test_health_check(client): assert resp.status == 200, text payload = await resp.json() - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert data assert not error - assert data['name'] == 'simcore_service_storage' - assert data['status'] == 'SERVICE_RUNNING' + assert data["name"] == "simcore_service_storage" + assert data["status"] == "SERVICE_RUNNING" + async def test_locations(client): user_id = USER_ID @@ -103,9 +114,9 @@ async def test_locations(client): payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) - _locs = 2 if has_datcore_tokens() else 1 + _locs = 2 if has_datcore_tokens() else 1 assert len(data) == _locs assert not error @@ -119,7 +130,7 @@ async def test_s3_files_metadata(client, dsm_mockup_db): payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error assert len(data) == id_file_count[_id] @@ -127,49 +138,69 @@ async def test_s3_files_metadata(client, dsm_mockup_db): for d in dsm_mockup_db.keys(): fmd = dsm_mockup_db[d] uuid_filter = os.path.join(fmd.project_id, fmd.node_id) - resp = await client.get("/v0/locations/0/files/metadata?user_id={}&uuid_filter={}".format(fmd.user_id, quote(uuid_filter, safe=''))) + resp = await client.get( + "/v0/locations/0/files/metadata?user_id={}&uuid_filter={}".format( + fmd.user_id, quote(uuid_filter, safe="") + ) + ) payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error for d in data: - assert os.path.join(d['project_id'], d['node_id']) == uuid_filter + assert os.path.join(d["project_id"], d["node_id"]) == uuid_filter + async def test_s3_file_metadata(client, dsm_mockup_db): # go through all files and get them for d in dsm_mockup_db.keys(): fmd = dsm_mockup_db[d] - resp = await client.get("/v0/locations/0/files/{}/metadata?user_id={}".format(quote(fmd.file_uuid, safe=''), fmd.user_id)) + resp = await client.get( + "/v0/locations/0/files/{}/metadata?user_id={}".format( + quote(fmd.file_uuid, safe=""), fmd.user_id + ) + ) payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error assert data + async def test_download_link(client, dsm_mockup_db): for d in dsm_mockup_db.keys(): fmd = dsm_mockup_db[d] - resp = await client.get("/v0/locations/0/files/{}?user_id={}".format(quote(fmd.file_uuid, safe=''), fmd.user_id)) + resp = await client.get( + "/v0/locations/0/files/{}?user_id={}".format( + quote(fmd.file_uuid, safe=""), fmd.user_id + ) + ) payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error assert data + async def test_upload_link(client, dsm_mockup_db): for d in dsm_mockup_db.keys(): fmd = dsm_mockup_db[d] - resp = await client.put("/v0/locations/0/files/{}?user_id={}".format(quote(fmd.file_uuid, safe=''), fmd.user_id)) + resp = await client.put( + "/v0/locations/0/files/{}?user_id={}".format( + quote(fmd.file_uuid, safe=""), fmd.user_id + ) + ) payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error assert data + async def test_copy(client, dsm_mockup_db, datcore_structured_testbucket): if not has_datcore_tokens(): return @@ -179,13 +210,19 @@ async def test_copy(client, dsm_mockup_db, datcore_structured_testbucket): for d in dsm_mockup_db.keys(): fmd = dsm_mockup_db[d] source_uuid = fmd.file_uuid - datcore_id = datcore_structured_testbucket['coll1_id'] - resp = await client.put("/v0/locations/1/files/{}?user_id={}&extra_location={}&extra_source={}".format(quote(datcore_id, safe=''), - fmd.user_id, SIMCORE_S3_ID, quote(source_uuid, safe=''))) + datcore_id = datcore_structured_testbucket["coll1_id"] + resp = await client.put( + "/v0/locations/1/files/{}?user_id={}&extra_location={}&extra_source={}".format( + quote(datcore_id, safe=""), + fmd.user_id, + SIMCORE_S3_ID, + quote(source_uuid, safe=""), + ) + ) payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error assert data @@ -195,24 +232,33 @@ async def test_copy(client, dsm_mockup_db, datcore_structured_testbucket): # list files for every user user_id = USER_ID - resp = await client.get("/v0/locations/1/files/metadata?user_id={}&uuid_filter={}".format(user_id, BUCKET_NAME)) + resp = await client.get( + "/v0/locations/1/files/metadata?user_id={}&uuid_filter={}".format( + user_id, BUCKET_NAME + ) + ) payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error assert len(data) > N + async def test_delete_file(client, dsm_mockup_db): id_file_count, _id_name_map = parse_db(dsm_mockup_db) for d in dsm_mockup_db.keys(): fmd = dsm_mockup_db[d] - resp = await client.delete("/v0/locations/0/files/{}?user_id={}".format(quote(fmd.file_uuid, safe=''), fmd.user_id)) + resp = await client.delete( + "/v0/locations/0/files/{}?user_id={}".format( + quote(fmd.file_uuid, safe=""), fmd.user_id + ) + ) payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error assert not data @@ -221,25 +267,19 @@ async def test_delete_file(client, dsm_mockup_db): payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error assert len(data) == 0 + async def test_action_check(client): - QUERY = 'mguidon' - ACTION = 'echo' - FAKE = { - 'path_value': 'one', - 'query_value': 'two', - 'body_value': { - 'a': 33, - 'b': 45 - } - } + QUERY = "mguidon" + ACTION = "echo" + FAKE = {"path_value": "one", "query_value": "two", "body_value": {"a": 33, "b": 45}} resp = await client.post(f"/v0/check/{ACTION}?data={QUERY}", json=FAKE) payload = await resp.json() - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert resp.status == 200, str(payload) assert data @@ -247,8 +287,9 @@ async def test_action_check(client): # TODO: validate response against specs - assert data['path_value'] == ACTION - assert data['query_value'] == QUERY + assert data["path_value"] == ACTION + assert data["query_value"] == QUERY + def get_project_with_data(): projects = [] @@ -258,57 +299,80 @@ def get_project_with_data(): # TODO: add schema validation return projects -@pytest.mark.parametrize("project_name,project", [ (prj['name'], prj) for prj in get_project_with_data()]) -async def test_create_and_delete_folders_from_project(client, dsm_mockup_db, project_name, project, mocker): + +@pytest.mark.parametrize( + "project_name,project", [(prj["name"], prj) for prj in get_project_with_data()] +) +async def test_create_and_delete_folders_from_project( + client, dsm_mockup_db, project_name, project, mocker +): source_project = project destination_project, nodes_map = clone_project_data(source_project) dsm = client.app[APP_DSM_KEY] - mock_dsm = mocker.patch.object(dsm,"copy_file_datcore_s3") + mock_dsm = mocker.patch.object(dsm, "copy_file_datcore_s3") mock_dsm.return_value = Future() mock_dsm.return_value.set_result("Howdie") - # CREATING - url = client.app.router["copy_folders_from_project"].url_for().with_query(user_id="1") - resp = await client.post(url, json={ - 'source':source_project, - 'destination': destination_project, - 'nodes_map': nodes_map - }) + url = ( + client.app.router["copy_folders_from_project"].url_for().with_query(user_id="1") + ) + resp = await client.post( + url, + json={ + "source": source_project, + "destination": destination_project, + "nodes_map": nodes_map, + }, + ) data, _error = await assert_status(resp, expected_cls=web.HTTPCreated) # data should be equal to the destination project, and all store entries should point to simcore.s3 for key in data: - if key!="workbench": + if key != "workbench": assert data[key] == destination_project[key] else: for _node_id, node in data[key].items(): - if 'outputs' in node: - for _o_id, o in node['outputs'].items(): - if 'store' in o: - assert o['store'] == SIMCORE_S3_ID + if "outputs" in node: + for _o_id, o in node["outputs"].items(): + if "store" in o: + assert o["store"] == SIMCORE_S3_ID # DELETING - project_id = data['uuid'] - url = client.app.router["delete_folders_of_project"].url_for(folder_id=project_id).with_query(user_id="1") + project_id = data["uuid"] + url = ( + client.app.router["delete_folders_of_project"] + .url_for(folder_id=project_id) + .with_query(user_id="1") + ) resp = await client.delete(url) await assert_status(resp, expected_cls=web.HTTPNoContent) + async def test_s3_datasets_metadata(client): - url = client.app.router["get_datasets_metadata"].url_for(location_id=str(SIMCORE_S3_ID)).with_query(user_id="21") + url = ( + client.app.router["get_datasets_metadata"] + .url_for(location_id=str(SIMCORE_S3_ID)) + .with_query(user_id="21") + ) resp = await client.get(url) payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error + async def test_s3_files_datasets_metadata(client): - url = client.app.router["get_files_metadata_dataset"].url_for(location_id=str(SIMCORE_S3_ID), dataset_id="aa").with_query(user_id="21") + url = ( + client.app.router["get_files_metadata_dataset"] + .url_for(location_id=str(SIMCORE_S3_ID), dataset_id="aa") + .with_query(user_id="21") + ) resp = await client.get(url) payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error diff --git a/services/storage/tests/utils.py b/services/storage/tests/utils.py index 393ccde3ac0..80282ab8e19 100644 --- a/services/storage/tests/utils.py +++ b/services/storage/tests/utils.py @@ -8,35 +8,44 @@ import requests import sqlalchemy as sa -from simcore_service_storage.models import (FileMetaData, file_meta_data, - projects, user_to_projects, users) +from simcore_service_storage.models import ( + FileMetaData, + file_meta_data, + projects, + user_to_projects, + users, +) log = logging.getLogger(__name__) -DATABASE = 'aio_login_tests' -USER = 'admin' -PASS = 'admin' +DATABASE = "aio_login_tests" +USER = "admin" +PASS = "admin" -ACCESS_KEY = '12345678' -SECRET_KEY = '12345678' +ACCESS_KEY = "12345678" +SECRET_KEY = "12345678" + +BUCKET_NAME = "simcore-testing" +USER_ID = "0" -BUCKET_NAME ="simcore-testing" -USER_ID = '0' def current_dir(): return Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + def data_dir(): return current_dir() / Path("data") -def has_datcore_tokens()->bool: + +def has_datcore_tokens() -> bool: token = os.environ.get("BF_API_KEY", "none") if token == "none": pytest.skip("Datcore access tokens not available, skipping test") return False return True + def is_responsive(url, code=200): """Check if something responds to ``url`` syncronously""" try: @@ -48,6 +57,7 @@ def is_responsive(url, code=200): return False + def is_postgres_responsive(url): """Check if something responds to ``url`` """ try: @@ -58,6 +68,7 @@ def is_postgres_responsive(url): return False return True + def create_tables(url, engine=None): meta = sa.MetaData() if not engine: @@ -74,48 +85,58 @@ def drop_tables(url, engine=None): meta.drop_all(bind=engine, tables=[file_meta_data]) + def insert_metadata(url: str, fmd: FileMetaData): - #FIXME: E1120:No value for argument 'dml' in method call + # FIXME: E1120:No value for argument 'dml' in method call # pylint: disable=E1120 ins = file_meta_data.insert().values( - file_uuid = fmd.file_uuid, - location_id = fmd.location_id, - location = fmd.location, - bucket_name = fmd.bucket_name, - object_name = fmd.object_name, - project_id = fmd.project_id, - project_name = fmd.project_name, - node_id = fmd.node_id, - node_name = fmd.node_name, - file_name = fmd.file_name, - user_id = fmd.user_id, - user_name= fmd.user_name, - file_id = fmd.file_id, - raw_file_path = fmd.raw_file_path, - display_file_path = fmd.display_file_path, - created_at = fmd.created_at, - last_modified = fmd.last_modified, - file_size = fmd.file_size) - + file_uuid=fmd.file_uuid, + location_id=fmd.location_id, + location=fmd.location, + bucket_name=fmd.bucket_name, + object_name=fmd.object_name, + project_id=fmd.project_id, + project_name=fmd.project_name, + node_id=fmd.node_id, + node_name=fmd.node_name, + file_name=fmd.file_name, + user_id=fmd.user_id, + user_name=fmd.user_name, + file_id=fmd.file_id, + raw_file_path=fmd.raw_file_path, + display_file_path=fmd.display_file_path, + created_at=fmd.created_at, + last_modified=fmd.last_modified, + file_size=fmd.file_size, + ) engine = sa.create_engine(url) conn = engine.connect() conn.execute(ins) engine.dispose() + def create_full_tables(url): meta = sa.MetaData() engine = sa.create_engine(url) - meta.drop_all(bind=engine, tables=[file_meta_data, projects, user_to_projects, users], checkfirst=True) - meta.create_all(bind=engine, tables=[file_meta_data, projects, user_to_projects, users]) + meta.drop_all( + bind=engine, + tables=[file_meta_data, projects, user_to_projects, users], + checkfirst=True, + ) + meta.create_all( + bind=engine, tables=[file_meta_data, projects, user_to_projects, users] + ) for t in ["file_meta_data", "projects", "users", "user_to_projects"]: filename = t + ".csv" csv_file = str(data_dir() / Path(filename)) - with open(csv_file, 'r') as file: + with open(csv_file, "r") as file: data_df = pd.read_csv(file) - data_df.to_sql(t, con=engine, index=False, index_label="id", if_exists='append') + data_df.to_sql( + t, con=engine, index=False, index_label="id", if_exists="append" + ) # NOTE: Leave here as a reference # import psycopg2 @@ -145,9 +166,12 @@ def create_full_tables(url): # data_df.to_sql(t, con=engine, index=False, index_label="id", if_exists='append') engine.dispose() + def drop_all_tables(url): meta = sa.MetaData() engine = sa.create_engine(url) - meta.drop_all(bind=engine, tables=[file_meta_data, projects, user_to_projects, users]) + meta.drop_all( + bind=engine, tables=[file_meta_data, projects, user_to_projects, users] + ) engine.dispose() diff --git a/services/web/Dockerfile b/services/web/Dockerfile index 04aec36e255..2849e7f513c 100644 --- a/services/web/Dockerfile +++ b/services/web/Dockerfile @@ -46,6 +46,16 @@ FROM base as build ENV SC_BUILD_TARGET build +# Installing client libraries and any other package you need +# +# libpq: client library for PostgreSQL https://www.postgresql.org/docs/9.5/libpq.html +# libstdc++: needed in ujson https://github.com/kohlschutter/junixsocket/issues/33 +# +RUN apk update && \ + apk add --no-cache \ + libpq \ + libstdc++ + RUN apk add --no-cache \ postgresql-dev \ gcc \ @@ -53,7 +63,7 @@ RUN apk add --no-cache \ libffi-dev RUN $SC_PIP install --upgrade \ - pip~=19.1.1 \ + pip~=20.0.2 \ wheel \ setuptools diff --git a/services/web/client/source/boot/index.html b/services/web/client/source/boot/index.html index fb80d86895b..ebaabd8e5be 100644 --- a/services/web/client/source/boot/index.html +++ b/services/web/client/source/boot/index.html @@ -41,5 +41,7 @@ + ${preBootJs} + diff --git a/services/web/client/source/class/osparc/component/export/ExportGroup.js b/services/web/client/source/class/osparc/component/export/ExportGroup.js new file mode 100644 index 00000000000..d50ad948f4f --- /dev/null +++ b/services/web/client/source/class/osparc/component/export/ExportGroup.js @@ -0,0 +1,246 @@ +/* + * oSPARC - The SIMCORE frontend - https://osparc.io + * Copyright: 2020 IT'IS Foundation - https://itis.swiss + * License: MIT - https://opensource.org/licenses/MIT + * Authors: Odei Maiz (odeimaiz) + */ + +/** + * Widget for exporting nodes-group: + * - Creates a copy of the inner nodes, so that values and access levels can be modified + * - If any of the inner nodes was connected to a non inner node, that connection is removed + * - The exported group is added to the catalog + */ + +qx.Class.define("osparc.component.export.ExportGroup", { + extend: qx.ui.core.Widget, + + /** + * @param node {osparc.data.model.Node} Group Node to be exported + */ + construct: function(node) { + this.base(arguments); + + this._setLayout(new qx.ui.layout.VBox(5)); + + this.set({ + inputNode: node + }); + + this.__prepareOutputNode(); + this.__prepareOutputWorkbench(); + this.__buildLayout(); + }, + + events: { + "finished": "qx.event.type.Data" + }, + + properties: { + inputNode: { + check: "osparc.data.model.Node", + nullable: false + }, + + outputNode: { + check: "osparc.data.model.Node", + nullable: false + }, + + outputWorkbench: { + check: "osparc.data.model.Workbench", + nullable: false + } + }, + + members: { + __groupName: null, + __groupDesc: null, + __activeStudy: null, + + tearDown: function() { + osparc.store.Store.getInstance().setCurrentStudy(this.__activeStudy); + }, + + __prepareOutputNode: function() { + const inputNode = this.getInputNode(); + + const key = inputNode.getKey(); + const version = inputNode.getVersion(); + const nodeData = inputNode.serialize(); + const nodesGroup = new osparc.data.model.Node(key, version); + nodesGroup.populateInputOutputData(nodeData); + this.setOutputNode(nodesGroup); + }, + + __prepareOutputWorkbench: function() { + const inputNode = this.getInputNode(); + + const studydata = { + workbench: this.__groupToWorkbenchData(inputNode) + }; + const dummyStudy = new osparc.data.model.Study(studydata); + + this.__activeStudy = osparc.store.Store.getInstance().getCurrentStudy(); + osparc.store.Store.getInstance().setCurrentStudy(dummyStudy); + this.setOutputWorkbench(dummyStudy.getWorkbench()); + dummyStudy.getWorkbench().buildWorkbench(); + }, + + __buildLayout: function() { + const { + formRenderer, + manager + } = this.__buildMetaDataForm(); + this._add(formRenderer); + + const scroll = new qx.ui.container.Scroll(); + const settingsLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); + const settingsView = this.__buildOutputSettings(); + settingsLayout.add(settingsView, { + flex: 1 + }); + scroll.add(settingsLayout); + this._add(scroll, { + flex: 1 + }); + + const exportBtn = new qx.ui.toolbar.Button(this.tr("Export")); + exportBtn.addListener("execute", () => { + if (manager.validate()) { + this.__exportAsMacroService(exportBtn); + } + }, this); + const actionsBar = new qx.ui.toolbar.ToolBar(); + const actionsPart = new qx.ui.toolbar.Part(); + actionsBar.addSpacer(); + actionsPart.add(exportBtn); + actionsBar.add(actionsPart); + this._add(actionsBar); + }, + + __buildMetaDataForm: function() { + const manager = new qx.ui.form.validation.Manager(); + const metaDataForm = new qx.ui.form.Form(); + + const groupName = this.__groupName = new qx.ui.form.TextField(this.getInputNode().getLabel()); + groupName.setRequired(true); + manager.add(groupName); + metaDataForm.add(groupName, this.tr("Name")); + + const groupDesc = this.__groupDesc = new qx.ui.form.TextField(); + metaDataForm.add(groupDesc, this.tr("Description")); + + const formRenderer = new qx.ui.form.renderer.Single(metaDataForm).set({ + padding: 10 + }); + + return { + formRenderer, + manager + }; + }, + + __buildOutputSettings: function() { + const innerNodes = this.getOutputWorkbench().getNodes(true); + const settingsEditorLayout = osparc.component.node.GroupNodeView.getSettingsEditorLayout(innerNodes); + return settingsEditorLayout; + }, + + __exportAsMacroService: function(exportBtn) { + exportBtn.setIcon("@FontAwesome5Solid/circle-notch/12"); + exportBtn.getChildControl("icon").getContentElement() + .addClass("rotate"); + + const outputNode = this.getOutputNode(); + const outputWorkbench = this.getOutputWorkbench(); + + const nodeKey = "simcore/services/frontend/nodes-group/macros/" + outputNode.getNodeId(); + const version = "1.0.0"; + const nodesGroupService = osparc.utils.Services.getNodesGroup(); + nodesGroupService["key"] = nodeKey; + nodesGroupService["version"] = version; + nodesGroupService["name"] = this.__groupName.getValue(); + nodesGroupService["description"] = this.__groupDesc.getValue(); + nodesGroupService["contact"] = osparc.auth.Data.getInstance().getEmail(); + nodesGroupService["workbench"] = outputWorkbench.serializeWorkbench(); + + // Use editorValues + const innerNodes = this.getOutputWorkbench().getNodes(true); + const nodes = Object.values(innerNodes); + for (const node of nodes) { + const nodeEntry = nodesGroupService["workbench"][node.getNodeId()]; + for (let [portId, portValue] of Object.entries(node.getInputEditorValues())) { + nodeEntry.inputs[portId] = portValue; + } + } + osparc.data.Resources.fetch("groups", "post", {data: nodesGroupService}) + .then(data => { + const text = this.tr("Group added to the Service catalog"); + osparc.component.message.FlashMessenger.getInstance().logAs(text, "INFO"); + this.fireDataEvent("finished"); + }) + .catch(err => { + console.error("error creating group", err); + const text = this.tr("Something went wrong adding the Group to the Service catalog"); + osparc.component.message.FlashMessenger.getInstance().logAs(text, "ERROR"); + }) + .finally(() => { + exportBtn.resetIcon(); + exportBtn.getChildControl("icon").getContentElement() + .removeClass("rotate"); + }); + }, + + __groupToWorkbenchData: function(nodesGroup) { + let workbenchData = {}; + + // serialize innerNodes + const innerNodes = nodesGroup.getInnerNodes(true); + Object.values(innerNodes).forEach(innerNode => { + workbenchData[innerNode.getNodeId()] = innerNode.serialize(); + }); + + // remove parent from first level + const firstLevelNodes = nodesGroup.getInnerNodes(false); + Object.values(firstLevelNodes).forEach(firstLevelNode => { + workbenchData[firstLevelNode.getNodeId()]["parent"] = null; + }); + + // deep copy workbenchData + workbenchData = osparc.utils.Utils.deepCloneObject(workbenchData); + + // removeOutReferences + workbenchData = this.__removeOutReferences(workbenchData); + + // replace Uuids + workbenchData = osparc.data.Converters.replaceUuids(workbenchData); + + return workbenchData; + }, + + __removeOutReferences: function(workbench) { + const innerNodeIds = Object.keys(workbench); + for (const nodeId in workbench) { + const node = workbench[nodeId]; + const inputNodes = node.inputNodes; + for (let i=0; i label + ": " + fromPortLabel - }); - - this.fireDataEvent("linkAdded", toPortId); - - return true; - }, - - removeLink: function(toPortId) { - this.getControl(toPortId).setEnabled(true); - if ("link" in this.getControl(toPortId)) { - delete this.getControl(toPortId).link; - } - - this.fireDataEvent("linkRemoved", toPortId); } } }); diff --git a/services/web/client/source/class/osparc/component/form/ToggleButtonContainer.js b/services/web/client/source/class/osparc/component/form/ToggleButtonContainer.js index f63e671ce48..8dd438bc7ae 100644 --- a/services/web/client/source/class/osparc/component/form/ToggleButtonContainer.js +++ b/services/web/client/source/class/osparc/component/form/ToggleButtonContainer.js @@ -20,7 +20,7 @@ qx.Class.define("osparc.component.form.ToggleButtonContainer", { }, members: { - // overriden + // overridden add: function(child, options) { if (child instanceof qx.ui.form.ToggleButton) { this.base(arguments, child, options); diff --git a/services/web/client/source/class/osparc/component/form/renderer/PropForm.js b/services/web/client/source/class/osparc/component/form/renderer/PropForm.js index af46af0faef..949ff75e247 100644 --- a/services/web/client/source/class/osparc/component/form/renderer/PropForm.js +++ b/services/web/client/source/class/osparc/component/form/renderer/PropForm.js @@ -6,7 +6,7 @@ Utf8Check: äöü ************************************************************************ */ -/* eslint no-underscore-dangle: ["error", { "allowAfterThis": true, "allow": ["__ctrlMap"] }] */ +/* eslint no-underscore-dangle: ["error", { "allowAfterThis": true}] */ /** * A special renderer for AutoForms which includes notes below the section header @@ -14,41 +14,26 @@ */ qx.Class.define("osparc.component.form.renderer.PropForm", { - extend : qx.ui.form.renderer.Single, + extend: osparc.component.form.renderer.PropFormBase, + /** - * create a page for the View Tab with the given title - * - * @param vizWidget {Widget} visualization widget to embedd - */ + * create a page for the View Tab with the given title + * + * @param form {osparc.component.form.Auto} form widget to embedd + * @param node {osparc.data.model.Node} Node owning the widget + */ construct: function(form, node) { - if (node) { - this.setNode(node); - } else { - this.setNode(null); - } + this.base(arguments, form, node); - this.base(arguments, form); - let fl = this._getLayout(); - // have plenty of space for input, not for the labels - fl.setColumnFlex(0, 0); - fl.setColumnAlign(0, "left", "top"); - fl.setColumnFlex(1, 1); - fl.setColumnMinWidth(1, 130); + this.__ctrlLinkMap = {}; + this.__addLinkCtrls(); this.setDroppable(true); this.__attachDragoverHighlighter(); }, events: { - "removeLink" : "qx.event.type.Data", - "dataFieldModified": "qx.event.type.Data" - }, - - properties: { - node: { - check: "osparc.data.model.Node", - nullable: true - } + "linkModified": "qx.event.type.Data" }, statics: { @@ -69,9 +54,10 @@ qx.Class.define("osparc.component.form.renderer.PropForm", { // eslint-disable-next-line qx-rules/no-refs-in-members members: { + // overridden _gridPos: { label: 0, - entryField: 1, + ctrlField: 1, retrieveStatus: 2 }, _retrieveStatus: { @@ -80,45 +66,14 @@ qx.Class.define("osparc.component.form.renderer.PropForm", { retrieving: 1, succeed: 2 }, - addItems: function(items, names, title, itemOptions, headerOptions) { - // add the header - if (title !== null) { - this._add( - this._createHeader(title), { - row: this._row, - column: this._gridPos.label, - colSpan: Object.keys(this._gridPos).length - } - ); - this._row++; - } - // add the items - for (let i = 0; i < items.length; i++) { - let item = items[i]; - let label = this._createLabel(names[i], item); - this._add(label, { - row: this._row, - column: this._gridPos.label - }); - label.setBuddy(item); + __ctrlLinkMap: null, - const field = new osparc.component.form.FieldWHint(null, item.description, item); - field.key = item.key; - this._add(field, { - row: this._row, - column: this._gridPos.entryField - }); - this._row++; - this._connectVisibility(item, label); - // store the names for translation - if (qx.core.Environment.get("qx.dynlocale")) { - this._names.push({ - name: names[i], - label: label, - item: items[i] - }); - } + // overridden + addItems: function(items, names, title, itemOptions, headerOptions) { + this.base(arguments, items, names, title, itemOptions, headerOptions); + + items.forEach(item => { this.__createDropMechanism(item, item.key); // Notify focus and focus out @@ -134,68 +89,29 @@ qx.Class.define("osparc.component.form.renderer.PropForm", { qx.event.message.Bus.getInstance().dispatchByName("inputFocusout", msgDataFn); } }, this); - } + }); }, - getValues: function() { - let data = this._form.getData(); - for (const portId in data) { - let ctrl = this._form.getControl(portId); - if (ctrl && ctrl.link) { - data[portId] = ctrl.link; - } - // FIXME: "null" should be a valid input - if (data[portId] === "null") { - data[portId] = null; - } - } - let filteredData = {}; + // overridden + setAccessLevel: function(data) { for (const key in data) { - if (data[key] !== null) { - filteredData[key] = data[key]; - } - } - return filteredData; - }, + const control = this._form.getControl(key); + this.__changeControlVisibility(control, data[key]); - __getLayoutChild(portId, column) { - let row = null; - const children = this._getChildren(); - for (let i=0; i { this.__unhighlightAll(); }); + }, + + getControlLinks: function() { + return this.__ctrlLinkMap; + }, + + getControlLink: function(key) { + return this.__ctrlLinkMap[key]; + }, + + __addLinkCtrls: function() { + Object.keys(this._form.getControls()).forEach(portId => { + this.__addLinkCtrl(portId); + }); + }, + + __addLinkCtrl: function(portId) { + const controlLink = new qx.ui.form.TextField().set({ + enabled: false + }); + controlLink.key = portId; + this.__ctrlLinkMap[portId] = controlLink; + }, + + __isPortAvailable: function(portId) { + const port = this._form.getControl(portId); + if (!port || !port.getEnabled() || Object.prototype.hasOwnProperty.call(port, "link")) { + return false; + } + return true; + }, + + addLink: function(toPortId, fromNodeId, fromPortId) { + if (!this.__isPortAvailable(toPortId)) { + return false; + } + this.getControlLink(toPortId).setEnabled(false); + this._form.getControl(toPortId).link = { + nodeUuid: fromNodeId, + output: fromPortId + }; + + const study = osparc.store.Store.getInstance().getCurrentStudy(); + const workbench = study.getWorkbench(); + const fromNode = workbench.getNode(fromNodeId); + const port = fromNode.getOutput(fromPortId); + const fromPortLabel = port ? port.label : null; + fromNode.bind("label", this.getControlLink(toPortId), "value", { + converter: label => label + ": " + fromPortLabel + }); + + this.linkAdded(toPortId); + + return true; + }, + + addLinks: function(data) { + for (let key in data) { + if (data[key] !== null && typeof data[key] === "object" && data[key].nodeUuid) { + this.addLink(key, data[key].nodeUuid, data[key].output); + } + } + }, + + removeLink: function(toPortId) { + this.getControlLink(toPortId).setEnabled(false); + if ("link" in this._form.getControl(toPortId)) { + delete this._form.getControl(toPortId).link; + } + + this.linkRemoved(toPortId); + }, + + hasVisibleInputs: function() { + const children = this._getChildren(); + for (let i=0; i { + this.__addAccessLevelRB(portId); + }); + }, + + __addAccessLevelRB: function(portId) { + const rbHidden = new qx.ui.form.RadioButton(this.tr("Not Visible")); + rbHidden.accessLevel = this._visibility.hidden; + rbHidden.portId = portId; + const rbReadOnly = new qx.ui.form.RadioButton(this.tr("Read Only")); + rbReadOnly.accessLevel = this._visibility.readOnly; + rbReadOnly.portId = portId; + const rbEditable = new qx.ui.form.RadioButton(this.tr("Editable")); + rbEditable.accessLevel = this._visibility.readWrite; + rbEditable.portId = portId; + + const groupBox = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); + groupBox.add(rbHidden); + groupBox.add(rbReadOnly); + groupBox.add(rbEditable); + + const group = new qx.ui.form.RadioGroup(rbHidden, rbReadOnly, rbEditable); + group.setSelection([rbEditable]); + this.__ctrlRBsMap[portId] = group; + group.addListener("changeSelection", this.__onAccessLevelChanged, this); + + const ctrlField = this._getCtrlFieldChild(portId); + if (ctrlField) { + const idx = ctrlField.idx; + const child = ctrlField.child; + const layoutProps = child.getLayoutProperties(); + this._addAt(groupBox, idx, { + row: layoutProps.row, + column: this._gridPos.accessLevel + }); + } + }, + + __onAccessLevelChanged: function(e) { + const selectedButton = e.getData()[0]; + const { + accessLevel, + portId + } = selectedButton; + + const data = {}; + data[portId] = accessLevel; + + this.__setAccessLevel(data); + + let inputAccess = this.getNode().getInputAccess(); + if (inputAccess === null) { + inputAccess = {}; + } + inputAccess[portId] = accessLevel; + this.getNode().setInputAccess(inputAccess); + + const propWidget = this.getNode().getPropsWidget(); + propWidget.setAccessLevel(data); + }, + + __addDelTag: function(label) { + const newLabel = "" + label + ""; + return newLabel; + }, + + __removeDelTag: function(label) { + let newLabel = label.replace("", ""); + newLabel = newLabel.replace("", ""); + return newLabel; + }, + + __setAccessLevel: function(data) { + for (const key in data) { + const label = this._getLabelFieldChild(key).child; + const control = this._form.getControl(key); + switch (data[key]) { + case this._visibility.hidden: { + const newLabel = this.__addDelTag(label.getValue()); + label.setValue(newLabel); + label.setEnabled(false); + control.setEnabled(false); + break; + } + case this._visibility.readOnly: { + const newLabel = this.__removeDelTag(label.getValue()); + label.setValue(newLabel); + label.setEnabled(false); + control.setEnabled(false); + break; + } + case this._visibility.readWrite: { + const newLabel = this.__removeDelTag(label.getValue()); + label.setValue(newLabel); + label.setEnabled(true); + control.setEnabled(true); + break; + } + } + } + }, + + __getRadioButtonsFieldChild: function(portId) { + return this._getLayoutChild(portId, this._gridPos.accessLevel); + } + } +}); diff --git a/services/web/client/source/class/osparc/component/metadata/ServiceInfo.js b/services/web/client/source/class/osparc/component/metadata/ServiceInfo.js index 750164c310a..fb7828cf386 100644 --- a/services/web/client/source/class/osparc/component/metadata/ServiceInfo.js +++ b/services/web/client/source/class/osparc/component/metadata/ServiceInfo.js @@ -145,7 +145,7 @@ qx.Class.define("osparc.component.metadata.ServiceInfo", { __createDescription: function() { const description = new osparc.ui.markdown.Markdown(); - description.setMarkdown(this.__metadata.description); + description.setMarkdown(this.__metadata.description || ""); return description; }, diff --git a/services/web/client/source/class/osparc/component/node/BaseNodeView.js b/services/web/client/source/class/osparc/component/node/BaseNodeView.js index 9c37d3c165a..48f5a6e9d3e 100644 --- a/services/web/client/source/class/osparc/component/node/BaseNodeView.js +++ b/services/web/client/source/class/osparc/component/node/BaseNodeView.js @@ -35,11 +35,24 @@ qx.Class.define("osparc.component.node.BaseNodeView", { createSettingsGroupBox: function(label) { const settingsGroupBox = new qx.ui.groupbox.GroupBox(label).set({ appearance: "settings-groupbox", - maxWidth: 500, + maxWidth: 800, alignX: "center", layout: new qx.ui.layout.VBox() }); return settingsGroupBox; + }, + + createWindow: function(label) { + const win = new qx.ui.window.Window(label).set({ + layout: new qx.ui.layout.Grow(), + contentPadding: 10, + showMinimize: false, + resizable: true, + modal: true, + height: 600, + width: 800 + }); + return win; } }, @@ -65,6 +78,14 @@ qx.Class.define("osparc.component.node.BaseNodeView", { _buttonContainer: null, _filesButton: null, + populateLayout: function() { + this.getNode().bind("label", this._title, "value"); + this._addInputPortsUIs(); + this._addSettings(); + this._addIFrame(); + this._addButtons(); + }, + __buildInputsView: function() { const inputsView = this._inputsView = new osparc.desktop.SidePanel().set({ minWidth: 300 @@ -123,19 +144,6 @@ qx.Class.define("osparc.component.node.BaseNodeView", { inputFont: "text-18", editable: osparc.data.Permissions.getInstance().canDo("study.node.rename") }); - titlePart.add(title); - - const infoBtn = new qx.ui.toolbar.Button(this.tr("Info"), "@FontAwesome5Solid/info-circle/14"); - infoPart.add(infoBtn); - - const filesBtn = this._filesButton = new qx.ui.toolbar.Button(this.tr("Files"), "@FontAwesome5Solid/folder-open/14"); - osparc.utils.Utils.setIdToWidget(filesBtn, "nodeViewFilesBtn"); - buttonsPart.add(filesBtn); - - filesBtn.addListener("execute", () => this.__openNodeDataManager(), this); - - infoBtn.addListener("execute", () => this.__openServiceInfo(), this); - title.addListener("editValue", evt => { if (evt.getData() !== this._title.getValue()) { const node = this.getNode(); @@ -146,6 +154,22 @@ qx.Class.define("osparc.component.node.BaseNodeView", { qx.event.message.Bus.getInstance().dispatchByName("updateStudy", study.serializeStudy()); } }, this); + titlePart.add(title); + + const infoBtn = new qx.ui.toolbar.Button(this.tr("Info"), "@FontAwesome5Solid/info-circle/14"); + infoBtn.addListener("execute", () => this.__openServiceInfo(), this); + infoPart.add(infoBtn); + + if (osparc.data.Permissions.getInstance().canDo("study.node.update")) { + const editAccessLevel = new qx.ui.toolbar.Button(this.tr("Edit Access Level")); + editAccessLevel.addListener("execute", () => this._openEditAccessLevel(), this); + infoPart.add(editAccessLevel); + } + + const filesBtn = this._filesButton = new qx.ui.toolbar.Button(this.tr("Files"), "@FontAwesome5Solid/folder-open/14"); + osparc.utils.Utils.setIdToWidget(filesBtn, "nodeViewFilesBtn"); + filesBtn.addListener("execute", () => this.__openNodeDataManager(), this); + buttonsPart.add(filesBtn); return toolbar; }, @@ -204,10 +228,6 @@ qx.Class.define("osparc.component.node.BaseNodeView", { if (retrieveIFrameButton) { this._buttonContainer.add(retrieveIFrameButton); } - let restartIFrameButton = this.getNode().getRestartIFrameButton(); - if (restartIFrameButton) { - this._buttonContainer.add(restartIFrameButton); - } this._buttonContainer.add(this._filesButton); this._toolbar.add(this._buttonContainer); }, @@ -270,6 +290,27 @@ qx.Class.define("osparc.component.node.BaseNodeView", { }, this); }, + /** + * @abstract + */ + _addSettings: function() { + throw new Error("Abstract method called!"); + }, + + /** + * @abstract + */ + _addIFrame: function() { + throw new Error("Abstract method called!"); + }, + + /** + * @abstract + */ + _openEditAccessLevel: function() { + throw new Error("Abstract method called!"); + }, + /** * @abstract * @param node {osparc.data.model.Node} node diff --git a/services/web/client/source/class/osparc/component/node/GroupNodeView.js b/services/web/client/source/class/osparc/component/node/GroupNodeView.js index 9a7d4683036..9c6e2aef4fd 100644 --- a/services/web/client/source/class/osparc/component/node/GroupNodeView.js +++ b/services/web/client/source/class/osparc/component/node/GroupNodeView.js @@ -36,26 +36,35 @@ qx.Class.define("osparc.component.node.GroupNodeView", { this.base(arguments); }, - members: { - populateLayout: function() { - this.getNode().bind("label", this._title, "value"); - this._addInputPortsUIs(); - this.__addSettings(); - this.__addIFrame(); - this._addButtons(); - }, + statics: { + getSettingsEditorLayout: function(nodes) { + const settingsEditorLayout = osparc.component.node.BaseNodeView.createSettingsGroupBox("Settings"); + Object.values(nodes).forEach(innerNode => { + const propsWidgetEditor = innerNode.getPropsWidgetEditor(); + if (propsWidgetEditor && Object.keys(innerNode.getInputs()).length) { + const innerSettings = osparc.component.node.BaseNodeView.createSettingsGroupBox().set({ + maxWidth: 700 + }); + innerNode.bind("label", innerSettings, "legend"); + innerSettings.add(propsWidgetEditor); + settingsEditorLayout.add(innerSettings); + } + }); + return settingsEditorLayout; + } + }, - __addSettings: function() { + members: { + _addSettings: function() { this._settingsLayout.removeAll(); this._mapperLayout.removeAll(); const innerNodes = this.getNode().getInnerNodes(true); Object.values(innerNodes).forEach(innerNode => { - // const innerSettings = this.superclass.self().createSettingsGroupBox(); - const innerSettings = osparc.component.node.BaseNodeView.createSettingsGroupBox(); - innerNode.bind("label", innerSettings, "legend"); const propsWidget = innerNode.getPropsWidget(); - if (propsWidget && Object.keys(innerNode.getInputs()).length) { + if (propsWidget && Object.keys(innerNode.getInputs()).length && propsWidget.hasVisibleInputs()) { + const innerSettings = osparc.component.node.BaseNodeView.createSettingsGroupBox(); + innerNode.bind("label", innerSettings, "legend"); innerSettings.add(propsWidget); this._settingsLayout.add(innerSettings); } @@ -73,7 +82,7 @@ qx.Class.define("osparc.component.node.GroupNodeView", { }); }, - __addIFrame: function() { + _addIFrame: function() { this._iFrameLayout.removeAll(); const tabView = new qx.ui.tabview.TabView().set({ @@ -109,6 +118,14 @@ qx.Class.define("osparc.component.node.GroupNodeView", { }); }, + _openEditAccessLevel: function() { + const settingsEditorLayout = this.self().getSettingsEditorLayout(this.getNode().getInnerNodes()); + const win = osparc.component.node.BaseNodeView.createWindow(this.getNode().getLabel()); + win.add(settingsEditorLayout); + win.center(); + win.open(); + }, + _applyNode: function(node) { if (!node.isContainer()) { console.error("Only group nodes are supported"); diff --git a/services/web/client/source/class/osparc/component/node/NodeView.js b/services/web/client/source/class/osparc/component/node/NodeView.js index de19d9dbd09..bf4d50af30b 100644 --- a/services/web/client/source/class/osparc/component/node/NodeView.js +++ b/services/web/client/source/class/osparc/component/node/NodeView.js @@ -42,15 +42,7 @@ qx.Class.define("osparc.component.node.NodeView", { }, members: { - populateLayout: function() { - this.getNode().bind("label", this._title, "value"); - this._addInputPortsUIs(); - this.__addSettings(); - this.__addIFrame(); - this._addButtons(); - }, - - __addSettings: function() { + _addSettings: function() { this._settingsLayout.removeAll(); this._mapperLayout.removeAll(); @@ -72,7 +64,7 @@ qx.Class.define("osparc.component.node.NodeView", { }); }, - __addIFrame: function() { + _addIFrame: function() { this._iFrameLayout.removeAll(); const iFrame = this.getNode().getIFrame(); @@ -94,6 +86,16 @@ qx.Class.define("osparc.component.node.NodeView", { }); }, + _openEditAccessLevel: function() { + const settingsEditorLayout = osparc.component.node.BaseNodeView.createSettingsGroupBox(this.tr("Settings")); + settingsEditorLayout.add(this.getNode().getPropsWidgetEditor()); + + const win = osparc.component.node.BaseNodeView.createWindow(this.getNode().getLabel()); + win.add(settingsEditorLayout); + win.center(); + win.open(); + }, + _applyNode: function(node) { if (node.isContainer()) { console.error("Only non-group nodes are supported"); diff --git a/services/web/client/source/class/osparc/component/widget/InputsMapper.js b/services/web/client/source/class/osparc/component/widget/InputsMapper.js index 7b5082fa03e..32a6f61018a 100644 --- a/services/web/client/source/class/osparc/component/widget/InputsMapper.js +++ b/services/web/client/source/class/osparc/component/widget/InputsMapper.js @@ -184,8 +184,8 @@ qx.Class.define("osparc.component.widget.InputsMapper", { let newItemBranch = qx.data.marshal.Json.createModel(newBranch, true); const itemProps = osparc.dev.fake.Data.getItem(null, Object.keys(node.getInputsDefault())[0], defValueId); if (itemProps) { - let form = new osparc.component.form.Auto(itemProps, this.getNode()); - let propsWidget = new osparc.component.form.renderer.PropForm(form); + let form = new osparc.component.form.Auto(itemProps); + let propsWidget = new osparc.component.form.renderer.PropForm(form, this.getNode()); newItemBranch["propsWidget"] = propsWidget; } data.children.push(newItemBranch); @@ -274,8 +274,8 @@ qx.Class.define("osparc.component.widget.InputsMapper", { // Hmmmm not sure about the double getKey :( const itemProps = osparc.dev.fake.Data.getItem(null, fromPortKey, newItem.getKey().getKey()); if (itemProps) { - let form = new osparc.component.form.Auto(itemProps, this.getNode()); - let propsWidget = new osparc.component.form.renderer.PropForm(form); + let form = new osparc.component.form.Auto(itemProps); + let propsWidget = new osparc.component.form.renderer.PropForm(form, this.getNode()); newItem["propsWidget"] = propsWidget; } } diff --git a/services/web/client/source/class/osparc/component/widget/NodeInOut.js b/services/web/client/source/class/osparc/component/widget/NodeInOut.js index 13fc623bb00..8ac580f44ea 100644 --- a/services/web/client/source/class/osparc/component/widget/NodeInOut.js +++ b/services/web/client/source/class/osparc/component/widget/NodeInOut.js @@ -102,7 +102,7 @@ qx.Class.define("osparc.component.widget.NodeInOut", { this.emptyPorts(); const metaData = this.getNode().getMetaData(); - this.__createUIPorts(isInput, metaData.outputs); + this.__createUIPorts(isInput, metaData && metaData.outputs); }, __createUIPorts: function(isInput, ports) { diff --git a/services/web/client/source/class/osparc/component/widget/NodesTree.js b/services/web/client/source/class/osparc/component/widget/NodesTree.js index ae5de475187..fe84dda2f2c 100644 --- a/services/web/client/source/class/osparc/component/widget/NodesTree.js +++ b/services/web/client/source/class/osparc/component/widget/NodesTree.js @@ -171,8 +171,12 @@ qx.Class.define("osparc.component.widget.NodesTree", { createItem: () => new osparc.component.widget.NodeTreeItem(), bindItem: (c, item, id) => { c.bindDefaultProperties(item, id); - c.bindProperty("label", "label", null, item, id); c.bindProperty("nodeId", "nodeId", null, item, id); + const node = study.getWorkbench().getNode(item.getModel().getNodeId()); + if (node) { + node.bind("label", item.getModel(), "label"); + } + c.bindProperty("label", "label", null, item, id); }, configureItem: item => { item.addListener("dbltap", () => { @@ -236,7 +240,6 @@ qx.Class.define("osparc.component.widget.NodesTree", { if (selectedItem) { if (selectedItem.getIsContainer()) { const nodeId = selectedItem.getNodeId(); - this.__openItem(nodeId); this.fireDataEvent("exportNode", nodeId); } else { osparc.component.message.FlashMessenger.getInstance().logAs(this.tr("Only Groups can be exported."), "ERROR"); diff --git a/services/web/client/source/class/osparc/component/widget/PersistentIframe.js b/services/web/client/source/class/osparc/component/widget/PersistentIframe.js index 1a68e5b7d06..58fb791e3f5 100644 --- a/services/web/client/source/class/osparc/component/widget/PersistentIframe.js +++ b/services/web/client/source/class/osparc/component/widget/PersistentIframe.js @@ -27,8 +27,8 @@ qx.Class.define("osparc.component.widget.PersistentIframe", { construct: function(source, el) { this.base(arguments, source); }, - properties : - { + + properties: { /** * Show a Maximize Button */ @@ -38,15 +38,20 @@ qx.Class.define("osparc.component.widget.PersistentIframe", { apply: "_applyShowMaximize" } }, + events: { + /** Fired for requesting a restart */ + "restart" : "qx.event.type.Event", /** Fired if the iframe is restored from a minimized or maximized state */ "restore" : "qx.event.type.Event", /** Fired if the iframe is maximized */ "maximize" : "qx.event.type.Event" }, + members: { __iframe: null, __syncScheduled: null, + __restartButton: null, __actionButton: null, // override _createContentElement : function() { @@ -63,6 +68,22 @@ qx.Class.define("osparc.component.widget.PersistentIframe", { appRoot.add(iframe, { top:-10000 }); + const restartButton = this.__restartButton = new qx.ui.form.Button(null, "@FontAwesome5Solid/redo-alt/14").set({ + zIndex: 20, + paddingLeft: 8, + paddingRight: 8, + paddingTop: 6, + paddingBottom: 6, + backgroundColor: "transparent", + decorator: null + }); + restartButton.addListener("execute", e => { + this.fireEvent("restart"); + }, this); + osparc.utils.Utils.setIdToWidget(restartButton, "iFrameRestartBtn"); + appRoot.add(restartButton, { + top:-10000 + }); let actionButton = this.__actionButton = new qx.ui.form.Button(null, osparc.theme.osparcdark.Image.URLS["window-maximize"]+"/20").set({ zIndex: 20, backgroundColor: "transparent", @@ -83,6 +104,9 @@ qx.Class.define("osparc.component.widget.PersistentIframe", { iframe.setLayoutProperties({ top: -10000 }); + restartButton.setLayoutProperties({ + top: -10000 + }); actionButton.setLayoutProperties({ top: -10000 }); @@ -129,8 +153,12 @@ qx.Class.define("osparc.component.widget.PersistentIframe", { window.setTimeout(() => { this.__syncScheduled = false; let iframeParentPos = qx.bom.element.Location.get(qx.bom.element.Location.getOffsetParent(this.__iframe.getContentElement().getDomElement()), "scroll"); - let divPos = qx.bom.element.Location.get(this.getContentElement().getDomElement(), "scroll"); - let divSize = qx.bom.element.Dimension.getSize(this.getContentElement().getDomElement()); + const domElement = this.getContentElement().getDomElement(); + if (domElement === null) { + return; + } + let divPos = qx.bom.element.Location.get(domElement, "scroll"); + let divSize = qx.bom.element.Dimension.getSize(domElement); this.__iframe.setLayoutProperties({ top: divPos.top - iframeParentPos.top, left: (divPos.left - iframeParentPos.left) @@ -139,6 +167,10 @@ qx.Class.define("osparc.component.widget.PersistentIframe", { width: (divSize.width), height: (divSize.height) }); + this.__restartButton.setLayoutProperties({ + top: (divPos.top - iframeParentPos.top), + right: (iframeParentPos.right - iframeParentPos.left - divPos.right) + 35 + }); this.__actionButton.setLayoutProperties({ top: (divPos.top - iframeParentPos.top), right: (iframeParentPos.right - iframeParentPos.left - divPos.right) diff --git a/services/web/client/source/class/osparc/component/workbench/NodeUI.js b/services/web/client/source/class/osparc/component/workbench/NodeUI.js index 2d839f136de..4a047c0b726 100644 --- a/services/web/client/source/class/osparc/component/workbench/NodeUI.js +++ b/services/web/client/source/class/osparc/component/workbench/NodeUI.js @@ -184,10 +184,8 @@ qx.Class.define("osparc.component.workbench.NodeUI", { this.setIcon("@FontAwesome5Solid/folder-open/14"); } const metaData = node.getMetaData(); - if (metaData) { - this.__createUIPorts(true, metaData.inputs); - this.__createUIPorts(false, metaData.outputs); - } + this.__createUIPorts(true, metaData && metaData.inputs); + this.__createUIPorts(false, metaData && metaData.outputs); if (node.isComputational() || node.isFilePicker()) { node.bind("progress", this.__progressBar, "value"); } diff --git a/services/web/client/source/class/osparc/component/workbench/ServiceCatalog.js b/services/web/client/source/class/osparc/component/workbench/ServiceCatalog.js index 1d569cc572b..cbf702eb86e 100644 --- a/services/web/client/source/class/osparc/component/workbench/ServiceCatalog.js +++ b/services/web/client/source/class/osparc/component/workbench/ServiceCatalog.js @@ -200,15 +200,10 @@ qx.Class.define("osparc.component.workbench.ServiceCatalog", { __populateList: function(reload = false) { this.__allServicesList = []; let store = osparc.store.Store.getInstance(); - let services = store.getServices(reload); - if (services === null) { - store.addListener("servicesRegistered", e => { - const data = e.getData(); - this.__addNewData(data["services"]); - }, this); - } else { - this.__addNewData(services); - } + store.getServices(reload) + .then(services => { + this.__addNewData(services); + }); }, __addNewData: function(newData) { diff --git a/services/web/client/source/class/osparc/component/workbench/SvgWidget.js b/services/web/client/source/class/osparc/component/workbench/SvgWidget.js index 12dc21ee9cb..964cfd7b4fa 100644 --- a/services/web/client/source/class/osparc/component/workbench/SvgWidget.js +++ b/services/web/client/source/class/osparc/component/workbench/SvgWidget.js @@ -46,12 +46,20 @@ qx.Class.define("osparc.component.workbench.SvgWidget", { this.__svgWrapper = new osparc.wrapper.Svg(); this.__svgWrapper.addListener(("svgLibReady"), () => { this.__canvas = this.__svgWrapper.createEmptyCanvas(svgLayerId); + this.setReady(true); this.fireDataEvent("SvgWidgetReady", true); }); this.__svgWrapper.init(); }); }, + properties: { + ready: { + check: "Boolean", + init: false + } + }, + events: { "SvgWidgetReady": "qx.event.type.Data" }, diff --git a/services/web/client/source/class/osparc/component/workbench/WorkbenchUI.js b/services/web/client/source/class/osparc/component/workbench/WorkbenchUI.js index d33152c5203..70fc3cd8488 100644 --- a/services/web/client/source/class/osparc/component/workbench/WorkbenchUI.js +++ b/services/web/client/source/class/osparc/component/workbench/WorkbenchUI.js @@ -78,16 +78,6 @@ qx.Class.define("osparc.component.workbench.WorkbenchUI", { this.__desktopCanvas.add(this.__startHint); this.__svgWidgetLinks = new osparc.component.workbench.SvgWidget("SvgWidget_Links"); - // this gets fired once the widget has appeared and the library has been loaded - // due to the qx rendering, this will always happen after setup, so we are - // sure to catch this event - this.__svgWidgetLinks.addListenerOnce("SvgWidgetReady", () => { - // Will be called only the first time Svg lib is loaded - this.loadModel(workbench); - const study = osparc.store.Store.getInstance().getCurrentStudy(); - this.__nodeSelected(study.getUuid()); - }); - this.__desktop.add(this.__svgWidgetLinks, { left: 0, top: 0, @@ -761,6 +751,16 @@ qx.Class.define("osparc.component.workbench.WorkbenchUI", { }, loadModel: function(model) { + if (this.__svgWidgetLinks.getReady()) { + this.__loadModel(model); + } else { + this.__svgWidgetLinks.addListenerOnce("SvgWidgetReady", () => { + this.__loadModel(model); + }, this); + } + }, + + __loadModel: function(model) { this.clearAll(); this.resetSelectedNodes(); this.__currentModel = model; diff --git a/services/web/client/source/class/osparc/data/Converters.js b/services/web/client/source/class/osparc/data/Converters.js index c0a1ad2ecab..20320fee639 100644 --- a/services/web/client/source/class/osparc/data/Converters.js +++ b/services/web/client/source/class/osparc/data/Converters.js @@ -239,6 +239,20 @@ qx.Class.define("osparc.data.Converters", { return "@MaterialIcons/insert_drive_file/15"; } return "@MaterialIcons/arrow_right_alt/15"; + }, + + replaceUuids: function(workbench) { + let workbenchStr = JSON.stringify(workbench); + const innerNodeIds = Object.keys(workbench); + for (let i=0; i { + this.startDynamicService(); + }) + .catch(err => { + const errorMsg = "Error when starting " + key + ":" + version + ": " + err.getTarget().getResponse()["error"]; + const errorMsgData = { + nodeId: this.getNodeId(), + msg: errorMsg + }; + this.fireDataEvent("showInLogger", errorMsgData); + this.setInteractiveStatus("failed"); + osparc.component.message.FlashMessenger.getInstance().logAs(this.tr("There was an error while starting the node."), "ERROR"); + }); + }, + + stopInBackend: function() { + // remove node in the backend + const study = osparc.store.Store.getInstance().getCurrentStudy(); + const params = { + url: { + projectId: study.getUuid(), + nodeId: this.getNodeId() + } + }; + osparc.data.Resources.fetch("studies", "deleteNode", params) + .catch(err => console.error(err)); + }, + repopulateOutputPortData: function() { if (this.__outputWidget) { this.__outputWidget.populatePortsData(); @@ -439,34 +495,50 @@ qx.Class.define("osparc.data.model.Node", { * */ __addSettings: function(inputs) { - const form = this.__settingsForm = new osparc.component.form.Auto(inputs, this); - form.addListener("linkAdded", e => { - const changedField = e.getData(); - this.getPropsWidget().linkAdded(changedField); - }, this); - form.addListener("linkRemoved", e => { - const changedField = e.getData(); - this.getPropsWidget().linkRemoved(changedField); - }, this); - + const form = this.__settingsForm = new osparc.component.form.Auto(inputs); const propsWidget = new osparc.component.form.renderer.PropForm(form, this); this.setPropsWidget(propsWidget); - propsWidget.addListener("removeLink", e => { - const changedField = e.getData(); - this.__settingsForm.removeLink(changedField); - }, this); - propsWidget.addListener("dataFieldModified", e => { - const portId = e.getData(); + propsWidget.addListener("linkModified", e => { + const linkModified = e.getData(); + const portId = linkModified.portId; this.__retrieveInputs(portId); }, this); }, + __addSettingsEditor: function(inputs) { + const propsWidget = this.getPropsWidget(); + const form = new osparc.component.form.Auto(inputs); + form.setData(this.__settingsForm.getData()); + const propsWidgetEditor = new osparc.component.form.renderer.PropFormEditor(form, this); + this.__settingsForm.addListener("changeData", e => { + // apply data + const data = this.__settingsForm.getData(); + form.setData(data); + }, this); + propsWidget.addListener("linkModified", e => { + const linkModified = e.getData(); + const portId = linkModified.portId; + const added = linkModified.added; + if (added) { + const srcControlLink = propsWidget.getControlLink(portId); + const controlLink = new qx.ui.form.TextField().set({ + enabled: false + }); + srcControlLink.bind("value", controlLink, "value"); + propsWidgetEditor.linkAdded(portId, controlLink); + } else { + propsWidgetEditor.linkRemoved(portId); + } + }, this); + this.setPropsWidgetEditor(propsWidgetEditor); + }, + removeNodePortConnections: function(inputNodeId) { let inputs = this.getInputValues(); for (const portId in inputs) { if (inputs[portId] && Object.prototype.hasOwnProperty.call(inputs[portId], "nodeUuid")) { if (inputs[portId]["nodeUuid"] === inputNodeId) { - this.__settingsForm.removeLink(portId); + this.getPropsWidget().removeLink(portId); } } } @@ -496,7 +568,10 @@ qx.Class.define("osparc.data.model.Node", { let filteredInputs = this.__removeNonSettingInputs(inputs); filteredInputs = this.__addMapper(filteredInputs); - this.__addSettings(filteredInputs); + if (Object.keys(filteredInputs).length) { + this.__addSettings(filteredInputs); + this.__addSettingsEditor(filteredInputs); + } }, __addOutputs: function(outputs) { @@ -505,23 +580,45 @@ qx.Class.define("osparc.data.model.Node", { this.__addOutputWidget(); }, - setInputData: function(nodeData) { - if (this.__settingsForm && nodeData) { - this.__settingsForm.setData(nodeData.inputs); - if ("inputAccess" in nodeData) { - this.__settingsForm.setAccessLevel(nodeData.inputAccess); - this.setInputAccess(nodeData.inputAccess); + __isInputDataALink: function(data) { + if (data !== null && typeof data === "object" && data.nodeUuid) { + return true; + } + return false; + }, + + setInputData: function(inputs) { + if (this.__settingsForm && inputs) { + const inputData = {}; + const inputLinks = {}; + const inputsCopy = osparc.utils.Utils.deepCloneObject(inputs); + for (let key in inputsCopy) { + if (this.__isInputDataALink(inputsCopy[key])) { + inputLinks[key] = inputsCopy[key]; + } else { + inputData[key] = inputsCopy[key]; + } } + this.getPropsWidget().addLinks(inputLinks); + this.__settingsForm.setData(inputData); + } + }, + + setInputDataAccess: function(inputAccess) { + if (inputAccess) { + this.setInputAccess(inputAccess); + this.getPropsWidget().setAccessLevel(inputAccess); + this.getPropsWidgetEditor().setAccessLevel(inputAccess); } }, - setOutputData: function(nodeData) { - if (nodeData.outputs) { - for (const outputKey in nodeData.outputs) { + setOutputData: function(outputs) { + if (outputs) { + for (const outputKey in outputs) { if (!Object.prototype.hasOwnProperty.call(this.__outputs, outputKey)) { this.__outputs[outputKey] = {}; } - this.__outputs[outputKey]["value"] = nodeData.outputs[outputKey]; + this.__outputs[outputKey]["value"] = outputs[outputKey]; this.fireDataEvent("outputChanged", outputKey); } } @@ -573,7 +670,7 @@ qx.Class.define("osparc.data.model.Node", { }, addPortLink: function(toPortId, fromNodeId, fromPortId) { - return this.__settingsForm.addLink(toPortId, fromNodeId, fromPortId); + return this.getPropsWidget().addLink(toPortId, fromNodeId, fromPortId); }, // ----- Input Nodes ----- @@ -583,9 +680,9 @@ qx.Class.define("osparc.data.model.Node", { addInputNodes: function(inputNodes) { if (inputNodes) { - for (let i=0; i { + this.addInputNode(inputNode); + }); } }, @@ -620,9 +717,9 @@ qx.Class.define("osparc.data.model.Node", { addOutputNodes: function(outputNodes) { if (outputNodes) { - for (let i=0; i { + this.addOutputNode(outputNode); + }); } }, @@ -661,7 +758,11 @@ qx.Class.define("osparc.data.model.Node", { restartIFrame: function(loadThis) { if (this.getIFrame() === null) { - this.setIFrame(new osparc.component.widget.PersistentIframe()); + const iframe = new osparc.component.widget.PersistentIframe(); + iframe.addListener("restart", () => { + this.restartIFrame(); + }, this); + this.setIFrame(iframe); } if (loadThis) { this.getIFrame().resetSource(); @@ -704,11 +805,22 @@ qx.Class.define("osparc.data.model.Node", { }, __retrieveInputs: function(portKey) { - const data = { - node: this, - portKey - }; - this.fireDataEvent("retrieveInputs", data); + if (this.isContainer()) { + const innerNodes = Object.values(this.getInnerNodes()); + for (let i=0; i { + let resp = e.getTarget().getResponse(); + if (typeof resp === "string") { + resp = JSON.parse(resp); + } const { data - } = e.getTarget().getResponse(); + } = resp; const sizeBytes = (data && ("size_bytes" in data)) ? data["size_bytes"] : 0; this.getPropsWidget().retrievedPortData(portKey, true, sizeBytes); console.log(data); @@ -760,24 +876,26 @@ qx.Class.define("osparc.data.model.Node", { addDynamicButtons: function() { if (this.isDynamic() && this.isRealService()) { - const retrieveBtn = new qx.ui.toolbar.Button(this.tr("Retrieve"), "@FontAwesome5Solid/spinner/14"); - osparc.utils.Utils.setIdToWidget(retrieveBtn, "nodeViewRetrieveBtn"); - retrieveBtn.addListener("execute", e => { - this.__retrieveInputs(); - }, this); - retrieveBtn.setEnabled(false); - this.setRetrieveIFrameButton(retrieveBtn); - - const restartBtn = new qx.ui.toolbar.Button(this.tr("Restart"), "@FontAwesome5Solid/redo-alt/14"); - osparc.utils.Utils.setIdToWidget(restartBtn, "nodeViewRestartBtn"); - restartBtn.addListener("execute", e => { - this.restartIFrame(); - }, this); - restartBtn.setEnabled(false); - this.setRestartIFrameButton(restartBtn); - + this.__addRetrieveButton(); this.__showLoadingIFrame(); } + if (this.isContainer()) { + const innerNodes = Object.values(this.getInnerNodes()); + if (innerNodes.some(innerNode => innerNode.isDynamic())) { + this.__addRetrieveButton(); + this.getRetrieveIFrameButton().setEnabled(true); + } + } + }, + + __addRetrieveButton: function() { + const retrieveBtn = new qx.ui.toolbar.Button(this.tr("Retrieve"), "@FontAwesome5Solid/spinner/14"); + osparc.utils.Utils.setIdToWidget(retrieveBtn, "nodeViewRetrieveBtn"); + retrieveBtn.addListener("execute", e => { + this.__retrieveInputs(); + }, this); + retrieveBtn.setEnabled(false); + this.setRetrieveIFrameButton(retrieveBtn); }, startDynamicService: function() { @@ -847,6 +965,10 @@ qx.Class.define("osparc.data.model.Node", { }, __nodeState: function() { const study = osparc.store.Store.getInstance().getCurrentStudy(); + if (study === null) { + return; + } + const params = { url: { projectId: study.getUuid(), @@ -896,7 +1018,6 @@ qx.Class.define("osparc.data.model.Node", { this.fireDataEvent("showInLogger", msgData); this.getRetrieveIFrameButton().setEnabled(true); - this.getRestartIFrameButton().setEnabled(true); this.setProgress(100); // FIXME: Apparently no all services are inmediately ready when they publish the port @@ -908,20 +1029,29 @@ qx.Class.define("osparc.data.model.Node", { this.__retrieveInputs(); }, - removeNode: function() { - this.removeIFrame(); + __removeInnerNodes: function() { const innerNodes = Object.values(this.getInnerNodes()); - for (const innerNode of innerNodes) { - innerNode.removeNode(); + for (let i=0; i { - node.startDynamicService(); - }) - .catch(err => { - const errorMsg = "Error when starting " + metaData.key + ":" + metaData.version + ": " + err.getTarget().getResponse()["error"]; - const errorMsgData = { - nodeId: node.getNodeId(), - msg: errorMsg - }; - node.fireDataEvent("showInLogger", errorMsgData); - node.setInteractiveStatus("failed"); - osparc.component.message.FlashMessenger.getInstance().logAs(this.tr("There was an error while starting the node."), "ERROR"); - }); + const metaData = node.getMetaData(); + if (metaData && Object.prototype.hasOwnProperty.call(metaData, "workbench")) { + this.__createInnerWorkbench(node, metaData); + } return node; }, + __createInnerWorkbench: function(parentNode, metaData) { + // this is must be a nodes group + const workbench = osparc.data.Converters.replaceUuids(metaData["workbench"]); + for (let innerNodeId in workbench) { + workbench[innerNodeId]["parent"] = workbench[innerNodeId]["parent"] || parentNode.getNodeId(); + } + + this.__deserializeWorkbench(workbench); + + for (let innerNodeId in workbench) { + this.getNode(innerNodeId).startInBackend(); + } + }, + __initNodeSignals: function(node) { if (node) { node.addListener("showInLogger", e => { @@ -247,10 +233,7 @@ qx.Class.define("osparc.data.model.Workbench", { const parentNode = this.getNode(nodeToClone.getParentNodeId()); let node = this.createNode(key, version, null, parentNode); const nodeData = nodeToClone.serialize(); - node.setInputData(nodeData); - node.setOutputData(nodeData); - node.addInputNodes(nodeData.inputNodes); - node.addOutputNodes(nodeData.outputNodes); + node.populateInputOutputData(nodeData); return node; }, @@ -284,16 +267,6 @@ qx.Class.define("osparc.data.model.Workbench", { if (!osparc.data.Permissions.getInstance().canDo("study.node.delete", true)) { return false; } - // remove node in the backend - const study = osparc.store.Store.getInstance().getCurrentStudy(); - const params = { - url: { - projectId: study.getUuid(), - nodeId: nodeId - } - }; - osparc.data.Resources.fetch("studies", "deleteNode", params) - .catch(err => console.error(err)); // remove first the connected edges const connectedEdges = this.getConnectedEdges(nodeId); @@ -480,7 +453,7 @@ qx.Class.define("osparc.data.model.Workbench", { const brotherNodes = this.__getBrotherNodes(currentModel, selectedNodeIds); // Create nodesGroup - const nodesGroupService = osparc.utils.Services.getNodesGroupService(); + const nodesGroupService = osparc.utils.Services.getNodesGroup(); const parentNode = currentModel.getNodeId ? currentModel : null; const nodesGroup = this.createNode(nodesGroupService.key, nodesGroupService.version, null, parentNode); if (!nodesGroup) { diff --git a/services/web/client/source/class/osparc/desktop/MainPage.js b/services/web/client/source/class/osparc/desktop/MainPage.js index e7dd48ee469..f564a7cc81f 100644 --- a/services/web/client/source/class/osparc/desktop/MainPage.js +++ b/services/web/client/source/class/osparc/desktop/MainPage.js @@ -89,7 +89,7 @@ qx.Class.define("osparc.desktop.MainPage", { let dashboard = this.__dashboard = new osparc.desktop.Dashboard(); dashboard.getStudyBrowser().addListener("startStudy", e => { const studyEditor = e.getData(); - this.__showStudyEditor(studyEditor); + this.__startStudyEditor(studyEditor); }, this); prjStack.add(dashboard); @@ -105,7 +105,7 @@ qx.Class.define("osparc.desktop.MainPage", { } }, - __showStudyEditor: function(studyEditor) { + __startStudyEditor: function(studyEditor) { if (this.__studyEditor) { this.__prjStack.remove(this.__studyEditor); } @@ -115,7 +115,7 @@ qx.Class.define("osparc.desktop.MainPage", { this.__prjStack.add(this.__studyEditor); this.__prjStack.setSelection([this.__studyEditor]); this.__navBar.setStudy(study); - this.__navBar.setPathButtons(study.getWorkbench().getPathIds(study.getUuid())); + this.__navBar.setPathButtons(this.__studyEditor.getCurrentPathIds()); this.__studyEditor.addListener("changeMainViewCaption", ev => { const elements = ev.getData(); diff --git a/services/web/client/source/class/osparc/desktop/NavigationBar.js b/services/web/client/source/class/osparc/desktop/NavigationBar.js index c1e9e93bdfe..56c43c95e89 100644 --- a/services/web/client/source/class/osparc/desktop/NavigationBar.js +++ b/services/web/client/source/class/osparc/desktop/NavigationBar.js @@ -152,20 +152,22 @@ qx.Class.define("osparc.desktop.NavigationBar", { setPathButtons: function(nodeIds) { this.__mainViewCaptionLayout.removeAll(); - const navBarLabelFont = qx.bom.Font.fromConfig(osparc.theme.Font.fonts["nav-bar-label"]); + nodeIds.length === 1 ? this.__studyTitle.show() : this.__studyTitle.exclude(); if (nodeIds.length === 0) { this.__highlightDashboard(true); - } else if (nodeIds.length === 1) { - this.__studyTitle.show(); return; } - this.__studyTitle.exclude(); + if (nodeIds.length === 1) { + return; + } + + const navBarLabelFont = qx.bom.Font.fromConfig(osparc.theme.Font.fonts["nav-bar-label"]); + const study = osparc.store.Store.getInstance().getCurrentStudy(); for (let i=0; i { - if (this.__servicesReady) { - userTimer.stop(); - this._removeAll(); - iframe.dispose(); - this.__createServicesLayout(); - this.__attachEventHandlers(); - } - }, this); - userTimer.start(); - - this.__initResources(); + this.__initResources(iframe); }, members: { - __servicesReady: null, + __reloadBtn: null, __serviceFilters: null, __allServices: null, - __servicesList: null, - __versionsList: null, - __searchTextfield: null, + __latestServicesModel: null, + __servicesUIList: null, + __versionsUIBox: null, + __deleteServiceBtn: null, + __selectedService: null, /** * Function that resets the selected item by reseting the filters and the service selection @@ -80,23 +69,42 @@ qx.Class.define("osparc.desktop.ServiceBrowser", { if (this.__serviceFilters) { this.__serviceFilters.reset(); } - if (this.__servicesList) { - this.__servicesList.setSelection([]); + if (this.__servicesUIList) { + this.__servicesUIList.setSelection([]); } }, - __initResources: function() { - this.__getServicesPreload(); + __initResources: function(iframe) { + const store = osparc.store.Store.getInstance(); + store.getServices(true) + .then(services => { + // Do not validate if are not taking actions + // this.__nodeCheck(services); + this._removeAll(); + iframe.dispose(); + this.__createServicesLayout(); + this.__populateList(false); + this.__attachEventHandlers(); + }); }, - __getServicesPreload: function() { + __populateList: function(reload) { + this.__reloadBtn.setFetching(true); + const store = osparc.store.Store.getInstance(); - store.addListener("servicesRegistered", e => { - // Do not validate if are not taking actions - // this.__nodeCheck(e.getData()); - this.__servicesReady = e.getData(); - }, this); - store.getServices(true); + store.getServices(reload) + .then(services => { + this.__allServices = services; + this.__latestServicesModel.removeAll(); + for (const serviceKey in services) { + const latestService = osparc.utils.Services.getLatest(services, serviceKey); + this.__latestServicesModel.append(qx.data.marshal.Json.createModel(latestService)); + } + }) + .finally(() => { + this.__reloadBtn.setFetching(false); + this.__serviceFilters.dispatch(); + }); }, __createServicesLayout: function() { @@ -112,42 +120,47 @@ qx.Class.define("osparc.desktop.ServiceBrowser", { __createServicesListLayout: function() { const servicesLayout = this.__createVBoxWLabel(this.tr("Services")); + // button for refetching services + const reloadBtn = this.__reloadBtn = new osparc.ui.form.FetchButton().set({ + label: this.tr("Reload"), + icon: "@FontAwesome5Solid/sync-alt/14", + allowGrowX: false + }); + reloadBtn.addListener("execute", function() { + this.__populateList(true); + }, this); + servicesLayout.add(reloadBtn); + const serviceFilters = this.__serviceFilters = new osparc.component.filter.group.ServiceFilterGroup("serviceBrowser"); servicesLayout.add(serviceFilters); - const servicesList = this.__servicesList = new qx.ui.form.List().set({ + const servicesUIList = this.__servicesUIList = new qx.ui.form.List().set({ orientation: "vertical", minWidth: 400, appearance: "pb-list" }); - servicesList.addListener("changeSelection", e => { + servicesUIList.addListener("changeSelection", e => { if (e.getData() && e.getData().length>0) { const selectedKey = e.getData()[0].getModel(); this.__serviceSelected(selectedKey); } }, this); - const store = osparc.store.Store.getInstance(); - const latestServices = []; - const services = this.__allServices = store.getServices(); - for (const serviceKey in services) { - latestServices.push(osparc.utils.Services.getLatest(services, serviceKey)); - } - const latestServicesModel = new qx.data.Array( - latestServices.map(s => qx.data.marshal.Json.createModel(s)) - ); - const servCtrl = new qx.data.controller.List(latestServicesModel, servicesList, "name"); + + const latestServicesModel = this.__latestServicesModel = new qx.data.Array(); + const servCtrl = new qx.data.controller.List(latestServicesModel, servicesUIList, "name"); servCtrl.setDelegate({ createItem: () => { const item = new osparc.desktop.ServiceBrowserListItem(); item.subscribeToFilterGroup("serviceBrowser"); item.addListener("tap", e => { - servicesList.setSelection([item]); + servicesUIList.setSelection([item]); }); return item; }, bindItem: (ctrl, item, id) => { ctrl.bindProperty("key", "model", null, item, id); ctrl.bindProperty("key", "key", null, item, id); + ctrl.bindProperty("version", "version", null, item, id); ctrl.bindProperty("name", "title", null, item, id); ctrl.bindProperty("description", "description", null, item, id); ctrl.bindProperty("type", "type", null, item, id); @@ -155,20 +168,10 @@ qx.Class.define("osparc.desktop.ServiceBrowser", { ctrl.bindProperty("contact", "contact", null, item, id); } }); - servicesLayout.add(servicesList, { + servicesLayout.add(servicesUIList, { flex: 1 }); - // Workaround to the list.changeSelection - servCtrl.addListener("changeValue", e => { - if (e.getData() && e.getData().length>0) { - const selectedService = e.getData().toArray()[0]; - this.__serviceSelected(selectedService); - } else { - this.__serviceSelected(null); - } - }, this); - return servicesLayout; }, @@ -187,7 +190,7 @@ qx.Class.define("osparc.desktop.ServiceBrowser", { titleContainer.add(new qx.ui.basic.Atom(this.tr("Version"))); - const versions = this.__versionsList = new qx.ui.form.SelectBox(); + const versions = this.__versionsUIBox = new qx.ui.form.SelectBox(); osparc.utils.Utils.setIdToWidget(versions, "serviceBrowserVersionsDrpDwn"); titleContainer.add(versions); versions.addListener("changeSelection", e => { @@ -197,6 +200,26 @@ qx.Class.define("osparc.desktop.ServiceBrowser", { }, this); descriptionView.add(titleContainer); + const actionsContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); + actionsContainer.add(new qx.ui.core.Spacer(300, null)); + const deleteServiceBtn = this.__deleteServiceBtn = new osparc.ui.form.FetchButton(this.tr("Delete")).set({ + allowGrowX: false, + visibility: "hidden" + }); + deleteServiceBtn.addListener("execute", () => { + const msg = this.tr("Are you sure you want to delete the group?"); + const win = new osparc.ui.window.Confirmation(msg); + win.addListener("close", () => { + if (win.getConfirmed()) { + this.__deleteService(); + } + }, this); + win.center(); + win.open(); + }, this); + actionsContainer.add(deleteServiceBtn); + descriptionView.add(actionsContainer); + const descriptionContainer = this.__serviceDescription = new qx.ui.container.Scroll(); descriptionView.add(descriptionContainer, { flex: 1 @@ -226,17 +249,17 @@ qx.Class.define("osparc.desktop.ServiceBrowser", { }, this); textfield.addListener("keypress", e => { if (e.getKeyIdentifier() === "Enter") { - const selectables = this.__servicesList.getSelectables(); + const selectables = this.__servicesUIList.getSelectables(); if (selectables) { - this.__servicesList.setSelection([selectables[0]]); + this.__servicesUIList.setSelection([selectables[0]]); } } }, this); }, __serviceSelected: function(serviceKey) { - if (this.__versionsList) { - const versionsList = this.__versionsList; + if (this.__versionsUIBox) { + const versionsList = this.__versionsUIBox; versionsList.removeAll(); if (serviceKey in this.__allServices) { const versions = osparc.utils.Services.getVersions(this.__allServices, serviceKey); @@ -258,7 +281,7 @@ qx.Class.define("osparc.desktop.ServiceBrowser", { }, __versionSelected: function(versionKey) { - const serviceSelection = this.__servicesList.getSelection(); + const serviceSelection = this.__servicesUIList.getSelection(); if (serviceSelection.length > 0) { const serviceKey = serviceSelection[0].getModel(); const selectedService = osparc.utils.Services.getFromObject(this.__allServices, serviceKey, versionKey); @@ -267,15 +290,47 @@ qx.Class.define("osparc.desktop.ServiceBrowser", { }, __updateServiceDescription: function(selectedService) { + let showDelete = false; const serviceDescription = this.__serviceDescription; if (serviceDescription) { - if (selectedService) { - const serviceInfo = new osparc.component.metadata.ServiceInfo(selectedService); - serviceDescription.add(serviceInfo); - } else { - serviceDescription.add(null); - } + const serviceInfo = selectedService ? new osparc.component.metadata.ServiceInfo(selectedService) : null; + serviceDescription.add(serviceInfo); + this.__selectedService = selectedService; + showDelete = this.__canServiceBeDeleted(selectedService); + } + this.__deleteServiceBtn.setVisibility(showDelete ? "visible" : "hidden"); + }, + + __canServiceBeDeleted: function(selectedService) { + if (selectedService) { + const isMacro = selectedService.key.includes("frontend/nodes-group/macros"); + const isOwner = selectedService.contact === osparc.auth.Data.getInstance().getEmail(); + return isMacro && isOwner; } + return false; + }, + + __deleteService: function() { + this.__deleteServiceBtn.setFetching(true); + + const serviceId = this.__selectedService.id; + const params = { + url: { + groupId: serviceId + } + }; + osparc.data.Resources.fetch("groups", "delete", params, serviceId) + .then(() => { + this.__updateServiceDescription(null); + this.__populateList(true); + }) + .catch(err => { + osparc.component.message.FlashMessenger.getInstance().logAs(this.tr("Unable to delete the group."), "ERROR"); + console.error(err); + }) + .finally(() => { + this.__deleteServiceBtn.setFetching(false); + }); }, __nodeCheck: function(services) { diff --git a/services/web/client/source/class/osparc/desktop/ServiceBrowserListItem.js b/services/web/client/source/class/osparc/desktop/ServiceBrowserListItem.js index d005d8c6eed..38f192ce30b 100644 --- a/services/web/client/source/class/osparc/desktop/ServiceBrowserListItem.js +++ b/services/web/client/source/class/osparc/desktop/ServiceBrowserListItem.js @@ -74,6 +74,15 @@ qx.Class.define("osparc.desktop.ServiceBrowserListItem", { apply : "_applyKey" }, + version: { + check: "String" + }, + + dagId: { + check : "String", + nullable : true + }, + title: { check : "String", apply : "_applyTitle", @@ -161,22 +170,34 @@ qx.Class.define("osparc.desktop.ServiceBrowserListItem", { }, _applyKey: function(value, old) { + if (value === null) { + return; + } const parts = value.split("/"); const id = parts.pop(); osparc.utils.Utils.setIdToWidget(this, "serviceBrowserListItem_"+id); }, _applyTitle: function(value) { + if (value === null) { + return; + } const label = this.getChildControl("title"); label.setValue(value); }, _applyDescription: function(value) { + if (value === null) { + return; + } const label = this.getChildControl("description"); label.setValue(value); }, _applyContact: function(value) { + if (value === null) { + return; + } const label = this.getChildControl("contact"); label.setValue(value); }, @@ -193,7 +214,7 @@ qx.Class.define("osparc.desktop.ServiceBrowserListItem", { }, _shouldApplyFilter: function(data) { - if (data.text) { + if (data.text && this.getTitle()) { const label = this.getTitle() .trim() .toLowerCase(); @@ -201,7 +222,7 @@ qx.Class.define("osparc.desktop.ServiceBrowserListItem", { return true; } } - if (data.tags && data.tags.length) { + if (data.tags && data.tags.length && this.getCategory()) { const category = this.getCategory() || ""; const type = this.getType() || ""; if (!data.tags.includes(osparc.utils.Utils.capitalize(category.trim())) && !data.tags.includes(osparc.utils.Utils.capitalize(type.trim()))) { diff --git a/services/web/client/source/class/osparc/desktop/StudyBrowser.js b/services/web/client/source/class/osparc/desktop/StudyBrowser.js index 1aaceb7435a..9fe7fcd63b2 100644 --- a/services/web/client/source/class/osparc/desktop/StudyBrowser.js +++ b/services/web/client/source/class/osparc/desktop/StudyBrowser.js @@ -204,9 +204,8 @@ qx.Class.define("osparc.desktop.StudyBrowser", { __getServicesPreload: function() { let store = osparc.store.Store.getInstance(); store.addListener("servicesRegistered", e => { - this.__servicesReady = e.getData(); + this.__servicesReady = true; }, this); - store.getServices(true); }, __createStudiesLayout: function() { @@ -293,7 +292,7 @@ qx.Class.define("osparc.desktop.StudyBrowser", { win.center(); win.open(); win.addListener("close", () => { - if (win["value"] === 1) { + if (win.getConfirmed()) { this.__deleteStudy(selection.map(button => this.__getStudyData(button.getUuid(), isTemplate)), isTemplate); } }, this); @@ -603,18 +602,8 @@ qx.Class.define("osparc.desktop.StudyBrowser", { }, __createConfirmWindow: function(isMulti) { - const win = new osparc.ui.window.Dialog("Confirmation", null, - `Are you sure you want to delete the ${isMulti ? "studies" : "study"}?` - ); - const btnYes = new qx.ui.toolbar.Button("Yes"); - osparc.utils.Utils.setIdToWidget(btnYes, "confirmDeleteStudyBtn"); - btnYes.addListener("execute", e => { - win["value"] = 1; - win.close(1); - }, this); - win.addCancelButton(); - win.addButton(btnYes); - return win; + const msg = isMulti ? this.tr("Are you sure you want to delete the studies?") : this.tr("Are you sure you want to delete the study?"); + return new osparc.ui.window.Confirmation(msg); } } }); diff --git a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js b/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js index 30314506acc..da18b8771fc 100644 --- a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js @@ -252,15 +252,6 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { _applyTags: function(tags) { if (osparc.data.Permissions.getInstance().canDo("study.tag")) { const tagsContainer = this.getChildControl("tags"); - if (tags.length) { - tagsContainer.show(); - this.getChildControl("creator").exclude(); - this.getChildControl("lastChangeDate").exclude(); - } else { - tagsContainer.exclude(); - this.getChildControl("creator").show(); - this.getChildControl("lastChangeDate").show(); - } tagsContainer.removeAll(); tags.forEach(tag => tagsContainer.add(new osparc.ui.basic.Tag(tag.name, tag.color, "studyBrowser"))); } diff --git a/services/web/client/source/class/osparc/desktop/StudyEditor.js b/services/web/client/source/class/osparc/desktop/StudyEditor.js index 5bb6ab4c7b3..429b3224b19 100644 --- a/services/web/client/source/class/osparc/desktop/StudyEditor.js +++ b/services/web/client/source/class/osparc/desktop/StudyEditor.js @@ -69,14 +69,34 @@ qx.Class.define("osparc.desktop.StudyEditor", { __loggerView: null, __currentNodeId: null, __autoSaveTimer: null, + __lastSavedStudy: null, _applyStudy: function(study) { osparc.store.Store.getInstance().setCurrentStudy(study); study.buildWorkbench(); study.openStudy(); - this.__initDefault(); + this.__initViews(); this.__connectEvents(); this.__startAutoSaveTimer(); + + this.__openOneNode(); + }, + + __openOneNode: function() { + const validNodeIds = []; + const allNodes = this.getStudy().getWorkbench().getNodes(true); + Object.values(allNodes).forEach(node => { + if (!node.isFilePicker()) { + validNodeIds.push(node.getNodeId()); + } + }); + + const preferencesSettings = osparc.desktop.preferences.Preferences.getInstance(); + if (validNodeIds.length === 1 && preferencesSettings.getAutoOpenNode()) { + this.nodeSelected(validNodeIds[0]); + } else { + this.nodeSelected(this.getStudy().getUuid()); + } }, /** @@ -87,7 +107,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { this.__stopAutoSaveTimer(); }, - __initDefault: function() { + __initViews: function() { const study = this.getStudy(); const nodesTree = this.__nodesTree = new osparc.component.widget.NodesTree(study); @@ -112,7 +132,6 @@ qx.Class.define("osparc.desktop.StudyEditor", { const edgeId = e.getData(); this.__removeEdge(edgeId); }, this); - this.showInMainView(workbenchUI, study.getUuid()); this.__nodeView = new osparc.component.node.NodeView().set({ minHeight: 200 @@ -166,21 +185,28 @@ qx.Class.define("osparc.desktop.StudyEditor", { const nodeId = e.getData(); const node = this.getStudy().getWorkbench().getNode(nodeId); if (node && node.isContainer()) { - // const exportGroupView = new osparc.component.export.ExportGroup(node); - + const exportGroupView = new osparc.component.export.ExportGroup(node); const window = new qx.ui.window.Window(this.tr("Export: ") + node.getLabel()).set({ appearance: "service-window", layout: new qx.ui.layout.Grow(), autoDestroy: true, contentPadding: 0, - width: 900, - height: 800, + width: 700, + height: 700, showMinimize: false, modal: true }); - // window.add(exportGroupView); + window.add(exportGroupView); window.center(); window.open(); + + window.addListener("close", () => { + exportGroupView.tearDown(); + }, this); + + exportGroupView.addListener("finished", () => { + window.close(); + }, this); } }); @@ -322,6 +348,11 @@ qx.Class.define("osparc.desktop.StudyEditor", { this.fireDataEvent("changeMainViewCaption", nodesPath); }, + getCurrentPathIds: function() { + const nodesPath = this.getStudy().getWorkbench().getPathIds(this.__currentNodeId); + return nodesPath; + }, + getLogger: function() { return this.__loggerView; }, @@ -443,6 +474,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { __doStartPipeline: function() { this.getStudy().getWorkbench().clearProgressData(); + // post pipeline const url = "/computation/pipeline/" + encodeURIComponent(this.getStudy().getUuid()) + "/start"; const req = new osparc.io.request.ApiRequest(url, "POST"); @@ -506,7 +538,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { let timer = this.__autoSaveTimer = new qx.event.Timer(interval); timer.addListener("interval", () => { const newObj = this.getStudy().serializeStudy(); - const delta = diffPatcher.diff(this.__lastSavedPrj, newObj); + const delta = diffPatcher.diff(this.__lastSavedStudy, newObj); if (delta) { let deltaKeys = Object.keys(delta); // lastChangeDate should not be taken into account as data change @@ -543,7 +575,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { }; osparc.data.Resources.fetch("studies", "put", params).then(data => { this.fireDataEvent("studySaved", true); - this.__lastSavedPrj = osparc.wrapper.JsonDiffPatch.getInstance().clone(newObj); + this.__lastSavedStudy = osparc.wrapper.JsonDiffPatch.getInstance().clone(newObj); if (cbSuccess) { cbSuccess.call(this); } diff --git a/services/web/client/source/class/osparc/desktop/preferences/Preferences.js b/services/web/client/source/class/osparc/desktop/preferences/Preferences.js index fb43a959af6..210f8fbbadc 100644 --- a/services/web/client/source/class/osparc/desktop/preferences/Preferences.js +++ b/services/web/client/source/class/osparc/desktop/preferences/Preferences.js @@ -33,6 +33,13 @@ qx.Class.define("osparc.desktop.preferences.Preferences", { init: false, check: "Boolean", event: "changeAutoConnectPorts" + }, + + autoOpenNode: { + nullable: false, + init: true, + check: "Boolean", + event: "changeAutoOpenNode" } } }); diff --git a/services/web/client/source/class/osparc/desktop/preferences/pages/ExperimentalPage.js b/services/web/client/source/class/osparc/desktop/preferences/pages/ExperimentalPage.js index 4158a836935..8d5ac5682a9 100644 --- a/services/web/client/source/class/osparc/desktop/preferences/pages/ExperimentalPage.js +++ b/services/web/client/source/class/osparc/desktop/preferences/pages/ExperimentalPage.js @@ -105,12 +105,16 @@ qx.Class.define("osparc.desktop.preferences.pages.ExperimentalPage", { const preferencesSettings = osparc.desktop.preferences.Preferences.getInstance(); - const cbAutoPorts = new qx.ui.form.CheckBox("Auto Connect Ports"); + const cbAutoPorts = new qx.ui.form.CheckBox(this.tr("Connect ports automatically")); preferencesSettings.bind("autoConnectPorts", cbAutoPorts, "value"); cbAutoPorts.bind("value", preferencesSettings, "autoConnectPorts"); - box.add(cbAutoPorts); + const cbAutoOpenNode = new qx.ui.form.CheckBox(this.tr("Open node automatically when opening studies with a single node")); + preferencesSettings.bind("autoOpenNode", cbAutoOpenNode, "value"); + cbAutoOpenNode.bind("value", preferencesSettings, "autoOpenNode"); + box.add(cbAutoOpenNode); + return box; } } diff --git a/services/web/client/source/class/osparc/dev/fake/Data.js b/services/web/client/source/class/osparc/dev/fake/Data.js index ded5f7edcba..7f42b8d83ab 100644 --- a/services/web/client/source/class/osparc/dev/fake/Data.js +++ b/services/web/client/source/class/osparc/dev/fake/Data.js @@ -25,156 +25,6 @@ qx.Class.define("osparc.dev.fake.Data", { type: "static", statics: { - getFakeServices: function() { - return [{ - key: "simcore/services/computational/itis/sleeper", - version: "0.0.0", - type: "computational", - name: "sleeper service", - description: "dummy sleepr service", - authors: [ - { - name: "Odei Maiz", - email: "maiz@itis.ethz.ch" - } - ], - contact: "maiz@itis.ethz.ch", - inputs: { - inNumber: { - displayOrder: 0, - label: "In", - description: "Chosen Number", - type: "number", - defaultValue: 42 - } - }, - outputs: { - outNumber: { - displayOrder: 0, - label: "Out", - description: "Chosen Number", - type: "number" - } - } - }, { - key: "simcore/services/computational/itis/tutti", - version: "0.0.0", - type: "computational", - name: "a little test node", - description: "just the bare minimum", - authors: [ - { - name: "Tobias Oetiker", - email: "oetiker@itis.ethz.ch" - } - ], - contact: "oetiker@itis.ethz.ch", - inputs: { - inNumber: { - displayOrder: 0, - label: "Number Test", - description: "Test Input for Number", - type: "number", - defaultValue: 5.3 - }, - inInt: { - displayOrder: 1, - label: "Integer Test", - description: "Test Input for Integer", - type: "integer", - defaultValue: 2 - }, - inBool: { - displayOrder: 2, - label: "Boolean Test", - type: "boolean", - description: "Test Input for Boolean", - defaultValue: true - }, - inStr: { - displayOrder: 3, - type: "string", - label: "String Test", - description: "Test Input for String", - defaultValue: "Gugus" - }, - inArea: { - displayOrder: 4, - type: "string", - label: "Widget TextArea Test", - description: "Test Input for String", - defaultValue: "Gugus\nDu\nDa", - widget: { - type: "TextArea", - minHeight: 50 - } - }, - inSb: { - displayOrder: 5, - label: "Widget SelectBox Test", - description: "Test Input for SelectBox", - defaultValue: "dog", - type: "string", - widget: { - /* - type: "SelectBox", - structure: [ - { - key: "dog", - label: "A Dog" - }, - { - key: "cat", - label: "A Cat" - } - ] - */ - type: "TextArea", - minHeight: 50 - } - }, - inFile: { - displayOrder: 6, - label: "File", - description: "Test Input File", - type: "data:*/*" - }, - inImage: { - displayOrder: 7, - label: "Image", - description: "Test Input Image", - type: "data:[image/jpeg,image/png]" - } - }, - outputs: { - outNumber: { - label: "Number Test", - description: "Test Output for Number", - displayOrder: 0, - type: "number" - }, - outInteger: { - label: "Integer Test", - description: "Test Output for Integer", - displayOrder: 1, - type: "integer" - }, - outBool: { - label: "Boolean Test", - description: "Test Output for Boolean", - displayOrder: 2, - type: "boolean" - }, - outPng: { - label: "Png Test", - description: "Test Output for PNG Image", - displayOrder: 3, - type: "data:image/png" - } - } - }]; - }, - getItemList: function(nodeKey, portKey) { switch (portKey) { case "defaultNeuromanModels": diff --git a/services/web/client/source/class/osparc/store/Store.js b/services/web/client/source/class/osparc/store/Store.js index 8241a684542..6a6d70b4e4b 100644 --- a/services/web/client/source/class/osparc/store/Store.js +++ b/services/web/client/source/class/osparc/store/Store.js @@ -81,6 +81,10 @@ qx.Class.define("osparc.store.Store", { check: "Array", init: [] }, + groups: { + check: "Array", + init: [] + }, storageLocations: { check: "Array", init: [] @@ -155,33 +159,24 @@ qx.Class.define("osparc.store.Store", { * @param {Boolean} reload ? */ getServices: function(reload) { - if (!osparc.utils.Services.reloadingServices && (reload || Object.keys(osparc.utils.Services.servicesCached).length === 0)) { - osparc.utils.Services.reloadingServices = true; - osparc.data.Resources.get("servicesTodo", null, !reload) - .then(data => { - const allServices = data.concat(osparc.utils.Services.getBuiltInServices()); - const filteredServices = osparc.utils.Services.filterOutUnavailableGroups(allServices); - const services = osparc.utils.Services.convertArrayToObject(filteredServices); - osparc.utils.Services.servicesToCache(services, true); - this.fireDataEvent("servicesRegistered", { - services, - fromServer: true - }); + return new Promise((resolve, reject) => { + const allServices = osparc.utils.Services.getBuiltInServices(); + const servicesPromise = osparc.data.Resources.get("servicesTodo", null, !reload); + const groupsPromise = osparc.data.Resources.get("groups", null, !reload); + Promise.all([servicesPromise, groupsPromise]) + .then(values => { + allServices.push(...values[0], ...values[1]); }) .catch(err => { console.error("getServices failed", err); - const allServices = osparc.dev.fake.Data.getFakeServices().concat(osparc.utils.Services.getBuiltInServices()); - const filteredServices = osparc.utils.Services.filterOutUnavailableGroups(allServices); - const services = osparc.utils.Services.convertArrayToObject(filteredServices); - osparc.utils.Services.servicesToCache(services, false); - this.fireDataEvent("servicesRegistered", { - services, - fromServer: false - }); + }) + .finally(() => { + const servicesObj = osparc.utils.Services.convertArrayToObject(allServices); + osparc.utils.Services.servicesToCache(servicesObj, true); + this.fireDataEvent("servicesRegistered", servicesObj); + resolve(osparc.utils.Services.servicesCached); }); - return null; - } - return osparc.utils.Services.servicesCached; + }); }, /** diff --git a/services/web/client/source/class/osparc/ui/hint/Hint.js b/services/web/client/source/class/osparc/ui/hint/Hint.js index 3c3712ba4e0..10d7ced01fa 100644 --- a/services/web/client/source/class/osparc/ui/hint/Hint.js +++ b/services/web/client/source/class/osparc/ui/hint/Hint.js @@ -17,7 +17,8 @@ qx.Class.define("osparc.ui.hint.Hint", { this.base(arguments); this.set({ backgroundColor: "transparent", - visibility: "excluded" + visibility: "excluded", + zIndex: 110000 }); const hintCssUri = qx.util.ResourceManager.getInstance().toUri("hint/hint.css"); diff --git a/services/web/client/source/class/osparc/ui/window/Confirmation.js b/services/web/client/source/class/osparc/ui/window/Confirmation.js new file mode 100644 index 00000000000..714997f96a8 --- /dev/null +++ b/services/web/client/source/class/osparc/ui/window/Confirmation.js @@ -0,0 +1,39 @@ +/* + * oSPARC - The SIMCORE frontend - https://osparc.io + * Copyright: 2020 IT'IS Foundation - https://itis.swiss + * License: MIT - https://opensource.org/licenses/MIT + * Authors: Odei Maiz (odeimaiz) + */ + +/** + * Generic confirmation window. + * Provides "Cancel" and "Yes" buttons as well as boolean Confirmed property. + */ +qx.Class.define("osparc.ui.window.Confirmation", { + extend: osparc.ui.window.Dialog, + + /** + * @extends osparc.ui.window.Dialog + * @param {String} message Message that will be displayed to the user. + */ + construct: function(message) { + this.base(arguments, this.tr("Confirmation"), null, message); + + this.addCancelButton(); + + const btnYes = new qx.ui.toolbar.Button("Yes"); + osparc.utils.Utils.setIdToWidget(btnYes, "confirmDeleteStudyBtn"); + btnYes.addListener("execute", e => { + this.setConfirmed(true); + this.close(1); + }, this); + this.addButton(btnYes); + }, + + properties: { + confirmed: { + check: "Boolean", + init: false + } + } +}); diff --git a/services/web/client/source/class/osparc/utils/Services.js b/services/web/client/source/class/osparc/utils/Services.js index 87b5c0885f8..4a4803ec6d2 100644 --- a/services/web/client/source/class/osparc/utils/Services.js +++ b/services/web/client/source/class/osparc/utils/Services.js @@ -75,7 +75,6 @@ qx.Class.define("osparc.utils.Services", { } }, - reloadingServices: false, servicesCached: {}, getTypes: function() { @@ -166,48 +165,15 @@ qx.Class.define("osparc.utils.Services", { return false; }, - filterOutUnavailableGroups: function(listOfServices) { - const filteredServices = []; - for (let i=0; i $(APP_NAME).log 2>&1 - - - -.PHONY: autoformat -autoformat: ## runs black python formatter on this service's code [https://black.readthedocs.io/en/stable/] - # auto formatting with black - @python3 -m black --verbose $(CURDIR) - -.PHONY: version-patch version-minor -version-patch version-minor: ## commits version as patch (bug fixes not affecting the API), minor/minor (backwards-compatible/INcompatible API addition or changes) - # upgrades as $(subst version-,,$@) version, commits and tags - @bump2version --verbose --list $(subst version-,,$@) - - -.PHONY: info -info: ## displays - # installed - @pip list - # version - @cat setup.py | grep version= - - -.PHONY: clean -clean: ## cleans all unversioned files in project and temp files create by this makefile - # Cleaning unversioned - @git clean -ndxf -e .vscode/ - @echo -n "Are you sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @echo -n "$(shell whoami), are you REALLY sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @git clean -dxf -e .vscode/ - - -.PHONY: help -# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html -help: ## this colorful help - @echo "Recipes for '${APP_NAME}':" - @echo "" - @awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo "" diff --git a/services/web/server/docker/healthcheck.py b/services/web/server/docker/healthcheck.py index af33e1cafa3..3faa018b291 100644 --- a/services/web/server/docker/healthcheck.py +++ b/services/web/server/docker/healthcheck.py @@ -26,9 +26,14 @@ ok = os.environ.get("SC_BOOT_MODE").lower() == "debug" # Queries host -ok = ok or urlopen("{host}{baseurl}".format( - host=sys.argv[1], - baseurl=os.environ.get("SIMCORE_NODE_BASEPATH", "")) # adds a base-path if defined in environ - ).getcode() == 200 +ok = ( + ok + or urlopen( + "{host}{baseurl}".format( + host=sys.argv[1], baseurl=os.environ.get("SIMCORE_NODE_BASEPATH", "") + ) # adds a base-path if defined in environ + ).getcode() + == 200 +) sys.exit(SUCCESS if ok else UNHEALTHY) diff --git a/services/web/server/requirements/_test.txt b/services/web/server/requirements/_test.txt index 7ed6a6db738..025573fad85 100644 --- a/services/web/server/requirements/_test.txt +++ b/services/web/server/requirements/_test.txt @@ -4,106 +4,106 @@ # # pip-compile --output-file=_test.txt _test.in # -aio-pika==6.5.2 # via -r _base.txt (line 7) -aiodebug==1.1.2 # via -r _base.txt (line 8) -aiohttp-jinja2==1.1.1 # via -r _base.txt (line 9) -aiohttp-security==0.4.0 # via -r _base.txt (line 10) -aiohttp-session[secure]==2.7.0 # via -r _base.txt (line 11) -aiohttp-swagger[performance]==1.0.14 # via -r _base.txt (line 12) -aiohttp==3.6.2 # via -r _base.txt (line 13), aiohttp-jinja2, aiohttp-security, aiohttp-session, aiohttp-swagger, aiozipkin, pytest-aiohttp -aiopg[sa]==1.0.0 # via -r _base.txt (line 14) -aioredis==1.3.0 # via -r _base.txt (line 15) -aiormq==3.2.1 # via -r _base.txt (line 16), aio-pika -aiosmtplib==1.0.5 # via -r _base.txt (line 17) -aiozipkin==0.6.0 # via -r _base.txt (line 18) -amqp==2.4.2 # via -r _base.txt (line 19), kombu -asn1crypto==0.24.0 # via -r _base.txt (line 20), cryptography +aio-pika==6.5.2 +aiodebug==1.1.2 +aiohttp-jinja2==1.1.1 +aiohttp-security==0.4.0 +aiohttp-session[secure]==2.7.0 +aiohttp-swagger[performance]==1.0.14 +aiohttp==3.6.2 +aiopg[sa]==1.0.0 +aioredis==1.3.0 +aiormq==3.2.1 +aiosmtplib==1.0.5 +aiozipkin==0.6.0 +amqp==2.4.2 +asn1crypto==0.24.0 astroid==2.3.3 # via pylint -async-timeout==3.0.1 # via -r _base.txt (line 21), aiohttp, aioredis -asyncpg==0.18.3 # via -r _base.txt (line 22) -attrs==19.1.0 # via -r _base.txt (line 23), aiohttp, jsonschema, openapi-core, pytest, pytest-docker -billiard==3.6.0.0 # via -r _base.txt (line 24), celery -celery==4.3.0 # via -r _base.txt (line 25) +async-timeout==3.0.1 +asyncpg==0.18.3 +attrs==19.1.0 +billiard==3.6.0.0 +celery==4.3.0 certifi==2019.11.28 # via requests -cffi==1.12.3 # via -r _base.txt (line 26), cryptography -change-case==0.5.2 # via -r _base.txt (line 27) -chardet==3.0.4 # via -r _base.txt (line 28), aiohttp, requests -codecov==2.0.15 # via -r _test.in (line 36) -coverage==4.5.1 # via -r _test.in (line 12), codecov, coveralls, pytest-cov -coveralls==1.10.0 # via -r _test.in (line 35) -cryptography==2.6.1 # via -r _base.txt (line 29), aiohttp-session -docker==4.1.0 # via -r _test.in (line 30) +cffi==1.12.3 +change-case==0.5.2 +chardet==3.0.4 +codecov==2.0.16 +coverage==4.5.1 +coveralls==1.11.1 +cryptography==2.6.1 +docker==4.2.0 docopt==0.6.2 # via coveralls -expiringdict==1.2.0 # via -r _base.txt (line 30) -faker==3.0.0 # via -r _test.in (line 26) -hiredis==1.0.1 # via -r _base.txt (line 31), aioredis -idna-ssl==1.1.0 # via -r _base.txt (line 32), aiohttp -idna==2.8 # via -r _base.txt (line 33), idna-ssl, requests, yarl -importlib-metadata==0.23 # via -r _base.txt (line 34), jsonschema, pluggy, pytest -isodate==0.6.0 # via -r _base.txt (line 35), openapi-core +expiringdict==1.2.0 +faker==4.0.1 +hiredis==1.0.1 +idna-ssl==1.1.0 +idna==2.8 +importlib-metadata==0.23 +isodate==0.6.0 isort==4.3.21 # via pylint -jinja-app-loader==1.0.2 # via -r _base.txt (line 36) -jinja2==2.10.1 # via -r _base.txt (line 37), aiohttp-jinja2, aiohttp-swagger -jsondiff==1.1.2 # via -r _base.txt (line 38) -jsonschema==3.2.0 # via -r _base.txt (line 39), -r _test.in (line 28), openapi-spec-validator -kombu==4.5.0 # via -r _base.txt (line 40), celery -lazy-object-proxy==1.4.3 # via -r _base.txt (line 41), astroid, openapi-core -markupsafe==1.1.1 # via -r _base.txt (line 42), jinja2 +jinja-app-loader==1.0.2 +jinja2==2.10.1 +jsondiff==1.1.2 +jsonschema==3.2.0 +kombu==4.5.0 +lazy-object-proxy==1.4.3 +markupsafe==1.1.1 mccabe==0.6.1 # via pylint -mock==3.0.5 # via -r _test.in (line 14) -more-itertools==7.2.0 # via -r _base.txt (line 43), pytest, zipp -multidict==4.5.2 # via -r _base.txt (line 44), aiohttp, yarl -openapi-core==0.12.0 # via -r _base.txt (line 45) -openapi-spec-validator==0.2.8 # via -r _base.txt (line 46), -r _test.in (line 27), openapi-core -packaging==20.0 # via pytest, pytest-sugar -pamqp==2.3.0 # via -r _base.txt (line 47), aiormq -passlib==1.7.1 # via -r _base.txt (line 48) +mock==4.0.1 +more-itertools==7.2.0 +multidict==4.5.2 +openapi-core==0.12.0 +openapi-spec-validator==0.2.8 +packaging==20.3 # via pytest, pytest-sugar +pamqp==2.3.0 +passlib==1.7.1 pluggy==0.13.1 # via pytest -prometheus-client==0.7.1 # via -r _base.txt (line 49) -psycopg2-binary==2.8.4 # via -r _base.txt (line 50), aiopg, sqlalchemy -ptvsd==4.3.2 # via -r _test.in (line 37) +prometheus-client==0.7.1 +psycopg2-binary==2.8.4 +ptvsd==4.3.2 py==1.8.1 # via pytest -pycparser==2.19 # via -r _base.txt (line 51), cffi -pylint==2.4.4 # via -r _test.in (line 34) +pycparser==2.19 +pylint==2.4.4 pyparsing==2.4.6 # via packaging -pyrsistent==0.15.6 # via -r _base.txt (line 52), jsonschema -pytest-aiohttp==0.3.0 # via -r _test.in (line 16) -pytest-cov==2.8.1 # via -r _test.in (line 17) -pytest-docker==0.6.1 # via -r _test.in (line 18) -pytest-instafail==0.4.1.post0 # via -r _test.in (line 19) -pytest-mock==2.0.0 # via -r _test.in (line 20) -pytest-runner==5.2 # via -r _test.in (line 21) -pytest-sugar==0.9.2 # via -r _test.in (line 22) -pytest==5.3.2 # via -r _test.in (line 15), pytest-aiohttp, pytest-cov, pytest-instafail, pytest-mock, pytest-sugar +pyrsistent==0.15.6 +pytest-aiohttp==0.3.0 +pytest-cov==2.8.1 +pytest-docker==0.7.2 +pytest-instafail==0.4.1.post0 +pytest-mock==2.0.0 +pytest-runner==5.2 +pytest-sugar==0.9.2 +pytest==5.3.5 python-dateutil==2.8.1 # via faker -python-engineio==3.9.3 # via -r _base.txt (line 53), python-socketio -python-socketio==4.3.1 # via -r _base.txt (line 54) -pytz==2019.1 # via -r _base.txt (line 55), celery -pyyaml==5.3 # via -r _base.txt (line 56), aiohttp-swagger, openapi-spec-validator, trafaret-config -redis==3.3.11 # via -r _test.in (line 31) -requests==2.22.0 # via codecov, coveralls, docker -semantic-version==2.6.0 # via -r _base.txt (line 57) -six==1.12.0 # via -r _base.txt (line 58), astroid, cryptography, docker, faker, isodate, jsonschema, mock, openapi-core, openapi-spec-validator, packaging, pyrsistent, python-dateutil, python-engineio, python-socketio, tenacity, websocket-client -sqlalchemy[postgresql_psycopg2binary]==1.3.4 # via -r _base.txt (line 59), aiopg -strict-rfc3339==0.7 # via -r _base.txt (line 60), openapi-core -tenacity==6.0.0 # via -r _base.txt (line 61), -r _test.in (line 29) +python-engineio==3.9.3 +python-socketio==4.3.1 +pytz==2019.1 +pyyaml==5.3 +redis==3.4.1 +requests==2.23.0 # via codecov, coveralls, docker +semantic-version==2.6.0 +six==1.12.0 +sqlalchemy[postgresql_psycopg2binary]==1.3.4 +strict-rfc3339==0.7 +tenacity==6.0.0 termcolor==1.1.0 # via pytest-sugar text-unidecode==1.3 # via faker -trafaret-config==2.0.2 # via -r _base.txt (line 62) -trafaret==1.2.0 # via -r _base.txt (line 63), trafaret-config -typed-ast==1.4.0 # via astroid -typing-extensions==3.7.2 # via -r _base.txt (line 64), aiohttp -typing==3.7.4.1 # via -r _base.txt (line 65), expiringdict -ujson==1.35 # via -r _base.txt (line 66), aiohttp-swagger -urllib3==1.25.7 # via requests -vine==1.3.0 # via -r _base.txt (line 67), amqp, celery +trafaret-config==2.0.2 +trafaret==1.2.0 +typed-ast==1.4.1 # via astroid +typing-extensions==3.7.2 +typing==3.7.4.1 +ujson==1.35 +urllib3==1.25.8 # via requests +vine==1.3.0 wcwidth==0.1.8 # via pytest websocket-client==0.57.0 # via docker -websockets==8.1 # via -r _test.in (line 23) -werkzeug==0.16.0 # via -r _base.txt (line 68) +websockets==8.1 +werkzeug==0.16.0 wrapt==1.11.2 # via astroid -yarl==1.3.0 # via -r _base.txt (line 69), aio-pika, aiohttp, aiormq -zipp==1.0.0 # via -r _base.txt (line 70), importlib-metadata +yarl==1.3.0 +zipp==1.0.0 # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/services/web/server/setup.py b/services/web/server/setup.py index c1b85714a8a..58aab19e2fc 100644 --- a/services/web/server/setup.py +++ b/services/web/server/setup.py @@ -7,43 +7,42 @@ current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -def read_reqs( reqs_path: Path): - return re.findall(r'(^[^#-][\w]+[-~>=<.\w]+)', reqs_path.read_text(), re.MULTILINE) +def read_reqs(reqs_path: Path): + return re.findall(r"(^[^#-][\w]+[-~>=<.\w]+)", reqs_path.read_text(), re.MULTILINE) -#----------------------------------------------------------------- +# ----------------------------------------------------------------- -install_requirements = read_reqs( current_dir / "requirements" / "_base.txt" ) + [ +install_requirements = read_reqs(current_dir / "requirements" / "_base.txt") + [ "s3wrapper==0.1.0", "simcore-postgres-database", "simcore-sdk==0.1.0", "simcore-service-library", ] -test_requirements = read_reqs( current_dir / "requirements" / "_test.txt" ) +test_requirements = read_reqs(current_dir / "requirements" / "_test.txt") setup( - name='simcore-service-webserver', + name="simcore-service-webserver", version="0.4.0", - packages=find_packages(where='src'), - package_dir={ - '': 'src', - }, + packages=find_packages(where="src"), + package_dir={"": "src",}, include_package_data=True, package_data={ - '': [ - 'api/v0/openapi.yaml', - 'api/v0/schemas/*.json', - 'config/*.y*ml', - 'data/*.json', - 'templates/**/*.html', - ] + "": [ + "api/v0/openapi.yaml", + "api/v0/schemas/*.json", + "config/*.y*ml", + "data/*.json", + "templates/**/*.html", + ] }, entry_points={ - 'console_scripts': [ - 'simcore-service-webserver=simcore_service_webserver.__main__:main', ] - }, - python_requires='>=3.6', + "console_scripts": [ + "simcore-service-webserver=simcore_service_webserver.__main__:main", + ] + }, + python_requires=">=3.6", install_requires=install_requirements, tests_require=test_requirements, - setup_requires=['pytest-runner'] + setup_requires=["pytest-runner"], ) diff --git a/services/web/server/src/simcore_service_webserver/__version__.py b/services/web/server/src/simcore_service_webserver/__version__.py index ead0df82a2c..8f4c858af49 100644 --- a/services/web/server/src/simcore_service_webserver/__version__.py +++ b/services/web/server/src/simcore_service_webserver/__version__.py @@ -5,7 +5,7 @@ from semantic_version import Version -__version__ : str = pkg_resources.get_distribution('simcore_service_webserver').version +__version__: str = pkg_resources.get_distribution("simcore_service_webserver").version version = Version(__version__) diff --git a/services/web/server/src/simcore_service_webserver/activity/__init__.py b/services/web/server/src/simcore_service_webserver/activity/__init__.py index b10b4915af3..8c7520cb601 100644 --- a/services/web/server/src/simcore_service_webserver/activity/__init__.py +++ b/services/web/server/src/simcore_service_webserver/activity/__init__.py @@ -4,9 +4,11 @@ from aiohttp import web from servicelib.application_keys import APP_CONFIG_KEY from servicelib.application_setup import ModuleCategory, app_module_setup -from servicelib.rest_routing import (get_handlers_from_namespace, - iter_path_operations, - map_handlers_with_operations) +from servicelib.rest_routing import ( + get_handlers_from_namespace, + iter_path_operations, + map_handlers_with_operations, +) from ..rest_config import APP_OPENAPI_SPECS_KEY from . import handlers @@ -14,11 +16,13 @@ logger = logging.getLogger(__name__) + @app_module_setup( __name__, category=ModuleCategory.ADDON, - depends=['simcore_service_webserver.rest'], - logger=logger) + depends=["simcore_service_webserver.rest"], + logger=logger, +) def setup(app: web.Application): # setup routes ------------ @@ -26,16 +30,12 @@ def setup(app: web.Application): def include_path(tup_object): _method, path, _operation_id, _tags = tup_object - return any( tail in path for tail in ['/activity/status'] ) + return any(tail in path for tail in ["/activity/status"]) - handlers_dict = { - 'get_status': handlers.get_status - } + handlers_dict = {"get_status": handlers.get_status} routes = map_handlers_with_operations( - handlers_dict, - filter(include_path, iter_path_operations(specs)), - strict=True + handlers_dict, filter(include_path, iter_path_operations(specs)), strict=True ) app.router.add_routes(routes) @@ -43,6 +43,4 @@ def include_path(tup_object): # alias setup_activity = setup -__all__ = ( - 'setup_activity' -) +__all__ = "setup_activity" diff --git a/services/web/server/src/simcore_service_webserver/activity/config.py b/services/web/server/src/simcore_service_webserver/activity/config.py index 0bc5d2a675f..3ff44ac975e 100644 --- a/services/web/server/src/simcore_service_webserver/activity/config.py +++ b/services/web/server/src/simcore_service_webserver/activity/config.py @@ -6,9 +6,13 @@ CONFIG_SECTION_NAME = "activity" -schema = T.Dict({ - T.Key("enabled", default=True, optional=True): T.Bool(), - T.Key("prometheus_host", default='http://prometheus', optional=False): T.String(), - T.Key("prometheus_port", default=9090, optional=False): T.Int(), - T.Key("prometheus_api_version", default='v1', optional=False): T.String() -}) +schema = T.Dict( + { + T.Key("enabled", default=True, optional=True): T.Bool(), + T.Key( + "prometheus_host", default="http://prometheus", optional=False + ): T.String(), + T.Key("prometheus_port", default=9090, optional=False): T.Int(), + T.Key("prometheus_api_version", default="v1", optional=False): T.String(), + } +) diff --git a/services/web/server/src/simcore_service_webserver/activity/handlers.py b/services/web/server/src/simcore_service_webserver/activity/handlers.py index 3aa85a4140d..0883a83debf 100644 --- a/services/web/server/src/simcore_service_webserver/activity/handlers.py +++ b/services/web/server/src/simcore_service_webserver/activity/handlers.py @@ -1,10 +1,12 @@ import asyncio +from collections import defaultdict import aiohttp +from yarl import URL + from servicelib.application_keys import APP_CONFIG_KEY from servicelib.client_session import get_client_session from servicelib.request_keys import RQT_USERID_KEY -from yarl import URL from ..computation_handlers import get_celery from ..login.decorators import login_required @@ -15,9 +17,11 @@ async def query_prometheus(session, url, query): result = await resp.json() return result + def celery_reserved(app): return get_celery(app).control.inspect().reserved() + # # Functions getting the data to be executed async # @@ -25,24 +29,27 @@ async def get_cpu_usage(session, url, user_id): cpu_query = f'sum by (container_label_node_id) (irate(container_cpu_usage_seconds_total{{container_label_node_id=~".+", container_label_user_id="{user_id}"}}[20s])) * 100' return await query_prometheus(session, url, cpu_query) + async def get_memory_usage(session, url, user_id): memory_query = f'container_memory_usage_bytes{{container_label_node_id=~".+", container_label_user_id="{user_id}"}} / 1000000' return await query_prometheus(session, url, memory_query) + async def get_celery_reserved(app): return celery_reserved(app) + async def get_container_metric_for_labels(session, url, user_id): just_a_metric = f'container_cpu_user_seconds_total{{container_label_node_id=~".+", container_label_user_id="{user_id}"}}' return await query_prometheus(session, url, just_a_metric) def get_prometheus_result_or_default(result, default): - if (isinstance(result, Exception)): + if isinstance(result, Exception): # Logs exception return default - return result['data']['result'] - + return result["data"]["result"] + @login_required async def get_status(request: aiohttp.web.Request): @@ -50,66 +57,61 @@ async def get_status(request: aiohttp.web.Request): user_id = request.get(RQT_USERID_KEY, -1) - config = request.app[APP_CONFIG_KEY]['activity'] - url = URL(config.get('prometheus_host')).with_port(config.get('prometheus_port')).with_path('api/' + config.get('prometheus_api_version') + '/query') + config = request.app[APP_CONFIG_KEY]["activity"] + url = ( + URL(config.get("prometheus_host")) + .with_port(config.get("prometheus_port")) + .with_path("api/" + config.get("prometheus_api_version") + "/query") + ) results = await asyncio.gather( get_cpu_usage(session, url, user_id), get_memory_usage(session, url, user_id), get_celery_reserved(request.app), get_container_metric_for_labels(session, url, user_id), - return_exceptions=True + return_exceptions=True, ) cpu_usage = get_prometheus_result_or_default(results[0], []) mem_usage = get_prometheus_result_or_default(results[1], []) metric = get_prometheus_result_or_default(results[3], []) celery_inspect = results[2] - res = {} + res = defaultdict(dict) for node in cpu_usage: - node_id = node['metric']['container_label_node_id'] - usage = float(node['value'][1]) - res[node_id] = { - 'stats': { - 'cpuUsage': usage - } - } + node_id = node["metric"]["container_label_node_id"] + usage = float(node["value"][1]) + res[node_id] = {"stats": {"cpuUsage": usage}} for node in mem_usage: - node_id = node['metric']['container_label_node_id'] - usage = float(node['value'][1]) + node_id = node["metric"]["container_label_node_id"] + usage = float(node["value"][1]) if node_id in res: - res[node_id]['stats']['memUsage'] = usage + res[node_id]["stats"]["memUsage"] = usage else: - res[node_id] = { - 'stats': { - 'memUsage': usage - } - } + res[node_id] = {"stats": {"memUsage": usage}} for node in metric: - limits = { - 'cpus': 0, - 'mem': 0 - } - metric_labels = node['metric'] - limits['cpus'] = float(metric_labels.get('container_label_nano_cpus_limit', 0)) / pow(10, 9) # Nanocpus to cpus - limits['mem'] = float(metric_labels.get('container_label_mem_limit', 0)) / pow(1024, 2) # In MB - node_id = metric_labels.get('container_label_node_id') - res[node_id]['limits'] = limits - - if (hasattr(celery_inspect, 'items')): + limits = {"cpus": 0, "mem": 0} + metric_labels = node["metric"] + limits["cpus"] = float( + metric_labels.get("container_label_nano_cpus_limit", 0) + ) / pow( + 10, 9 + ) # Nanocpus to cpus + limits["mem"] = float(metric_labels.get("container_label_mem_limit", 0)) / pow( + 1024, 2 + ) # In MB + node_id = metric_labels.get("container_label_node_id") + res[node_id]["limits"] = limits + + if hasattr(celery_inspect, "items"): for dummy_worker_id, worker in celery_inspect.items(): for task in worker: - if (task['args'][1:-1].split(', ')[0] == str(user_id)): # Extracts user_id from task's args - node_id = task['args'][1:-1].split(', ')[2][1:-1] # Extracts node_id from task's args - if node_id in res: - res[node_id]['queued'] = True - else: - res[node_id] = { - 'queued': True - } - - if (not res): + values = task["args"][1:-1].split(", ") + if values[0] == str(user_id): # Extracts user_id from task's args + node_id = values[2][1:-1] # Extracts node_id from task's args + res[node_id]["queued"] = True + + if not res: raise aiohttp.web.HTTPNoContent - return res + return dict(res) diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index f61e9f3001b..0f10936a74a 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -3378,6 +3378,11 @@ paths: description: distinctive name for the node based on the docker registry path pattern: '^(simcore)/(services)/(comp|dynamic)(/[^\s/]+)+$' example: simcore/services/comp/itis/sleeper + integration-version: + type: string + description: integration version number + pattern: '^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$' + example: 1.0.0 version: type: string description: semantic version number diff --git a/services/web/server/src/simcore_service_webserver/api/v0/schemas/node-meta-v0.0.1.json b/services/web/server/src/simcore_service_webserver/api/v0/schemas/node-meta-v0.0.1.json index b4978cbd794..2909aae313a 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/schemas/node-meta-v0.0.1.json +++ b/services/web/server/src/simcore_service_webserver/api/v0/schemas/node-meta-v0.0.1.json @@ -26,6 +26,14 @@ "simcore/services/dynamic/3dviewer" ] }, + "integration-version": { + "type": "string", + "description": "integration version number", + "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", + "examples": [ + "1.0.0" + ] + }, "version": { "type": "string", "description": "semantic version number", diff --git a/services/web/server/src/simcore_service_webserver/application.py b/services/web/server/src/simcore_service_webserver/application.py index f5f4e95554e..0be88d232ff 100644 --- a/services/web/server/src/simcore_service_webserver/application.py +++ b/services/web/server/src/simcore_service_webserver/application.py @@ -34,9 +34,12 @@ log = logging.getLogger(__name__) -@app_module_setup("servicelib.monitoring", ModuleCategory.ADDON, +@app_module_setup( + "servicelib.monitoring", + ModuleCategory.ADDON, config_enabled="main.monitoring_enabled", - logger=log) + logger=log, +) def setup_app_monitoring(app: web.Application): return setup_monitoring(app, "simcore_service_webserver") @@ -45,8 +48,10 @@ def create_application(config: Dict) -> web.Application: """ Initializes service """ - log.debug("Initializing app with config:\n%s", - json.dumps(config, indent=2, sort_keys=True)) + log.debug( + "Initializing app with config:\n%s", + json.dumps(config, indent=2, sort_keys=True), + ) app = create_safe_application(config) @@ -66,7 +71,7 @@ def create_application(config: Dict) -> web.Application: setup_director(app) setup_storage(app) setup_users(app) - setup_projects(app) # needs storage + setup_projects(app) # needs storage setup_studies_access(app) setup_activity(app) setup_app_proxy(app) @@ -76,6 +81,7 @@ def create_application(config: Dict) -> web.Application: return app + def run_service(config: dict): """ Runs service @@ -84,12 +90,7 @@ def run_service(config: dict): app = create_application(config) - web.run_app(app, - host=config["main"]["host"], - port=config["main"]["port"]) + web.run_app(app, host=config["main"]["host"], port=config["main"]["port"]) -__all__ = ( - 'create_application', - 'run_service' -) +__all__ = ("create_application", "run_service") diff --git a/services/web/server/src/simcore_service_webserver/application_config.py b/services/web/server/src/simcore_service_webserver/application_config.py index d5cfeaf8901..442295ebda5 100644 --- a/services/web/server/src/simcore_service_webserver/application_config.py +++ b/services/web/server/src/simcore_service_webserver/application_config.py @@ -24,8 +24,16 @@ from servicelib.config_schema_utils import addon_section, minimal_addon_schema from trafaret_config.simple import read_and_validate -from . import (catalog_config, computation_config, db_config, email_config, - rest_config, session_config, storage_config, tracing) +from . import ( + catalog_config, + computation_config, + db_config, + email_config, + rest_config, + session_config, + storage_config, + tracing, +) from .activity import config as activity_config from .director import config as director_config from .login import config as login_config @@ -36,8 +44,8 @@ log = logging.getLogger(__name__) -CLI_DEFAULT_CONFIGFILE = 'server-defaults.yaml' -assert resources.exists( 'config/' + CLI_DEFAULT_CONFIGFILE ) # nosec +CLI_DEFAULT_CONFIGFILE = "server-defaults.yaml" +assert resources.exists("config/" + CLI_DEFAULT_CONFIGFILE) # nosec def create_schema() -> T.Dict: @@ -45,50 +53,66 @@ def create_schema() -> T.Dict: Build schema for the configuration's file by aggregating all the subsystem configurations """ - schema = T.Dict({ - "version": T.String(), - "main": T.Dict({ - "host": T.IP, - "port": T.Int(), - "client_outdir": T.String(), - "log_level": T.Enum(*logging._nameToLevel.keys()), # pylint: disable=protected-access - "testing": T.Bool(), - T.Key("studies_access_enabled", default=False): T.Or(T.Bool(), T.Int), - - T.Key("monitoring_enabled", default=False): T.Or(T.Bool(), T.Int), # Int added to use environs - }), - addon_section(tracing.tracing_section_name, optional=True): tracing.schema, - db_config.CONFIG_SECTION_NAME: db_config.schema, - director_config.CONFIG_SECTION_NAME: director_config.schema, - rest_config.CONFIG_SECTION_NAME: rest_config.schema, - projects_config.CONFIG_SECTION_NAME: projects_config.schema, - email_config.CONFIG_SECTION_NAME: email_config.schema, - computation_config.CONFIG_SECTION_NAME: computation_config.schema, - storage_config.CONFIG_SECTION_NAME: storage_config.schema, - addon_section(login_config.CONFIG_SECTION_NAME, optional=True): login_config.schema, - addon_section(socketio_config.CONFIG_SECTION_NAME, optional=True): socketio_config.schema, - session_config.CONFIG_SECTION_NAME: session_config.schema, - activity_config.CONFIG_SECTION_NAME: activity_config.schema, - resource_manager_config.CONFIG_SECTION_NAME: resource_manager_config.schema, - # BELOW HERE minimal sections until more options are needed - addon_section("reverse_proxy", optional=True): minimal_addon_schema(), - addon_section("application_proxy", optional=True): minimal_addon_schema(), - addon_section("users", optional=True): minimal_addon_schema(), - addon_section("studies_access", optional=True): minimal_addon_schema(), - addon_section("tags", optional=True): minimal_addon_schema(), - addon_section("catalog", optional=True): catalog_config.schema, - }) + # pylint: disable=protected-access + schema = T.Dict( + { + "version": T.String(), + "main": T.Dict( + { + "host": T.IP, + "port": T.Int(), + "client_outdir": T.String(), + "log_level": T.Enum( + *logging._nameToLevel.keys() + ), + "testing": T.Bool(), + T.Key("studies_access_enabled", default=False): T.Or( + T.Bool(), T.Int + ), + T.Key("monitoring_enabled", default=False): T.Or( + T.Bool(), T.Int + ), # Int added to use environs + } + ), + addon_section(tracing.tracing_section_name, optional=True): tracing.schema, + db_config.CONFIG_SECTION_NAME: db_config.schema, + director_config.CONFIG_SECTION_NAME: director_config.schema, + rest_config.CONFIG_SECTION_NAME: rest_config.schema, + projects_config.CONFIG_SECTION_NAME: projects_config.schema, + email_config.CONFIG_SECTION_NAME: email_config.schema, + computation_config.CONFIG_SECTION_NAME: computation_config.schema, + storage_config.CONFIG_SECTION_NAME: storage_config.schema, + addon_section( + login_config.CONFIG_SECTION_NAME, optional=True + ): login_config.schema, + addon_section( + socketio_config.CONFIG_SECTION_NAME, optional=True + ): socketio_config.schema, + session_config.CONFIG_SECTION_NAME: session_config.schema, + activity_config.CONFIG_SECTION_NAME: activity_config.schema, + resource_manager_config.CONFIG_SECTION_NAME: resource_manager_config.schema, + # BELOW HERE minimal sections until more options are needed + addon_section("reverse_proxy", optional=True): minimal_addon_schema(), + addon_section("application_proxy", optional=True): minimal_addon_schema(), + addon_section("users", optional=True): minimal_addon_schema(), + addon_section("studies_access", optional=True): minimal_addon_schema(), + addon_section("tags", optional=True): minimal_addon_schema(), + addon_section("catalog", optional=True): catalog_config.schema, + } + ) section_names = [k.name for k in schema.keys] - assert len(section_names) == len(set(section_names)), "Found repeated section names in %s" % section_names # nosec + assert len(section_names) == len(set(section_names)), ( + "Found repeated section names in %s" % section_names + ) # nosec return schema def load_default_config(environs=None) -> Dict: - filepath: Path = resources.get_path(f'config/{CLI_DEFAULT_CONFIGFILE}') + filepath: Path = resources.get_path(f"config/{CLI_DEFAULT_CONFIGFILE}") return read_and_validate(filepath, trafaret=app_schema, vars=environs) -app_schema = create_schema() # TODO: rename as schema +app_schema = create_schema() # TODO: rename as schema diff --git a/services/web/server/src/simcore_service_webserver/application_proxy.py b/services/web/server/src/simcore_service_webserver/application_proxy.py index b9731acea85..ab90e9122dc 100644 --- a/services/web/server/src/simcore_service_webserver/application_proxy.py +++ b/services/web/server/src/simcore_service_webserver/application_proxy.py @@ -26,6 +26,7 @@ logger = logging.getLogger(__name__) + @attr.s(auto_attribs=True) class ServiceMonitor(ServiceResolutionPolicy): app: web.Application @@ -51,8 +52,7 @@ async def _request_info(self, service_identifier: str): # override async def get_image_name(self, service_identifier: str) -> str: data = await self._request_info(service_identifier) - return data.get('service_key') - + return data.get("service_key") # override async def find_url(self, service_identifier: str) -> URL: @@ -60,30 +60,32 @@ async def find_url(self, service_identifier: str) -> URL: """ data = await self._request_info(service_identifier) - base_url = URL.build(scheme="http", - host=data.get('service_host'), - port=data.get('service_port'), - path=data.get('service_basepath')) - - if not os.environ.get('IS_CONTAINER_CONTEXT'): + base_url = URL.build( + scheme="http", + host=data.get("service_host"), + port=data.get("service_port"), + path=data.get("service_basepath"), + ) + + if not os.environ.get("IS_CONTAINER_CONTEXT"): # If server is not in swarm (e.g. during testing) then host:port = localhost:data['published_port'] - base_url = base_url.with_host('127.0.0.1') \ - .with_port(data['published_port']) + base_url = base_url.with_host("127.0.0.1").with_port(data["published_port"]) return base_url - - -@app_module_setup(__name__, ModuleCategory.ADDON, - depends=["simcore_service_webserver.director", ], - logger=logger) +@app_module_setup( + __name__, + ModuleCategory.ADDON, + depends=["simcore_service_webserver.director",], + logger=logger, +) def setup(app: web.Application): monitor = ServiceMonitor(app, base_url=app[APP_DIRECTOR_API_KEY]) setup_reverse_proxy(app, monitor) - assert "reverse_proxy" in app.router # nosec + assert "reverse_proxy" in app.router # nosec app["reverse_proxy.basemount"] = monitor.base_mountpoint @@ -92,6 +94,4 @@ def setup(app: web.Application): setup_app_proxy = setup -__all__ = ( - 'setup_app_proxy' -) +__all__ = "setup_app_proxy" diff --git a/services/web/server/src/simcore_service_webserver/cli.py b/services/web/server/src/simcore_service_webserver/cli.py index 29bc54e641d..e70b278cf8d 100644 --- a/services/web/server/src/simcore_service_webserver/cli.py +++ b/services/web/server/src/simcore_service_webserver/cli.py @@ -101,19 +101,19 @@ def main(args: Optional[List] = None): setup_parser(parser) config = parse(args, parser) - # logging + # service log level log_level = getattr(logging, config["main"]["log_level"]) logging.basicConfig(level=log_level) logging.root.setLevel(log_level) - - # mute noisy loggers - logging.getLogger("engineio").setLevel( - min(log_level + LOG_LEVEL_STEP, logging.CRITICAL) - ) - access_logger.setLevel(max(log_level - LOG_LEVEL_STEP, logging.DEBUG)) - logging.getLogger("openapi_spec_validator").setLevel(logging.WARNING) - logging.getLogger("sqlalchemy").setLevel(logging.WARNING) - logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING) + # aiohttp access log-levels + access_logger.setLevel(log_level) + + # keep mostly quiet noisy loggers + quiet_level = max(min(log_level + LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING) + logging.getLogger("engineio").setLevel(quiet_level) + logging.getLogger("openapi_spec_validator").setLevel(quiet_level) + logging.getLogger("sqlalchemy").setLevel(quiet_level) + logging.getLogger("sqlalchemy.engine").setLevel(quiet_level) # NOTE: Every task blocking > AIODEBUG_SLOW_DURATION_SECS secs is considered slow and logged as warning slow_duration = float(os.environ.get("AIODEBUG_SLOW_DURATION_SECS", 0.1)) diff --git a/services/web/server/src/simcore_service_webserver/cli_config.py b/services/web/server/src/simcore_service_webserver/cli_config.py index 672ad008ed2..08e2f8cee03 100644 --- a/services/web/server/src/simcore_service_webserver/cli_config.py +++ b/services/web/server/src/simcore_service_webserver/cli_config.py @@ -1,4 +1,3 @@ - import argparse import os import logging @@ -25,13 +24,15 @@ def add_cli_options(argument_parser, default_config): argument_parser = argparse.ArgumentParser() commandline.standard_argparse_options( - argument_parser.add_argument_group('settings'), - default_config=default_config) + argument_parser.add_argument_group("settings"), default_config=default_config + ) return argument_parser -def config_from_options(options, schema, vars=None): # pylint: disable=redefined-builtin +def config_from_options( + options, schema, vars=None +): # pylint: disable=redefined-builtin if vars is None: vars = os.environ @@ -40,7 +41,7 @@ def config_from_options(options, schema, vars=None): # pylint: disable=redefined if resources.exists(resource_name): options.config = resources.get_path(resource_name) else: - resource_name = resources.config_folder + '/' + resource_name + resource_name = resources.config_folder + "/" + resource_name if resources.exists(resource_name): options.config = resources.get_path(resource_name) @@ -49,16 +50,12 @@ def config_from_options(options, schema, vars=None): # pylint: disable=redefined return commandline.config_from_options(options, trafaret=schema, vars=vars) - - - - - - # FIXME: should replace these functions and remove dependency -def read_and_validate(filepath, vars=None): # pylint: disable=W0622 + +def read_and_validate(filepath, vars=None): # pylint: disable=W0622 from .application_config import app_schema + if vars is None: vars = os.environ # NOTE: vars=os.environ in signature freezes default to os.environ before it gets @@ -74,5 +71,6 @@ def config_from_file(filepath) -> dict: Raises trafaret_config.ConfigError """ from .application_config import app_schema + config = trafaret_config.read_and_validate(filepath, app_schema, vars=os.environ) return config diff --git a/services/web/server/src/simcore_service_webserver/computation.py b/services/web/server/src/simcore_service_webserver/computation.py index c63d3fa45e3..b8aa185e35a 100644 --- a/services/web/server/src/simcore_service_webserver/computation.py +++ b/services/web/server/src/simcore_service_webserver/computation.py @@ -11,8 +11,7 @@ from aiohttp import web from servicelib.application_setup import ModuleCategory, app_module_setup -from servicelib.rest_routing import (iter_path_operations, - map_handlers_with_operations) +from servicelib.rest_routing import iter_path_operations, map_handlers_with_operations from . import computation_handlers from .computation_comp_tasks_listening_task import setup as setup_comp_tasks_listener @@ -23,9 +22,9 @@ log = logging.getLogger(__file__) -@app_module_setup(__name__, ModuleCategory.ADDON, - config_section=CONFIG_SECTION_NAME, - logger=log) +@app_module_setup( + __name__, ModuleCategory.ADDON, config_section=CONFIG_SECTION_NAME, logger=log +) def setup(app: web.Application): # subscribe to rabbit upon startup # TODO: Define connection policies (e.g. {on-startup}, lazy). Could be defined in config-file @@ -35,23 +34,25 @@ def setup(app: web.Application): # app.on_cleanup.append(unsubscribe) if not APP_OPENAPI_SPECS_KEY in app: - log.warning("rest submodule not initialised? computation routes will not be defined!") + log.warning( + "rest submodule not initialised? computation routes will not be defined!" + ) return specs = app[APP_OPENAPI_SPECS_KEY] - routes = map_handlers_with_operations({ - 'start_pipeline': computation_handlers.start_pipeline, - 'update_pipeline': computation_handlers.update_pipeline + routes = map_handlers_with_operations( + { + "start_pipeline": computation_handlers.start_pipeline, + "update_pipeline": computation_handlers.update_pipeline, }, filter(lambda o: "/computation" in o[1], iter_path_operations(specs)), - strict=True + strict=True, ) app.router.add_routes(routes) setup_comp_tasks_listener(app) + # alias setup_computation = setup -__all__ = ( - "setup_computation" -) +__all__ = "setup_computation" diff --git a/services/web/server/src/simcore_service_webserver/computation_api.py b/services/web/server/src/simcore_service_webserver/computation_api.py index 9d64efc16d5..2fa1e705898 100644 --- a/services/web/server/src/simcore_service_webserver/computation_api.py +++ b/services/web/server/src/simcore_service_webserver/computation_api.py @@ -12,6 +12,7 @@ import sqlalchemy as sa from aiohttp import web, web_exceptions from aiopg.sa import Engine +from aiopg.sa.connection import SAConnection from sqlalchemy import and_ from servicelib.application_keys import APP_DB_ENGINE_KEY @@ -171,7 +172,7 @@ async def _parse_project_data(pipeline_data: Dict, app: web.Application): "inputs": node_inputs, "outputs": node_outputs, "image": {"name": node_key, "tag": node_version}, - "node_class": to_node_class(node_key) + "node_class": to_node_class(node_key), } log.debug("storing task for node %s: %s", node_uuid, task) @@ -218,6 +219,63 @@ async def _set_tasks_in_tasks_db( db_engine: Engine, project_id: str, tasks: Dict[str, Dict], replace_pipeline=True ): # pylint: disable=no-value-for-parameter + + async def _task_already_exists( + conn: SAConnection, project_id: str, node_id: str + ) -> bool: + task_count: int = await conn.scalar( + sa.select([sa.func.count()]).where( + and_( + comp_tasks.c.project_id == project_id, + comp_tasks.c.node_id == node_id, + ) + ) + ) + assert task_count in ( # nosec + 0, + 1, + ), f"Uniqueness violated: task_count={task_count}" # nosec + return task_count != 0 + + async def _update_task( + conn: SAConnection, task: Dict, project_id: str, node_id: str + ) -> None: + # update task's inputs/outputs + io_update = {} + task_inputs: str = await conn.scalar( + sa.select([comp_tasks.c.inputs]).where( + and_( + comp_tasks.c.project_id == project_id, + comp_tasks.c.node_id == node_id, + ) + ) + ) + # updates inputs + if task_inputs != task["inputs"]: + io_update["inputs"] = task["inputs"] + + # update outputs + # NOTE: update ONLY outputs of front-end nodes. The rest are + # updated by backend services (e.g. workers, interactive services) + if task["outputs"] and task["node_class"] == NodeClass.FRONTEND: + io_update["outputs"] = task["outputs"] + + if io_update: + query = ( + comp_tasks.update() + .where( + and_( + comp_tasks.c.project_id == project_id, + comp_tasks.c.node_id == node_id, + ) + ) + .values(**io_update) + ) + + await conn.execute(query) + + # MAIN ----------- + async with db_engine.acquire() as conn: if replace_pipeline: @@ -239,24 +297,33 @@ async def _set_tasks_in_tasks_db( internal_id = 1 for node_id, task in tasks.items(): + + is_new_task: bool = not await _task_already_exists( + conn, project_id, node_id + ) try: - # create task - query = comp_tasks.insert().values( - project_id=project_id, - node_id=node_id, - node_class=task["node_class"], - internal_id=internal_id, - image=task["image"], - schema=task["schema"], - inputs=task["inputs"], - outputs=task["outputs"] if task["outputs"] else {}, - submit=datetime.datetime.utcnow(), - ) + if is_new_task: + # create task + query = comp_tasks.insert().values( + project_id=project_id, + node_id=node_id, + node_class=task["node_class"], + internal_id=internal_id, + image=task["image"], + schema=task["schema"], + inputs=task["inputs"], + outputs=task["outputs"] if task["outputs"] else {}, + submit=datetime.datetime.utcnow(), + ) - await conn.execute(query) - internal_id = internal_id + 1 + await conn.execute(query) + internal_id = internal_id + 1 + + except psycopg2.errors.UniqueViolation: # pylint: disable=no-member + # avoids race condition + is_new_task = False - except psycopg2.errors.UniqueViolation: # pylint: disable=no-member + if not is_new_task: if replace_pipeline: # replace task query = ( @@ -280,39 +347,7 @@ async def _set_tasks_in_tasks_db( ) await conn.execute(query) else: - # update task's inputs/outputs - io_update = {} - task_inputs: str = await conn.scalar( - sa.select([comp_tasks.c.inputs]).where( - and_( - comp_tasks.c.project_id == project_id, - comp_tasks.c.node_id == node_id, - ) - ) - ) - # updates inputs - if task_inputs != task["inputs"]: - io_update["inputs"] = task["inputs"] - - # update outputs - # NOTE: update ONLY outputs of front-end nodes. The rest are - # updated by backend services (e.g. workers, interactive services) - if task["outputs"] and task["node_class"] == NodeClass.FRONTEND: - io_update["outputs"] = task["outputs"] - - if io_update: - query = ( - comp_tasks.update() - .where( - and_( - comp_tasks.c.project_id == project_id, - comp_tasks.c.node_id == node_id, - ) - ) - .values(**io_update) - ) - - await conn.execute(query) + await _update_task(conn, task, project_id, node_id) # diff --git a/services/web/server/src/simcore_service_webserver/computation_comp_tasks_listening_task.py b/services/web/server/src/simcore_service_webserver/computation_comp_tasks_listening_task.py index f5a894d58f6..73798c6dba9 100644 --- a/services/web/server/src/simcore_service_webserver/computation_comp_tasks_listening_task.py +++ b/services/web/server/src/simcore_service_webserver/computation_comp_tasks_listening_task.py @@ -13,7 +13,7 @@ from servicelib.application_keys import APP_DB_ENGINE_KEY -from .projects import projects_api +from .projects import projects_api, projects_exceptions from .projects.projects_models import projects, user_to_projects from .socketio.events import post_messages @@ -59,12 +59,12 @@ async def register_trigger_function(app: web.Application): EXECUTE PROCEDURE {DB_PROCEDURE_NAME}(); """ - async with db_engine.acquire() as conn: async with conn.begin(): await conn.execute(notification_fct_query) await conn.execute(trigger_registration_query) + async def listen(app: web.Application): listen_query = f"LISTEN {DB_CHANNEL_NAME};" db_engine: Engine = app[APP_DB_ENGINE_KEY] @@ -80,10 +80,22 @@ async def listen(app: web.Application): project_id = node_data["project_id"] # find the user(s) linked to that project joint_table = user_to_projects.join(projects) - query = select([user_to_projects]).select_from(joint_table).where(projects.c.uuid == project_id) + query = ( + select([user_to_projects]) + .select_from(joint_table) + .where(projects.c.uuid == project_id) + ) async for row in conn.execute(query): user_id = row["user_id"] - node_data = await projects_api.update_project_node_outputs(app, user_id, project_id, node_id, data=task_output) + try: + node_data = await projects_api.update_project_node_outputs( + app, user_id, project_id, node_id, data=task_output + ) + except projects_exceptions.ProjectNotFoundError: + log.exception("Project %s not found", project_id) + except projects_exceptions.NodeNotFoundError: + log.exception("Node %s ib project %s not found", node_id, project_id) + messages = {"nodeUpdated": {"Node": node_id, "Data": node_data}} await post_messages(app, user_id, messages) @@ -99,6 +111,7 @@ async def comp_tasks_listening_task(app: web.Application) -> None: finally: pass + async def setup_comp_tasks_listening_task(app: web.Application): task = asyncio.get_event_loop().create_task(comp_tasks_listening_task(app)) yield diff --git a/services/web/server/src/simcore_service_webserver/computation_config.py b/services/web/server/src/simcore_service_webserver/computation_config.py index 4f5d39a666f..70fbfd82dc7 100644 --- a/services/web/server/src/simcore_service_webserver/computation_config.py +++ b/services/web/server/src/simcore_service_webserver/computation_config.py @@ -7,7 +7,7 @@ # import trafaret as T -SERVICE_NAME = 'rabbit' +SERVICE_NAME = "rabbit" CONFIG_SECTION_NAME = SERVICE_NAME APP_CLIENT_RABBIT_DECORATED_HANDLERS_KEY = __name__ + ".rabbit_handlers" APP_COMP_TASKS_LISTENING_KEY: str = __name__ + ".comp_tasks_listening_key" diff --git a/services/web/server/src/simcore_service_webserver/computation_handlers.py b/services/web/server/src/simcore_service_webserver/computation_handlers.py index 644faf51220..ae7f4b41680 100644 --- a/services/web/server/src/simcore_service_webserver/computation_handlers.py +++ b/services/web/server/src/simcore_service_webserver/computation_handlers.py @@ -15,18 +15,21 @@ from .computation_config import CONFIG_SECTION_NAME as CONFIG_RABBIT_SECTION from .login.decorators import login_required from .projects.projects_api import get_project_for_user +from .projects.projects_exceptions import ProjectNotFoundError from .security_api import check_permission log = logging.getLogger(__file__) computation_routes = web.RouteTableDef() + def get_celery(_app: web.Application): config = _app[APP_CONFIG_KEY][CONFIG_RABBIT_SECTION] rabbit = rabbit_config(config=config) celery = Celery(rabbit.name, broker=rabbit.broker, backend=rabbit.backend) return celery + async def _process_request(request): # TODO: PC->SAN why validation is commented??? # params, query, body = await extract_and_validate(request) @@ -41,6 +44,7 @@ async def _process_request(request): # HANDLERS ------------------------------------------ + @login_required async def update_pipeline(request: web.Request) -> web.Response: await check_permission(request, "services.pipeline.*") @@ -48,8 +52,12 @@ async def update_pipeline(request: web.Request) -> web.Response: user_id, project_id = await _process_request(request) - project = await get_project_for_user(request.app, project_id, user_id) - await update_pipeline_db(request.app, project_id, project["workbench"]) + try: + project = await get_project_for_user(request.app, project_id, user_id) + await update_pipeline_db(request.app, project_id, project["workbench"]) + except ProjectNotFoundError: + raise web.HTTPNotFound(reason=f"Project {project_id} not found") + raise web.HTTPNoContent() @@ -64,18 +72,25 @@ async def start_pipeline(request: web.Request) -> web.Response: user_id, project_id = await _process_request(request) - project = await get_project_for_user(request.app, project_id, user_id) - await update_pipeline_db(request.app, project_id, project["workbench"]) + try: + project = await get_project_for_user(request.app, project_id, user_id) + await update_pipeline_db(request.app, project_id, project["workbench"]) + except ProjectNotFoundError: + raise web.HTTPNotFound(reason=f"Project {project_id} not found") # commit the tasks to celery - _ = get_celery(request.app).send_task("comp.task", args=(user_id, project_id,), kwargs={}) + _ = get_celery(request.app).send_task( + "comp.task", args=(user_id, project_id,), kwargs={} + ) - log.debug("Task (user_id=%s, project_id=%s) submitted for execution.", user_id, project_id) + log.debug( + "Task (user_id=%s, project_id=%s) submitted for execution.", user_id, project_id + ) # answer the client while task has been spawned data = { # TODO: PC->SAN: some name with task id. e.g. to distinguish two projects with identical pipeline? - "pipeline_name":"request_data", - "project_id": project_id + "pipeline_name": "request_data", + "project_id": project_id, } return data diff --git a/services/web/server/src/simcore_service_webserver/computation_models.py b/services/web/server/src/simcore_service_webserver/computation_models.py index 1c21daa9f11..bd01ddfbf33 100644 --- a/services/web/server/src/simcore_service_webserver/computation_models.py +++ b/services/web/server/src/simcore_service_webserver/computation_models.py @@ -9,14 +9,14 @@ # TODO: test agains all names in registry/fake projects?? node_key_re = re.compile(r"^simcore/services/(comp|dynamic|frontend)(/[^\s/]+)+$") str_to_nodeclass = { - 'comp': NodeClass.COMPUTATIONAL, - 'dynamic': NodeClass.INTERACTIVE, - 'frontend': NodeClass.FRONTEND, + "comp": NodeClass.COMPUTATIONAL, + "dynamic": NodeClass.INTERACTIVE, + "frontend": NodeClass.FRONTEND, } + def to_node_class(node_key: str) -> NodeClass: match = node_key_re.match(node_key) if match: return str_to_nodeclass.get(match.group(1)) return None - diff --git a/services/web/server/src/simcore_service_webserver/computation_subscribe.py b/services/web/server/src/simcore_service_webserver/computation_subscribe.py index 676c4f19a83..1d30855d164 100644 --- a/services/web/server/src/simcore_service_webserver/computation_subscribe.py +++ b/services/web/server/src/simcore_service_webserver/computation_subscribe.py @@ -12,29 +12,32 @@ from servicelib.application_keys import APP_CONFIG_KEY from simcore_sdk.config.rabbit import eval_broker -from .computation_config import (APP_CLIENT_RABBIT_DECORATED_HANDLERS_KEY, - CONFIG_SECTION_NAME) +from .computation_config import ( + APP_CLIENT_RABBIT_DECORATED_HANDLERS_KEY, + CONFIG_SECTION_NAME, +) from .projects import projects_api -from .projects.projects_exceptions import (NodeNotFoundError, - ProjectNotFoundError) +from .projects.projects_exceptions import NodeNotFoundError, ProjectNotFoundError from .socketio.events import post_messages log = logging.getLogger(__file__) + class RabbitMQRetryPolicyUponInitialization: """ Retry policy upon service initialization """ + WAIT_SECS = 2 ATTEMPTS_COUNT = 20 - def __init__(self, logger: Optional[logging.Logger]=None): + def __init__(self, logger: Optional[logging.Logger] = None): logger = logger or log self.kwargs = dict( wait=wait_fixed(self.WAIT_SECS), stop=stop_after_attempt(self.ATTEMPTS_COUNT), before_sleep=before_sleep_log(logger, logging.INFO), - reraise=True + reraise=True, ) @@ -43,13 +46,17 @@ def rabbit_adapter(app: web.Application) -> Callable: I.e. aiopika handler expect functions of type `async def function(message)` This allows to create a function of type `async def function(message, app: web.Application) """ + def decorator(func) -> Coroutine: @wraps(func) async def wrapped(*args, **kwargs) -> Coroutine: return await func(*args, **kwargs, app=app) + return wrapped + return decorator + async def parse_rabbit_message_data(app: web.Application, data: Dict) -> None: log.debug("parsing message data:\n%s", pformat(data, depth=3)) # get common data @@ -61,7 +68,9 @@ async def parse_rabbit_message_data(app: web.Application, data: Dict) -> None: messages = {} if data["Channel"] == "Progress": # update corresponding project, node, progress value - node_data = await projects_api.update_project_node_progress(app, user_id, project_id, node_id, progress=data["Progress"]) + node_data = await projects_api.update_project_node_progress( + app, user_id, project_id, node_id, progress=data["Progress"] + ) messages["nodeUpdated"] = {"Node": node_id, "Data": node_data} elif data["Channel"] == "Log": messages["logger"] = data @@ -73,12 +82,15 @@ async def parse_rabbit_message_data(app: web.Application, data: Dict) -> None: log.exception("parsed rabbit message invalid") -async def rabbit_message_handler(message: aio_pika.IncomingMessage, app: web.Application) -> None: +async def rabbit_message_handler( + message: aio_pika.IncomingMessage, app: web.Application +) -> None: data = json.loads(message.body) await parse_rabbit_message_data(app, data) # NOTE: this allows the webserver to breath if a lot of messages are entering await asyncio.sleep(1) + async def subscribe(app: web.Application) -> None: # TODO: catch and deal with missing connections: # e.g. CRITICAL:pika.adapters.base_connection:Could not get addresses to use: [Errno -2] Name or service not known (rabbit) @@ -89,10 +101,10 @@ async def subscribe(app: web.Application) -> None: log.info("Creating pika connection for %s", rabbit_broker) await wait_till_rabbitmq_responsive(rabbit_broker) - connection = await aio_pika.connect_robust(rabbit_broker, - client_properties={ - "connection_name": "webserver read connection" - }) + connection = await aio_pika.connect_robust( + rabbit_broker, + client_properties={"connection_name": "webserver read connection"}, + ) channel = await connection.channel() await channel.set_qos(prefetch_count=1) @@ -119,6 +131,7 @@ async def subscribe(app: web.Application) -> None: app[APP_CLIENT_RABBIT_DECORATED_HANDLERS_KEY] = [partial_rabbit_message_handler] await queue.consume(partial_rabbit_message_handler, exclusive=True, no_ack=True) + @retry(**RabbitMQRetryPolicyUponInitialization().kwargs) async def wait_till_rabbitmq_responsive(url: str) -> bool: """Check if something responds to ``url`` """ diff --git a/services/web/server/src/simcore_service_webserver/data/s4l_converter.py b/services/web/server/src/simcore_service_webserver/data/s4l_converter.py index fcb1ae20310..006d1816e4b 100644 --- a/services/web/server/src/simcore_service_webserver/data/s4l_converter.py +++ b/services/web/server/src/simcore_service_webserver/data/s4l_converter.py @@ -7,51 +7,54 @@ import s4l_v1.document as document from s4l_v1.simulation.emlf import MaterialSettings -here = os.path.dirname( sys.argv[0] ) +here = os.path.dirname(sys.argv[0]) + + +PATTERN = re.compile(r"\W") -PATTERN = re.compile(r'\W') def make_key(m): return PATTERN.sub("_", m.Name) + "-UUID" + sims = document.AllSimulations sim = sims[0] materials = [s for s in sim.AllSettings if isinstance(s, MaterialSettings)] - def create_material_getitemlist(materials): - path = os.path.join(here, r'fake-materialDB-LF-getItemList.json') - with open(path, 'wt') as f: + path = os.path.join(here, r"fake-materialDB-LF-getItemList.json") + with open(path, "wt") as f: data = [dict(key=make_key(m), label=m.Name) for m in materials] json.dump(data, f, indent=2) + def create_item(m): props = [ - m.MassDensityProp, - m.ElectricProps.ConductivityProp, - m.ElectricProps.RelativePermittivityProp, - m.MagneticProps.ConductivityProp, - m.MagneticProps.RelativePermeabilityProp, + m.MassDensityProp, + m.ElectricProps.ConductivityProp, + m.ElectricProps.RelativePermittivityProp, + m.MagneticProps.ConductivityProp, + m.MagneticProps.RelativePermeabilityProp, ] - + result = {} for index, prop in enumerate(props): result[prop.Name.replace(" ", "")] = { - 'displayOrder': index, - 'label': prop.Name, - 'unit': str(prop.Unit or ""), - 'type': "number", - 'defaultValue': prop.Value + "displayOrder": index, + "label": prop.Name, + "unit": str(prop.Unit or ""), + "type": "number", + "defaultValue": prop.Value, } return result - + def create_material_getitem(materials): - path = os.path.join(here, r'fake-materialDB-LF-getItem.json') - with open(path, 'wt') as f: - data = { make_key(m): create_item(m) for m in materials } + path = os.path.join(here, r"fake-materialDB-LF-getItem.json") + with open(path, "wt") as f: + data = {make_key(m): create_item(m) for m in materials} json.dump(data, f, indent=2) @@ -64,8 +67,8 @@ def create_map(sim): result[make_key(m)].append(make_key(c)) return result - path = os.path.join(here, r'fake-materialDB-LF-Material2Entities.json') - with open(path, 'wt') as f: + path = os.path.join(here, r"fake-materialDB-LF-Material2Entities.json") + with open(path, "wt") as f: data = create_map(sim) json.dump(data, f, indent=2) @@ -79,7 +82,12 @@ def get_name(ent): name = name.replace("Model/", "") return name -path = os.path.join(here, r'fake-modeler-LF-getItemList.json') -with open(path, 'wt') as f: - data = [ dict(key=make_key(c), label=get_name(c.Geometry)) for c in sim.AllComponents if c.Geometry ] + +path = os.path.join(here, r"fake-modeler-LF-getItemList.json") +with open(path, "wt") as f: + data = [ + dict(key=make_key(c), label=get_name(c.Geometry)) + for c in sim.AllComponents + if c.Geometry + ] json.dump(data, f, indent=2) diff --git a/services/web/server/src/simcore_service_webserver/db.py b/services/web/server/src/simcore_service_webserver/db.py index 2c55afaf065..c9e5bd289ba 100644 --- a/services/web/server/src/simcore_service_webserver/db.py +++ b/services/web/server/src/simcore_service_webserver/db.py @@ -5,10 +5,14 @@ import logging from aiohttp import web -from servicelib.aiopg_utils import (DataSourceName, - PostgresRetryPolicyUponInitialization, - create_pg_engine, init_pg_tables, - is_pg_responsive, raise_if_not_responsive) +from servicelib.aiopg_utils import ( + DataSourceName, + PostgresRetryPolicyUponInitialization, + create_pg_engine, + init_pg_tables, + is_pg_responsive, + raise_if_not_responsive, +) from servicelib.application_keys import APP_CONFIG_KEY, APP_DB_ENGINE_KEY from servicelib.application_setup import ModuleCategory, app_module_setup from tenacity import Retrying @@ -16,57 +20,60 @@ from .db_config import CONFIG_SECTION_NAME from .db_models import metadata -THIS_MODULE_NAME = __name__.split(".")[-1] -THIS_SERVICE_NAME = 'postgres' +THIS_MODULE_NAME = __name__.split(".")[-1] +THIS_SERVICE_NAME = "postgres" log = logging.getLogger(__name__) async def pg_engine(app: web.Application): cfg = app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] - pg_cfg = cfg['postgres'] - - app[f"{__name__}.dsn"]= dsn = \ - DataSourceName( - application_name=f'{__name__}_{id(app)}', - database=pg_cfg['database'], - user=pg_cfg['user'], - password=pg_cfg['password'], - host=pg_cfg['host'], - port=pg_cfg['port'] - ) + pg_cfg = cfg["postgres"] + + app[f"{__name__}.dsn"] = dsn = DataSourceName( + application_name=f"{__name__}_{id(app)}", + database=pg_cfg["database"], + user=pg_cfg["user"], + password=pg_cfg["password"], + host=pg_cfg["host"], + port=pg_cfg["port"], + ) log.info("Creating pg engine for %s", dsn) for attempt in Retrying(**PostgresRetryPolicyUponInitialization(log).kwargs): with attempt: - engine = await create_pg_engine(dsn, - minsize=pg_cfg['minsize'], - maxsize=pg_cfg['maxsize'] + engine = await create_pg_engine( + dsn, minsize=pg_cfg["minsize"], maxsize=pg_cfg["maxsize"] ) await raise_if_not_responsive(engine) - assert engine # nosec + assert engine # nosec app[APP_DB_ENGINE_KEY] = engine - if cfg['init_tables']: + if cfg["init_tables"]: log.info("Initializing tables for %s", dsn) init_pg_tables(dsn, schema=metadata) - yield #------------------- + yield # ------------------- if engine is not app.get(APP_DB_ENGINE_KEY): log.critical("app does not hold right db engine. Somebody has changed it??") engine.close() await engine.wait_closed() - log.debug("engine '%s' after shutdown: closed=%s, size=%d", engine.dsn, engine.closed, engine.size) + log.debug( + "engine '%s' after shutdown: closed=%s, size=%d", + engine.dsn, + engine.closed, + engine.size, + ) def is_service_enabled(app: web.Application): return app.get(APP_DB_ENGINE_KEY) is not None -async def is_service_responsive(app:web.Application): +async def is_service_responsive(app: web.Application): """ Returns true if the app can connect to db service """ @@ -89,7 +96,4 @@ def setup(app: web.Application): # alias --- setup_db = setup -__all__ = ( - 'setup_db', - 'is_service_enabled' -) +__all__ = ("setup_db", "is_service_enabled") diff --git a/services/web/server/src/simcore_service_webserver/db_config.py b/services/web/server/src/simcore_service_webserver/db_config.py index bbe78fe022f..8563b218a38 100644 --- a/services/web/server/src/simcore_service_webserver/db_config.py +++ b/services/web/server/src/simcore_service_webserver/db_config.py @@ -7,11 +7,11 @@ from simcore_sdk.config.db import CONFIG_SCHEMA as _PG_SCHEMA -CONFIG_SECTION_NAME = 'db' +CONFIG_SECTION_NAME = "db" # FIXME: database user password host port minsize maxsize -#CONFIG_SCHEMA = T.Dict({ +# CONFIG_SCHEMA = T.Dict({ # "database": T.String(), # "user": T.String(), # "password": T.String(), @@ -19,11 +19,13 @@ # "port": T.Or( T.Int, T.Null), # T.Key("minsize", default=1 ,optional=True): T.Int(), # T.Key("maxsize", default=4, optional=True): T.Int(), -#}) +# }) -schema = T.Dict({ - T.Key("postgres"): _PG_SCHEMA, - T.Key("init_tables", default=False, optional=True): T.Or(T.Bool, T.Int), - T.Key("enabled", default=True, optional=True): T.Bool() -}) +schema = T.Dict( + { + T.Key("postgres"): _PG_SCHEMA, + T.Key("init_tables", default=False, optional=True): T.Or(T.Bool, T.Int), + T.Key("enabled", default=True, optional=True): T.Bool(), + } +) diff --git a/services/web/server/src/simcore_service_webserver/db_models.py b/services/web/server/src/simcore_service_webserver/db_models.py index b6ff68d4301..8113263eaf2 100644 --- a/services/web/server/src/simcore_service_webserver/db_models.py +++ b/services/web/server/src/simcore_service_webserver/db_models.py @@ -2,15 +2,27 @@ Facade to keep API LEGACY """ from simcore_postgres_database.models.base import metadata -from simcore_postgres_database.webserver_models import (ConfirmationAction, - UserRole, UserStatus, - confirmations, tokens, - users, tags, study_tags) +from simcore_postgres_database.webserver_models import ( + ConfirmationAction, + UserRole, + UserStatus, + confirmations, + tokens, + users, + tags, + study_tags, +) # TODO: roles table that maps every role with allowed tasks e.g. read/write,...?? __all__ = ( - "UserStatus", "UserRole", "ConfirmationAction", - "users", "confirmations", "tokens", - "metadata", "tags", "study_tags" + "UserStatus", + "UserRole", + "ConfirmationAction", + "users", + "confirmations", + "tokens", + "metadata", + "tags", + "study_tags", ) diff --git a/services/web/server/src/simcore_service_webserver/director/__init__.py b/services/web/server/src/simcore_service_webserver/director/__init__.py index ed2f9b6d88f..b9e4561af07 100644 --- a/services/web/server/src/simcore_service_webserver/director/__init__.py +++ b/services/web/server/src/simcore_service_webserver/director/__init__.py @@ -10,9 +10,11 @@ from servicelib.application_keys import APP_CONFIG_KEY from servicelib.application_setup import ModuleCategory, app_module_setup -from servicelib.rest_routing import (get_handlers_from_namespace, - iter_path_operations, - map_handlers_with_operations) +from servicelib.rest_routing import ( + get_handlers_from_namespace, + iter_path_operations, + map_handlers_with_operations, +) from ..rest_config import APP_OPENAPI_SPECS_KEY from . import handlers @@ -22,10 +24,9 @@ module_name = __name__.replace(".__init__", "") -@app_module_setup(module_name, ModuleCategory.ADDON, - depends=[], - logger=logger) -def setup(app: web.Application,* , disable_login=False): + +@app_module_setup(module_name, ModuleCategory.ADDON, depends=[], logger=logger) +def setup(app: web.Application, *, disable_login=False): """ Sets up director's subsystem :param app: main application @@ -43,28 +44,25 @@ def setup(app: web.Application,* , disable_login=False): def include_path(tup_object): _method, path, _operation_id, _tags = tup_object - return any( tail in path for tail in ['/running_interactive_services', '/services'] ) + return any( + tail in path for tail in ["/running_interactive_services", "/services"] + ) - handlers_dict = { - 'services_get': handlers.services_get - } + handlers_dict = {"services_get": handlers.services_get} # Disables login_required decorator for testing purposes if disable_login: for name, hnds in handlers_dict.items(): - if hasattr(hnds, '__wrapped__'): + if hasattr(hnds, "__wrapped__"): handlers_dict[name] = hnds.__wrapped__ routes = map_handlers_with_operations( - handlers_dict, - filter(include_path, iter_path_operations(specs)), - strict=True + handlers_dict, filter(include_path, iter_path_operations(specs)), strict=True ) app.router.add_routes(routes) + # alias setup_director = setup -__all__ = ( - 'setup_director' -) +__all__ = "setup_director" diff --git a/services/web/server/src/simcore_service_webserver/director/config.py b/services/web/server/src/simcore_service_webserver/director/config.py index b039987eb21..5b7b46af26d 100644 --- a/services/web/server/src/simcore_service_webserver/director/config.py +++ b/services/web/server/src/simcore_service_webserver/director/config.py @@ -12,23 +12,30 @@ APP_DIRECTOR_API_KEY = __name__ + ".director_api" -CONFIG_SECTION_NAME = 'director' +CONFIG_SECTION_NAME = "director" + +schema = T.Dict( + { + T.Key("enabled", default=True, optional=True): T.Bool(), + T.Key("host", default="director",): T.String(), + T.Key("port", default=8001): T.Int(), + T.Key("version", default="v0"): T.Regexp( + regexp=r"^v\d+" + ), # storage API version basepath + } +) -schema = T.Dict({ - T.Key("enabled", default=True, optional=True): T.Bool(), - T.Key("host", default="director", ): T.String(), - T.Key("port", default=8001): T.Int(), - T.Key("version", default="v0"): T.Regexp(regexp=r'^v\d+') # storage API version basepath -}) def build_api_url(config: Dict) -> URL: - api_baseurl = URL.build(scheme='http', - host=config['host'], - port=config['port']).with_path(config["version"]) + api_baseurl = URL.build( + scheme="http", host=config["host"], port=config["port"] + ).with_path(config["version"]) return api_baseurl + def get_config(app: web.Application) -> Dict: return app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] + def get_client_session(app: web.Application) -> ClientSession: return app[APP_CLIENT_SESSION_KEY] diff --git a/services/web/server/src/simcore_service_webserver/director/director_api.py b/services/web/server/src/simcore_service_webserver/director/director_api.py index 01401a48309..c50eb0b2973 100644 --- a/services/web/server/src/simcore_service_webserver/director/director_api.py +++ b/services/web/server/src/simcore_service_webserver/director/director_api.py @@ -1,4 +1,5 @@ -import asyncio + # pylint: disable=too-many-arguments + import logging import urllib from typing import Dict, List, Optional @@ -6,8 +7,10 @@ from aiohttp import web from yarl import URL -from .config import get_client_session, get_config +from servicelib.utils import logged_gather + from . import director_exceptions +from .config import get_client_session, get_config log = logging.getLogger(__name__) @@ -20,15 +23,18 @@ def _get_director_client(app: web.Application) -> URL: # Use director. # TODO: this is also in app[APP_DIRECTOR_API_KEY] upon startup api_endpoint = URL.build( - scheme='http', - host=cfg['host'], - port=cfg['port']).with_path(cfg["version"]) + scheme="http", host=cfg["host"], port=cfg["port"] + ).with_path(cfg["version"]) session = get_client_session(app) return session, api_endpoint -async def get_running_interactive_services(app: web.Application, user_id: Optional[str] = None, project_id: Optional[str] = None) -> List[Dict]: +async def get_running_interactive_services( + app: web.Application, + user_id: Optional[str] = None, + project_id: Optional[str] = None, +) -> List[Dict]: session, api_endpoint = _get_director_client(app) params = {} @@ -45,7 +51,14 @@ async def get_running_interactive_services(app: web.Application, user_id: Option return [] -async def start_service(app: web.Application, user_id: str, project_id: str, service_key: str, service_version: str, service_uuid: str) -> Optional[Dict]: # pylint: disable=too-many-arguments +async def start_service( + app: web.Application, + user_id: str, + project_id: str, + service_key: str, + service_version: str, + service_uuid: str, +) -> Optional[Dict]: session, api_endpoint = _get_director_client(app) params = { @@ -54,7 +67,7 @@ async def start_service(app: web.Application, user_id: str, project_id: str, ser "service_key": service_key, "service_tag": service_version, "service_uuid": service_uuid, - "service_basepath": f"/x/{service_uuid}" + "service_basepath": f"/x/{service_uuid}", } url = (api_endpoint / "running_interactive_services").with_query(params) @@ -66,7 +79,7 @@ async def start_service(app: web.Application, user_id: str, project_id: str, ser async def stop_service(app: web.Application, service_uuid: str) -> None: session, api_endpoint = _get_director_client(app) - url = (api_endpoint / "running_interactive_services" / service_uuid) + url = api_endpoint / "running_interactive_services" / service_uuid async with session.delete(url, ssl=False) as resp: if resp.status == 404: raise director_exceptions.ServiceNotFoundError(service_uuid) @@ -74,19 +87,33 @@ async def stop_service(app: web.Application, service_uuid: str) -> None: payload = await resp.json() raise director_exceptions.DirectorException(payload) -async def stop_services(app: web.Application, user_id: Optional[str] = None, project_id: Optional[str] = None) -> None: + +async def stop_services( + app: web.Application, + user_id: Optional[str] = None, + project_id: Optional[str] = None, +) -> None: if not user_id and not project_id: raise ValueError("Expected either user or project") - services = await get_running_interactive_services(app, user_id=user_id, project_id=project_id) + services = await get_running_interactive_services( + app, user_id=user_id, project_id=project_id + ) stop_tasks = [stop_service(app, service_uuid) for service_uuid in services] - await asyncio.gather(*stop_tasks) + await logged_gather(*stop_tasks, reraise=False) + -async def get_service_by_key_version(app: web.Application, service_key: str, service_version: str) -> Optional[Dict]: +async def get_service_by_key_version( + app: web.Application, service_key: str, service_version: str +) -> Optional[Dict]: session, api_endpoint = _get_director_client(app) - url = (api_endpoint / "services" / - urllib.parse.quote(service_key, safe='') / service_version) + url = ( + api_endpoint + / "services" + / urllib.parse.quote(service_key, safe="") + / service_version + ) async with session.get(url) as resp: if resp.status != 200: return diff --git a/services/web/server/src/simcore_service_webserver/director/director_exceptions.py b/services/web/server/src/simcore_service_webserver/director/director_exceptions.py index 9e1fe3a570c..d90e6839b09 100644 --- a/services/web/server/src/simcore_service_webserver/director/director_exceptions.py +++ b/services/web/server/src/simcore_service_webserver/director/director_exceptions.py @@ -1,12 +1,15 @@ class DirectorException(Exception): """Basic exception for errors raised with director""" + def __init__(self, msg=None): if msg is None: msg = "Unexpected error occured in director subpackage" super(DirectorException, self).__init__(msg) + class ServiceNotFoundError(DirectorException): """Service was not found in swarm""" + def __init__(self, service_uuid): msg = "Service with uuid {} not found".format(service_uuid) super(ServiceNotFoundError, self).__init__(msg) diff --git a/services/web/server/src/simcore_service_webserver/director/handlers.py b/services/web/server/src/simcore_service_webserver/director/handlers.py index 2fcf56a371d..0eb2263f35e 100644 --- a/services/web/server/src/simcore_service_webserver/director/handlers.py +++ b/services/web/server/src/simcore_service_webserver/director/handlers.py @@ -23,20 +23,22 @@ def _forward_url(app: web.Application, url: URL) -> URL: # director service API endpoint # TODO: service API endpoint could be deduced and checked upon setup (e.g. health check on startup) - endpoint = URL.build( - scheme='http', - host=cfg['host'], - port=cfg['port']).with_path(cfg["version"]) + endpoint = URL.build(scheme="http", host=cfg["host"], port=cfg["port"]).with_path( + cfg["version"] + ) tail = "/".join(url.raw_parts[2:]) - url = (endpoint / tail) + url = endpoint / tail return url + def _resolve_url(request: web.Request) -> URL: return _forward_url(request.app, request.url) + # HANDLERS ------------------------------------------------------------------- + @login_required async def services_get(request: web.Request) -> web.Response: await check_permission(request, "services.catalog.*") diff --git a/services/web/server/src/simcore_service_webserver/email.py b/services/web/server/src/simcore_service_webserver/email.py index b05495e32d2..b87f05b8d06 100644 --- a/services/web/server/src/simcore_service_webserver/email.py +++ b/services/web/server/src/simcore_service_webserver/email.py @@ -5,7 +5,8 @@ import logging import aiohttp_jinja2 -#import jinja2 TODO: check + +# import jinja2 TODO: check import jinja_app_loader from aiohttp import web @@ -15,32 +16,32 @@ from .resources import resources # TODO: move login/utils.py email functionality here! -#from email.mime.text import MIMEText -#import aiosmtplib +# from email.mime.text import MIMEText +# import aiosmtplib log = logging.getLogger(__name__) -@app_module_setup(__name__, ModuleCategory.ADDON, - config_section=CONFIG_SECTION_NAME, - logger=log) -def setup(app: web.Application, debug: bool=False): - tmpl_dir = resources.get_path('templates') + +@app_module_setup( + __name__, ModuleCategory.ADDON, config_section=CONFIG_SECTION_NAME, logger=log +) +def setup(app: web.Application, debug: bool = False): + tmpl_dir = resources.get_path("templates") if not tmpl_dir.exists(): log.error("Cannot find email templates in '%s'", tmpl_dir) return False env = aiohttp_jinja2.setup( app, - loader=jinja_app_loader.Loader(), #jinja2.FileSystemLoader(tmpl_dir) - auto_reload=debug + loader=jinja_app_loader.Loader(), # jinja2.FileSystemLoader(tmpl_dir) + auto_reload=debug, ) return env + # alias setup_email = setup -__all__ = ( - 'setup_email' -) +__all__ = "setup_email" diff --git a/services/web/server/src/simcore_service_webserver/email_config.py b/services/web/server/src/simcore_service_webserver/email_config.py index c7e83d6ec1c..d8253554461 100644 --- a/services/web/server/src/simcore_service_webserver/email_config.py +++ b/services/web/server/src/simcore_service_webserver/email_config.py @@ -6,15 +6,18 @@ import trafaret as T -CONFIG_SECTION_NAME = 'smtp' +CONFIG_SECTION_NAME = "smtp" -schema = T.Dict({ - T.Key('sender', default='OSPARC support '): T.String(), # FIXME: email format - 'host': T.String(), - 'port': T.Int(), - T.Key('tls', default=False): T.Or(T.Bool(), T.Int), - T.Key('username', default=None): T.Or(T.String, T.Null), - T.Key('password', default=None): T.Or(T.String, T.Null) +schema = T.Dict( + { + T.Key( + "sender", default="OSPARC support " + ): T.String(), # FIXME: email format + "host": T.String(), + "port": T.Int(), + T.Key("tls", default=False): T.Or(T.Bool(), T.Int), + T.Key("username", default=None): T.Or(T.String, T.Null), + T.Key("password", default=None): T.Or(T.String, T.Null), } ) diff --git a/services/web/server/src/simcore_service_webserver/login/__init__.py b/services/web/server/src/simcore_service_webserver/login/__init__.py index 2f6ea1cfd4c..12ee926619c 100644 --- a/services/web/server/src/simcore_service_webserver/login/__init__.py +++ b/services/web/server/src/simcore_service_webserver/login/__init__.py @@ -33,17 +33,14 @@ def _create_login_config(app: web.Application, storage: AsyncpgStorage) -> Dict: """ Creates compatible config to update login.cfg.cfg object """ - login_cfg = app[APP_CONFIG_KEY].get(CONFIG_SECTION_NAME, {}) # optional! + login_cfg = app[APP_CONFIG_KEY].get(CONFIG_SECTION_NAME, {}) # optional! smtp_cfg = app[APP_CONFIG_KEY][SMTP_SECTION] - config = { - "APP": app, - "STORAGE": storage - } + config = {"APP": app, "STORAGE": storage} def _fmt(val): if isinstance(val, str): - if val.strip().lower() in ['null', 'none', '']: + if val.strip().lower() in ["null", "none", ""]: return None return val @@ -55,6 +52,7 @@ def _fmt(val): return config + async def _setup_config_and_pgpool(app: web.Application): """ - gets input configs from different subsystems and initializes cfg (internal configuration) @@ -63,30 +61,33 @@ async def _setup_config_and_pgpool(app: web.Application): :param app: fully setup application on startup :type app: web.Application """ - db_cfg = app[APP_CONFIG_KEY][DB_SECTION]['postgres'] + db_cfg = app[APP_CONFIG_KEY][DB_SECTION]["postgres"] # db pool = await asyncpg.create_pool( dsn=DSN.format(**db_cfg) + f"?application_name={__name__}_{id(app)}", - min_size=db_cfg['minsize'], - max_size=db_cfg['maxsize'], - loop=asyncio.get_event_loop()) + min_size=db_cfg["minsize"], + max_size=db_cfg["maxsize"], + loop=asyncio.get_event_loop(), + ) - storage = AsyncpgStorage(pool) #NOTE: this key belongs to cfg, not settings! + storage = AsyncpgStorage(pool) # NOTE: this key belongs to cfg, not settings! # config config = _create_login_config(app, storage) cfg.configure(config) if INDEX_RESOURCE_NAME in app.router: - cfg['LOGIN_REDIRECT'] = app.router[INDEX_RESOURCE_NAME].url_for() + cfg["LOGIN_REDIRECT"] = app.router[INDEX_RESOURCE_NAME].url_for() else: - log.warning("Unknown location for login page. Defaulting redirection to %s", - cfg['LOGIN_REDIRECT'] ) + log.warning( + "Unknown location for login page. Defaulting redirection to %s", + cfg["LOGIN_REDIRECT"], + ) app[APP_LOGIN_CONFIG] = cfg - yield # ---------------- + yield # ---------------- if config["STORAGE"].pool is not pool: log.error("Somebody has changed the db pool") @@ -96,10 +97,12 @@ async def _setup_config_and_pgpool(app: web.Application): log.exception("Failed to close login storage loop") - -@app_module_setup(__name__, ModuleCategory.ADDON, - depends=[f'simcore_service_webserver.{mod}' for mod in ('rest', 'db') ], - logger=log) +@app_module_setup( + __name__, + ModuleCategory.ADDON, + depends=[f"simcore_service_webserver.{mod}" for mod in ("rest", "db")], + logger=log, +) def setup_login(app: web.Application): """ Setting up login subsystem in application @@ -114,6 +117,4 @@ def setup_login(app: web.Application): return True -__all__ = ( - 'setup_login' -) +__all__ = "setup_login" diff --git a/services/web/server/src/simcore_service_webserver/login/cfg.py b/services/web/server/src/simcore_service_webserver/login/cfg.py index dc81409acee..675611011cd 100644 --- a/services/web/server/src/simcore_service_webserver/login/cfg.py +++ b/services/web/server/src/simcore_service_webserver/login/cfg.py @@ -2,65 +2,66 @@ APP_LOGIN_CONFIG = __name__ + ".config" -_MINUTES = 1./24./60. +_MINUTES = 1.0 / 24.0 / 60.0 REQUIRED = object() DEFAULTS = { - 'THEME': 'templates/osparc.io', - 'COMMON_THEME': 'templates/common', - 'PASSWORD_LEN': (6, 30), - 'LOGIN_REDIRECT': '/', - 'LOGOUT_REDIRECT': '/', - 'REGISTRATION_CONFIRMATION_REQUIRED': True, - + "THEME": "templates/osparc.io", + "COMMON_THEME": "templates/common", + "PASSWORD_LEN": (6, 30), + "LOGIN_REDIRECT": "/", + "LOGOUT_REDIRECT": "/", + "REGISTRATION_CONFIRMATION_REQUIRED": True, # TODO: add in configuration file as environ! - 'SMTP_SENDER': None, - 'SMTP_HOST': REQUIRED, - 'SMTP_PORT': REQUIRED, - 'SMTP_TLS_ENABLED': False, - 'SMTP_USERNAME': None, - 'SMTP_PASSWORD': None, - + "SMTP_SENDER": None, + "SMTP_HOST": REQUIRED, + "SMTP_PORT": REQUIRED, + "SMTP_TLS_ENABLED": False, + "SMTP_USERNAME": None, + "SMTP_PASSWORD": None, # email confirmation links lifetime in days - 'REGISTRATION_CONFIRMATION_LIFETIME': 5, - 'INVITATION_CONFIRMATION_LIFETIME': 5, - 'RESET_PASSWORD_CONFIRMATION_LIFETIME': 20 * _MINUTES, - 'CHANGE_EMAIL_CONFIRMATION_LIFETIME': 5, - - 'MSG_LOGGED_IN': 'You are logged in', - 'MSG_LOGGED_OUT': 'You are logged out', - 'MSG_ACTIVATED': 'Your account is activated', - 'MSG_UNKNOWN_EMAIL': 'This email is not registered', - 'MSG_WRONG_PASSWORD': 'Wrong password', - 'MSG_PASSWORD_MISMATCH': 'Password and confirmation do not match', - 'MSG_USER_BANNED': 'This user is banned', - 'MSG_ACTIVATION_REQUIRED': ('You have to activate your account via' - ' email, before you can login'), - 'MSG_EMAIL_EXISTS': 'This email is already registered', - 'MSG_OFTEN_RESET_PASSWORD': ( - 'You can not request of restoring your password so often. Please, use' - ' the link we sent you recently'), - 'MSG_CANT_SEND_MAIL': 'Can\'t send email, try a little later', - 'MSG_PASSWORDS_NOT_MATCH': 'Passwords must match', - 'MSG_PASSWORD_CHANGED': 'Your password is changed', - 'MSG_CHANGE_EMAIL_REQUESTED': ('Please, click on the verification link' - ' we sent to your new email address'), - 'MSG_EMAIL_CHANGED': 'Your email is changed', - 'MSG_AUTH_FAILED': 'Authorization failed', - 'MSG_EMAIL_SENT': 'An email has been sent to {email} with further instructions', - + "REGISTRATION_CONFIRMATION_LIFETIME": 5, + "INVITATION_CONFIRMATION_LIFETIME": 5, + "RESET_PASSWORD_CONFIRMATION_LIFETIME": 20 * _MINUTES, + "CHANGE_EMAIL_CONFIRMATION_LIFETIME": 5, + "MSG_LOGGED_IN": "You are logged in", + "MSG_LOGGED_OUT": "You are logged out", + "MSG_ACTIVATED": "Your account is activated", + "MSG_UNKNOWN_EMAIL": "This email is not registered", + "MSG_WRONG_PASSWORD": "Wrong password", + "MSG_PASSWORD_MISMATCH": "Password and confirmation do not match", + "MSG_USER_BANNED": "This user is banned", + "MSG_ACTIVATION_REQUIRED": ( + "You have to activate your account via" " email, before you can login" + ), + "MSG_EMAIL_EXISTS": "This email is already registered", + "MSG_OFTEN_RESET_PASSWORD": ( + "You can not request of restoring your password so often. Please, use" + " the link we sent you recently" + ), + "MSG_CANT_SEND_MAIL": "Can't send email, try a little later", + "MSG_PASSWORDS_NOT_MATCH": "Passwords must match", + "MSG_PASSWORD_CHANGED": "Your password is changed", + "MSG_CHANGE_EMAIL_REQUESTED": ( + "Please, click on the verification link" " we sent to your new email address" + ), + "MSG_EMAIL_CHANGED": "Your email is changed", + "MSG_AUTH_FAILED": "Authorization failed", + "MSG_EMAIL_SENT": "An email has been sent to {email} with further instructions", # next settings are initialized during `setup()`, do not set it manually - 'APP': REQUIRED, - 'STORAGE': REQUIRED, + "APP": REQUIRED, + "STORAGE": REQUIRED, } -assert 'STORAGE' in DEFAULTS.keys() # nosec +assert "STORAGE" in DEFAULTS.keys() # nosec + def get_storage(app: web.Application): - return app[APP_LOGIN_CONFIG]['STORAGE'] + return app[APP_LOGIN_CONFIG]["STORAGE"] + class Cfg(dict): - ''' + """ Settings storage witch suports both, dict and dot notations >>> cfg = Cfg({'foo': 1, 'bar': 2, 'baz': REQUIRED}) @@ -103,7 +104,8 @@ class Cfg(dict): Traceback (most recent call last): ... AttributeError - ''' + """ + def __init__(self, defaults): super().__init__(self) self.defaults = defaults @@ -112,12 +114,12 @@ def __init__(self, defaults): # pylint: disable=E0202 def __getitem__(self, name): if not self.configured: - raise RuntimeError('Settings are not configured yet') + raise RuntimeError("Settings are not configured yet") self.__getitem__ = super().__getitem__ return super().__getitem__(name) def __getattr__(self, name): - if name == '__wrapped__': + if name == "__wrapped__": raise AttributeError try: return self[name] @@ -129,13 +131,14 @@ def configure(self, updates): for key in self.defaults: value = updates.get(key, self.defaults[key]) if value == REQUIRED: - raise RuntimeError('You have to set `{}`'.format(key)) + raise RuntimeError("You have to set `{}`".format(key)) self[key] = value self.configured = True -if __name__ == '__main__': +if __name__ == "__main__": import doctest + print(doctest.testmod()) else: cfg = Cfg(DEFAULTS) diff --git a/services/web/server/src/simcore_service_webserver/login/config.py b/services/web/server/src/simcore_service_webserver/login/config.py index d3cfeb6a178..cb954d57357 100644 --- a/services/web/server/src/simcore_service_webserver/login/config.py +++ b/services/web/server/src/simcore_service_webserver/login/config.py @@ -7,17 +7,26 @@ from .cfg import DEFAULTS -CONFIG_SECTION_NAME = 'login' +CONFIG_SECTION_NAME = "login" # TODO: merge with cfg.py -schema = T.Dict({ - T.Key("enabled", default=True, optional=True): T.Bool(), - T.Key("registration_confirmation_required", default=DEFAULTS["REGISTRATION_CONFIRMATION_REQUIRED"], optional=True): T.Or(T.Bool, T.Int), - T.Key("registration_invitation_required", default=False, optional=True): T.Or(T.Bool, T.Int), -}) +schema = T.Dict( + { + T.Key("enabled", default=True, optional=True): T.Bool(), + T.Key( + "registration_confirmation_required", + default=DEFAULTS["REGISTRATION_CONFIRMATION_REQUIRED"], + optional=True, + ): T.Or(T.Bool, T.Int), + T.Key("registration_invitation_required", default=False, optional=True): T.Or( + T.Bool, T.Int + ), + } +) def get_login_config(app): from servicelib.application_keys import APP_CONFIG_KEY + cfg = app[APP_CONFIG_KEY].get(CONFIG_SECTION_NAME, dict()) return cfg diff --git a/services/web/server/src/simcore_service_webserver/login/confirmation.py b/services/web/server/src/simcore_service_webserver/login/confirmation.py index 8b045a3654f..35dd9f4216c 100644 --- a/services/web/server/src/simcore_service_webserver/login/confirmation.py +++ b/services/web/server/src/simcore_service_webserver/login/confirmation.py @@ -13,30 +13,34 @@ log = logging.getLogger(__name__) + async def validate_confirmation_code(code, db): - confirmation = await db.get_confirmation({'code': code}) + confirmation = await db.get_confirmation({"code": code}) if confirmation and is_confirmation_expired(confirmation): - log.info("Confirmation code '%s' %s. Deleting ...", code, - "consumed" if confirmation else "expired") + log.info( + "Confirmation code '%s' %s. Deleting ...", + code, + "consumed" if confirmation else "expired", + ) await db.delete_confirmation(confirmation) confirmation = None return confirmation async def make_confirmation_link(request, confirmation): - link = request.app.router['auth_confirmation'].url_for(code=confirmation['code']) - return '{}://{}{}'.format(request.scheme, request.host, link) + link = request.app.router["auth_confirmation"].url_for(code=confirmation["code"]) + return "{}://{}{}".format(request.scheme, request.host, link) def get_expiration_date(confirmation): lifetime = get_confirmation_lifetime(confirmation) - estimated_expiration = confirmation['created_at'] + lifetime + estimated_expiration = confirmation["created_at"] + lifetime return estimated_expiration async def is_confirmation_allowed(user, action): db = cfg.STORAGE - confirmation = await db.get_confirmation({'user': user, 'action': action}) + confirmation = await db.get_confirmation({"user": user, "action": action}) if not confirmation: return True if is_confirmation_expired(confirmation): @@ -45,18 +49,17 @@ async def is_confirmation_allowed(user, action): def is_confirmation_expired(confirmation): - age = datetime.utcnow() - confirmation['created_at'] + age = datetime.utcnow() - confirmation["created_at"] lifetime = get_confirmation_lifetime(confirmation) return age > lifetime def get_confirmation_lifetime(confirmation): - lifetime_days = cfg['{}_CONFIRMATION_LIFETIME'.format( - confirmation['action'].upper())] + lifetime_days = cfg[ + "{}_CONFIRMATION_LIFETIME".format(confirmation["action"].upper()) + ] lifetime = timedelta(days=lifetime_days) return lifetime -__all__ = ( - "ConfirmationAction", -) +__all__ = ("ConfirmationAction",) diff --git a/services/web/server/src/simcore_service_webserver/login/decorators.py b/services/web/server/src/simcore_service_webserver/login/decorators.py index 6cf7aca1ac8..e6d580449a0 100644 --- a/services/web/server/src/simcore_service_webserver/login/decorators.py +++ b/services/web/server/src/simcore_service_webserver/login/decorators.py @@ -13,11 +13,13 @@ def user_to_request(handler): """ Handler decorator that injects in request, current authorized user ID """ + @wraps(handler) async def wrapped(*args, **kwargs): request = get_request(*args, **kwargs) request[RQT_USERID_KEY] = await authorized_userid(request) return await handler(*args) + return wrapped @@ -29,6 +31,7 @@ def login_required(handler): Keeps userid in request[RQT_USERID_KEY] """ + @wraps(handler) async def wrapped(*args, **kwargs): request = get_request(*args, **kwargs) @@ -39,9 +42,8 @@ async def wrapped(*args, **kwargs): request[RQT_USERID_KEY] = userid ret = await handler(*args, **kwargs) return ret + return wrapped -__all__ = ( - "login_required" -) +__all__ = "login_required" diff --git a/services/web/server/src/simcore_service_webserver/login/handlers.py b/services/web/server/src/simcore_service_webserver/login/handlers.py index 7dde6370531..a152ec0343e 100644 --- a/services/web/server/src/simcore_service_webserver/login/handlers.py +++ b/services/web/server/src/simcore_service_webserver/login/handlers.py @@ -10,14 +10,22 @@ from ..security_api import check_password, encrypt_password, forget, remember from .cfg import APP_LOGIN_CONFIG, cfg, get_storage from .config import get_login_config -from .confirmation import (is_confirmation_allowed, make_confirmation_link, - validate_confirmation_code) +from .confirmation import ( + is_confirmation_allowed, + make_confirmation_link, + validate_confirmation_code, +) from .decorators import RQT_USERID_KEY, login_required from .registration import check_invitation, check_registration -from .utils import (common_themed, flash_response, get_client_ip, - render_and_send_mail, themed) +from .utils import ( + common_themed, + flash_response, + get_client_ip, + render_and_send_mail, + themed, +) - # FIXME: do not use cfg singleton. use instead cfg = request.app[APP_LOGIN_CONFIG] +# FIXME: do not use cfg singleton. use instead cfg = request.app[APP_LOGIN_CONFIG] log = logging.getLogger(__name__) @@ -28,43 +36,49 @@ def to_names(enum_cls, names): return [getattr(enum_cls, att).name for att in names.split()] -CONFIRMATION_PENDING, ACTIVE, BANNED = to_names(UserStatus, \ - 'CONFIRMATION_PENDING ACTIVE BANNED') +CONFIRMATION_PENDING, ACTIVE, BANNED = to_names( + UserStatus, "CONFIRMATION_PENDING ACTIVE BANNED" +) -ANONYMOUS, GUEST, USER, TESTER= to_names(UserRole, \ - 'ANONYMOUS GUEST USER TESTER') +ANONYMOUS, GUEST, USER, TESTER = to_names(UserRole, "ANONYMOUS GUEST USER TESTER") -REGISTRATION, RESET_PASSWORD, CHANGE_EMAIL = to_names(ConfirmationAction, \ - 'REGISTRATION RESET_PASSWORD CHANGE_EMAIL') +REGISTRATION, RESET_PASSWORD, CHANGE_EMAIL = to_names( + ConfirmationAction, "REGISTRATION RESET_PASSWORD CHANGE_EMAIL" +) async def register(request: web.Request): _, _, body = await extract_and_validate(request) # see https://aiohttp.readthedocs.io/en/stable/web_advanced.html#data-sharing-aka-no-singletons-please - app_cfg = get_login_config(request.app) # TODO: replace cfg by app_cfg + app_cfg = get_login_config(request.app) # TODO: replace cfg by app_cfg db = get_storage(request.app) email = body.email - username = email.split('@')[0] # FIXME: this has to be unique and add this in user registration! + username = email.split("@")[ + 0 + ] # FIXME: this has to be unique and add this in user registration! password = body.password - confirm = body.confirm if hasattr(body, 'confirm') else None + confirm = body.confirm if hasattr(body, "confirm") else None if app_cfg.get("registration_invitation_required"): - invitation = body.invitation if hasattr(body, 'invitation') else None + invitation = body.invitation if hasattr(body, "invitation") else None await check_invitation(invitation, db) await check_registration(email, password, confirm, db) - user = await db.create_user({ - 'name': username, - 'email': email, - 'password_hash': encrypt_password(password), - 'status': CONFIRMATION_PENDING if bool(cfg.REGISTRATION_CONFIRMATION_REQUIRED) - else ACTIVE, - 'role': USER, - 'created_ip': get_client_ip(request), # FIXME: does not get right IP! - }) + user = await db.create_user( + { + "name": username, + "email": email, + "password_hash": encrypt_password(password), + "status": CONFIRMATION_PENDING + if bool(cfg.REGISTRATION_CONFIRMATION_REQUIRED) + else ACTIVE, + "role": USER, + "created_ip": get_client_ip(request), # FIXME: does not get right IP! + } + ) if not bool(cfg.REGISTRATION_CONFIRMATION_REQUIRED): # user is logged in @@ -77,24 +91,27 @@ async def register(request: web.Request): link = await make_confirmation_link(request, confirmation_) try: await render_and_send_mail( - request, email, - themed('registration_email.html'), { - 'auth': { - 'cfg': cfg, - }, - 'host': request.host, - 'link': link, - 'name': email.split("@")[0], - }) - except Exception: #pylint: disable=broad-except - log.exception('Can not send email') + request, + email, + themed("registration_email.html"), + { + "auth": {"cfg": cfg,}, + "host": request.host, + "link": link, + "name": email.split("@")[0], + }, + ) + except Exception: # pylint: disable=broad-except + log.exception("Can not send email") await db.delete_confirmation(confirmation_) await db.delete_user(user) raise web.HTTPServiceUnavailable(reason=cfg.MSG_CANT_SEND_MAIL) response = flash_response( "You are registered successfully! To activate your account, please, " - "click on the verification link in the email we sent you.", "INFO") + "click on the verification link in the email we sent you.", + "INFO", + ) return response @@ -105,28 +122,32 @@ async def login(request: web.Request): email = body.email password = body.password - user = await db.get_user({'email': email}) + user = await db.get_user({"email": email}) if not user: - raise web.HTTPUnauthorized(reason=cfg.MSG_UNKNOWN_EMAIL, - content_type='application/json') + raise web.HTTPUnauthorized( + reason=cfg.MSG_UNKNOWN_EMAIL, content_type="application/json" + ) - if user['status'] == BANNED or user['role'] == ANONYMOUS: - raise web.HTTPUnauthorized(reason=cfg.MSG_USER_BANNED, - content_type='application/json') + if user["status"] == BANNED or user["role"] == ANONYMOUS: + raise web.HTTPUnauthorized( + reason=cfg.MSG_USER_BANNED, content_type="application/json" + ) - if not check_password(password, user['password_hash']): - raise web.HTTPUnauthorized(reason=cfg.MSG_WRONG_PASSWORD, - content_type='application/json') + if not check_password(password, user["password_hash"]): + raise web.HTTPUnauthorized( + reason=cfg.MSG_WRONG_PASSWORD, content_type="application/json" + ) - if user['status'] == CONFIRMATION_PENDING: - raise web.HTTPUnauthorized(reason=cfg.MSG_ACTIVATION_REQUIRED, - content_type='application/json') + if user["status"] == CONFIRMATION_PENDING: + raise web.HTTPUnauthorized( + reason=cfg.MSG_ACTIVATION_REQUIRED, content_type="application/json" + ) - assert user['status'] == ACTIVE, "db corrupted. Invalid status" # nosec - assert user['email'] == email, "db corrupted. Invalid email" # nosec + assert user["status"] == ACTIVE, "db corrupted. Invalid status" # nosec + assert user["email"] == email, "db corrupted. Invalid email" # nosec # user logs in - identity = user['email'] + identity = user["email"] response = flash_response(cfg.MSG_LOGGED_IN, "INFO") await remember(request, response, identity) return response @@ -163,39 +184,40 @@ async def reset_password(request: web.Request): db = get_storage(request.app) email = body.email - user = await db.get_user({'email': email}) + user = await db.get_user({"email": email}) try: if not user: - raise web.HTTPUnprocessableEntity(reason=cfg.MSG_UNKNOWN_EMAIL, - content_type='application/json') # 422 + raise web.HTTPUnprocessableEntity( + reason=cfg.MSG_UNKNOWN_EMAIL, content_type="application/json" + ) # 422 - if user['status'] == BANNED: - raise web.HTTPUnauthorized(reason=cfg.MSG_USER_BANNED, - content_type='application/json') # 401 + if user["status"] == BANNED: + raise web.HTTPUnauthorized( + reason=cfg.MSG_USER_BANNED, content_type="application/json" + ) # 401 - if user['status'] == CONFIRMATION_PENDING: - raise web.HTTPUnauthorized(reason=cfg.MSG_ACTIVATION_REQUIRED, - content_type='application/json') # 401 + if user["status"] == CONFIRMATION_PENDING: + raise web.HTTPUnauthorized( + reason=cfg.MSG_ACTIVATION_REQUIRED, content_type="application/json" + ) # 401 - assert user['status'] == ACTIVE # nosec - assert user['email'] == email # nosec + assert user["status"] == ACTIVE # nosec + assert user["email"] == email # nosec if not await is_confirmation_allowed(user, action=RESET_PASSWORD): - raise web.HTTPUnauthorized(reason=cfg.MSG_OFTEN_RESET_PASSWORD, - content_type='application/json') # 401 + raise web.HTTPUnauthorized( + reason=cfg.MSG_OFTEN_RESET_PASSWORD, content_type="application/json" + ) # 401 except web.HTTPError as err: # Email wiht be an explanation and suggest alternative approaches or ways to contact support for help try: await render_and_send_mail( - request, email, - common_themed('reset_password_email_failed.html'), { - 'auth': { - 'cfg': cfg, - }, - 'host': request.host, - 'reason': err.reason, - }) - except Exception: #pylint: disable=broad-except + request, + email, + common_themed("reset_password_email_failed.html"), + {"auth": {"cfg": cfg,}, "host": request.host, "reason": err.reason,}, + ) + except Exception: # pylint: disable=broad-except log.exception("Cannot send email") raise web.HTTPServiceUnavailable(reason=cfg.MSG_CANT_SEND_MAIL) else: @@ -204,16 +226,13 @@ async def reset_password(request: web.Request): try: # primary reset email with a URL and the normal instructions. await render_and_send_mail( - request, email, - common_themed('reset_password_email.html'), { - 'auth': { - 'cfg': cfg, - }, - 'host': request.host, - 'link': link, - }) - except Exception: #pylint: disable=broad-except - log.exception('Can not send email') + request, + email, + common_themed("reset_password_email.html"), + {"auth": {"cfg": cfg,}, "host": request.host, "link": link,}, + ) + except Exception: # pylint: disable=broad-except + log.exception("Can not send email") await db.delete_confirmation(confirmation) raise web.HTTPServiceUnavailable(reason=cfg.MSG_CANT_SEND_MAIL) @@ -228,21 +247,18 @@ async def change_email(request: web.Request): db = get_storage(request.app) email = body.email - user = await db.get_user({'id': request[RQT_USERID_KEY]}) - assert user # nosec + user = await db.get_user({"id": request[RQT_USERID_KEY]}) + assert user # nosec - if user['email'] == email: + if user["email"] == email: return flash_response("Email changed") - other = await db.get_user({'email': email}) + other = await db.get_user({"email": email}) if other: raise web.HTTPUnprocessableEntity(reason="This email cannot be used") # Reset if previously requested - confirmation = await db.get_confirmation({ - 'user': user, - 'action': CHANGE_EMAIL} - ) + confirmation = await db.get_confirmation({"user": user, "action": CHANGE_EMAIL}) if confirmation: await db.delete_confirmation(confirmation) @@ -251,16 +267,13 @@ async def change_email(request: web.Request): link = await make_confirmation_link(request, confirmation) try: await render_and_send_mail( - request, email, - common_themed('change_email_email.html'), { - 'auth': { - 'cfg': cfg, - }, - 'host': request.host, - 'link': link, - }) - except Exception: #pylint: disable=broad-except - log.error('Can not send email') + request, + email, + common_themed("change_email_email.html"), + {"auth": {"cfg": cfg,}, "host": request.host, "link": link,}, + ) + except Exception: # pylint: disable=broad-except + log.error("Can not send email") await db.delete_confirmation(confirmation) raise web.HTTPServiceUnavailable(reason=cfg.MSG_CANT_SEND_MAIL) @@ -272,8 +285,8 @@ async def change_email(request: web.Request): async def change_password(request: web.Request): db = get_storage(request.app) - user = await db.get_user({'id': request[RQT_USERID_KEY]}) - assert user # nosec + user = await db.get_user({"id": request[RQT_USERID_KEY]}) + assert user # nosec _, _, body = await extract_and_validate(request) @@ -281,15 +294,17 @@ async def change_password(request: web.Request): new_password = body.new confirm = body.confirm - if not check_password(cur_password, user['password_hash']): - raise web.HTTPUnprocessableEntity(reason=cfg.MSG_WRONG_PASSWORD, - content_type='application/json') # 422 + if not check_password(cur_password, user["password_hash"]): + raise web.HTTPUnprocessableEntity( + reason=cfg.MSG_WRONG_PASSWORD, content_type="application/json" + ) # 422 if new_password != confirm: - raise web.HTTPConflict(reason=cfg.MSG_PASSWORD_MISMATCH, - content_type='application/json') # 409 + raise web.HTTPConflict( + reason=cfg.MSG_PASSWORD_MISMATCH, content_type="application/json" + ) # 409 - await db.update_user(user, {'password_hash': encrypt_password(new_password)}) + await db.update_user(user, {"password_hash": encrypt_password(new_password)}) response = flash_response(cfg.MSG_PASSWORD_CHANGED) return response @@ -313,30 +328,30 @@ async def email_confirmation(request: web.Request): params, _, _ = await extract_and_validate(request) db = get_storage(request.app) - code = params['code'] + code = params["code"] confirmation = await validate_confirmation_code(code, db) if confirmation: - action = confirmation['action'] - redirect_url = URL(request.app[APP_LOGIN_CONFIG]['LOGIN_REDIRECT']) + action = confirmation["action"] + redirect_url = URL(request.app[APP_LOGIN_CONFIG]["LOGIN_REDIRECT"]) if action == REGISTRATION: - user = await db.get_user({'id': confirmation['user_id']}) - await db.update_user(user, {'status': ACTIVE}) + user = await db.get_user({"id": confirmation["user_id"]}) + await db.update_user(user, {"status": ACTIVE}) await db.delete_confirmation(confirmation) log.debug("User %s registered", user) redirect_url = redirect_url.with_fragment("?registered=true") elif action == CHANGE_EMAIL: - user = await db.get_user({'id': confirmation['user_id']}) - await db.update_user(user, {'email': confirmation['data']}) + user = await db.get_user({"id": confirmation["user_id"]}) + await db.update_user(user, {"email": confirmation["data"]}) await db.delete_confirmation(confirmation) log.debug("User %s changed email", user) elif action == RESET_PASSWORD: # NOTE: By using fragments (instead of queries or path parameters), the browser does NOT reloads page - redirect_url = redirect_url.with_fragment("reset-password?code=%s" % code ) + redirect_url = redirect_url.with_fragment("reset-password?code=%s" % code) log.debug("Reset password requested %s", confirmation) raise web.HTTPFound(location=redirect_url) @@ -349,27 +364,28 @@ async def reset_password_allowed(request: web.Request): params, _, body = await extract_and_validate(request) db = get_storage(request.app) - code = params['code'] + code = params["code"] password = body.password confirm = body.confirm if password != confirm: - raise web.HTTPConflict(reason=cfg.MSG_PASSWORD_MISMATCH, - content_type='application/json') # 409 + raise web.HTTPConflict( + reason=cfg.MSG_PASSWORD_MISMATCH, content_type="application/json" + ) # 409 confirmation = await validate_confirmation_code(code, db) if confirmation: - user = await db.get_user({'id': confirmation['user_id']}) - assert user # nosec + user = await db.get_user({"id": confirmation["user_id"]}) + assert user # nosec - await db.update_user(user, { - 'password_hash': encrypt_password(password) - }) + await db.update_user(user, {"password_hash": encrypt_password(password)}) await db.delete_confirmation(confirmation) response = flash_response(cfg.MSG_PASSWORD_CHANGED) return response - raise web.HTTPUnauthorized(reason="Cannot reset password. Invalid token or user", - content_type='application/json') # 401 + raise web.HTTPUnauthorized( + reason="Cannot reset password. Invalid token or user", + content_type="application/json", + ) # 401 diff --git a/services/web/server/src/simcore_service_webserver/login/registration.py b/services/web/server/src/simcore_service_webserver/login/registration.py index 28be2601e31..3c0558e5c8c 100644 --- a/services/web/server/src/simcore_service_webserver/login/registration.py +++ b/services/web/server/src/simcore_service_webserver/login/registration.py @@ -13,38 +13,45 @@ from ..db_models import UserStatus from .cfg import cfg -from .confirmation import (ConfirmationAction, get_expiration_date, - is_confirmation_expired, validate_confirmation_code) +from .confirmation import ( + ConfirmationAction, + get_expiration_date, + is_confirmation_expired, + validate_confirmation_code, +) from .storage import AsyncpgStorage log = logging.getLogger(__name__) -async def check_registration(email: str, password: str, confirm: str, db: AsyncpgStorage): +async def check_registration( + email: str, password: str, confirm: str, db: AsyncpgStorage +): # email : required & formats # password: required & secure[min length, ...] # If the email field is missing, return a 400 - HTTPBadRequest if email is None or password is None: - raise web.HTTPBadRequest(reason="Email and password required", - content_type='application/json') + raise web.HTTPBadRequest( + reason="Email and password required", content_type="application/json" + ) if confirm and password != confirm: - raise web.HTTPConflict(reason=cfg.MSG_PASSWORD_MISMATCH, - content_type='application/json') + raise web.HTTPConflict( + reason=cfg.MSG_PASSWORD_MISMATCH, content_type="application/json" + ) # TODO: If the email field isn’t a valid email, return a 422 - HTTPUnprocessableEntity # TODO: If the password field is too short, return a 422 - HTTPUnprocessableEntity # TODO: use passwordmeter to enforce good passwords, but first create helper in front-end - user = await db.get_user({'email': email}) + user = await db.get_user({"email": email}) if user: # Resets pending confirmation if re-registers? - if user['status'] == UserStatus.CONFIRMATION_PENDING.value: - _confirmation = await db.get_confirmation({ - 'user': user, - 'action': ConfirmationAction.REGISTRATION.value - }) + if user["status"] == UserStatus.CONFIRMATION_PENDING.value: + _confirmation = await db.get_confirmation( + {"user": user, "action": ConfirmationAction.REGISTRATION.value} + ) if is_confirmation_expired(_confirmation): await db.delete_confirmation(_confirmation) @@ -52,12 +59,14 @@ async def check_registration(email: str, password: str, confirm: str, db: Asyncp return # If the email is already taken, return a 409 - HTTPConflict - raise web.HTTPConflict(reason=cfg.MSG_EMAIL_EXISTS, - content_type='application/json') + raise web.HTTPConflict( + reason=cfg.MSG_EMAIL_EXISTS, content_type="application/json" + ) log.debug("Registration data validated") -async def create_invitation(host:Dict, guest:str, db:AsyncpgStorage): + +async def create_invitation(host: Dict, guest: str, db: AsyncpgStorage): """ Creates an invitation token for a guest to register in the platform Creates and injects an invitation token in the confirmation table associated @@ -70,46 +79,53 @@ async def create_invitation(host:Dict, guest:str, db:AsyncpgStorage): confirmation = await db.create_confirmation( user=host, action=ConfirmationAction.INVITATION.name, - data= json.dumps({ - "created_by": host['email'], - "guest": guest - }) + data=json.dumps({"created_by": host["email"], "guest": guest}), ) return confirmation -async def check_invitation(invitation:str, db:AsyncpgStorage): + +async def check_invitation(invitation: str, db: AsyncpgStorage): confirmation = None if invitation: confirmation = await validate_confirmation_code(invitation, db) if confirmation: - #FIXME: check if action=invitation?? - log.info("Invitation code used. Deleting %s", pformat(get_confirmation_info(confirmation))) + # FIXME: check if action=invitation?? + log.info( + "Invitation code used. Deleting %s", + pformat(get_confirmation_info(confirmation)), + ) await db.delete_confirmation(confirmation) else: - raise web.HTTPForbidden(reason=("Invalid invitation code." - "Your invitation was already used or might have expired." - "Please contact our support team to get a new one.") ) + raise web.HTTPForbidden( + reason=( + "Invalid invitation code." + "Your invitation was already used or might have expired." + "Please contact our support team to get a new one." + ) + ) + def get_confirmation_info(confirmation): info = dict(confirmation) # data column is a string try: - info['data'] = json.loads(confirmation['data']) + info["data"] = json.loads(confirmation["data"]) except json.decoder.JSONDecodeError: log.warning("Failed to load data from confirmation. Skipping 'data' field.") # extra info["expires"] = get_expiration_date(confirmation) - if confirmation['action']==ConfirmationAction.INVITATION.name: + if confirmation["action"] == ConfirmationAction.INVITATION.name: info["url"] = get_invitation_url(confirmation) return info -def get_invitation_url(confirmation, origin: URL=None) -> URL: - code = confirmation['code'] - is_invitation = confirmation['action'] == ConfirmationAction.INVITATION.name + +def get_invitation_url(confirmation, origin: URL = None) -> URL: + code = confirmation["code"] + is_invitation = confirmation["action"] == ConfirmationAction.INVITATION.name if origin is None or not is_invitation: origin = URL() diff --git a/services/web/server/src/simcore_service_webserver/login/routes.py b/services/web/server/src/simcore_service_webserver/login/routes.py index 0e84d44d94e..61b78959bd7 100644 --- a/services/web/server/src/simcore_service_webserver/login/routes.py +++ b/services/web/server/src/simcore_service_webserver/login/routes.py @@ -10,15 +10,13 @@ from aiohttp import web from servicelib import openapi -from servicelib.rest_routing import (iter_path_operations, - map_handlers_with_operations) +from servicelib.rest_routing import iter_path_operations, map_handlers_with_operations from . import handlers as login_handlers log = logging.getLogger(__name__) - def create(specs: openapi.Spec) -> List[web.RouteDef]: """ Creates routes mapping operators_id with handler functions @@ -36,23 +34,21 @@ def include_path(tuple_object): return path.startswith(base_path + "/auth/") handlers_map = { - 'auth_register': login_handlers.register, - 'auth_login': login_handlers.login, - 'auth_logout': login_handlers.logout, - 'auth_reset_password': login_handlers.reset_password, - 'auth_reset_password_allowed': login_handlers.reset_password_allowed, - 'auth_change_email': login_handlers.change_email, - 'auth_change_password': login_handlers.change_password, - 'auth_confirmation': login_handlers.email_confirmation, + "auth_register": login_handlers.register, + "auth_login": login_handlers.login, + "auth_logout": login_handlers.logout, + "auth_reset_password": login_handlers.reset_password, + "auth_reset_password_allowed": login_handlers.reset_password_allowed, + "auth_change_email": login_handlers.change_email, + "auth_change_password": login_handlers.change_password, + "auth_confirmation": login_handlers.email_confirmation, } routes = map_handlers_with_operations( - handlers_map, - filter(include_path, iter_path_operations(specs)), - strict=True + handlers_map, filter(include_path, iter_path_operations(specs)), strict=True ) - log.debug("Mapped auth routes: %s", "\n".join( [pformat(r) for r in routes]) ) + log.debug("Mapped auth routes: %s", "\n".join([pformat(r) for r in routes])) return routes @@ -60,6 +56,4 @@ def include_path(tuple_object): # alias create_routes = create -__all__ = ( - 'create_routes' -) +__all__ = "create_routes" diff --git a/services/web/server/src/simcore_service_webserver/login/settings.py b/services/web/server/src/simcore_service_webserver/login/settings.py index 115a9dd7e0c..cd092905e45 100644 --- a/services/web/server/src/simcore_service_webserver/login/settings.py +++ b/services/web/server/src/simcore_service_webserver/login/settings.py @@ -2,7 +2,7 @@ APP_LOGIN_CONFIG = __name__ + ".config" -CFG_LOGIN_STORAGE = "STORAGE" # Needs to match login.cfg!!! +CFG_LOGIN_STORAGE = "STORAGE" # Needs to match login.cfg!!! def get_storage(app: web.Application): diff --git a/services/web/server/src/simcore_service_webserver/login/sql.py b/services/web/server/src/simcore_service_webserver/login/sql.py index 2cf03b1918f..5876bfa6dd4 100644 --- a/services/web/server/src/simcore_service_webserver/login/sql.py +++ b/services/web/server/src/simcore_service_webserver/login/sql.py @@ -2,7 +2,7 @@ log = getLogger(__name__) -LOG_TPL = '%s <--%s' +LOG_TPL = "%s <--%s" def find_one(conn, table, filter_, fields=None): @@ -12,27 +12,27 @@ def find_one(conn, table, filter_, fields=None): def find_one_sql(table, filter_, fields=None): - ''' + """ >>> find_one_sql('tbl', {'foo': 10, 'bar': 'baz'}) ('SELECT * FROM tbl WHERE bar=$1 AND foo=$2', ['baz', 10]) >>> find_one_sql('tbl', {'id': 10}, fields=['foo', 'bar']) ('SELECT foo, bar FROM tbl WHERE id=$1', [10]) - ''' + """ keys, values = _split_dict(filter_) - fields = ', '.join(fields) if fields else '*' + fields = ", ".join(fields) if fields else "*" where = _pairs(keys) - sql = 'SELECT {} FROM {} WHERE {}'.format(fields, table, where) + sql = "SELECT {} FROM {} WHERE {}".format(fields, table, where) return sql, values -def insert(conn, table, data, returning='id'): +def insert(conn, table, data, returning="id"): sql, values = insert_sql(table, data, returning) log.debug(LOG_TPL, sql, values) return conn.fetchval(sql, *values) -def insert_sql(table, data, returning='id'): - ''' +def insert_sql(table, data, returning="id"): + """ >>> insert_sql('tbl', {'foo': 'bar', 'id': 1}) ('INSERT INTO tbl (foo, id) VALUES ($1, $2) RETURNING id', ['bar', 1]) @@ -41,13 +41,14 @@ def insert_sql(table, data, returning='id'): >>> insert_sql('tbl', {'foo': 'bar', 'id': 1}, returning='pk') ('INSERT INTO tbl (foo, id) VALUES ($1, $2) RETURNING pk', ['bar', 1]) - ''' + """ keys, values = _split_dict(data) - sql = 'INSERT INTO {} ({}) VALUES ({}){}'.format( + sql = "INSERT INTO {} ({}) VALUES ({}){}".format( table, - ', '.join(keys), - ', '.join(_placeholders(data)), - ' RETURNING {}'.format(returning) if returning else '') + ", ".join(keys), + ", ".join(_placeholders(data)), + " RETURNING {}".format(returning) if returning else "", + ) return sql, values @@ -58,16 +59,15 @@ def update(conn, table, filter_, updates): def update_sql(table, filter_, updates): - ''' + """ >>> update_sql('tbl', {'foo': 'a', 'bar': 1}, {'bar': 2, 'baz': 'b'}) ('UPDATE tbl SET bar=$1, baz=$2 WHERE bar=$3 AND foo=$4', [2, 'b', 1, 'a']) - ''' + """ where_keys, where_vals = _split_dict(filter_) up_keys, up_vals = _split_dict(updates) - changes = _pairs(up_keys, sep=', ') + changes = _pairs(up_keys, sep=", ") where = _pairs(where_keys, start=len(up_keys) + 1) - sql = 'UPDATE {} SET {} WHERE {}'.format( - table, changes, where) + sql = "UPDATE {} SET {} WHERE {}".format(table, changes, where) return sql, up_vals + where_vals @@ -78,41 +78,41 @@ def delete(conn, table, filter_): def delete_sql(table, filter_): - ''' + """ >>> delete_sql('tbl', {'foo': 10, 'bar': 'baz'}) ('DELETE FROM tbl WHERE bar=$1 AND foo=$2', ['baz', 10]) - ''' + """ keys, values = _split_dict(filter_) where = _pairs(keys) - sql = 'DELETE FROM {} WHERE {}'.format(table, where) + sql = "DELETE FROM {} WHERE {}".format(table, where) return sql, values -def _pairs(keys, *, start=1, sep=' AND '): - ''' +def _pairs(keys, *, start=1, sep=" AND "): + """ >>> _pairs(['foo', 'bar', 'baz'], sep=', ') 'foo=$1, bar=$2, baz=$3' >>> _pairs(['foo', 'bar', 'baz'], start=2) 'foo=$2 AND bar=$3 AND baz=$4' - ''' - return sep.join('{}=${}'.format(k, i) for i, k in enumerate(keys, start)) + """ + return sep.join("{}=${}".format(k, i) for i, k in enumerate(keys, start)) def _placeholders(variables): - '''Returns placeholders by number of variables + """Returns placeholders by number of variables >>> _placeholders(['foo', 'bar', 1]) ['$1', '$2', '$3'] - ''' - return ['${}'.format(i) for i, _ in enumerate(variables, 1)] + """ + return ["${}".format(i) for i, _ in enumerate(variables, 1)] def _split_dict(dic): - '''Split dict into sorted keys and values + """Split dict into sorted keys and values >>> _split_dict({'b': 2, 'a': 1}) (['a', 'b'], [1, 2]) - ''' + """ keys = sorted(dic.keys()) return keys, [dic[k] for k in keys] @@ -120,6 +120,4 @@ def _split_dict(dic): if __name__ == "__main__": import doctest - print(doctest.testmod( - optionflags=doctest.REPORT_ONLY_FIRST_FAILURE - )) + print(doctest.testmod(optionflags=doctest.REPORT_ONLY_FIRST_FAILURE)) diff --git a/services/web/server/src/simcore_service_webserver/login/storage.py b/services/web/server/src/simcore_service_webserver/login/storage.py index 58a680dcd3e..ca908ae205b 100644 --- a/services/web/server/src/simcore_service_webserver/login/storage.py +++ b/services/web/server/src/simcore_service_webserver/login/storage.py @@ -11,10 +11,11 @@ log = getLogger(__name__) + class AsyncpgStorage: - def __init__(self, pool, *, - user_table_name='users', - confirmation_table_name='confirmations'): + def __init__( + self, pool, *, user_table_name="users", confirmation_table_name="confirmations" + ): self.pool = pool self.user_tbl = user_table_name self.confirm_tbl = confirmation_table_name @@ -22,54 +23,49 @@ def __init__(self, pool, *, async def get_user(self, with_data) -> asyncpg.Record: # FIXME: these can throw!!!! async with self.pool.acquire() as conn: - data = await sql.find_one(conn, self.user_tbl, with_data) + data = await sql.find_one(conn, self.user_tbl, with_data) return data async def create_user(self, data) -> asyncpg.Record: - data.setdefault('created_at', datetime.utcnow()) + data.setdefault("created_at", datetime.utcnow()) async with self.pool.acquire() as conn: - data['id'] = await sql.insert(conn, self.user_tbl, data) + data["id"] = await sql.insert(conn, self.user_tbl, data) return data async def update_user(self, user, updates) -> asyncpg.Record: async with self.pool.acquire() as conn: - await sql.update(conn, self.user_tbl, {'id': user['id']}, updates) + await sql.update(conn, self.user_tbl, {"id": user["id"]}, updates) async def delete_user(self, user): async with self.pool.acquire() as conn: - await sql.delete(conn, self.user_tbl, {'id': user['id']}) + await sql.delete(conn, self.user_tbl, {"id": user["id"]}) async def create_confirmation(self, user, action, data=None) -> asyncpg.Record: async with self.pool.acquire() as conn: while True: code = get_random_string(30) - if not await sql.find_one(conn, self.confirm_tbl, - {'code': code}): + if not await sql.find_one(conn, self.confirm_tbl, {"code": code}): break confirmation = { - 'code': code, - 'user_id': user['id'], - 'action': action, - 'data': data, - 'created_at': datetime.utcnow(), + "code": code, + "user_id": user["id"], + "action": action, + "data": data, + "created_at": datetime.utcnow(), } await sql.insert(conn, self.confirm_tbl, confirmation, None) return confirmation async def get_confirmation(self, filter_dict) -> asyncpg.Record: - if 'user' in filter_dict: - filter_dict['user_id'] = filter_dict.pop('user')['id'] + if "user" in filter_dict: + filter_dict["user_id"] = filter_dict.pop("user")["id"] async with self.pool.acquire() as conn: confirmation = await sql.find_one(conn, self.confirm_tbl, filter_dict) return confirmation async def delete_confirmation(self, confirmation): async with self.pool.acquire() as conn: - await sql.delete(conn, self.confirm_tbl, - {'code': confirmation['code']}) - - - + await sql.delete(conn, self.confirm_tbl, {"code": confirmation["code"]}) # helpers ---------------------------- @@ -79,16 +75,19 @@ def _to_enum(data): # TODO: ensure columns names and types! User tables for that # See https://docs.sqlalchemy.org/en/latest/core/metadata.html if data: - for key, enumtype in ( ('status', UserStatus), - ('role', UserRole), - ('action', ConfirmationAction) ): + for key, enumtype in ( + ("status", UserStatus), + ("role", UserRole), + ("action", ConfirmationAction), + ): if key in data: data[key] = getattr(enumtype, data[key]) return data + def _to_name(data): if data: - for key in ('status', 'role', 'action'): + for key in ("status", "role", "action"): if key in data: if isinstance(data[key], enum.Enum): data[key] = data[key].name diff --git a/services/web/server/src/simcore_service_webserver/login/utils.py b/services/web/server/src/simcore_service_webserver/login/utils.py index 577f30198cb..cd6437f0c4d 100644 --- a/services/web/server/src/simcore_service_webserver/login/utils.py +++ b/services/web/server/src/simcore_service_webserver/login/utils.py @@ -1,51 +1,51 @@ import random -import string from email.mime.text import MIMEText from logging import getLogger from os.path import join from pprint import pformat +from typing import Mapping, Optional import aiosmtplib import attr import passlib.hash +from aiohttp import web from aiohttp_jinja2 import render_string +from passlib import pwd -from aiohttp import web from servicelib.rest_models import LogMessageType from ..resources import resources from .cfg import cfg # TODO: remove this singleton!!! -CHARS = string.ascii_uppercase + string.ascii_lowercase + string.digits log = getLogger(__name__) -def encrypt_password(password): - #TODO: add settings sha256_crypt.using(**settings).hash(secret) +def encrypt_password(password: str) -> str: + # TODO: add settings sha256_crypt.using(**settings).hash(secret) # see https://passlib.readthedocs.io/en/stable/lib/passlib.hash.sha256_crypt.html # return passlib.hash.sha256_crypt.using(rounds=1000).hash(password) -def check_password(password, password_hash): +def check_password(password: str, password_hash: str) -> bool: return passlib.hash.sha256_crypt.verify(password, password_hash) -def get_random_string(min_len, max_len=None): +def get_random_string(min_len: int, max_len: Optional[int] = None) -> str: max_len = max_len or min_len size = random.randint(min_len, max_len) - return ''.join(random.choice(CHARS) for x in range(size)) + return pwd.genword(entropy=52, length=size) -def get_client_ip(request): +def get_client_ip(request: web.Request) -> str: try: - ips = request.headers['X-Forwarded-For'] + ips = request.headers["X-Forwarded-For"] except KeyError: - ips = request.transport.get_extra_info('peername')[0] - return ips.split(',')[0] + ips = request.transport.get_extra_info("peername")[0] + return ips.split(",")[0] -async def send_mail(recipient, subject, body): +async def send_mail(recipient: str, subject: str, body: str) -> None: # TODO: move to email submodule smtp_args = dict( loop=cfg.APP.loop, @@ -55,10 +55,10 @@ async def send_mail(recipient, subject, body): ) log.debug("Sending email with smtp configuration: %s", pformat(smtp_args)) - msg = MIMEText(body, 'html') - msg['Subject'] = subject - msg['From'] = cfg.SMTP_SENDER - msg['To'] = recipient + msg = MIMEText(body, "html") + msg["Subject"] = subject + msg["From"] = cfg.SMTP_SENDER + msg["To"] = recipient if cfg.SMTP_PORT == 587: # NOTE: aiosmtplib does not handle port 587 correctly @@ -81,21 +81,25 @@ async def send_mail(recipient, subject, body): await smtp.login(cfg.SMTP_USERNAME, cfg.SMTP_PASSWORD) await smtp.send_message(msg) -async def render_and_send_mail(request, to, template, context=None): + +async def render_and_send_mail( + request: web.Request, to: str, template: str, context: Mapping +): page = render_string(str(template), request, context) - subject, body = page.split('\n', 1) + subject, body = page.split("\n", 1) await send_mail(to, subject.strip(), body) def themed(template): return resources.get_path(join(cfg.THEME, template)) + def common_themed(template): return resources.get_path(join(cfg.COMMON_THEME, template)) -def flash_response(msg: str, level: str="INFO"): - response = web.json_response(data={ - 'data': attr.asdict(LogMessageType(msg, level)), - 'error': None - }) + +def flash_response(msg: str, level: str = "INFO") -> web.Response: + response = web.json_response( + data={"data": attr.asdict(LogMessageType(msg, level)), "error": None} + ) return response diff --git a/services/web/server/src/simcore_service_webserver/projects/__init__.py b/services/web/server/src/simcore_service_webserver/projects/__init__.py index 9e8535add87..66cd4423026 100644 --- a/services/web/server/src/simcore_service_webserver/projects/__init__.py +++ b/services/web/server/src/simcore_service_webserver/projects/__init__.py @@ -12,14 +12,15 @@ from aiohttp import ClientSession, web from tenacity import before_sleep_log, retry, stop_after_attempt, wait_fixed -from servicelib.application_keys import (APP_CONFIG_KEY, - APP_JSONSCHEMA_SPECS_KEY) +from servicelib.application_keys import APP_CONFIG_KEY, APP_JSONSCHEMA_SPECS_KEY from servicelib.application_setup import ModuleCategory, app_module_setup from servicelib.client_session import get_client_session from servicelib.jsonschema_specs import create_jsonschema_specs -from servicelib.rest_routing import (get_handlers_from_namespace, - iter_path_operations, - map_handlers_with_operations) +from servicelib.rest_routing import ( + get_handlers_from_namespace, + iter_path_operations, + map_handlers_with_operations, +) from ..resources import resources from ..rest_config import APP_OPENAPI_SPECS_KEY @@ -41,12 +42,12 @@ def _create_routes(tag, handlers_module, specs, *, disable_login=False): # TODO: Remove 'disable_login' and use instead a mock.patch on the decorator! handlers = get_handlers_from_namespace(handlers_module) if disable_login: - handlers = { name: hnds.__wrapped__ for name, hnds in handlers.items() } + handlers = {name: hnds.__wrapped__ for name, hnds in handlers.items()} routes = map_handlers_with_operations( - handlers, - filter(lambda o: tag in o[3], iter_path_operations(specs)), - strict=True + handlers, + filter(lambda o: tag in o[3], iter_path_operations(specs)), + strict=True, ) if disable_login: @@ -55,10 +56,12 @@ def _create_routes(tag, handlers_module, specs, *, disable_login=False): return routes - -@app_module_setup(module_name, ModuleCategory.ADDON, - depends=[f'simcore_service_webserver.{mod}' for mod in ('rest', 'db') ], - logger=logger) +@app_module_setup( + module_name, + ModuleCategory.ADDON, + depends=[f"simcore_service_webserver.{mod}" for mod in ("rest", "db")], + logger=logger, +) def setup(app: web.Application, *, enable_fake_data=False) -> bool: """ @@ -105,6 +108,4 @@ def setup(app: web.Application, *, enable_fake_data=False) -> bool: # alias setup_projects = setup -__all__ = ( - 'setup_projects' -) +__all__ = "setup_projects" diff --git a/services/web/server/src/simcore_service_webserver/projects/config.py b/services/web/server/src/simcore_service_webserver/projects/config.py index c1f2a2bfd29..eb3e7d6cbad 100644 --- a/services/web/server/src/simcore_service_webserver/projects/config.py +++ b/services/web/server/src/simcore_service_webserver/projects/config.py @@ -7,6 +7,4 @@ CONFIG_SECTION_NAME = "projects" -schema = T.Dict({ - T.Key("enabled", default=True, optional=True): T.Bool() -}) +schema = T.Dict({T.Key("enabled", default=True, optional=True): T.Bool()}) diff --git a/services/web/server/src/simcore_service_webserver/projects/nodes_handlers.py b/services/web/server/src/simcore_service_webserver/projects/nodes_handlers.py index 194408536b8..32409184359 100644 --- a/services/web/server/src/simcore_service_webserver/projects/nodes_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/nodes_handlers.py @@ -13,6 +13,7 @@ log = logging.getLogger(__name__) + @login_required async def get_node_output_ui(request: web.Request): """ Returns a json description of the ui for presenting the output within the mainUi @@ -20,9 +21,7 @@ async def get_node_output_ui(request: web.Request): json payloads and responses for the api calls available at this endpoint """ - log.debug(request.match_info["nodeInstanceUUID"], - request.match_info["outputKey"] - ) + log.debug(request.match_info["nodeInstanceUUID"], request.match_info["outputKey"]) raise NotImplementedError() @@ -33,14 +32,16 @@ async def send_to_node_output_api(request: web.Request): protocol depends on the definition """ body = await request.body - log.debug(request.match_info["nodeInstanceUUID"], - request.match_info["outputKey"], - request.match_info["apiCall"], - body + log.debug( + request.match_info["nodeInstanceUUID"], + request.match_info["outputKey"], + request.match_info["apiCall"], + body, ) raise NotImplementedError() + @login_required async def get_node_output_iframe(request: web.Request): """ entry point for iframe interaction with the node. diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_access.py b/services/web/server/src/simcore_service_webserver/projects/projects_access.py index d9fc5ca09a1..0a71e902d8a 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_access.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_access.py @@ -1,4 +1,3 @@ - import jsondiff from aiohttp import web @@ -10,10 +9,10 @@ async def can_update_node_inputs(context): Returns True if user has permission to update inputs """ - db = context['dbapi'] - project_uuid = context['project_id'] - user_id = context['user_id'] - updated_project = context['new_data'] + db = context["dbapi"] + project_uuid = context["project_id"] + user_id = context["user_id"] + updated_project = context["new_data"] if project_uuid is None or user_id is None: return False @@ -28,8 +27,8 @@ async def can_update_node_inputs(context): try: for node in diffs["workbench"]: # can ONLY modify `inputs` fields set as ReadAndWrite - access = current_project['workbench'][node]["inputAccess"] - inputs = diffs["workbench"][node]['inputs'] + access = current_project["workbench"][node]["inputAccess"] + inputs = diffs["workbench"][node]["inputs"] for key in inputs: if access.get(key) != "ReadAndWrite": return False @@ -38,8 +37,7 @@ async def can_update_node_inputs(context): pass return False - return len(diffs)==0 # no changes - + return len(diffs) == 0 # no changes def setup_projects_access(app: web.Application): @@ -49,4 +47,6 @@ def setup_projects_access(app: web.Application): hrba = get_access_model(app) # TODO: add here also named permissions, i.e. all project.* operations - hrba.roles[UserRole.GUEST].check["project.workbench.node.inputs.update"] = can_update_node_inputs + hrba.roles[UserRole.GUEST].check[ + "project.workbench.node.inputs.update" + ] = can_update_node_inputs diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py index dd1e3c5ecf1..1f4db02e8f6 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py @@ -6,9 +6,10 @@ - return data and successful HTTP responses (or raise them) - upon failure raise errors that can be also HTTP reponses """ +# pylint: disable=too-many-arguments + import logging -from asyncio import ensure_future, gather -from pprint import pprint +from pprint import pformat from typing import Dict, Optional from uuid import uuid4 @@ -17,6 +18,7 @@ from servicelib.application_keys import APP_JSONSCHEMA_SPECS_KEY from servicelib.jsonschema_validation import validate_instance from servicelib.observer import observe +from servicelib.utils import fire_and_forget_task, logged_gather from ..computation_api import delete_pipeline_db from ..director import director_api @@ -26,20 +28,28 @@ delete_data_folders_of_project_node) from .config import CONFIG_SECTION_NAME from .projects_db import APP_PROJECT_DBAPI -from .projects_exceptions import NodeNotFoundError, ProjectNotFoundError +from .projects_exceptions import NodeNotFoundError from .projects_utils import clone_project_document log = logging.getLogger(__name__) + def _is_node_dynamic(node_key: str) -> bool: return "/dynamic/" in node_key + def validate_project(app: web.Application, project: Dict): project_schema = app[APP_JSONSCHEMA_SPECS_KEY][CONFIG_SECTION_NAME] - validate_instance(project, project_schema) # TODO: handl + validate_instance(project, project_schema) # TODO: handl -async def get_project_for_user(app: web.Application, project_uuid, user_id, *, include_templates=False) -> Dict: +async def get_project_for_user( + app: web.Application, + project_uuid: str, + user_id: int, + *, + include_templates: bool = False +) -> Dict: """ Returns a project accessible to user :raises web.HTTPNotFound: if no match found @@ -47,26 +57,24 @@ async def get_project_for_user(app: web.Application, project_uuid, user_id, *, i :rtype: Dict """ - try: - db = app[APP_PROJECT_DBAPI] - - project = None - if include_templates: - project = await db.get_template_project(project_uuid) + db = app[APP_PROJECT_DBAPI] - if not project: - project = await db.get_user_project(user_id, project_uuid) + project = None + if include_templates: + project = await db.get_template_project(project_uuid) - # TODO: how to handle when database has an invalid project schema??? - # Notice that db model does not include a check on project schema. - validate_project(app, project) - return project + if not project: + project = await db.get_user_project(user_id, project_uuid) - except ProjectNotFoundError: - raise web.HTTPNotFound(reason="Project not found") + # TODO: how to handle when database has an invalid project schema??? + # Notice that db model does not include a check on project schema. + validate_project(app, project) + return project -async def clone_project(request: web.Request, project: Dict, user_id, forced_copy_project_id: str ="") -> Dict: +async def clone_project( + request: web.Request, project: Dict, user_id: int, forced_copy_project_id: str = "" +) -> Dict: """Clones both document and data folders of a project - document @@ -87,117 +95,190 @@ async def clone_project(request: web.Request, project: Dict, user_id, forced_cop """ cloned_project, nodes_map = clone_project_document(project, forced_copy_project_id) - updated_project = await copy_data_folders_from_project(request.app, - project, cloned_project, nodes_map, user_id) + updated_project = await copy_data_folders_from_project( + request.app, project, cloned_project, nodes_map, user_id + ) return updated_project -async def start_project_interactive_services(request: web.Request, project: Dict, user_id: str) -> None: + +async def start_project_interactive_services( + request: web.Request, project: Dict, user_id: str +) -> None: # first get the services if they already exist - log.debug("getting running interactive services of project %s for user %s", project["uuid"], user_id) - running_services = await director_api.get_running_interactive_services(request.app, user_id, project["uuid"]) + log.debug( + "getting running interactive services of project %s for user %s", + project["uuid"], + user_id, + ) + running_services = await director_api.get_running_interactive_services( + request.app, user_id, project["uuid"] + ) running_service_uuids = [x["service_uuid"] for x in running_services] # now start them if needed - project_needed_services = {service_uuid:service for service_uuid, service in project["workbench"].items() \ - if _is_node_dynamic(service["key"]) and \ - service_uuid not in running_service_uuids} - - start_service_tasks = [director_api.start_service(request.app, - user_id=user_id, - project_id=project["uuid"], - service_key=service["key"], - service_version=service["version"], - service_uuid=service_uuid) for service_uuid, service in project_needed_services.items()] - await gather(*start_service_tasks) - + project_needed_services = { + service_uuid: service + for service_uuid, service in project["workbench"].items() + if _is_node_dynamic(service["key"]) + and service_uuid not in running_service_uuids + } -async def delete_project(request: web.Request, project_uuid: str, user_id: str) -> None: + start_service_tasks = [ + director_api.start_service( + request.app, + user_id=user_id, + project_id=project["uuid"], + service_key=service["key"], + service_version=service["version"], + service_uuid=service_uuid, + ) + for service_uuid, service in project_needed_services.items() + ] + await logged_gather(*start_service_tasks, reraise=True) + + +async def delete_project(request: web.Request, project_uuid: str, user_id: int) -> None: await delete_project_from_db(request, project_uuid, user_id) + async def remove_services_and_data(): await remove_project_interactive_services(user_id, project_uuid, request.app) await delete_project_data(request, project_uuid, user_id) - ensure_future(remove_services_and_data()) + + fire_and_forget_task(remove_services_and_data()) + @observe(event="SIGNAL_PROJECT_CLOSE") -async def remove_project_interactive_services(user_id: Optional[str], project_uuid: Optional[str], app: web.Application) -> None: +async def remove_project_interactive_services( + user_id: Optional[int], project_uuid: Optional[str], app: web.Application +) -> None: if not user_id and not project_uuid: raise ValueError("Expected either user or project") - list_of_services = await director_api.get_running_interactive_services(app, - project_id=project_uuid, - user_id=user_id) - stop_tasks = [director_api.stop_service(app, service["service_uuid"]) for service in list_of_services] + list_of_services = await director_api.get_running_interactive_services( + app, project_id=project_uuid, user_id=user_id + ) + stop_tasks = [ + director_api.stop_service(app, service["service_uuid"]) + for service in list_of_services + ] if stop_tasks: - await gather(*stop_tasks) + await logged_gather(*stop_tasks, reraise=False) + -async def delete_project_data(request: web.Request, project_uuid: str, user_id: str) -> None: +async def delete_project_data( + request: web.Request, project_uuid: str, user_id: int +) -> None: # requests storage to delete all project's stored data await delete_data_folders_of_project(request.app, project_uuid, user_id) -async def delete_project_from_db(request: web.Request, project_uuid: str, user_id: str) -> None: - db = request.config_dict[APP_PROJECT_DBAPI] - try: - await delete_pipeline_db(request.app, project_uuid) - await db.delete_user_project(user_id, project_uuid) - except ProjectNotFoundError: - # TODO: add flag in query to determine whether to respond if error? - raise web.HTTPNotFound +async def delete_project_from_db( + request: web.Request, project_uuid: str, user_id: int +) -> None: + db = request.config_dict[APP_PROJECT_DBAPI] + await delete_pipeline_db(request.app, project_uuid) + await db.delete_user_project(user_id, project_uuid) # requests storage to delete all project's stored data await delete_data_folders_of_project(request.app, project_uuid, user_id) -async def add_project_node(request: web.Request, project_uuid: str, user_id: str, service_key: str, service_version: str, service_id: Optional[str]) -> str: # pylint: disable=too-many-arguments - log.debug("starting node %s:%s in project %s for user %s", service_key, service_version, project_uuid, user_id) + +async def add_project_node( + request: web.Request, + project_uuid: str, + user_id: int, + service_key: str, + service_version: str, + service_id: Optional[str], +) -> str: + log.debug( + "starting node %s:%s in project %s for user %s", + service_key, + service_version, + project_uuid, + user_id, + ) node_uuid = service_id if service_id else str(uuid4()) if _is_node_dynamic(service_key): - await director_api.start_service(request.app, user_id, project_uuid, service_key, service_version, node_uuid) + await director_api.start_service( + request.app, user_id, project_uuid, service_key, service_version, node_uuid + ) return node_uuid -async def get_project_node(request: web.Request, project_uuid: str, user_id:str, node_id: str): - log.debug("getting node %s in project %s for user %s", node_id, project_uuid, user_id) - list_of_interactive_services = await director_api.get_running_interactive_services(request.app, - project_id=project_uuid, - user_id=user_id) +async def get_project_node( + request: web.Request, project_uuid: str, user_id: int, node_id: str +): + log.debug( + "getting node %s in project %s for user %s", node_id, project_uuid, user_id + ) + + list_of_interactive_services = await director_api.get_running_interactive_services( + request.app, project_id=project_uuid, user_id=user_id + ) # get the project if it is running for service in list_of_interactive_services: if service["service_uuid"] == node_id: return service # the service is not running, it's a computational service maybe # TODO: find out if computational service is running if not throw a 404 since it's not around - return { - "service_uuid": node_id, - "service_state": "idle" - } + return {"service_uuid": node_id, "service_state": "idle"} + -async def delete_project_node(request: web.Request, project_uuid: str, user_id: str, node_uuid: str) -> None: - log.debug("deleting node %s in project %s for user %s", node_uuid, project_uuid, user_id) +async def delete_project_node( + request: web.Request, project_uuid: str, user_id: int, node_uuid: str +) -> None: + log.debug( + "deleting node %s in project %s for user %s", node_uuid, project_uuid, user_id + ) - list_of_services = await director_api.get_running_interactive_services(request.app, - project_id=project_uuid, - user_id=user_id) + list_of_services = await director_api.get_running_interactive_services( + request.app, project_id=project_uuid, user_id=user_id + ) # stop the service if it is running for service in list_of_services: if service["service_uuid"] == node_uuid: await director_api.stop_service(request.app, node_uuid) break # remove its data if any - await delete_data_folders_of_project_node(request.app, project_uuid, node_uuid, user_id) - - -async def update_project_node_progress(app: web.Application, user_id: str, project_id: str, node_id: str, progress: float) -> Optional[Dict]: - log.debug("updating node %s progress in project %s for user %s with %s", node_id, project_id, user_id, progress) + await delete_data_folders_of_project_node( + request.app, project_uuid, node_uuid, user_id + ) + + +async def update_project_node_progress( + app: web.Application, user_id: int, project_id: str, node_id: str, progress: float +) -> Optional[Dict]: + log.debug( + "updating node %s progress in project %s for user %s with %s", + node_id, + project_id, + user_id, + progress, + ) project = await get_project_for_user(app, project_id, user_id) if not node_id in project["workbench"]: raise NodeNotFoundError(project_id, node_id) - project["workbench"][node_id]["progress"] = int(100.0 * float(progress) + .5) + project["workbench"][node_id]["progress"] = int(100.0 * float(progress) + 0.5) db = app[APP_PROJECT_DBAPI] await db.update_user_project(project, user_id, project_id) return project["workbench"][node_id] -async def update_project_node_outputs(app: web.Application, user_id: str, project_id: str, node_id: str, data: Optional[Dict]) -> Optional[Dict]: - log.debug("updating node %s outputs in project %s for user %s with %s", node_id, project_id, user_id, pprint(data)) + +async def update_project_node_outputs( + app: web.Application, + user_id: int, + project_id: str, + node_id: str, + data: Optional[Dict], +) -> Optional[Dict]: + log.debug( + "updating node %s outputs in project %s for user %s with %s", + node_id, + project_id, + user_id, + pformat(data), + ) project = await get_project_for_user(app, project_id, user_id) if not node_id in project["workbench"]: raise NodeNotFoundError(project_id, node_id) diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_db.py b/services/web/server/src/simcore_service_webserver/projects/projects_db.py index dc5baaf743a..a51a9aa745a 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_db.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_db.py @@ -238,7 +238,7 @@ async def load_template_projects(self, *, only_published=False) -> List[Dict]: async def __load_projects(self, conn: SAConnection, query) -> List[Dict]: api_projects: List[Dict] = [] # API model-compatible projects - db_projects: List[Dict] = [] # DB model-compatible projects + db_projects: List[Dict] = [] # DB model-compatible projects async for row in conn.execute(query): prj = dict(row.items()) log.debug("found project: %s", prj) @@ -405,7 +405,6 @@ async def update_user_project( ) await conn.execute(query) - async def delete_user_project(self, user_id: int, project_uuid: str): log.info("Deleting project %s for user %s", project_uuid, user_id) async with self.engine.acquire() as conn: diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py b/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py index 3c6fb28718c..15a9eb40fa0 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py @@ -1,29 +1,39 @@ """Defines the different exceptions that may arise in the projects subpackage""" + class ProjectsException(Exception): """Basic exception for errors raised in projects""" + def __init__(self, msg=None): if msg is None: msg = "Unexpected error occured in projects subpackage" super(ProjectsException, self).__init__(msg) + class ProjectInvalidRightsError(ProjectsException): """Invalid rights to access project""" + def __init__(self, user_id, project_uuid): - msg = "User {} has no rights to access project with uuid {}".format(user_id, project_uuid) + msg = "User {} has no rights to access project with uuid {}".format( + user_id, project_uuid + ) super(ProjectInvalidRightsError, self).__init__(msg) self.user_id = user_id self.project_uuid = project_uuid + class ProjectNotFoundError(ProjectsException): """Project was not found in DB""" + def __init__(self, project_uuid): msg = "Project with uuid {} not found".format(project_uuid) super(ProjectNotFoundError, self).__init__(msg) self.project_uuid = project_uuid + class NodeNotFoundError(ProjectsException): """Node was not found in project""" + def __init__(self, project_uuid: str, node_uuid: str): msg = f"Node {node_uuid} not found in project {project_uuid}" super(NodeNotFoundError, self).__init__(msg) diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_fakes.py b/services/web/server/src/simcore_service_webserver/projects/projects_fakes.py index c3da640a63a..7583e0bb3be 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_fakes.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_fakes.py @@ -10,20 +10,21 @@ log = logging.getLogger(__name__) + class Fake: """ Holds fake database of projects and its association to users for testing purposes Keeps also generated data """ + # TODO: auto generate data from specs and faker tool. Use http://json-schema-faker.js.org ProjectItem = namedtuple("ProjectItem", "id template data".split()) # fake databases - projects = {} # project_id -> ProjectItem - user_to_projects_map = defaultdict(list) # user_id -> [project_id, ...] - + projects = {} # project_id -> ProjectItem + user_to_projects_map = defaultdict(list) # user_id -> [project_id, ...] @classmethod def add_projects(cls, projects, user_id=None): @@ -31,8 +32,10 @@ def add_projects(cls, projects, user_id=None): """ for prj in projects: - pid = prj['uuid'] - cls.projects[pid] = cls.ProjectItem(id=pid, template=user_id is None, data=deepcopy(prj)) + pid = prj["uuid"] + cls.projects[pid] = cls.ProjectItem( + id=pid, template=user_id is None, data=deepcopy(prj) + ) if user_id is not None: cls.user_to_projects_map[user_id].append(pid) @@ -49,8 +52,8 @@ def load_template_projects(cls): projects = projects + json.load(f) for prj in projects: - pid = prj['uuid'] - cls.projects[pid] = cls.ProjectItem(id=pid, template=True, data=prj) + pid = prj["uuid"] + cls.projects[pid] = cls.ProjectItem(id=pid, template=True, data=prj) @classmethod def reset(cls): diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py index 916bb99a050..0ae3f78a3ef 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py @@ -1,14 +1,14 @@ - """ Handlers for CRUD operations on /projects/ """ -import asyncio import json import logging from aiohttp import web from jsonschema import ValidationError +from servicelib.utils import fire_and_forget_task + from ..computation_api import update_pipeline_db from ..login.decorators import RQT_USERID_KEY, login_required from ..resource_manager.websocket_manager import managed_resource @@ -18,7 +18,7 @@ from .projects_exceptions import (ProjectInvalidRightsError, ProjectNotFoundError) -OVERRIDABLE_DOCUMENT_KEYS = ['name', 'description', 'thumbnail', 'prjOwner'] +OVERRIDABLE_DOCUMENT_KEYS = ["name", "description", "thumbnail", "prjOwner"] # TODO: validate these against api/specs/webserver/v0/components/schemas/project-v0.0.1.json log = logging.getLogger(__name__) @@ -26,40 +26,45 @@ @login_required async def create_projects(request: web.Request): - from .projects_api import clone_project # TODO: keep here since is async and parser thinks it is a handler + from .projects_api import ( + clone_project, + ) # TODO: keep here since is async and parser thinks it is a handler # pylint: disable=too-many-branches await check_permission(request, "project.create") - await check_permission(request, "services.pipeline.*") # due to update_pipeline_db + await check_permission(request, "services.pipeline.*") # due to update_pipeline_db user_id = request[RQT_USERID_KEY] db = request.config_dict[APP_PROJECT_DBAPI] - template_uuid = request.query.get('from_template') - as_template = request.query.get('as_template') + template_uuid = request.query.get("from_template") + as_template = request.query.get("as_template") try: project = {} - if as_template: # create template from + if as_template: # create template from await check_permission(request, "project.template.create") # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! from .projects_api import get_project_for_user - source_project = await get_project_for_user(request.app, + source_project = await get_project_for_user( + request.app, project_uuid=as_template, user_id=user_id, - include_templates=False + include_templates=False, ) project = await clone_project(request, source_project, user_id) - elif template_uuid: # create from template + elif template_uuid: # create from template template_prj = await db.get_template_project(template_uuid) if not template_prj: - raise web.HTTPNotFound(reason="Invalid template uuid {}".format(template_uuid)) + raise web.HTTPNotFound( + reason="Invalid template uuid {}".format(template_uuid) + ) project = await clone_project(request, template_prj, user_id) - #FIXME: parameterized inputs should get defaults provided by service + # FIXME: parameterized inputs should get defaults provided by service # overrides with body if request.has_body: @@ -78,20 +83,22 @@ async def create_projects(request: web.Request): projects_api.validate_project(request.app, project) # update metadata (uuid, timestamps, ownership) and save - await db.add_project(project, user_id, force_as_template=as_template is not None) + await db.add_project( + project, user_id, force_as_template=as_template is not None + ) # This is a new project and every new graph needs to be reflected in the pipeline db await update_pipeline_db(request.app, project["uuid"], project["workbench"]) except ValidationError: raise web.HTTPBadRequest(reason="Invalid project data") - + except ProjectNotFoundError: + raise web.HTTPNotFound(reason=f"Project not found") except ProjectInvalidRightsError: raise web.HTTPUnauthorized else: - raise web.HTTPCreated(text=json.dumps(project), - content_type='application/json') + raise web.HTTPCreated(text=json.dumps(project), content_type="application/json") @login_required @@ -101,7 +108,7 @@ async def list_projects(request: web.Request): # TODO: implement all query parameters as # in https://www.ibm.com/support/knowledgecenter/en/SSCRJU_3.2.0/com.ibm.swg.im.infosphere.streams.rest.api.doc/doc/restapis-queryparms-list.html user_id = request[RQT_USERID_KEY] - ptype = request.query.get('type', 'all') # TODO: get default for oaspecs + ptype = request.query.get("type", "all") # TODO: get default for oaspecs db = request.config_dict[APP_PROJECT_DBAPI] # TODO: improve dbapi to list project @@ -109,13 +116,15 @@ async def list_projects(request: web.Request): if ptype in ("template", "all"): projects_list += await db.load_template_projects() - if ptype in ("user", "all"): # standard only (notice that templates will only) - projects_list += await db.load_user_projects(user_id=user_id, exclude_templates=True) + if ptype in ("user", "all"): # standard only (notice that templates will only) + projects_list += await db.load_user_projects( + user_id=user_id, exclude_templates=True + ) - start = int(request.query.get('start', 0)) - count = int(request.query.get('count',len(projects_list))) + start = int(request.query.get("start", 0)) + count = int(request.query.get("count", len(projects_list))) - stop = min(start+count, len(projects_list)) + stop = min(start + count, len(projects_list)) projects_list = projects_list[start:stop] # validate response @@ -128,7 +137,7 @@ async def list_projects(request: web.Request): log.exception("Skipping invalid project from list") continue - return {'data': validated_projects} + return {"data": validated_projects} @login_required @@ -141,16 +150,18 @@ async def get_project(request: web.Request): from .projects_api import get_project_for_user project_uuid = request.match_info.get("project_id") + try: + project = await get_project_for_user( + request.app, + project_uuid=project_uuid, + user_id=request[RQT_USERID_KEY], + include_templates=True, + ) - project = await get_project_for_user(request.app, - project_uuid=project_uuid, - user_id=request[RQT_USERID_KEY], - include_templates=True - ) - - return { - 'data': project - } + return {"data": project} + except ProjectNotFoundError: + raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") + @login_required @@ -169,30 +180,34 @@ async def replace_project(request: web.Request): :raises web.HTTPNotFound: cannot find project id in repository """ - await check_permission(request, "services.pipeline.*") # due to update_pipeline_db + await check_permission(request, "services.pipeline.*") # due to update_pipeline_db user_id = request[RQT_USERID_KEY] project_uuid = request.match_info.get("project_id") - replace_pipeline = request.query.get("run", False) # FIXME: Actually was never called. CHECK if logic still applies (issue #1176) + replace_pipeline = request.query.get( + "run", False + ) # FIXME: Actually was never called. CHECK if logic still applies (issue #1176) new_project = await request.json() - db = request.config_dict[APP_PROJECT_DBAPI] - await check_permission(request, "project.update | project.workbench.node.inputs.update", - context={ - 'dbapi': db, - 'project_id': project_uuid, - 'user_id': user_id, - 'new_data': new_project - }) + await check_permission( + request, + "project.update | project.workbench.node.inputs.update", + context={ + "dbapi": db, + "project_id": project_uuid, + "user_id": user_id, + "new_data": new_project, + }, + ) try: projects_api.validate_project(request.app, new_project) await db.update_user_project(new_project, user_id, project_uuid) - await update_pipeline_db(request.app, - project_uuid, new_project["workbench"], - replace_pipeline) + await update_pipeline_db( + request.app, project_uuid, new_project["workbench"], replace_pipeline + ) except ValidationError: raise web.HTTPBadRequest @@ -200,7 +215,8 @@ async def replace_project(request: web.Request): except ProjectNotFoundError: raise web.HTTPNotFound - return {'data': new_project} + return {"data": new_project} + @login_required async def delete_project(request: web.Request): @@ -210,51 +226,57 @@ async def delete_project(request: web.Request): # first check if the project exists user_id = request[RQT_USERID_KEY] project_uuid = request.match_info.get("project_id") - project = await projects_api.get_project_for_user(request.app, - project_uuid=project_uuid, - user_id=user_id, - include_templates=True - ) - with managed_resource(user_id, None, request.app) as rt: - other_users = await rt.find_users_of_resource("project_id", project_uuid) - if other_users: - message = "Project is opened by another user. It cannot be deleted." - if user_id in other_users: - message = "Project is still open. It cannot be deleted until it is closed." - # we cannot delete that project - raise web.HTTPForbidden(reason=message) + try: + project = await projects_api.get_project_for_user( + request.app, project_uuid=project_uuid, user_id=user_id, include_templates=True + ) + with managed_resource(user_id, None, request.app) as rt: + other_users = await rt.find_users_of_resource("project_id", project_uuid) + if other_users: + message = "Project is opened by another user. It cannot be deleted." + if user_id in other_users: + message = ( + "Project is still open. It cannot be deleted until it is closed." + ) + # we cannot delete that project + raise web.HTTPForbidden(reason=message) + + + await projects_api.delete_project(request, project_uuid, user_id) + except ProjectNotFoundError: + raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") + - await projects_api.delete_project(request, project_uuid, user_id) + raise web.HTTPNoContent(content_type="application/json") - raise web.HTTPNoContent(content_type='application/json') @login_required async def open_project(request: web.Request) -> web.Response: # TODO: replace by decorator since it checks again authentication await check_permission(request, "project.open") - - # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! - from .projects_api import get_project_for_user - user_id = request[RQT_USERID_KEY] project_uuid = request.match_info.get("project_id") client_session_id = await request.json() + try: + with managed_resource(user_id, client_session_id, request.app) as rt: + # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! + from .projects_api import get_project_for_user + project = await get_project_for_user( + request.app, + project_uuid=project_uuid, + user_id=user_id, + include_templates=True, + ) + await rt.add("project_id", project_uuid) - with managed_resource(user_id, client_session_id, request.app) as rt: - project = await get_project_for_user(request.app, - project_uuid=project_uuid, - user_id=user_id, - include_templates=True - ) - await rt.add("project_id", project_uuid) + # user id opened project uuid + await projects_api.start_project_interactive_services(request, project, user_id) - # user id opened project uuid - await projects_api.start_project_interactive_services(request, project, user_id) + return {"data": project} + except ProjectNotFoundError: + raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") - return { - 'data': project - } @login_required async def close_project(request: web.Request) -> web.Response: @@ -265,46 +287,60 @@ async def close_project(request: web.Request) -> web.Response: project_uuid = request.match_info.get("project_id") client_session_id = await request.json() - # ensure the project exists - # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! - from .projects_api import get_project_for_user - - with managed_resource(user_id, client_session_id, request.app) as rt: - project = await get_project_for_user(request.app, - project_uuid=project_uuid, - user_id=user_id, - include_templates=True - ) - await rt.remove("project_id") - other_users = await rt.find_users_of_resource("project_id", project_uuid) - if not other_users: - # only remove the services if no one else is using them now - asyncio.ensure_future(projects_api.remove_project_interactive_services(user_id, project_uuid, request.app)) - + try: + # ensure the project exists + # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! + from .projects_api import get_project_for_user + + with managed_resource(user_id, client_session_id, request.app) as rt: + project = await get_project_for_user( + request.app, + project_uuid=project_uuid, + user_id=user_id, + include_templates=True, + ) + await rt.remove("project_id") + other_users = await rt.find_users_of_resource("project_id", project_uuid) + if not other_users: + # only remove the services if no one else is using them now + fire_and_forget_task( + projects_api.remove_project_interactive_services( + user_id, project_uuid, request.app + ) + ) + + raise web.HTTPNoContent(content_type="application/json") + except ProjectNotFoundError: + raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") + - raise web.HTTPNoContent(content_type='application/json') @login_required async def get_active_project(request: web.Request) -> web.Response: await check_permission(request, "project.read") user_id = request[RQT_USERID_KEY] client_session_id = request.query["client_session_id"] - project = None - with managed_resource(user_id, client_session_id, request.app) as rt: - # get user's projects - list_project_ids = await rt.find("project_id") - if list_project_ids: - # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! - from .projects_api import get_project_for_user - project = await get_project_for_user(request.app, - project_uuid=list_project_ids[0], - user_id=user_id, - include_templates=True - ) - return { - 'data': project - } + try: + project = None + with managed_resource(user_id, client_session_id, request.app) as rt: + # get user's projects + list_project_ids = await rt.find("project_id") + if list_project_ids: + # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! + from .projects_api import get_project_for_user + + project = await get_project_for_user( + request.app, + project_uuid=list_project_ids[0], + user_id=user_id, + include_templates=True, + ) + + return {"data": project} + except ProjectNotFoundError: + raise web.HTTPNotFound(reason=f"Project not found") + @login_required async def create_node(request: web.Request) -> web.Response: @@ -314,25 +350,29 @@ async def create_node(request: web.Request) -> web.Response: project_uuid = request.match_info.get("project_id") body = await request.json() - # ensure the project exists - # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! - from .projects_api import get_project_for_user - await get_project_for_user(request.app, - project_uuid=project_uuid, - user_id=user_id, - include_templates=True - ) - data = { - "node_id": await projects_api.add_project_node( - request, - project_uuid, - user_id, - body["service_key"], - body["service_version"], - body["service_id"] if "service_id" in body else None + try: + # ensure the project exists + # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! + from .projects_api import get_project_for_user + + await get_project_for_user( + request.app, project_uuid=project_uuid, user_id=user_id, include_templates=True ) - } - return web.json_response({'data': data}, status=web.HTTPCreated.status_code) + data = { + "node_id": await projects_api.add_project_node( + request, + project_uuid, + user_id, + body["service_key"], + body["service_version"], + body["service_id"] if "service_id" in body else None, + ) + } + return web.json_response({"data": data}, status=web.HTTPCreated.status_code) + except ProjectNotFoundError: + raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") + + @login_required async def get_node(request: web.Request) -> web.Response: @@ -341,19 +381,23 @@ async def get_node(request: web.Request) -> web.Response: user_id = request[RQT_USERID_KEY] project_uuid = request.match_info.get("project_id") node_uuid = request.match_info.get("node_id") - # ensure the project exists - # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! - from .projects_api import get_project_for_user - await get_project_for_user(request.app, - project_uuid=project_uuid, - user_id=user_id, - include_templates=True + try: + # ensure the project exists + # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! + from .projects_api import get_project_for_user + + await get_project_for_user( + request.app, project_uuid=project_uuid, user_id=user_id, include_templates=True ) - node_details = await projects_api.get_project_node(request, project_uuid, user_id, node_uuid) - return { - 'data': node_details - } + node_details = await projects_api.get_project_node( + request, project_uuid, user_id, node_uuid + ) + return {"data": node_details} + except ProjectNotFoundError: + raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") + + @login_required async def delete_node(request: web.Request) -> web.Response: @@ -362,39 +406,39 @@ async def delete_node(request: web.Request) -> web.Response: user_id = request[RQT_USERID_KEY] project_uuid = request.match_info.get("project_id") node_uuid = request.match_info.get("node_id") - # ensure the project exists - # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! - from .projects_api import get_project_for_user - await get_project_for_user(request.app, - project_uuid=project_uuid, - user_id=user_id, - include_templates=True + try: + # ensure the project exists + # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! + from .projects_api import get_project_for_user + + await get_project_for_user( + request.app, project_uuid=project_uuid, user_id=user_id, include_templates=True ) - await projects_api.delete_project_node(request, project_uuid, user_id, node_uuid) + await projects_api.delete_project_node(request, project_uuid, user_id, node_uuid) - raise web.HTTPNoContent(content_type='application/json') + raise web.HTTPNoContent(content_type="application/json") + except ProjectNotFoundError: + raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") @login_required async def add_tag(request: web.Request): - await check_permission(request, 'project.tag.*') + await check_permission(request, "project.tag.*") uid, db = request[RQT_USERID_KEY], request.config_dict[APP_PROJECT_DBAPI] - tag_id, study_uuid = request.match_info.get('tag_id'), request.match_info.get('study_uuid') - return await db.add_tag( - project_uuid=study_uuid, - user_id=uid, - tag_id=int(tag_id) + tag_id, study_uuid = ( + request.match_info.get("tag_id"), + request.match_info.get("study_uuid"), ) + return await db.add_tag(project_uuid=study_uuid, user_id=uid, tag_id=int(tag_id)) @login_required async def remove_tag(request: web.Request): - await check_permission(request, 'project.tag.*') + await check_permission(request, "project.tag.*") uid, db = request[RQT_USERID_KEY], request.config_dict[APP_PROJECT_DBAPI] - tag_id, study_uuid = request.match_info.get('tag_id'), request.match_info.get('study_uuid') - return await db.remove_tag( - project_uuid=study_uuid, - user_id=uid, - tag_id=int(tag_id) + tag_id, study_uuid = ( + request.match_info.get("tag_id"), + request.match_info.get("study_uuid"), ) + return await db.remove_tag(project_uuid=study_uuid, user_id=uid, tag_id=int(tag_id)) diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_models.py b/services/web/server/src/simcore_service_webserver/projects/projects_models.py index 7513b4fd5af..82d7e2e6191 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_models.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_models.py @@ -2,10 +2,14 @@ Facade """ -from simcore_postgres_database.webserver_models import (ProjectType, projects, - user_to_projects) +from simcore_postgres_database.webserver_models import ( + ProjectType, + projects, + user_to_projects, +) __all__ = [ - "projects", "ProjectType", + "projects", + "ProjectType", "user_to_projects", ] diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_utils.py b/services/web/server/src/simcore_service_webserver/projects/projects_utils.py index 6b709265563..c8a59316806 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_utils.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_utils.py @@ -2,14 +2,17 @@ import re import uuid as uuidlib from copy import deepcopy -from typing import Dict, Tuple +from typing import AnyStr, Dict, Match, Optional, Tuple from servicelib.decorators import safe_return log = logging.getLogger(__name__) variable_pattern = re.compile(r"^{{\W*(\w+)\W*}}$") -def clone_project_document(project: Dict, forced_copy_project_id: str ="") -> Tuple[Dict, Dict]: + +def clone_project_document( + project: Dict, forced_copy_project_id: str = "" +) -> Tuple[Dict, Dict]: project_copy = deepcopy(project) # Update project id @@ -17,16 +20,16 @@ def clone_project_document(project: Dict, forced_copy_project_id: str ="") -> Tu if forced_copy_project_id: project_copy_uuid = uuidlib.UUID(forced_copy_project_id) else: - project_copy_uuid = uuidlib.uuid1() # random project id + project_copy_uuid = uuidlib.uuid1() # random project id - project_copy['uuid'] = str(project_copy_uuid) + project_copy["uuid"] = str(project_copy_uuid) # Workbench nodes shall be unique within the project context def _create_new_node_uuid(old_uuid): - return str( uuidlib.uuid5(project_copy_uuid, str(old_uuid)) ) + return str(uuidlib.uuid5(project_copy_uuid, str(old_uuid))) nodes_map = {} - for node_uuid in project.get('workbench', {}).keys(): + for node_uuid in project.get("workbench", {}).keys(): nodes_map[node_uuid] = _create_new_node_uuid(node_uuid) def _replace_uuids(node): @@ -44,12 +47,13 @@ def _replace_uuids(node): node[key] = _replace_uuids(value) return node - project_copy['workbench'] = _replace_uuids(project_copy.get('workbench', {})) + project_copy["workbench"] = _replace_uuids(project_copy.get("workbench", {})) return project_copy, nodes_map - @safe_return(if_fails_return=False, logger=log) -def substitute_parameterized_inputs(parameterized_project: Dict, parameters: Dict) -> Dict: +def substitute_parameterized_inputs( + parameterized_project: Dict, parameters: Dict +) -> Dict: """ Substitutes parameterized r/w inputs NOTE: project is is changed @@ -69,20 +73,30 @@ def _normalize_value(s): except ValueError: return s - for node in project['workbench'].values(): - inputs = node.get('inputs', {}) - access = node.get('inputAccess', {}) + def _get_param_input_match(name, value, access) -> Optional[Match[AnyStr]]: + if isinstance(value, str) and access.get(name, "ReadAndWrite") == "ReadAndWrite": + match = variable_pattern.match(value) + return match + return None + + for node in project["workbench"].values(): + inputs = node.get("inputs", {}) + access = node.get("inputAccess", {}) new_inputs = {} + for name, value in inputs.items(): - if isinstance(value, str) and access.get(name, "ReadAndWrite") == "ReadAndWrite": + match = _get_param_input_match(name, value, access) + if match: # TODO: use jinja2 to interpolate expressions? - m = variable_pattern.match(value) - if m: - value = m.group(1) - if value in parameters: - new_inputs[name] = _normalize_value(parameters[value]) - else: - log.warning("Could not resolve parameter %s. No value provided in %s", value, parameters) + value = match.group(1) + if value in parameters: + new_inputs[name] = _normalize_value(parameters[value]) + else: + log.warning( + "Could not resolve parameter %s. No value provided in %s", + value, + parameters, + ) inputs.update(new_inputs) return project @@ -100,11 +114,15 @@ def is_graph_equal(lhs_workbench: Dict, rhs_workbench: Dict) -> bool: for node_id, node in rhs_workbench.items(): # same nodes - if not all(node.get(k) == lhs_workbench[node_id].get(k) for k in ['key', 'version'] ): + if not all( + node.get(k) == lhs_workbench[node_id].get(k) for k in ["key", "version"] + ): raise ValueError() # same connectivity (edges) - if not set(node.get('inputNodes')) == set(lhs_workbench[node_id].get('inputNodes')): + if not set(node.get("inputNodes")) == set( + lhs_workbench[node_id].get("inputNodes") + ): raise ValueError() # same input values diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/__init__.py b/services/web/server/src/simcore_service_webserver/resource_manager/__init__.py index 0f7dc8cae1b..8f0c6fb8754 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/__init__.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/__init__.py @@ -13,8 +13,11 @@ from servicelib.application_keys import APP_CONFIG_KEY from servicelib.application_setup import ModuleCategory, app_module_setup -from .config import (APP_CLIENT_SOCKET_REGISTRY_KEY, - APP_RESOURCE_MANAGER_TASKS_KEY, CONFIG_SECTION_NAME) +from .config import ( + APP_CLIENT_SOCKET_REGISTRY_KEY, + APP_RESOURCE_MANAGER_TASKS_KEY, + CONFIG_SECTION_NAME, +) from .garbage_collector import setup as setup_garbage_collector from .redis import setup_redis_client from .registry import RedisResourceRegistry @@ -24,6 +27,7 @@ MODULE_NAME = __name__.split(".")[-1] module_name = module_name = __name__.replace(".__init__", "") + @app_module_setup(module_name, ModuleCategory.SYSTEM, logger=logger) def setup(app: web.Application) -> bool: """Sets up resource manager subsystem in the application @@ -32,8 +36,9 @@ def setup(app: web.Application) -> bool: cfg = app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] app[APP_RESOURCE_MANAGER_TASKS_KEY] = [] setup_redis_client(app) - app[APP_CLIENT_SOCKET_REGISTRY_KEY] = RedisResourceRegistry(app) if cfg["redis"]["enabled"] \ - else None + app[APP_CLIENT_SOCKET_REGISTRY_KEY] = ( + RedisResourceRegistry(app) if cfg["redis"]["enabled"] else None + ) setup_garbage_collector(app) return True @@ -41,6 +46,4 @@ def setup(app: web.Application) -> bool: # alias setup_resource_manager = setup -__all__ = ( - 'setup_resource_manager' -) +__all__ = "setup_resource_manager" diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/config.py b/services/web/server/src/simcore_service_webserver/resource_manager/config.py index a0f52e9a197..a086d96bfcd 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/config.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/config.py @@ -8,26 +8,35 @@ from servicelib.application_keys import APP_CONFIG_KEY -CONFIG_SECTION_NAME = 'resource_manager' +CONFIG_SECTION_NAME = "resource_manager" APP_CLIENT_REDIS_CLIENT_KEY = __name__ + ".resource_manager.redis_client" APP_CLIENT_SOCKET_REGISTRY_KEY = __name__ + ".resource_manager.registry" APP_RESOURCE_MANAGER_TASKS_KEY = __name__ + ".resource_manager.tasks.key" APP_GARBAGE_COLLECTOR_KEY = __name__ + ".resource_manager.garbage_collector_key" -schema = T.Dict({ - T.Key("enabled", default=True, optional=True): T.Or(T.Bool(), T.Int()), - T.Key("resource_deletion_timeout_seconds", default=900, optional=True): T.Int(), - T.Key("garbage_collection_interval_seconds", default=30, optional=True): T.Int(), - T.Key("redis", optional=False): T.Dict({ - T.Key("enabled", default=True, optional=True): T.Bool(), - T.Key("host", default="redis", optional=True): T.String(), - T.Key("port", default=6793, optional=True): T.Int(), - }), -}) +schema = T.Dict( + { + T.Key("enabled", default=True, optional=True): T.Or(T.Bool(), T.Int()), + T.Key("resource_deletion_timeout_seconds", default=900, optional=True): T.Int(), + T.Key( + "garbage_collection_interval_seconds", default=30, optional=True + ): T.Int(), + T.Key("redis", optional=False): T.Dict( + { + T.Key("enabled", default=True, optional=True): T.Bool(), + T.Key("host", default="redis", optional=True): T.String(), + T.Key("port", default=6793, optional=True): T.Int(), + } + ), + } +) def get_service_deletion_timeout(app: web.Application) -> int: return app[APP_CONFIG_KEY][CONFIG_SECTION_NAME]["resource_deletion_timeout_seconds"] + def get_garbage_collector_interval(app: web.Application) -> int: - return app[APP_CONFIG_KEY][CONFIG_SECTION_NAME]["garbage_collection_interval_seconds"] + return app[APP_CONFIG_KEY][CONFIG_SECTION_NAME][ + "garbage_collection_interval_seconds" + ] diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py b/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py index 914177a4768..3fe25762d70 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py @@ -12,6 +12,7 @@ from aiohttp import web from servicelib.observer import emit +from servicelib.utils import logged_gather from .config import APP_GARBAGE_COLLECTOR_KEY, get_garbage_collector_interval from .registry import RedisResourceRegistry, get_registry @@ -19,7 +20,7 @@ logger = logging.getLogger(__name__) -async def collect_garbage(registry: RedisResourceRegistry, app: web.Application): +async def collect_garbage(registry: RedisResourceRegistry, app: web.Application): logger.info("collecting garbage...") alive_keys, dead_keys = await registry.get_all_resource_keys() logger.debug("potential dead keys: %s", dead_keys) @@ -34,24 +35,39 @@ async def collect_garbage(registry: RedisResourceRegistry, app: web.Application logger.debug("found the following resources: %s", resources) # find if there are alive entries using these resources for resource_name, resource_value in resources.items(): - other_keys = [x for x in await registry.find_keys((resource_name, resource_value)) if x != key] + other_keys = [ + x + for x in await registry.find_keys((resource_name, resource_value)) + if x != key + ] # the resource ref can be closed anyway - logger.debug( - "removing resource entry: %s: %s", key, resources) + logger.debug("removing resource entry: %s: %s", key, resources) await registry.remove_resource(key, resource_name) # check if the resource is still in use in the alive keys if not any(elem in alive_keys for elem in other_keys): # remove the resource from the other keys as well - remove_tasks = [registry.remove_resource( - x, resource_name) for x in other_keys] + remove_tasks = [ + registry.remove_resource(x, resource_name) for x in other_keys + ] if remove_tasks: logger.debug( - "removing resource entry: %s: %s", other_keys, resources) - await asyncio.gather(*remove_tasks) + "removing resource entry: %s: %s", other_keys, resources + ) + await logged_gather(*remove_tasks, reraise=False) + logger.debug( - "the resources %s:%s of %s may be now safely closed", resource_name, resource_value, key) - await emit(event="SIGNAL_PROJECT_CLOSE", user_id=None, project_uuid=resource_value, app=app) + "the resources %s:%s of %s may be now safely closed", + resource_name, + resource_value, + key, + ) + await emit( + event="SIGNAL_PROJECT_CLOSE", + user_id=None, + project_uuid=resource_value, + app=app, + ) async def garbage_collector_task(app: web.Application): @@ -70,7 +86,9 @@ async def garbage_collector_task(app: web.Application): async def setup_garbage_collector_task(app: web.Application): - app[APP_GARBAGE_COLLECTOR_KEY] = asyncio.get_event_loop().create_task(garbage_collector_task(app)) + app[APP_GARBAGE_COLLECTOR_KEY] = asyncio.get_event_loop().create_task( + garbage_collector_task(app) + ) yield task = app[APP_GARBAGE_COLLECTOR_KEY] task.cancel() diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/redis.py b/services/web/server/src/simcore_service_webserver/resource_manager/redis.py index 3a06a499d62..fef9636e658 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/redis.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/redis.py @@ -9,15 +9,16 @@ log = logging.getLogger(__name__) -THIS_SERVICE_NAME = 'redis' +THIS_SERVICE_NAME = "redis" DSN = "redis://{host}:{port}" retry_upon_init_policy = dict( stop=stop_after_attempt(3), wait=wait_random(min=1, max=2), - before=before_log(log, logging.WARNING) + before=before_log(log, logging.WARNING), ) + async def redis_client(app: web.Application): cfg = app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] url = DSN.format(**cfg["redis"]) @@ -26,7 +27,7 @@ async def redis_client(app: web.Application): with attempt: client = await aioredis.create_redis_pool(url, encoding="utf-8") - assert client # nosec + assert client # nosec app[APP_CLIENT_REDIS_CLIENT_KEY] = client yield @@ -37,6 +38,7 @@ async def redis_client(app: web.Application): client.close() await client.wait_closed() + def setup_redis_client(app: web.Application): app[APP_CLIENT_REDIS_CLIENT_KEY] = None @@ -50,5 +52,6 @@ def setup_redis_client(app: web.Application): app.cleanup_ctx.append(redis_client) + def get_redis_client(app: web.Application) -> aioredis.Redis: return app[APP_CLIENT_REDIS_CLIENT_KEY] diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/registry.py b/services/web/server/src/simcore_service_webserver/resource_manager/registry.py index 82dfe2c1ff3..4127d68206e 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/registry.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/registry.py @@ -27,6 +27,7 @@ RESOURCE_SUFFIX = "resources" ALIVE_SUFFIX = "alive" + @attr.s(auto_attribs=True) class RedisResourceRegistry: """ Keeps a record of connected sockets per user @@ -34,6 +35,7 @@ class RedisResourceRegistry: redis structure is following Redis Hash: key=user_id:client_session_id values={server_id socket_id project_id} """ + app: web.Application @classmethod @@ -43,11 +45,17 @@ def _hash_key(cls, key: Dict[str, str]) -> str: @classmethod def _decode_hash_key(cls, hash_key: str) -> Dict[str, str]: - tmp_key = hash_key[:-len(f":{RESOURCE_SUFFIX}")] if hash_key.endswith(f":{RESOURCE_SUFFIX}") else hash_key[:-len(f":{ALIVE_SUFFIX}")] + tmp_key = ( + hash_key[: -len(f":{RESOURCE_SUFFIX}")] + if hash_key.endswith(f":{RESOURCE_SUFFIX}") + else hash_key[: -len(f":{ALIVE_SUFFIX}")] + ) key = dict(x.split("=") for x in tmp_key.split(":")) return key - async def set_resource(self, key: Dict[str, str], resource: Tuple[str, str]) -> None: + async def set_resource( + self, key: Dict[str, str], resource: Tuple[str, str] + ) -> None: client = get_redis_client(self.app) hash_key = f"{self._hash_key(key)}:{RESOURCE_SUFFIX}" await client.hmset_dict(hash_key, **{resource[0]: resource[1]}) @@ -62,7 +70,9 @@ async def remove_resource(self, key: Dict[str, str], resource_name: str) -> None hash_key = f"{self._hash_key(key)}:{RESOURCE_SUFFIX}" await client.hdel(hash_key, resource_name) - async def find_resources(self, key: Dict[str, str], resource_name: str) -> List[str]: + async def find_resources( + self, key: Dict[str, str], resource_name: str + ) -> List[str]: client = get_redis_client(self.app) resources = [] # the key might only be partialy complete @@ -82,13 +92,13 @@ async def find_keys(self, resource: Tuple[str, str]) -> List[Dict[str, str]]: keys.append(self._decode_hash_key(hash_key)) return keys - async def set_key_alive(self, key: Dict[str, str], alive: bool, timeout: int =0) -> None: + async def set_key_alive( + self, key: Dict[str, str], alive: bool, timeout: int = 0 + ) -> None: client = get_redis_client(self.app) hash_key = f"{self._hash_key(key)}:{ALIVE_SUFFIX}" - await client.set(hash_key, - 1, - expire=0 if alive else timeout - ) + await client.set(hash_key, 1, expire=0 if alive else timeout) + async def is_key_alive(self, key: Dict[str, str]) -> bool: client = get_redis_client(self.app) hash_key = f"{self._hash_key(key)}:{ALIVE_SUFFIX}" @@ -96,13 +106,24 @@ async def is_key_alive(self, key: Dict[str, str]) -> bool: async def remove_key(self, key: Dict[str, str]) -> None: client = get_redis_client(self.app) - await client.delete(f"{self._hash_key(key)}:{RESOURCE_SUFFIX}", - f"{self._hash_key(key)}:{ALIVE_SUFFIX}") - - async def get_all_resource_keys(self) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]: + await client.delete( + f"{self._hash_key(key)}:{RESOURCE_SUFFIX}", + f"{self._hash_key(key)}:{ALIVE_SUFFIX}", + ) + + async def get_all_resource_keys( + self, + ) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]: client = get_redis_client(self.app) - alive_keys = [self._decode_hash_key(hash_key) async for hash_key in client.iscan(match=f"*:{ALIVE_SUFFIX}")] - dead_keys = [self._decode_hash_key(hash_key) async for hash_key in client.iscan(match=f"*:{RESOURCE_SUFFIX}") if self._decode_hash_key(hash_key) not in alive_keys] + alive_keys = [ + self._decode_hash_key(hash_key) + async for hash_key in client.iscan(match=f"*:{ALIVE_SUFFIX}") + ] + dead_keys = [ + self._decode_hash_key(hash_key) + async for hash_key in client.iscan(match=f"*:{RESOURCE_SUFFIX}") + if self._decode_hash_key(hash_key) not in alive_keys + ] return (alive_keys, dead_keys) diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/websocket_manager.py b/services/web/server/src/simcore_service_webserver/resource_manager/websocket_manager.py index 7551bc28380..1b83047d4d2 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/websocket_manager.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/websocket_manager.py @@ -28,66 +28,124 @@ SOCKET_ID_KEY = "socket_id" + @attr.s(auto_attribs=True) class WebsocketRegistry: user_id: str client_session_id: str app: web.Application - def _resource_key(self) -> Dict[str,str]: + def _resource_key(self) -> Dict[str, str]: return { "user_id": self.user_id, - "client_session_id": self.client_session_id if self.client_session_id else "*" - } + "client_session_id": self.client_session_id + if self.client_session_id + else "*", + } async def set_socket_id(self, socket_id: str) -> None: - log.debug("user %s/tab %s adding socket %s in registry...", self.user_id, self.client_session_id, socket_id) + log.debug( + "user %s/tab %s adding socket %s in registry...", + self.user_id, + self.client_session_id, + socket_id, + ) registry = get_registry(self.app) await registry.set_resource(self._resource_key(), (SOCKET_ID_KEY, socket_id)) await registry.set_key_alive(self._resource_key(), True) async def get_socket_id(self) -> str: - log.debug("user %s/tab %s removing socket from registry...", self.user_id, self.client_session_id) + log.debug( + "user %s/tab %s removing socket from registry...", + self.user_id, + self.client_session_id, + ) registry = get_registry(self.app) resources = await registry.get_resources(self._resource_key()) return resources.get(SOCKET_ID_KEY, None) async def remove_socket_id(self) -> None: - log.debug("user %s/tab %s removing socket from registry...", self.user_id, self.client_session_id) + log.debug( + "user %s/tab %s removing socket from registry...", + self.user_id, + self.client_session_id, + ) registry = get_registry(self.app) await registry.remove_resource(self._resource_key(), SOCKET_ID_KEY) - await registry.set_key_alive(self._resource_key(), False, get_service_deletion_timeout(self.app)) + await registry.set_key_alive( + self._resource_key(), False, get_service_deletion_timeout(self.app) + ) async def find_socket_ids(self) -> List[str]: - log.debug("user %s/tab %s finding %s from registry...", self.user_id, self.client_session_id, SOCKET_ID_KEY) + log.debug( + "user %s/tab %s finding %s from registry...", + self.user_id, + self.client_session_id, + SOCKET_ID_KEY, + ) registry = get_registry(self.app) - user_sockets = await registry.find_resources({"user_id": self.user_id, "client_session_id": "*"}, SOCKET_ID_KEY) + user_sockets = await registry.find_resources( + {"user_id": self.user_id, "client_session_id": "*"}, SOCKET_ID_KEY + ) return user_sockets async def find(self, key: str) -> List[str]: - log.debug("user %s/tab %s finding %s from registry...", self.user_id, self.client_session_id, key) + log.debug( + "user %s/tab %s finding %s from registry...", + self.user_id, + self.client_session_id, + key, + ) registry = get_registry(self.app) user_resources = await registry.find_resources(self._resource_key(), key) return user_resources async def add(self, key: str, value: str) -> None: - log.debug("user %s/tab %s adding %s:%s in registry...", self.user_id, self.client_session_id, key, value) + log.debug( + "user %s/tab %s adding %s:%s in registry...", + self.user_id, + self.client_session_id, + key, + value, + ) registry = get_registry(self.app) - await registry.set_resource(self._resource_key(), (key,value)) + await registry.set_resource(self._resource_key(), (key, value)) async def remove(self, key: str) -> None: - log.debug("user %s/tab %s removing %s from registry...", self.user_id, self.client_session_id, key) + log.debug( + "user %s/tab %s removing %s from registry...", + self.user_id, + self.client_session_id, + key, + ) registry = get_registry(self.app) await registry.remove_resource(self._resource_key(), key) async def find_users_of_resource(self, key: str, value: str) -> List[str]: - log.debug("user %s/tab %s finding %s:%s in registry..." ,self.user_id, self.client_session_id, key, value) + log.debug( + "user %s/tab %s finding %s:%s in registry...", + self.user_id, + self.client_session_id, + key, + value, + ) registry = get_registry(self.app) registry_keys = await registry.find_keys((key, value)) users = list({x["user_id"] for x in registry_keys}) return users + @contextmanager -def managed_resource(user_id: str, client_session_id: str, app: web.Application) -> WebsocketRegistry: +def managed_resource( + user_id: str, client_session_id: str, app: web.Application +) -> WebsocketRegistry: registry = WebsocketRegistry(user_id, client_session_id, app) - yield registry + try: + yield registry + except Exception: + log.exception( + "Error in web-socket for user:%s, session:%s", user_id, client_session_id + ) + raise + + # TODO: PC->SAN?? exception handling? e.g. remove resource from registry? diff --git a/services/web/server/src/simcore_service_webserver/resources.py b/services/web/server/src/simcore_service_webserver/resources.py index d4b9255a3ee..f009ea9add7 100644 --- a/services/web/server/src/simcore_service_webserver/resources.py +++ b/services/web/server/src/simcore_service_webserver/resources.py @@ -7,10 +7,8 @@ resources = ResourcesFacade( package_name=__name__, distribution_name="simcore-service-webserver", - config_folder='config', + config_folder="config", ) -__all__ = ( - 'resources', -) +__all__ = ("resources",) diff --git a/services/web/server/src/simcore_service_webserver/rest.py b/services/web/server/src/simcore_service_webserver/rest.py index 67d4faea436..9b40c565a1b 100644 --- a/services/web/server/src/simcore_service_webserver/rest.py +++ b/services/web/server/src/simcore_service_webserver/rest.py @@ -27,14 +27,14 @@ log = logging.getLogger(__name__) -def get_openapi_specs_path(api_version_dir: Optional[str]=None) -> Path: +def get_openapi_specs_path(api_version_dir: Optional[str] = None) -> Path: if api_version_dir is None: api_version_dir = api_version_prefix - return resources.get_path(f'api/{api_version_dir}/openapi.yaml') + return resources.get_path(f"api/{api_version_dir}/openapi.yaml") -def load_openapi_specs(spec_path: Optional[Path]=None) -> OpenApiSpecs: +def load_openapi_specs(spec_path: Optional[Path] = None) -> OpenApiSpecs: if spec_path is None: spec_path = get_openapi_specs_path() @@ -45,9 +45,12 @@ def load_openapi_specs(spec_path: Optional[Path]=None) -> OpenApiSpecs: return specs -@app_module_setup(__name__, ModuleCategory.ADDON, - depends=['simcore_service_webserver.security'], - logger=log) +@app_module_setup( + __name__, + ModuleCategory.ADDON, + depends=["simcore_service_webserver.security"], + logger=log, +) def setup(app: web.Application): cfg = get_rest_config(app) api_version_dir = cfg["version"] @@ -61,7 +64,9 @@ def setup(app: web.Application): major, *_ = specs.info.version if f"/v{major}" != base_path: - raise ValueError(f"Basepath naming {base_path} does not fit API version {specs.info.version}") + raise ValueError( + f"Basepath naming {base_path} does not fit API version {specs.info.version}" + ) # diagnostics routes routes = rest_routes.create(specs) @@ -72,15 +77,10 @@ def setup(app: web.Application): # rest API doc at /api/doc log.debug("OAS loaded from %s ", spec_path) - setup_swagger(app, - swagger_from_file=str(spec_path), - ui_version=3) - + setup_swagger(app, swagger_from_file=str(spec_path), ui_version=3) # alias setup_rest = setup -__all__ = ( - 'setup_rest' -) +__all__ = "setup_rest" diff --git a/services/web/server/src/simcore_service_webserver/rest_config.py b/services/web/server/src/simcore_service_webserver/rest_config.py index 9a58db47e3a..21117c9142f 100644 --- a/services/web/server/src/simcore_service_webserver/rest_config.py +++ b/services/web/server/src/simcore_service_webserver/rest_config.py @@ -10,17 +10,15 @@ from servicelib.application_keys import APP_CONFIG_KEY, APP_OPENAPI_SPECS_KEY -CONFIG_SECTION_NAME = 'rest' +CONFIG_SECTION_NAME = "rest" -schema = T.Dict({ - T.Key("enabled", default=True, optional=True): T.Bool(), - "version": T.Enum("v0"), -}) +schema = T.Dict( + {T.Key("enabled", default=True, optional=True): T.Bool(), "version": T.Enum("v0"),} +) def get_rest_config(app: web.Application) -> Dict: return app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] -__all__ =[ - 'APP_OPENAPI_SPECS_KEY' -] + +__all__ = ["APP_OPENAPI_SPECS_KEY"] diff --git a/services/web/server/src/simcore_service_webserver/rest_models.py b/services/web/server/src/simcore_service_webserver/rest_models.py index e3dab197066..ae28204555e 100644 --- a/services/web/server/src/simcore_service_webserver/rest_models.py +++ b/services/web/server/src/simcore_service_webserver/rest_models.py @@ -9,6 +9,7 @@ # NOTE: using these, optional and required fields are always transmitted! # NOTE: make some attrs nullable by default!? + @attr.s(auto_attribs=True) class RegistrationType: email: str @@ -16,7 +17,7 @@ class RegistrationType: confirm: str @classmethod - def from_body(cls, data): # struct-like unmarshalled data produced by + def from_body(cls, data): # struct-like unmarshalled data produced by # TODO: simplify return cls(email=data.email, password=data.password, confirm=data.confirm) @@ -24,8 +25,8 @@ def from_body(cls, data): # struct-like unmarshalled data produced by @attr.s(auto_attribs=True) class LogMessageType: message: str - level: str = 'INFO' - logger: str = 'user' + level: str = "INFO" + logger: str = "user" @attr.s(auto_attribs=True) @@ -37,10 +38,8 @@ class ErrorItemType: @classmethod def from_error(cls, err: BaseException): - item = cls( code = err.__class__.__name__, - message=str(err), - resource=None, - field=None + item = cls( + code=err.__class__.__name__, message=str(err), resource=None, field=None ) return item diff --git a/services/web/server/src/simcore_service_webserver/rest_routes.py b/services/web/server/src/simcore_service_webserver/rest_routes.py index f5ce8c35d12..8b6146f33ff 100644 --- a/services/web/server/src/simcore_service_webserver/rest_routes.py +++ b/services/web/server/src/simcore_service_webserver/rest_routes.py @@ -25,21 +25,21 @@ def create(specs: openapi.Spec) -> List[web.RouteDef]: # TODO: routing will be done automatically using operation_id/tags, etc... # diagnostics -- - path, handle = '/', rest_handlers.check_health - operation_id = specs.paths[path].operations['get'].operation_id - routes.append( web.get(base_path+path, handle, name=operation_id) ) + path, handle = "/", rest_handlers.check_health + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(base_path + path, handle, name=operation_id)) - path, handle = '/check/{action}', rest_handlers.check_action - operation_id = specs.paths[path].operations['post'].operation_id - routes.append( web.post(base_path+path, handle, name=operation_id) ) + path, handle = "/check/{action}", rest_handlers.check_action + operation_id = specs.paths[path].operations["post"].operation_id + routes.append(web.post(base_path + path, handle, name=operation_id)) - path, handle = '/config', rest_handlers.get_config - operation_id = specs.paths[path].operations['get'].operation_id - routes.append( web.get(base_path+path, handle, name=operation_id) ) + path, handle = "/config", rest_handlers.get_config + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(base_path + path, handle, name=operation_id)) # NOTE: Internal. Not shown in api/docs - path, handle = '/diagnostics', rest_handlers.get_diagnostics - operation_id = 'get_diagnotics' # specs.paths[path].operations['get'].operation_id - routes.append( web.get(base_path+path, handle, name=operation_id) ) + path, handle = "/diagnostics", rest_handlers.get_diagnostics + operation_id = "get_diagnotics" # specs.paths[path].operations['get'].operation_id + routes.append(web.get(base_path + path, handle, name=operation_id)) return routes diff --git a/services/web/server/src/simcore_service_webserver/reverse_proxy/__init__.py b/services/web/server/src/simcore_service_webserver/reverse_proxy/__init__.py index eb9c6351771..da4eb1fa916 100644 --- a/services/web/server/src/simcore_service_webserver/reverse_proxy/__init__.py +++ b/services/web/server/src/simcore_service_webserver/reverse_proxy/__init__.py @@ -26,12 +26,13 @@ ROUTE_NAME = MODULE_NAME module_name = module_name = __name__.replace(".__init__", "") + async def _on_shutdown(app: web.Application): for ws in app[APP_SOCKETS_KEY]: await ws.close() -@app_module_setup(module_name, ModuleCategory.ADDON, - logger=logger) + +@app_module_setup(module_name, ModuleCategory.ADDON, logger=logger) def setup(app: web.Application, service_resolver: ServiceResolutionPolicy): """Sets up reverse-proxy subsystem in the application (a la aiohttp) @@ -40,21 +41,19 @@ def setup(app: web.Application, service_resolver: ServiceResolutionPolicy): # Registers reverse proxy handlers customized for specific service types for name in jupyter.SUPPORTED_IMAGE_NAME: - chooser.register_handler(jupyter.handler, - image_name=name) - + chooser.register_handler(jupyter.handler, image_name=name) for name in paraview.SUPPORTED_IMAGE_NAME: chooser.register_handler(paraview.handler, image_name=name) # /x/{serviceId}/{proxyPath:.*} - app.router.add_route(method='*', path=URL_PATH, - handler=chooser.do_route, name=ROUTE_NAME) + app.router.add_route( + method="*", path=URL_PATH, handler=chooser.do_route, name=ROUTE_NAME + ) # chooser has same lifetime as the application app[__name__] = {"chooser": chooser} - # cleans up all sockets created by the proxy app[APP_SOCKETS_KEY] = list() app.on_shutdown.append(_on_shutdown) @@ -63,6 +62,4 @@ def setup(app: web.Application, service_resolver: ServiceResolutionPolicy): # alias setup_reverse_proxy = setup -__all__ = ( - 'setup_reverse_proxy' -) +__all__ = "setup_reverse_proxy" diff --git a/services/web/server/src/simcore_service_webserver/reverse_proxy/abc.py b/services/web/server/src/simcore_service_webserver/reverse_proxy/abc.py index b365532ea5e..7e62bc94966 100644 --- a/services/web/server/src/simcore_service_webserver/reverse_proxy/abc.py +++ b/services/web/server/src/simcore_service_webserver/reverse_proxy/abc.py @@ -1,4 +1,3 @@ - import abc from yarl import URL @@ -10,6 +9,7 @@ class ServiceResolutionPolicy(metaclass=abc.ABCMeta): """ Implements an interface to identify and resolve the location of a dynamic backend service """ + base_mountpoint = PROXY_MOUNTPOINT @abc.abstractmethod diff --git a/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/aiohttp_client_extension.py b/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/aiohttp_client_extension.py index 4539bf91b52..87f6fe328ff 100644 --- a/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/aiohttp_client_extension.py +++ b/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/aiohttp_client_extension.py @@ -1,7 +1,6 @@ - -#pylint: disable=unused-wildcard-import -#pylint: disable=wildcard-import -#pylint: disable=unused-import, redefined-outer-name, protected-access +# pylint: disable=unused-wildcard-import +# pylint: disable=wildcard-import +# pylint: disable=unused-import, redefined-outer-name, protected-access # TODO: should be fixed in #710. TEMPORARY SOLUTION @@ -14,39 +13,76 @@ import traceback import warnings from types import SimpleNamespace, TracebackType -from typing import (Any, Coroutine, Generator, Generic, Iterable, List, # noqa - Mapping, Optional, Set, Tuple, Type, TypeVar, Union) +from typing import ( + Any, + Coroutine, + Generator, + Generic, + Iterable, + List, # noqa + Mapping, + Optional, + Set, + Tuple, + Type, + TypeVar, + Union, +) import attr from aiohttp import hdrs, http, payload from aiohttp.abc import AbstractCookieJar from aiohttp.client import * from aiohttp.client import _SessionRequestContextManager -from aiohttp.client_exceptions import (ClientConnectionError, - ClientConnectorCertificateError, - ClientConnectorError, - ClientConnectorSSLError, ClientError, - ClientHttpProxyError, ClientOSError, - ClientPayloadError, - ClientProxyConnectionError, - ClientResponseError, ClientSSLError, - ContentTypeError, InvalidURL, - ServerConnectionError, - ServerDisconnectedError, - ServerFingerprintMismatch, - ServerTimeoutError, TooManyRedirects, - WSServerHandshakeError) -from aiohttp.client_reqrep import (ClientRequest, ClientResponse, Fingerprint, - RequestInfo, _merge_ssl_params) +from aiohttp.client_exceptions import ( + ClientConnectionError, + ClientConnectorCertificateError, + ClientConnectorError, + ClientConnectorSSLError, + ClientError, + ClientHttpProxyError, + ClientOSError, + ClientPayloadError, + ClientProxyConnectionError, + ClientResponseError, + ClientSSLError, + ContentTypeError, + InvalidURL, + ServerConnectionError, + ServerDisconnectedError, + ServerFingerprintMismatch, + ServerTimeoutError, + TooManyRedirects, + WSServerHandshakeError, +) +from aiohttp.client_reqrep import ( + ClientRequest, + ClientResponse, + Fingerprint, + RequestInfo, + _merge_ssl_params, +) from aiohttp.client_ws import ClientWebSocketResponse from aiohttp.connector import BaseConnector, TCPConnector, UnixConnector from aiohttp.cookiejar import CookieJar -from aiohttp.helpers import (DEBUG, PY_36, BasicAuth, CeilTimeout, - TimeoutHandle, get_running_loop, proxies_from_env, - sentinel, strip_auth_from_url) +from aiohttp.helpers import ( + DEBUG, + PY_36, + BasicAuth, + CeilTimeout, + TimeoutHandle, + get_running_loop, + proxies_from_env, + sentinel, + strip_auth_from_url, +) from aiohttp.http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter -from aiohttp.http_websocket import (WSHandshakeError, WSMessage, # noqa - ws_ext_gen, ws_ext_parse) +from aiohttp.http_websocket import ( + WSHandshakeError, + WSMessage, # noqa + ws_ext_gen, + ws_ext_parse, +) from aiohttp.streams import FlowControlDataQueue from aiohttp.tracing import Trace, TraceConfig from aiohttp.typedefs import JSONEncoder, LooseCookies, LooseHeaders, StrOrURL @@ -55,28 +91,29 @@ def client_request( - method: str, - url: StrOrURL, *, - params: Optional[Mapping[str, str]]=None, - data: Any=None, - json: Any=None, - headers: LooseHeaders=None, - skip_auto_headers: Optional[Iterable[str]]=None, - auth: Optional[BasicAuth]=None, - allow_redirects: bool=True, - max_redirects: int=10, - compress: Optional[str]=None, - chunked: Optional[bool]=None, - expect100: bool=False, - raise_for_status: Optional[bool]=None, - read_until_eof: bool=True, - proxy: Optional[StrOrURL]=None, - proxy_auth: Optional[BasicAuth]=None, - timeout: Union[ClientTimeout, object]=sentinel, - cookies: Optional[LooseCookies]=None, - version: HttpVersion=http.HttpVersion11, - connector: Optional[BaseConnector]=None, - loop: Optional[asyncio.AbstractEventLoop]=None + method: str, + url: StrOrURL, + *, + params: Optional[Mapping[str, str]] = None, + data: Any = None, + json: Any = None, + headers: LooseHeaders = None, + skip_auto_headers: Optional[Iterable[str]] = None, + auth: Optional[BasicAuth] = None, + allow_redirects: bool = True, + max_redirects: int = 10, + compress: Optional[str] = None, + chunked: Optional[bool] = None, + expect100: bool = False, + raise_for_status: Optional[bool] = None, + read_until_eof: bool = True, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + timeout: Union[ClientTimeout, object] = sentinel, + cookies: Optional[LooseCookies] = None, + version: HttpVersion = http.HttpVersion11, + connector: Optional[BaseConnector] = None, + loop: Optional[asyncio.AbstractEventLoop] = None ) -> _SessionRequestContextManager: """ as aiohttp.client.request using a client session that does not decompress, i.e auto_decompress=False @@ -87,25 +124,34 @@ def client_request( connector = TCPConnector(loop=loop, force_close=True) session = ClientSession( - loop=loop, cookies=cookies, version=version, timeout=timeout, - connector=connector, connector_owner=connector_owner, - auto_decompress=False) + loop=loop, + cookies=cookies, + version=version, + timeout=timeout, + connector=connector, + connector_owner=connector_owner, + auto_decompress=False, + ) return _SessionRequestContextManager( - session._request(method, url, - params=params, - data=data, - json=json, - headers=headers, - skip_auto_headers=skip_auto_headers, - auth=auth, - allow_redirects=allow_redirects, - max_redirects=max_redirects, - compress=compress, - chunked=chunked, - expect100=expect100, - raise_for_status=raise_for_status, - read_until_eof=read_until_eof, - proxy=proxy, - proxy_auth=proxy_auth,), - session) + session._request( + method, + url, + params=params, + data=data, + json=json, + headers=headers, + skip_auto_headers=skip_auto_headers, + auth=auth, + allow_redirects=allow_redirects, + max_redirects=max_redirects, + compress=compress, + chunked=chunked, + expect100=expect100, + raise_for_status=raise_for_status, + read_until_eof=read_until_eof, + proxy=proxy, + proxy_auth=proxy_auth, + ), + session, + ) diff --git a/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/generic.py b/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/generic.py index 9446435854a..a8f435d6c4d 100644 --- a/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/generic.py +++ b/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/generic.py @@ -24,9 +24,12 @@ def check_ws_in_headers(request): - return request.headers.get('connection', '').lower() == 'upgrade' and \ - request.headers.get('upgrade', '').lower() == 'websocket' and \ - request.method == 'GET' + return ( + request.headers.get("connection", "").lower() == "upgrade" + and request.headers.get("upgrade", "").lower() == "websocket" + and request.method == "GET" + ) + async def handle_websocket_requests(ws_server, request, target_url): client_session = aiohttp.ClientSession(cookies=request.cookies) @@ -46,34 +49,36 @@ async def _ws_forward(ws_from, ws_to): elif ws_to.closed: await ws_to.close(code=ws_to.close_code, message=msg.extra) else: - raise ValueError( - 'unexpected message type: %s' % pprint.pformat(msg)) + raise ValueError("unexpected message type: %s" % pprint.pformat(msg)) async with client_session.ws_connect(target_url) as ws_client: - await asyncio.wait([_ws_forward(ws_server, ws_client), - _ws_forward(ws_client, ws_server)], - return_when=asyncio.FIRST_COMPLETED) + await asyncio.wait( + [_ws_forward(ws_server, ws_client), _ws_forward(ws_client, ws_server)], + return_when=asyncio.FIRST_COMPLETED, + ) return ws_server + async def handle_web_request(request, target_url): async with client.request( - request.method, target_url, + request.method, + target_url, headers=request.headers.copy(), allow_redirects=False, - data=await request.read() + data=await request.read(), ) as res: body = await res.read() response = web.Response( - headers=res.headers.copy(), - status=res.status, - body=body + headers=res.headers.copy(), status=res.status, body=body ) return response async def handler(request: web.Request, service_url: str, **_kargs): - target_url = URL(service_url).origin().with_path(request.path).with_query(request.query) + target_url = ( + URL(service_url).origin().with_path(request.path).with_query(request.query) + ) ws_available = False if check_ws_in_headers(request): ws = web.WebSocketResponse() @@ -89,26 +94,26 @@ async def handler(request: web.Request, service_url: str, **_kargs): request.app[APP_SOCKETS_KEY].remove(ws) if not ws_available: - return ( await handle_web_request(request, target_url) ) - + return await handle_web_request(request, target_url) # OTHER IMPLEMENTATIONS ------------------------------------------------------ + async def handler_impl_2(request: web.Request, target_url: str): # FIXME: Taken tmp from https://github.com/weargoggles/aioproxy/blob/master/aioproxy.py start = time.time() async with aiohttp.client.request( - request.method, target_url, + request.method, + target_url, headers=request.headers, chunked=CHUNK, # response_class=ReverseProxyResponse, ) as r: - logger.debug('opened backend request in %d ms', ((time.time() - start) * 1000)) + logger.debug("opened backend request in %d ms", ((time.time() - start) * 1000)) - response = aiohttp.web.StreamResponse(status=r.status, - headers=r.headers) + response = aiohttp.web.StreamResponse(status=r.status, headers=r.headers) await response.prepare(request) content = r.content while True: @@ -117,7 +122,7 @@ async def handler_impl_2(request: web.Request, target_url: str): break await response.write(chunk) - logger.debug('finished sending content in %d ms', ((time.time() - start) * 1000,)) + logger.debug("finished sending content in %d ms", ((time.time() - start) * 1000,)) await response.write_eof() return response diff --git a/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/paraview.py b/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/paraview.py index 196b43c9670..b0fa8673d5f 100644 --- a/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/paraview.py +++ b/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/paraview.py @@ -19,16 +19,22 @@ from ..settings import APP_SOCKETS_KEY -SUPPORTED_IMAGE_NAME = ["simcore/services/dynamic/3d-viewer", - "simcore/services/dynamic/3d-viewer-gpu"] +SUPPORTED_IMAGE_NAME = [ + "simcore/services/dynamic/3d-viewer", + "simcore/services/dynamic/3d-viewer-gpu", +] SUPPORTED_IMAGE_TAG = "==1.0.5" logger = logging.getLogger(__name__) + def check_ws_in_headers(request): - return request.headers.get('connection', '').lower() == 'upgrade' and \ - request.headers.get('upgrade', '').lower() == 'websocket' and \ - request.method == 'GET' + return ( + request.headers.get("connection", "").lower() == "upgrade" + and request.headers.get("upgrade", "").lower() == "websocket" + and request.method == "GET" + ) + async def handle_websocket_requests(ws_server, request: web.Request, target_url: URL): async def _ws_forward(ws_from, ws_to): @@ -46,53 +52,61 @@ async def _ws_forward(ws_from, ws_to): elif ws_to.closed: await ws_to.close(code=ws_to.close_code, message=msg.extra) else: - raise ValueError( - 'unexpected message type: %s' % pprint.pformat(msg)) + raise ValueError("unexpected message type: %s" % pprint.pformat(msg)) async with aiohttp.ClientSession(cookies=request.cookies) as session: # websocket connection with backend services async with session.ws_connect(target_url) as ws_client: - await asyncio.wait([_ws_forward(ws_server, ws_client), - _ws_forward(ws_client, ws_server)], - return_when=asyncio.FIRST_COMPLETED) + await asyncio.wait( + [_ws_forward(ws_server, ws_client), _ws_forward(ws_client, ws_server)], + return_when=asyncio.FIRST_COMPLETED, + ) return ws_server -async def handle_web_request(request: web.Request, target_url: URL, mount_point:str, proxy_path: str): + +async def handle_web_request( + request: web.Request, target_url: URL, mount_point: str, proxy_path: str +): async with client.request( - request.method, target_url, + request.method, + target_url, headers=request.headers.copy(), allow_redirects=False, - data=await request.read() + data=await request.read(), ) as res: # special handling for paraview headers = res.headers.copy() - del headers['content-length'] + del headers["content-length"] body = await res.read() - if proxy_path == 'Visualizer.js': - body = body.replace(b'"https"===window.location.protocol', b'window.location.protocol.startsWith("https")') - body = body.replace(b'"/ws"', b'"%s/ws"' % - mount_point.encode(), 1) + if proxy_path == "Visualizer.js": body = body.replace( - b'"/paraview/"', b'"%s/paraview/"' % mount_point.encode(), 1) + b'"https"===window.location.protocol', + b'window.location.protocol.startsWith("https")', + ) + body = body.replace(b'"/ws"', b'"%s/ws"' % mount_point.encode(), 1) + body = body.replace( + b'"/paraview/"', b'"%s/paraview/"' % mount_point.encode(), 1 + ) logger.info("fixed Visualizer.js paths on the fly") - response = web.Response( - headers=headers, - status=res.status, - body=body - ) + response = web.Response(headers=headers, status=res.status, body=body) return response -async def handler(request: web.Request, service_url: str, mount_point: str, proxy_path: str, **_kargs): + +async def handler( + request: web.Request, service_url: str, mount_point: str, proxy_path: str, **_kargs +): logger.debug("handling request %s, using service url %s", request, service_url) - target_url = URL(service_url).origin().with_path(request.path).with_query(request.query) + target_url = ( + URL(service_url).origin().with_path(request.path).with_query(request.query) + ) ws_available = False if check_ws_in_headers(request): ws = web.WebSocketResponse() ws_available = ws.can_prepare(request) if ws_available: await ws.prepare(request) - logger.info('##### WS_SERVER %s', pprint.pformat(ws)) + logger.info("##### WS_SERVER %s", pprint.pformat(ws)) try: request.app[APP_SOCKETS_KEY].append(ws) # paraview special handling, it is somehow fixed at the root endpoint @@ -102,18 +116,18 @@ async def handler(request: web.Request, service_url: str, mount_point: str, prox finally: request.app[APP_SOCKETS_KEY].remove(ws) if not ws_available: - return (await handle_web_request(request, target_url, mount_point, proxy_path)) + return await handle_web_request(request, target_url, mount_point, proxy_path) + if __name__ == "__main__": # dummies for manual testing - BASE_URL = 'http://0.0.0.0:8080' - MOUNT_POINT = '/x/fakeUuid' + BASE_URL = "http://0.0.0.0:8080" + MOUNT_POINT = "/x/fakeUuid" def adapter(req: web.Request): - proxy_path = req.match_info.get('proxyPath', - 'no proxyPath placeholder defined') + proxy_path = req.match_info.get("proxyPath", "no proxyPath placeholder defined") return handler(req, BASE_URL, MOUNT_POINT, proxy_path) app = web.Application() - app.router.add_route('*', MOUNT_POINT + '{proxyPath:.*}', adapter) + app.router.add_route("*", MOUNT_POINT + "{proxyPath:.*}", adapter) web.run_app(app, port=3985) diff --git a/services/web/server/src/simcore_service_webserver/reverse_proxy/routing.py b/services/web/server/src/simcore_service_webserver/reverse_proxy/routing.py index 92eb7abf9c9..15c27838091 100644 --- a/services/web/server/src/simcore_service_webserver/reverse_proxy/routing.py +++ b/services/web/server/src/simcore_service_webserver/reverse_proxy/routing.py @@ -15,7 +15,9 @@ from aiohttp import web from .abc import ServiceResolutionPolicy -from .handlers.jupyter import handler as default_handler #TODO: jupyter shall be the generic one +from .handlers.jupyter import ( + handler as default_handler, +) # TODO: jupyter shall be the generic one from .settings import PROXY_PATH_KEY, SERVICE_ID_KEY logger = logging.getLogger(__name__) @@ -29,6 +31,7 @@ class Wrapper: NOTE: wrapper.cache.clear() """ + encapsulated: ServiceResolutionPolicy cache: Dict[str, Tuple[str, str]] = OrderedDict() MAXSIZE = 128 @@ -53,8 +56,7 @@ async def resolve(self, service_identifier: str) -> Tuple[str, str]: except Exception: logger.debug("Failed to resolve service", exc_info=True) # TODO: translate exception into HTTPStatus - raise web.HTTPServiceUnavailable( - reason="Cannot resolve service") + raise web.HTTPServiceUnavailable(reason="Cannot resolve service") @attr.s(auto_attribs=True) @@ -62,9 +64,9 @@ class ReverseChooser: resolver: Wrapper = attr.ib(converter=Wrapper) handlers: Dict = dict() - def register_handler(self, - handler: Callable[..., web.StreamResponse], *, - image_name: str): + def register_handler( + self, handler: Callable[..., web.StreamResponse], *, image_name: str + ): self.handlers[image_name] = handler async def do_route(self, request: web.Request) -> web.Response: @@ -75,9 +77,11 @@ async def do_route(self, request: web.Request) -> web.Response: service_identifier = request.match_info.get(SERVICE_ID_KEY) proxy_path = request.match_info.get(PROXY_PATH_KEY) - mountpoint = request.path[:-len(proxy_path)].rstrip("/") + mountpoint = request.path[: -len(proxy_path)].rstrip("/") - image_name, service_url = await cli.resolve(service_identifier) # pylint: disable=E1101 + image_name, service_url = await cli.resolve( + service_identifier + ) # pylint: disable=E1101 # TODO: reset cache for given service_identifier when it is shutdown or reused # To clear cache, use cli.cache.clear() @@ -89,8 +93,7 @@ async def do_route(self, request: web.Request) -> web.Response: # FIXME: add version as well handler = self.handlers.get(image_name, default_handler) - - response = await handler(request, service_url, - mount_point=mountpoint, - proxy_path=proxy_path) + response = await handler( + request, service_url, mount_point=mountpoint, proxy_path=proxy_path + ) return response diff --git a/services/web/server/src/simcore_service_webserver/security.py b/services/web/server/src/simcore_service_webserver/security.py index 85e9223ab9a..9c0e8c0e05e 100644 --- a/services/web/server/src/simcore_service_webserver/security.py +++ b/services/web/server/src/simcore_service_webserver/security.py @@ -35,8 +35,7 @@ def setup(app: web.Application): authorization_policy = AuthorizationPolicy(app, access_model) aiohttp_security.setup(app, identity_policy, authorization_policy) + setup_security = setup -__all__ = ( - 'setup_security' -) +__all__ = "setup_security" diff --git a/services/web/server/src/simcore_service_webserver/security_access_model.py b/services/web/server/src/simcore_service_webserver/security_access_model.py index aaaeacd271b..07bac80cc07 100644 --- a/services/web/server/src/simcore_service_webserver/security_access_model.py +++ b/services/web/server/src/simcore_service_webserver/security_access_model.py @@ -20,24 +20,28 @@ @attr.s(auto_attribs=True) class RolePermissions: role: UserRole - allowed: List[str]=attr.Factory(list) # named permissions allowed - check: Dict[str, Callable[[],bool]]=attr.Factory(dict) # checked permissions: permissions with conditions - inherits: List[str]=attr.Factory(list) + allowed: List[str] = attr.Factory(list) # named permissions allowed + check: Dict[str, Callable[[], bool]] = attr.Factory( + dict + ) # checked permissions: permissions with conditions + inherits: List[str] = attr.Factory(list) @classmethod - def from_rawdata(cls, role, value:Dict): + def from_rawdata(cls, role, value: Dict): if isinstance(role, str): name = role role = UserRole[name] - role = RolePermissions(role=role, allowed=[], check=[], inherits=value.get('inherits', [])) + role = RolePermissions( + role=role, allowed=[], check=[], inherits=value.get("inherits", []) + ) allowed = set() check = dict() - for item in value.get('can', list()): + for item in value.get("can", list()): if isinstance(item, Dict): - check[item['name']] = item['check'] + check[item["name"]] = item["check"] elif isinstance(item, str): allowed.add(item) else: @@ -57,14 +61,15 @@ class RoleBasedAccessModel: - For checks with operation expressions (e.g. can operation A & operation B?) see check_access free function below """ + def __init__(self, roles: List[RolePermissions]): - self.roles = {r.role:r for r in roles} + self.roles = {r.role: r for r in roles} # TODO: all operations allowed for a given role # TODO: build a tree out of the list of allowed operations # TODO: operations to ADD/REMOVE/EDIT permissions in a role - async def can(self, role: UserRole, operation: str, context: Dict=None) -> bool: + async def can(self, role: UserRole, operation: str, context: Dict = None) -> bool: # pylint: disable=too-many-return-statements # undefined operation TODO: check if such a name is defined?? @@ -88,10 +93,12 @@ async def can(self, role: UserRole, operation: str, context: Dict=None) -> bool: check = role_access.check[operation] try: if inspect.iscoroutinefunction(check): - return (await check(context)) + return await check(context) return check(context) - except Exception: #pylint: disable=broad-except - log.exception("Check operation '%s', shall not raise [%s]", operation, check) + except Exception: # pylint: disable=broad-except + log.exception( + "Check operation '%s', shall not raise [%s]", operation, check + ) return False # check if any parents @@ -103,37 +110,40 @@ async def can(self, role: UserRole, operation: str, context: Dict=None) -> bool: return True return False - async def who_can(self, operation: str, context: Dict=None): + async def who_can(self, operation: str, context: Dict = None): allowed = [] for role in self.roles: if await self.can(role, operation, context): allowed.append(role) return allowed - @classmethod def from_rawdata(cls, raw: Dict): - roles = [RolePermissions.from_rawdata(role, value) for role, value in raw.items()] + roles = [ + RolePermissions.from_rawdata(role, value) for role, value in raw.items() + ] return RoleBasedAccessModel(roles) # TODO: print table?? # TODO: implement expression parser: reg = re.compile(r'(&|\||\bAND\b|\bOR\b|\(|\))') -operators_pattern = re.compile(r'(&|\||\bAND\b|\bOR\b)') +operators_pattern = re.compile(r"(&|\||\bAND\b|\bOR\b)") -async def check_access(model: RoleBasedAccessModel, role:UserRole, operations: str, context: Dict=None) -> bool: +async def check_access( + model: RoleBasedAccessModel, role: UserRole, operations: str, context: Dict = None +) -> bool: """ Extends `RoleBasedAccessModel.can` to check access to boolean expressions of operations Returns True if a user with a role has permission on a given context """ tokens = operators_pattern.split(operations) - if len(tokens)==1: + if len(tokens) == 1: return await model.can(role, tokens[0], context) - if len(tokens)==3: - tokens = [t.strip() for t in tokens if t.strip() != ''] + if len(tokens) == 3: + tokens = [t.strip() for t in tokens if t.strip() != ""] lhs, op, rhs = tokens can_lhs = await model.can(role, lhs, context) if op in ["AND", "&"]: diff --git a/services/web/server/src/simcore_service_webserver/security_api.py b/services/web/server/src/simcore_service_webserver/security_api.py index 55d428b24d2..ac76c16af1c 100644 --- a/services/web/server/src/simcore_service_webserver/security_api.py +++ b/services/web/server/src/simcore_service_webserver/security_api.py @@ -7,9 +7,14 @@ import passlib.hash import sqlalchemy as sa from aiohttp import web -from aiohttp_security.api import (AUTZ_KEY, authorized_userid, - check_permission, forget, is_anonymous, - remember) +from aiohttp_security.api import ( + AUTZ_KEY, + authorized_userid, + check_permission, + forget, + is_anonymous, + remember, +) from aiopg.sa import Engine from .db_models import UserStatus, users @@ -21,27 +26,36 @@ async def check_credentials(engine: Engine, email: str, password: str) -> bool: async with engine.acquire() as conn: query = users.select().where( - sa.and_(users.c.email == email, - users.c.status != UserStatus.BANNED) + sa.and_(users.c.email == email, users.c.status != UserStatus.BANNED) ) ret = await conn.execute(query) user = await ret.fetchone() if user is not None: - return check_password(password, user['password_hash'] ) + return check_password(password, user["password_hash"]) return False + def encrypt_password(password): return passlib.hash.sha256_crypt.encrypt(password, rounds=1000) + def check_password(password, password_hash): return passlib.hash.sha256_crypt.verify(password, password_hash) + def get_access_model(app: web.Application): autz_policy = app[AUTZ_KEY] return autz_policy.access_model + __all__ = ( - 'encrypt_password', 'check_credentials', - 'authorized_userid', 'forget', 'remember', 'is_anonymous', 'check_permission', - 'get_access_model', 'UserRole' + "encrypt_password", + "check_credentials", + "authorized_userid", + "forget", + "remember", + "is_anonymous", + "check_permission", + "get_access_model", + "UserRole", ) diff --git a/services/web/server/src/simcore_service_webserver/security_authorization.py b/services/web/server/src/simcore_service_webserver/security_authorization.py index bc13812e03a..c02c4d61b9d 100644 --- a/services/web/server/src/simcore_service_webserver/security_authorization.py +++ b/services/web/server/src/simcore_service_webserver/security_authorization.py @@ -23,7 +23,9 @@ class AuthorizationPolicy(AbstractAuthorizationPolicy): app: web.Application access_model: RoleBasedAccessModel - timed_cache: ExpiringDict = attr.ib(init=False, default=ExpiringDict(max_len=100, max_age_seconds=10)) + timed_cache: ExpiringDict = attr.ib( + init=False, default=ExpiringDict(max_len=100, max_age_seconds=10) + ) @property def engine(self) -> Engine: @@ -31,8 +33,8 @@ def engine(self) -> Engine: :return: database's engine """ - # TODO: what if db is not available? - #return self.app.config_dict[APP_DB_ENGINE_KEY] + # TODO: what if db is not available? + # return self.app.config_dict[APP_DB_ENGINE_KEY] return self.app[APP_DB_ENGINE_KEY] @retry(**PostgresRetryPolicyUponOperation(log).kwargs) @@ -41,8 +43,7 @@ async def _pg_query_user(self, identity: str) -> RowProxy: row = self.timed_cache.get(identity) if not row: query = users.select().where( - sa.and_(users.c.email == identity, - users.c.status != UserStatus.BANNED) + sa.and_(users.c.email == identity, users.c.status != UserStatus.BANNED) ) async with self.engine.acquire() as conn: # NOTE: sometimes it raises psycopg2.DatabaseError in #880 and #1160 @@ -61,7 +62,12 @@ async def authorized_userid(self, identity: str) -> Optional[str]: user = await self._pg_query_user(identity) return user["id"] if user else None - async def permits(self, identity: str, permission: Union[str,Tuple], context: Optional[Dict]=None) -> bool: + async def permits( + self, + identity: str, + permission: Union[str, Tuple], + context: Optional[Dict] = None, + ) -> bool: """ Determines whether an identified user has permission :param identity: session identified corresponds to the user's email as defined in login.handlers.registration @@ -70,12 +76,16 @@ async def permits(self, identity: str, permission: Union[str,Tuple], context: Op :return: True if user has permission to execute this operation within the given context """ if identity is None or permission is None: - log.debug("Invalid indentity [%s] of permission [%s]. Denying access.", identity, permission) + log.debug( + "Invalid indentity [%s] of permission [%s]. Denying access.", + identity, + permission, + ) return False user = await self._pg_query_user(identity) if user: - role = user.get('role') + role = user.get("role") return await check_access(self.access_model, role, permission, context) return False diff --git a/services/web/server/src/simcore_service_webserver/security_permissions.py b/services/web/server/src/simcore_service_webserver/security_permissions.py index 9411025ca4c..c9ca981aaa3 100644 --- a/services/web/server/src/simcore_service_webserver/security_permissions.py +++ b/services/web/server/src/simcore_service_webserver/security_permissions.py @@ -19,7 +19,7 @@ def named_permissions() -> List[str]: return permissions -def split_permission_name(permission:str) -> Tuple[str, str]: +def split_permission_name(permission: str) -> Tuple[str, str]: parts = permission.split(".") resource, action = ".".join(parts[:-1]), parts[-1] return (resource, action) diff --git a/services/web/server/src/simcore_service_webserver/security_roles.py b/services/web/server/src/simcore_service_webserver/security_roles.py index b2b388f1570..dcd69710af2 100644 --- a/services/web/server/src/simcore_service_webserver/security_roles.py +++ b/services/web/server/src/simcore_service_webserver/security_roles.py @@ -18,61 +18,54 @@ # If only needed to discrimiate a resource use `resource.sub_resource.*` # ROLES_PERMISSIONS = { - UserRole.ANONYMOUS: { - "can": [] # Add only permissions here to handles that do not require login. - # Anonymous user can only access - }, - UserRole.GUEST: { - "can": [ - # Anonymous users need access to the filesystem because files are being transferred - "project.update", - "storage.locations.*", # "storage.datcore.read" - "storage.files.*", - - "project.open", - "project.read", # "studies.user.read", - # "studies.templates.read" - "project.node.read", - # NOTE: All services* are not necessary since it only requires login - # and there is no distinction among logged in users. - # TODO: kept temporarily as a way to denote resources - "services.pipeline.*", # "study.update", - # "study.start", - # "study.stop", - "services.interactive.*",# "study.node.start" - "services.catalog.*", - ] - }, - UserRole.USER: { - "can": [ - "project.create", # "studies.user.create", - "project.close", - "project.delete", # "study.node.create", - # "study.node.delete", - # "study.node.rename", - # "study.edge.create", - # "study.edge.delete" - "project.node.create", - "project.node.delete", - "project.tag.*", # "study.tag" - "user.profile.update", # "preferences.user.update", - # "preferences.role.update" - "user.tokens.*", # "preferences.token.create", - # "preferences.token.delete" - "tag.crud.*" # "preferences.tag" - - # NOTE: All services* are not necessary since it only requires login - # and there is no distinction among logged in users. - # TODO: kept temporarily as a way to denote resources - ], - "inherits": [UserRole.GUEST, UserRole.ANONYMOUS] - }, - UserRole.TESTER: { - "can": [ - "project.template.create", - ], - "inherits": [UserRole.USER] - } + UserRole.ANONYMOUS: { + "can": [] # Add only permissions here to handles that do not require login. + # Anonymous user can only access + }, + UserRole.GUEST: { + "can": [ + # Anonymous users need access to the filesystem because files are being transferred + "project.update", + "storage.locations.*", # "storage.datcore.read" + "storage.files.*", + "project.open", + "project.read", # "studies.user.read", + # "studies.templates.read" + "project.node.read", + # NOTE: All services* are not necessary since it only requires login + # and there is no distinction among logged in users. + # TODO: kept temporarily as a way to denote resources + "services.pipeline.*", # "study.update", + # "study.start", + # "study.stop", + "services.interactive.*", # "study.node.start" + "services.catalog.*", + ] + }, + UserRole.USER: { + "can": [ + "project.create", # "studies.user.create", + "project.close", + "project.delete", # "study.node.create", + # "study.node.delete", + # "study.node.rename", + # "study.edge.create", + # "study.edge.delete" + "project.node.create", + "project.node.delete", + "project.tag.*", # "study.tag" + "user.profile.update", # "preferences.user.update", + # "preferences.role.update" + "user.tokens.*", # "preferences.token.create", + # "preferences.token.delete" + "tag.crud.*" # "preferences.tag" + # NOTE: All services* are not necessary since it only requires login + # and there is no distinction among logged in users. + # TODO: kept temporarily as a way to denote resources + ], + "inherits": [UserRole.GUEST, UserRole.ANONYMOUS], + }, + UserRole.TESTER: {"can": ["project.template.create",], "inherits": [UserRole.USER]}, } # @@ -100,8 +93,8 @@ ### "study.node.start", # "study.node.data.push", <----------??? # "study.node.data.delete", <----------??? -#XX "study.edge.create", -#XX "study.edge.delete" +# XX "study.edge.create", +# XX "study.edge.delete" # ], # "tester": [ # "services.all.read", <----------??? diff --git a/services/web/server/src/simcore_service_webserver/session.py b/services/web/server/src/simcore_service_webserver/session.py index 13d366c014f..f5d42ecf222 100644 --- a/services/web/server/src/simcore_service_webserver/session.py +++ b/services/web/server/src/simcore_service_webserver/session.py @@ -31,6 +31,7 @@ async def my_handler(request) logger = logging.getLogger(__file__) + def generate_key(): # secret_key must be 32 url-safe base64-encoded bytes fernet_key = fernet.Fernet.generate_key() @@ -46,17 +47,17 @@ def setup_session(app: web.Application): cfg = app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] # secret key needed by EncryptedCookieStorage: is *bytes* key with length of *32* - secret_key_bytes = cfg["secret_key"].encode('utf-8') - if len(secret_key_bytes)==0: + secret_key_bytes = cfg["secret_key"].encode("utf-8") + if len(secret_key_bytes) == 0: raise ValueError("Empty %s.secret_key in config. Expected at least length 32") - while len(secret_key_bytes)<32: + while len(secret_key_bytes) < 32: secret_key_bytes += secret_key_bytes # EncryptedCookieStorage urlsafe_b64decode inside if passes bytes storage = EncryptedCookieStorage( - secret_key=secret_key_bytes[:32], - cookie_name="API_SESSION") + secret_key=secret_key_bytes[:32], cookie_name="API_SESSION" + ) aiohttp_session.setup(app, storage) @@ -65,7 +66,4 @@ def setup_session(app: web.Application): get_session = aiohttp_session.get_session -__all__ = ( - 'setup_session', - 'get_session' -) +__all__ = ("setup_session", "get_session") diff --git a/services/web/server/src/simcore_service_webserver/session_config.py b/services/web/server/src/simcore_service_webserver/session_config.py index b5128d9ca2b..620ed3f99ee 100644 --- a/services/web/server/src/simcore_service_webserver/session_config.py +++ b/services/web/server/src/simcore_service_webserver/session_config.py @@ -5,8 +5,6 @@ """ import trafaret as T -CONFIG_SECTION_NAME = 'session' +CONFIG_SECTION_NAME = "session" -schema = T.Dict({ - "secret_key": T.String -}) +schema = T.Dict({"secret_key": T.String}) diff --git a/services/web/server/src/simcore_service_webserver/socketio/__init__.py b/services/web/server/src/simcore_service_webserver/socketio/__init__.py index e3e8622b94e..0b526bc75c6 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/__init__.py +++ b/services/web/server/src/simcore_service_webserver/socketio/__init__.py @@ -15,16 +15,16 @@ log = logging.getLogger(__name__) + @app_module_setup(__name__, ModuleCategory.SYSTEM, logger=log) def setup(app: web.Application): mgr = None sio = AsyncServer(async_mode="aiohttp", client_manager=mgr, logging=log) sio.attach(app) - app[APP_CLIENT_SOCKET_SERVER_KEY] = sio + app[APP_CLIENT_SOCKET_SERVER_KEY] = sio handlers_utils.register_handlers(app, handlers) + # alias setup_sockets = setup -__all__ = ( - "setup_sockets" -) +__all__ = "setup_sockets" diff --git a/services/web/server/src/simcore_service_webserver/socketio/config.py b/services/web/server/src/simcore_service_webserver/socketio/config.py index ac73cd57777..4bfa8b308e1 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/config.py +++ b/services/web/server/src/simcore_service_webserver/socketio/config.py @@ -11,16 +11,18 @@ from servicelib.application_keys import APP_CONFIG_KEY from socketio import AsyncServer -CONFIG_SECTION_NAME = 'socketio' +CONFIG_SECTION_NAME = "socketio" APP_CLIENT_SOCKET_SERVER_KEY = __name__ + ".socketio_socketio" APP_CLIENT_SOCKET_DECORATED_HANDLERS_KEY = __name__ + ".socketio_handlers" -schema = T.Dict({ - T.Key("enabled", default=True, optional=True): T.Or(T.Bool(), T.Int()), -}) +schema = T.Dict( + {T.Key("enabled", default=True, optional=True): T.Or(T.Bool(), T.Int()),} +) + def get_config(app: web.Application) -> Dict: return app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] + def get_socket_server(app: web.Application) -> AsyncServer: return app[APP_CLIENT_SOCKET_SERVER_KEY] diff --git a/services/web/server/src/simcore_service_webserver/socketio/events.py b/services/web/server/src/simcore_service_webserver/socketio/events.py index f37f07eb6e5..d7477aca481 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/events.py +++ b/services/web/server/src/simcore_service_webserver/socketio/events.py @@ -1,22 +1,30 @@ """ -This module takes care of sending events to the connected webclient through the socket.io interface. +This module takes care of sending events to the connected webclient through the socket.io interface """ -import asyncio import json -from typing import Any, Dict +import logging +from typing import Any, Dict, List -from aiohttp import web +from aiohttp.web import Application + +from servicelib.utils import fire_and_forget_task from ..resource_manager.websocket_manager import managed_resource -from .config import get_socket_server +from .config import AsyncServer, get_socket_server + +log = logging.getLogger(__name__) + +async def post_messages( + app: Application, user_id: str, messages: Dict[str, Any] +) -> None: + sio: AsyncServer = get_socket_server(app) -async def post_messages(app: web.Application, user_id: str, messages: Dict[str, Any]) -> None: - sio = get_socket_server(app) - with managed_resource(user_id, None, app) as rt: - socket_ids = await rt.find_socket_ids() + with managed_resource(user_id, None, app) as registry: + socket_ids: List[str] = await registry.find_socket_ids() for sid in socket_ids: - # we only send the data to the right sockets (there might be several tabs open) - tasks = [sio.emit(event, json.dumps(data), room=sid) for event, data in messages.items()] - asyncio.ensure_future(asyncio.gather(*tasks)) + # We only send the data to the right sockets + # Notice that there might be several tabs open + for event_name, data in messages.items(): + fire_and_forget_task(sio.emit(event_name, json.dumps(data), room=sid)) diff --git a/services/web/server/src/simcore_service_webserver/socketio/handlers.py b/services/web/server/src/simcore_service_webserver/socketio/handlers.py index 57aadcfbdbb..63e782429f7 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/handlers.py +++ b/services/web/server/src/simcore_service_webserver/socketio/handlers.py @@ -11,19 +11,23 @@ from typing import Dict, List, Optional from aiohttp import web + from servicelib.observer import observe -from socketio.exceptions import \ - ConnectionRefusedError as socket_io_connection_error +from servicelib.utils import fire_and_forget_task, logged_gather +from socketio.exceptions import ConnectionRefusedError as SocketIOConnectionError from ..login.decorators import RQT_USERID_KEY, login_required from ..resource_manager.websocket_manager import managed_resource from .config import get_socket_server +from .handlers_utils import register_socketio_handler ANONYMOUS_USER_ID = -1 _SOCKET_IO_AIOHTTP_REQUEST_KEY = "aiohttp.request" log = logging.getLogger(__file__) + +@register_socketio_handler async def connect(sid: str, environ: Dict, app: web.Application) -> bool: """socketio reserved handler for when the fontend connects through socket.io @@ -40,12 +44,17 @@ async def connect(sid: str, environ: Dict, app: web.Application) -> bool: try: await authenticate_user(sid, app, request) except web.HTTPUnauthorized: - raise socket_io_connection_error("authentification failed") + raise SocketIOConnectionError("authentification failed") + except Exception as exc: # pylint: disable=broad-except + raise SocketIOConnectionError(f"Unexpected error: {exc}") return True + @login_required -async def authenticate_user(sid: str, app: web.Application, request: web.Request) -> None: +async def authenticate_user( + sid: str, app: web.Application, request: web.Request +) -> None: """throws web.HTTPUnauthorized when the user is not recognized. Keeps the original request. """ user_id = request.get(RQT_USERID_KEY, ANONYMOUS_USER_ID) @@ -65,19 +74,27 @@ async def authenticate_user(sid: str, app: web.Application, request: web.Request log.info("socketio connection from user %s", user_id) await rt.set_socket_id(sid) + async def disconnect_other_sockets(sio, sockets: List[str]) -> None: log.debug("disconnecting sockets %s", sockets) - logout_tasks = [sio.emit("logout", to=sid, data={"reason": "user logged out"}) for sid in sockets] - await asyncio.gather(*logout_tasks, return_exceptions=True) + logout_tasks = [ + sio.emit("logout", to=sid, data={"reason": "user logged out"}) + for sid in sockets + ] + await logged_gather(*logout_tasks, reraise=False) + # let the client react await asyncio.sleep(3) # ensure disconnection is effective disconnect_tasks = [sio.disconnect(sid=sid) for sid in sockets] - await asyncio.gather(*disconnect_tasks, return_exceptions=True) + await logged_gather(*disconnect_tasks) + @observe(event="SIGNAL_USER_LOGOUT") -async def user_logged_out(user_id: str, client_session_id: Optional[str], app: web.Application) -> None: - log.debug("user %s must be disconnected", user_id) +async def user_logged_out( + user_id: str, client_session_id: Optional[str], app: web.Application +) -> None: + log.debug("user %s must be disconnected", user_id) # find the sockets related to the user sio = get_socket_server(app) with managed_resource(user_id, client_session_id, app) as rt: @@ -91,10 +108,10 @@ async def user_logged_out(user_id: str, client_session_id: Optional[str], app: w sockets = await rt.find_socket_ids() if sockets: # let's do it as a task so it does not block us here - asyncio.ensure_future(disconnect_other_sockets(sio, sockets)) + fire_and_forget_task(disconnect_other_sockets(sio, sockets)) - +@register_socketio_handler async def disconnect(sid: str, app: web.Application) -> None: """socketio reserved handler for when the socket.io connection is disconnected. @@ -105,7 +122,7 @@ async def disconnect(sid: str, app: web.Application) -> None: log.debug("client in room %s disconnecting", sid) sio = get_socket_server(app) async with sio.session(sid) as socketio_session: - if "user_id" in socketio_session: + if "user_id" in socketio_session: user_id = socketio_session["user_id"] client_session_id = socketio_session["client_session_id"] with managed_resource(user_id, client_session_id, app) as rt: @@ -113,4 +130,8 @@ async def disconnect(sid: str, app: web.Application) -> None: await rt.remove_socket_id() else: # this should not happen!! - log.error("Unknown client diconnected sid: %s, session %s", sid, str(socketio_session)) + log.error( + "Unknown client diconnected sid: %s, session %s", + sid, + str(socketio_session), + ) diff --git a/services/web/server/src/simcore_service_webserver/socketio/handlers_utils.py b/services/web/server/src/simcore_service_webserver/socketio/handlers_utils.py index 1cbdc1cbd98..0faba48f7a5 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/handlers_utils.py +++ b/services/web/server/src/simcore_service_webserver/socketio/handlers_utils.py @@ -7,32 +7,64 @@ from .config import APP_CLIENT_SOCKET_DECORATED_HANDLERS_KEY, get_socket_server +socketio_handlers_registry = [] + + def socket_io_handler(app: web.Application): """this decorator allows passing additional paramters to python-socketio compatible handlers. I.e. python-socketio handler expect functions of type `async def function(sid, *args, **kwargs)` This allows to create a function of type `async def function(sid, *args, **kwargs, app: web.Application) """ + def decorator(func): @wraps(func) async def wrapped(*args, **kwargs): return await func(*args, **kwargs, app=app) + + return wrapped + return decorator + def has_socket_io_handler_signature(fun) -> bool: # last parameter is web.Application - return list(inspect.signature(fun).parameters.values())[-1].annotation == web.Application + return ( + list(inspect.signature(fun).parameters.values())[-1].annotation + == web.Application + ) + def register_handlers(app: web.Application, module: ModuleType): sio = get_socket_server(app) - predicate = lambda obj: inspect.isfunction(obj) and \ - has_socket_io_handler_signature(obj) and \ - inspect.iscoroutinefunction(obj) and \ - inspect.getmodule(obj) == module - member_fcts = inspect.getmembers(module, predicate) + member_fcts = [ + fct for fct in socketio_handlers_registry if inspect.getmodule(fct) == module + ] # convert handler - partial_fcts = [socket_io_handler(app)(func_handler) for _, func_handler in member_fcts] + partial_fcts = [ + socket_io_handler(app)(func_handler) for func_handler in member_fcts + ] app[APP_CLIENT_SOCKET_DECORATED_HANDLERS_KEY] = partial_fcts # register the fcts for func in partial_fcts: sio.on(func.__name__, handler=func) + + +def register_socketio_handler(func: callable) -> callable: + """this decorator appends handlers to a registry if they fit certain rules + + :param func: the function to call + :type func: callable + :return: the function to call + :rtype: callable + """ + is_handler = ( + inspect.isfunction(func) + and has_socket_io_handler_signature(func) + and inspect.iscoroutinefunction(func) + ) + if is_handler: + socketio_handlers_registry.append(func) + else: + raise SyntaxError("the function shall be of type fct(*args, app: web.Application") + return func diff --git a/services/web/server/src/simcore_service_webserver/statics.py b/services/web/server/src/simcore_service_webserver/statics.py index 8dcce0abb04..85dfc53383c 100644 --- a/services/web/server/src/simcore_service_webserver/statics.py +++ b/services/web/server/src/simcore_service_webserver/statics.py @@ -26,7 +26,7 @@ def get_client_outdir(app: web.Application) -> Path: # pylint 2.3.0 produces 'E1101: Instance of 'Path' has no 'expanduser' member (no-member)' ONLY # with the installed code and not with the development code! - client_dir = Path(cfg["client_outdir"]).expanduser() #pylint: disable=E1101 + client_dir = Path(cfg["client_outdir"]).expanduser() # pylint: disable=E1101 if not client_dir.exists(): txt = reason = "Front-end application is not available" if cfg["testing"]: @@ -34,6 +34,7 @@ def get_client_outdir(app: web.Application) -> Path: raise web.HTTPServiceUnavailable(reason=reason, text=txt) return client_dir + async def index(request: web.Request): """ Serves boot application under index @@ -47,9 +48,9 @@ async def index(request: web.Request): def write_statics_file(directory): statics = {} - statics['stackName'] = os.environ.get('SWARM_STACK_NAME') - statics['buildDate'] = os.environ.get('BUILD_DATE') - with open(directory / 'statics.json', 'w') as statics_file: + statics["stackName"] = os.environ.get("SWARM_STACK_NAME") + statics["buildDate"] = os.environ.get("BUILD_DATE") + with open(directory / "statics.json", "w") as statics_file: json.dump(statics, statics_file) @@ -61,16 +62,17 @@ def setup_statics(app: web.Application): outdir = get_client_outdir(app) # Checks integrity of RIA source before serving - EXPECTED_FOLDERS = ('osparc', 'resource', 'transpiled') + EXPECTED_FOLDERS = ("osparc", "resource", "transpiled") folders = [x for x in outdir.iterdir() if x.is_dir()] for name in EXPECTED_FOLDERS: folder_names = [path.name for path in folders] if name not in folder_names: raise web.HTTPServiceUnavailable( - reason="Invalid front-end source-output folders" \ - " Expected %s, got %s in %s" %(EXPECTED_FOLDERS, folder_names, outdir), - text ="Front-end application is not available" + reason="Invalid front-end source-output folders" + " Expected %s, got %s in %s" + % (EXPECTED_FOLDERS, folder_names, outdir), + text="Front-end application is not available", ) # TODO: map ui to /ui or create an alias!? @@ -79,10 +81,10 @@ def setup_statics(app: web.Application): # NOTE: source-output and build-output have both the same subfolder structure # TODO: check whether this can be done at oncen for path in folders: - app.router.add_static('/' + path.name, path) + app.router.add_static("/" + path.name, path) # Create statics file - write_statics_file(outdir / 'resource') + write_statics_file(outdir / "resource") except web.HTTPServiceUnavailable as ex: log.exception(ex.text) diff --git a/services/web/server/src/simcore_service_webserver/storage.py b/services/web/server/src/simcore_service_webserver/storage.py index 12a72dca3f5..734283adb61 100644 --- a/services/web/server/src/simcore_service_webserver/storage.py +++ b/services/web/server/src/simcore_service_webserver/storage.py @@ -10,14 +10,14 @@ from . import storage_routes from .storage_config import get_config -from servicelib.application_setup import app_module_setup,ModuleCategory +from servicelib.application_setup import app_module_setup, ModuleCategory log = logging.getLogger(__name__) @app_module_setup(__name__, ModuleCategory.ADDON, logger=log) def setup(app: web.Application): - specs = app[APP_OPENAPI_SPECS_KEY] # validated openapi specs + specs = app[APP_OPENAPI_SPECS_KEY] # validated openapi specs routes = storage_routes.create(specs) app.router.add_routes(routes) @@ -28,7 +28,4 @@ def setup(app: web.Application): get_storage_config = get_config -__all__ = ( - 'setup_storage', - 'get_storage_config' -) +__all__ = ("setup_storage", "get_storage_config") diff --git a/services/web/server/src/simcore_service_webserver/storage_api.py b/services/web/server/src/simcore_service_webserver/storage_api.py index c24e1195bec..cac23f492cb 100644 --- a/services/web/server/src/simcore_service_webserver/storage_api.py +++ b/services/web/server/src/simcore_service_webserver/storage_api.py @@ -17,25 +17,31 @@ def _get_storage_client(app: web.Application): cfg = get_config(app) # storage service API endpoint - endpoint = URL.build(scheme='http', - host=cfg['host'], - port=cfg['port']).with_path(cfg["version"]) + endpoint = URL.build(scheme="http", host=cfg["host"], port=cfg["port"]).with_path( + cfg["version"] + ) session = get_client_session(app) return session, endpoint -async def copy_data_folders_from_project(app, source_project, destination_project, nodes_map, user_id): +async def copy_data_folders_from_project( + app, source_project, destination_project, nodes_map, user_id +): # TODO: optimize if project has actualy data or not before doing the call client, api_endpoint = _get_storage_client(app) # /simcore-s3/folders: url = (api_endpoint / "simcore-s3/folders").with_query(user_id=user_id) - async with client.post( url , json={ - 'source':source_project, - 'destination': destination_project, - 'nodes_map': nodes_map - }, ssl=False) as resp: + async with client.post( + url, + json={ + "source": source_project, + "destination": destination_project, + "nodes_map": nodes_map, + }, + ssl=False, + ) as resp: payload = await resp.json() updated_project, error = unwrap_envelope(payload) if error: @@ -49,23 +55,30 @@ async def copy_data_folders_from_project(app, source_project, destination_projec async def _delete(session, target_url): async with session.delete(target_url, ssl=False) as resp: - log.info("delete_data_folders_of_project request responded with status %s", resp.status ) + log.info( + "delete_data_folders_of_project request responded with status %s", + resp.status, + ) # NOTE: context will automatically close connection + async def delete_data_folders_of_project(app, project_id, user_id): # SEE api/specs/storage/v0/openapi.yaml session, api_endpoint = _get_storage_client(app) - url = (api_endpoint / f"simcore-s3/folders/{project_id}").with_query(user_id=user_id) - + url = (api_endpoint / f"simcore-s3/folders/{project_id}").with_query( + user_id=user_id + ) await _delete(session, url) - #asyncio.ensure_future(_delete(session, url)) - #loop = asyncio.get_event_loop() - #loop.run_until_complete(_delete(session, url)) -async def delete_data_folders_of_project_node(app, project_id: str, node_id: str, user_id: str): + +async def delete_data_folders_of_project_node( + app, project_id: str, node_id: str, user_id: str +): # SEE api/specs/storage/v0/openapi.yaml session, api_endpoint = _get_storage_client(app) - url = (api_endpoint / f"simcore-s3/folders/{project_id}").with_query(user_id=user_id, node_id=node_id) + url = (api_endpoint / f"simcore-s3/folders/{project_id}").with_query( + user_id=user_id, node_id=node_id + ) await _delete(session, url) diff --git a/services/web/server/src/simcore_service_webserver/storage_config.py b/services/web/server/src/simcore_service_webserver/storage_config.py index 758c3a8c9a2..f6fc8a8da29 100644 --- a/services/web/server/src/simcore_service_webserver/storage_config.py +++ b/services/web/server/src/simcore_service_webserver/storage_config.py @@ -9,17 +9,23 @@ from aiohttp import ClientSession, web from servicelib.application_keys import APP_CLIENT_SESSION_KEY, APP_CONFIG_KEY -CONFIG_SECTION_NAME = 'storage' +CONFIG_SECTION_NAME = "storage" + +schema = T.Dict( + { + T.Key("enabled", default=True, optional=True): T.Bool(), + T.Key("host", default="storage"): T.String(), + T.Key("port", default=11111): T.Int(), + T.Key("version", default="v0"): T.Regexp( + regexp=r"^v\d+" + ), # storage API version basepath + } +) -schema = T.Dict({ - T.Key("enabled", default=True, optional=True): T.Bool(), - T.Key("host", default="storage"): T.String(), - T.Key("port", default=11111): T.Int(), - T.Key("version", default="v0"): T.Regexp(regexp=r'^v\d+') # storage API version basepath -}) def get_config(app: web.Application) -> Dict: return app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] + def get_client_session(app: web.Application) -> ClientSession: return app[APP_CLIENT_SESSION_KEY] diff --git a/services/web/server/src/simcore_service_webserver/storage_handlers.py b/services/web/server/src/simcore_service_webserver/storage_handlers.py index f92c7f0b116..700829442db 100644 --- a/services/web/server/src/simcore_service_webserver/storage_handlers.py +++ b/services/web/server/src/simcore_service_webserver/storage_handlers.py @@ -21,20 +21,21 @@ def _resolve_storage_url(request: web.Request) -> URL: cfg = get_config(request.app) # storage service API endpoint - endpoint = URL.build(scheme='http', - host=cfg['host'], - port=cfg['port']).with_path(cfg["version"]) + endpoint = URL.build(scheme="http", host=cfg["host"], port=cfg["port"]).with_path( + cfg["version"] + ) BASEPATH_INDEX = 3 # strip basepath from webserver API path (i.e. webserver api version) # >>> URL('http://storage:1234/v5/storage/asdf/').raw_parts[3:] # ('asdf', '') - suffix = "/".join( request.url.raw_parts[BASEPATH_INDEX:] ) + suffix = "/".join(request.url.raw_parts[BASEPATH_INDEX:]) # TODO: check request.query to storage! unsafe!? url = (endpoint / suffix).with_query(request.query).update_query(user_id=userid) return url + async def _request_storage(request: web.Request, method: str): await extract_and_validate(request) @@ -51,36 +52,41 @@ async def _request_storage(request: web.Request, method: str): return payload -#--------------------------------------------------------------------- +# --------------------------------------------------------------------- + @login_required async def get_storage_locations(request: web.Request): await check_permission(request, "storage.locations.*") - payload = await _request_storage(request, 'GET') + payload = await _request_storage(request, "GET") return payload + @login_required async def get_datasets_metadata(request: web.Request): await check_permission(request, "storage.files.*") - payload = await _request_storage(request, 'GET') + payload = await _request_storage(request, "GET") return payload + @login_required async def get_files_metadata(request: web.Request): await check_permission(request, "storage.files.*") - payload = await _request_storage(request, 'GET') + payload = await _request_storage(request, "GET") return payload + @login_required async def get_files_metadata_dataset(request: web.Request): await check_permission(request, "storage.files.*") - payload = await _request_storage(request, 'GET') + payload = await _request_storage(request, "GET") return payload + @login_required async def get_file_metadata(request: web.Request): await check_permission(request, "storage.files.*") - payload = await _request_storage(request, 'GET') + payload = await _request_storage(request, "GET") return payload @@ -95,19 +101,19 @@ async def update_file_meta_data(request: web.Request): @login_required async def download_file(request: web.Request): await check_permission(request, "storage.files.*") - payload = await _request_storage(request, 'GET') + payload = await _request_storage(request, "GET") return payload @login_required async def upload_file(request: web.Request): await check_permission(request, "storage.files.*") - payload = await _request_storage(request, 'PUT') + payload = await _request_storage(request, "PUT") return payload @login_required async def delete_file(request: web.Request): await check_permission(request, "storage.files.*") - payload = await _request_storage(request, 'DELETE') + payload = await _request_storage(request, "DELETE") return payload diff --git a/services/web/server/src/simcore_service_webserver/storage_routes.py b/services/web/server/src/simcore_service_webserver/storage_routes.py index b682604d03b..a1f31ae18d8 100644 --- a/services/web/server/src/simcore_service_webserver/storage_routes.py +++ b/services/web/server/src/simcore_service_webserver/storage_routes.py @@ -17,7 +17,7 @@ def create(specs: openapi.Spec) -> List[web.RouteDef]: # TODO: consider the case in which server creates routes for both v0 and v1!!! # TODO: should this be taken from servers instead? - BASEPATH = '/v' + specs.info.version.split('.')[0] + BASEPATH = "/v" + specs.info.version.split(".")[0] log.debug("creating %s ", __name__) routes = [] @@ -25,42 +25,62 @@ def create(specs: openapi.Spec) -> List[web.RouteDef]: # TODO: routing will be done automatically using operation_id/tags, etc... # storage -- - path, handler = '/storage/locations', storage_handlers.get_storage_locations - operation_id = specs.paths[path].operations['get'].operation_id - routes.append(web.get(BASEPATH+path, handler, name=operation_id)) - - path, handler = '/storage/locations/{location_id}/datasets', storage_handlers.get_datasets_metadata - operation_id = specs.paths[path].operations['get'].operation_id - routes.append(web.get(BASEPATH+path, handler, name=operation_id)) - - path, handle = '/storage/locations/{location_id}/files/metadata', storage_handlers.get_files_metadata - operation_id = specs.paths[path].operations['get'].operation_id - routes.append(web.get(BASEPATH+path, handle, name=operation_id)) - - path, handle = '/storage/locations/{location_id}/datasets/{dataset_id}/metadata', storage_handlers.get_files_metadata_dataset - operation_id = specs.paths[path].operations['get'].operation_id - routes.append(web.get(BASEPATH+path, handle, name=operation_id)) - - path, handle = '/storage/locations/{location_id}/files/{fileId}/metadata', storage_handlers.get_file_metadata - operation_id = specs.paths[path].operations['get'].operation_id - routes.append(web.get(BASEPATH+path, handle, name=operation_id)) + path, handler = "/storage/locations", storage_handlers.get_storage_locations + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handler, name=operation_id)) + + path, handler = ( + "/storage/locations/{location_id}/datasets", + storage_handlers.get_datasets_metadata, + ) + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handler, name=operation_id)) + + path, handle = ( + "/storage/locations/{location_id}/files/metadata", + storage_handlers.get_files_metadata, + ) + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) + + path, handle = ( + "/storage/locations/{location_id}/datasets/{dataset_id}/metadata", + storage_handlers.get_files_metadata_dataset, + ) + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) + + path, handle = ( + "/storage/locations/{location_id}/files/{fileId}/metadata", + storage_handlers.get_file_metadata, + ) + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) # TODO: Implements update # path, handle = '/{location_id}/files/{fileId}/metadata', handlers.update_file_metadata # operation_id = specs.paths[path].operations['patch'].operation_id # routes.append( web.patch(BASEPATH+path, handle, name=operation_id) ) - path, handle = '/storage/locations/{location_id}/files/{fileId}', storage_handlers.download_file - operation_id = specs.paths[path].operations['get'].operation_id - routes.append(web.get(BASEPATH+path, handle, name=operation_id)) - - path, handle = '/storage/locations/{location_id}/files/{fileId}', storage_handlers.delete_file - operation_id = specs.paths[path].operations['delete'].operation_id - routes.append(web.delete(BASEPATH+path, handle, name=operation_id)) - - path, handle = '/storage/locations/{location_id}/files/{fileId}', storage_handlers.upload_file - operation_id = specs.paths[path].operations['put'].operation_id - routes.append(web.put(BASEPATH+path, handle, name=operation_id)) - + path, handle = ( + "/storage/locations/{location_id}/files/{fileId}", + storage_handlers.download_file, + ) + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) + + path, handle = ( + "/storage/locations/{location_id}/files/{fileId}", + storage_handlers.delete_file, + ) + operation_id = specs.paths[path].operations["delete"].operation_id + routes.append(web.delete(BASEPATH + path, handle, name=operation_id)) + + path, handle = ( + "/storage/locations/{location_id}/files/{fileId}", + storage_handlers.upload_file, + ) + operation_id = specs.paths[path].operations["put"].operation_id + routes.append(web.put(BASEPATH + path, handle, name=operation_id)) return routes diff --git a/services/web/server/src/simcore_service_webserver/studies_access.py b/services/web/server/src/simcore_service_webserver/studies_access.py index 68bb360049b..41f0947f3ad 100644 --- a/services/web/server/src/simcore_service_webserver/studies_access.py +++ b/services/web/server/src/simcore_service_webserver/studies_access.py @@ -28,6 +28,7 @@ BASE_UUID = uuid.UUID("71e0eb5e-0797-4469-89ba-00a0df4d338a") + @lru_cache() def compose_uuid(template_uuid, user_id, query="") -> str: """ Creates a new uuid composing a project's and user ids such that @@ -35,7 +36,9 @@ def compose_uuid(template_uuid, user_id, query="") -> str: Enforces a constraint: a user CANNOT have multiple copies of the same template """ - new_uuid = str( uuid.uuid5(BASE_UUID, str(template_uuid) + str(user_id) + str(query)) ) + new_uuid = str( + uuid.uuid5(BASE_UUID, str(template_uuid) + str(user_id) + str(query)) + ) return new_uuid @@ -50,6 +53,7 @@ async def get_public_project(app: web.Application, project_uuid: str): prj = await db.get_template_project(project_uuid, only_published=True) return prj + # TODO: from .users import create_temporary_user async def create_temporary_user(request: web.Request): """ @@ -59,6 +63,7 @@ async def create_temporary_user(request: web.Request): from .login.handlers import ACTIVE, GUEST from .login.utils import get_client_ip, get_random_string from .security_api import encrypt_password + # from .utils import generate_passphrase # from .utils import generate_password @@ -68,20 +73,22 @@ async def create_temporary_user(request: web.Request): # FIXME: # username = generate_passphrase(number_of_words=2).replace(" ", "_").replace("'", "") username = get_random_string(min_len=5) email = username + "@guest-at-osparc.io" - # TODO: temporarily while developing, a fixed password - password = "guest" #generate_password() - - user = await db.create_user({ - 'name': username, - 'email': email, - 'password_hash': encrypt_password(password), - 'status': ACTIVE, - 'role': GUEST, - 'created_ip': get_client_ip(request), - }) + password = get_random_string(min_len=12) + + user = await db.create_user( + { + "name": username, + "email": email, + "password_hash": encrypt_password(password), + "status": ACTIVE, + "role": GUEST, + "created_ip": get_client_ip(request), + } + ) return user + # TODO: from .users import get_user? async def get_authorized_user(request: web.Request) -> Dict: from .login.cfg import get_storage @@ -89,11 +96,14 @@ async def get_authorized_user(request: web.Request) -> Dict: db = get_storage(request.app) userid = await authorized_userid(request) - user = await db.get_user({'id': userid}) + user = await db.get_user({"id": userid}) return user + # TODO: from .projects import ...? -async def copy_study_to_account(request: web.Request, template_project: Dict, user: Dict): +async def copy_study_to_account( + request: web.Request, template_project: Dict, user: Dict +): """ Creates a copy of the study to a given project in user's account @@ -111,7 +121,9 @@ async def copy_study_to_account(request: web.Request, template_project: Dict, us template_parameters = dict(request.query) # assign id to copy - project_uuid = compose_uuid(template_project["uuid"], user["id"], str(template_parameters)) + project_uuid = compose_uuid( + template_project["uuid"], user["id"], str(template_parameters) + ) try: # Avoids multiple copies of the same template on each account @@ -121,12 +133,16 @@ async def copy_study_to_account(request: web.Request, template_project: Dict, us except ProjectNotFoundError: # new project from template - project = await clone_project(request, template_project, user["id"], forced_copy_project_id=project_uuid) + project = await clone_project( + request, template_project, user["id"], forced_copy_project_id=project_uuid + ) # check project inputs and substitute template_parameters if template_parameters: log.info("Substituting parameters '%s' in template", template_parameters) - project = substitute_parameterized_inputs(project, template_parameters) or project + project = ( + substitute_parameterized_inputs(project, template_parameters) or project + ) await db.add_project(project, user["id"], force_project_uuid=True) @@ -145,8 +161,10 @@ async def access_study(request: web.Request) -> web.Response: template_project = await get_public_project(request.app, project_id) if not template_project: - raise web.HTTPNotFound(reason=f"Requested study ({project_id}) has not been published.\ - Please contact the data curators for more information.") + raise web.HTTPNotFound( + reason=f"Requested study ({project_id}) has not been published.\ + Please contact the data curators for more information." + ) user = None is_anonymous_user = await is_anonymous(request) @@ -159,28 +177,39 @@ async def access_study(request: web.Request) -> web.Response: if not user: raise RuntimeError("Unable to start user session") - log.debug("Granted access to study '%d' for user %s. Copying study over ...", template_project.get('name'), user.get('email')) + log.debug( + "Granted access to study '%d' for user %s. Copying study over ...", + template_project.get("name"), + user.get("email"), + ) copied_project_id = await copy_study_to_account(request, template_project, user) log.debug("Study %s copied", copied_project_id) try: - redirect_url = request.app.router[INDEX_RESOURCE_NAME].url_for().with_fragment("/study/{}".format(copied_project_id)) + redirect_url = ( + request.app.router[INDEX_RESOURCE_NAME] + .url_for() + .with_fragment("/study/{}".format(copied_project_id)) + ) except KeyError: - log.error("Cannot redirect to website because route was not registered. Probably qx output was not ready and it was disabled (see statics.py)") - raise RuntimeError("Unable to serve front-end. Study has been anyway copied over to user.") + log.error( + "Cannot redirect to website because route was not registered. Probably qx output was not ready and it was disabled (see statics.py)" + ) + raise RuntimeError( + "Unable to serve front-end. Study has been anyway copied over to user." + ) response = web.HTTPFound(location=redirect_url) if is_anonymous_user: log.debug("Auto login for anonymous user %s", user["name"]) - identity = user['email'] + identity = user["email"] await remember(request, response, identity) raise response -@app_module_setup(__name__, ModuleCategory.ADDON, - logger=log) +@app_module_setup(__name__, ModuleCategory.ADDON, logger=log) def setup(app: web.Application): cfg = app[APP_CONFIG_KEY]["main"] @@ -188,18 +217,20 @@ def setup(app: web.Application): study_handler = access_study if not cfg["studies_access_enabled"]: study_handler = login_required(access_study) - log.warning("'%s' config explicitly disables anonymous users from this feature", __name__) + log.warning( + "'%s' config explicitly disables anonymous users from this feature", + __name__, + ) # TODO: make sure that these routes are filtered properly in active middlewares - app.router.add_routes([ - web.get(r"/study/{id}", study_handler, name="study"), - ]) + app.router.add_routes( + [web.get(r"/study/{id}", study_handler, name="study"),] + ) return True + # alias setup_studies_access = setup -__all__ = ( - 'setup_studies_access' -) +__all__ = "setup_studies_access" diff --git a/services/web/server/src/simcore_service_webserver/tag_handlers.py b/services/web/server/src/simcore_service_webserver/tag_handlers.py index 642da4332ff..3ef6db725c6 100644 --- a/services/web/server/src/simcore_service_webserver/tag_handlers.py +++ b/services/web/server/src/simcore_service_webserver/tag_handlers.py @@ -10,10 +10,13 @@ @login_required async def list_tags(request: web.Request): - await check_permission(request, 'tag.crud.*') + await check_permission(request, "tag.crud.*") uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] async with engine.acquire() as conn: - columns = [col for col in tags.columns if col.key != 'user_id'] # pylint: disable=not-an-iterable + # pylint: disable=not-an-iterable + columns = [ + col for col in tags.columns if col.key != "user_id" + ] query = sa.select(columns).where(tags.c.user_id == uid) result = [] async for row_proxy in conn.execute(query): @@ -24,21 +27,21 @@ async def list_tags(request: web.Request): @login_required async def update_tag(request: web.Request): - await check_permission(request, 'tag.crud.*') + await check_permission(request, "tag.crud.*") uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] - tag_id = request.match_info.get('tag_id') + tag_id = request.match_info.get("tag_id") tag_data = await request.json() async with engine.acquire() as conn: # pylint: disable=no-value-for-parameter - query = tags.update().values( - name=tag_data['name'], - description=tag_data['description'], - color=tag_data['color'] - ).where(and_(tags.c.id == tag_id, tags.c.user_id == uid)).returning( - tags.c.id, - tags.c.name, - tags.c.description, - tags.c.color + query = ( + tags.update() + .values( + name=tag_data["name"], + description=tag_data["description"], + color=tag_data["color"], + ) + .where(and_(tags.c.id == tag_id, tags.c.user_id == uid)) + .returning(tags.c.id, tags.c.name, tags.c.description, tags.c.color) ) async with conn.execute(query) as result: if result.rowcount == 1: @@ -49,21 +52,20 @@ async def update_tag(request: web.Request): @login_required async def create_tag(request: web.Request): - await check_permission(request, 'tag.crud.*') + await check_permission(request, "tag.crud.*") uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] tag_data = await request.json() async with engine.acquire() as conn: # pylint: disable=no-value-for-parameter - query = tags.insert().values( - user_id=uid, - name=tag_data['name'], - description=tag_data['description'], - color=tag_data['color'] - ).returning( - tags.c.id, - tags.c.name, - tags.c.description, - tags.c.color + query = ( + tags.insert() + .values( + user_id=uid, + name=tag_data["name"], + description=tag_data["description"], + color=tag_data["color"], + ) + .returning(tags.c.id, tags.c.name, tags.c.description, tags.c.color) ) async with conn.execute(query) as result: if result.rowcount == 1: @@ -74,13 +76,11 @@ async def create_tag(request: web.Request): @login_required async def delete_tag(request: web.Request): - await check_permission(request, 'tag.crud.*') + await check_permission(request, "tag.crud.*") uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] - tag_id = request.match_info.get('tag_id') + tag_id = request.match_info.get("tag_id") async with engine.acquire() as conn: # pylint: disable=no-value-for-parameter - query = tags.delete().where( - and_(tags.c.id == tag_id, tags.c.user_id == uid) - ) + query = tags.delete().where(and_(tags.c.id == tag_id, tags.c.user_id == uid)) async with conn.execute(query) as result: - raise web.HTTPNoContent(content_type='application/json') + raise web.HTTPNoContent(content_type="application/json") diff --git a/services/web/server/src/simcore_service_webserver/tags.py b/services/web/server/src/simcore_service_webserver/tags.py index 0392024bf0c..51873a4c354 100644 --- a/services/web/server/src/simcore_service_webserver/tags.py +++ b/services/web/server/src/simcore_service_webserver/tags.py @@ -6,26 +6,32 @@ from aiohttp import web from servicelib.application_setup import ModuleCategory, app_module_setup -from servicelib.rest_routing import (get_handlers_from_namespace, - iter_path_operations, - map_handlers_with_operations) +from servicelib.rest_routing import ( + get_handlers_from_namespace, + iter_path_operations, + map_handlers_with_operations, +) from . import tag_handlers from .rest_config import APP_OPENAPI_SPECS_KEY logger = logging.getLogger(__name__) -@app_module_setup(__name__, ModuleCategory.ADDON, - depends=['simcore_service_webserver.rest'], - logger=logger) + +@app_module_setup( + __name__, + ModuleCategory.ADDON, + depends=["simcore_service_webserver.rest"], + logger=logger, +) def setup(app: web.Application): # routes specs = app[APP_OPENAPI_SPECS_KEY] routes = map_handlers_with_operations( - get_handlers_from_namespace(tag_handlers), - filter(lambda o: "tag" in o[3], iter_path_operations(specs)), - strict=True + get_handlers_from_namespace(tag_handlers), + filter(lambda o: "tag" in o[3], iter_path_operations(specs)), + strict=True, ) app.router.add_routes(routes) @@ -33,6 +39,4 @@ def setup(app: web.Application): # alias setup_tags = setup -__all__ = ( - 'setup_tags' -) +__all__ = "setup_tags" diff --git a/services/web/server/src/simcore_service_webserver/tracing/__init__.py b/services/web/server/src/simcore_service_webserver/tracing/__init__.py index 064f73e43da..ffb1a0621dc 100644 --- a/services/web/server/src/simcore_service_webserver/tracing/__init__.py +++ b/services/web/server/src/simcore_service_webserver/tracing/__init__.py @@ -8,22 +8,22 @@ from servicelib.tracing import schema -CONFIG_SECTION_NAME = 'tracing' +CONFIG_SECTION_NAME = "tracing" log = logging.getLogger(__name__) + @app_module_setup(__name__, ModuleCategory.ADDON, logger=log) def setup(app: web.Application): config = app[APP_CONFIG_KEY] - host=config["main"]["host"] - port=config["main"]["port"] - return setup_tracing(app, "simcore_service_webserver", host, port, config["tracing"]) + host = config["main"]["host"] + port = config["main"]["port"] + return setup_tracing( + app, "simcore_service_webserver", host, port, config["tracing"] + ) + # alias setup_app_tracing = setup tracing_section_name = CONFIG_SECTION_NAME -__all__ = ( - "setup_app_tracing", - "schema", - "tracing_section_name" -) +__all__ = ("setup_app_tracing", "schema", "tracing_section_name") diff --git a/services/web/server/src/simcore_service_webserver/users.py b/services/web/server/src/simcore_service_webserver/users.py index 50032ff6636..b2d176083d7 100644 --- a/services/web/server/src/simcore_service_webserver/users.py +++ b/services/web/server/src/simcore_service_webserver/users.py @@ -6,26 +6,32 @@ from aiohttp import web from servicelib.application_setup import ModuleCategory, app_module_setup -from servicelib.rest_routing import (get_handlers_from_namespace, - iter_path_operations, - map_handlers_with_operations) +from servicelib.rest_routing import ( + get_handlers_from_namespace, + iter_path_operations, + map_handlers_with_operations, +) from . import users_handlers from .rest_config import APP_OPENAPI_SPECS_KEY logger = logging.getLogger(__name__) -@app_module_setup(__name__, ModuleCategory.ADDON, - depends=['simcore_service_webserver.rest'], - logger=logger) + +@app_module_setup( + __name__, + ModuleCategory.ADDON, + depends=["simcore_service_webserver.rest"], + logger=logger, +) def setup(app: web.Application): # routes specs = app[APP_OPENAPI_SPECS_KEY] routes = map_handlers_with_operations( - get_handlers_from_namespace(users_handlers), - filter(lambda o: "me" in o[1].split("/"), iter_path_operations(specs)), - strict=True + get_handlers_from_namespace(users_handlers), + filter(lambda o: "me" in o[1].split("/"), iter_path_operations(specs)), + strict=True, ) app.router.add_routes(routes) @@ -33,6 +39,4 @@ def setup(app: web.Application): # alias setup_users = setup -__all__ = ( - 'setup_users' -) +__all__ = "setup_users" diff --git a/services/web/server/src/simcore_service_webserver/users_handlers.py b/services/web/server/src/simcore_service_webserver/users_handlers.py index bd0ed4d7b09..22766349a55 100644 --- a/services/web/server/src/simcore_service_webserver/users_handlers.py +++ b/services/web/server/src/simcore_service_webserver/users_handlers.py @@ -26,22 +26,23 @@ async def get_my_profile(request: web.Request): @retry(**PostgresRetryPolicyUponOperation(logger).kwargs) async def _query_db(uid, engine): async with engine.acquire() as conn: - query = sa.select([ - users.c.email, - users.c.role, - users.c.name]).where(users.c.id == uid) + query = sa.select([users.c.email, users.c.role, users.c.name]).where( + users.c.id == uid + ) result = await conn.execute(query) return await result.first() - row = await _query_db(uid=request[RQT_USERID_KEY], engine=request.app[APP_DB_ENGINE_KEY]) - parts = row['name'].split(".") + [""] + row = await _query_db( + uid=request[RQT_USERID_KEY], engine=request.app[APP_DB_ENGINE_KEY] + ) + parts = row["name"].split(".") + [""] return { - 'login': row['email'], - 'first_name': parts[0], - 'last_name': parts[1], - 'role': row['role'].name.capitalize(), - 'gravatar_id': gravatar_hash(row['email']) + "login": row["email"], + "first_name": parts[0], + "last_name": parts[1], + "role": row["role"].name.capitalize(), + "gravatar_id": gravatar_hash(row["email"]), } @@ -55,22 +56,18 @@ async def update_my_profile(request: web.Request): body = await request.json() async with engine.acquire() as conn: - query = sa.select([users.c.name]).where( - users.c.id == uid) + query = sa.select([users.c.name]).where(users.c.id == uid) default_name = await conn.scalar(query) parts = default_name.split(".") + [""] - name = body.get('first_name', parts[0]) + "." + body.get('last_name', parts[1]) + name = body.get("first_name", parts[0]) + "." + body.get("last_name", parts[1]) async with engine.acquire() as conn: - query = (users.update() - .where(users.c.id == uid) - .values(name=name) - ) + query = users.update().where(users.c.id == uid).values(name=name) resp = await conn.execute(query) - assert resp.rowcount == 1 # nosec + assert resp.rowcount == 1 # nosec - raise web.HTTPNoContent(content_type='application/json') + raise web.HTTPNoContent(content_type="application/json") # me/tokens/ ------------------------------------------------------ @@ -87,13 +84,13 @@ async def create_tokens(request: web.Request): # TODO: if service already, then IntegrityError is raised! How to deal with db exceptions?? async with engine.acquire() as conn: stmt = tokens.insert().values( - user_id=uid, - token_service=body['service'], - token_data=body) + user_id=uid, token_service=body["service"], token_data=body + ) await conn.execute(stmt) - raise web.HTTPCreated(text=json.dumps({'data': body}), - content_type="application/json") + raise web.HTTPCreated( + text=json.dumps({"data": body}), content_type="application/json" + ) @login_required @@ -106,9 +103,7 @@ async def list_tokens(request: web.Request): user_tokens = [] async with engine.acquire() as conn: - query = (sa.select([tokens.c.token_data]) - .where(tokens.c.user_id == uid) - ) + query = sa.select([tokens.c.token_data]).where(tokens.c.user_id == uid) async for row in conn.execute(query): user_tokens.append(row["token_data"]) @@ -120,13 +115,11 @@ async def get_token(request: web.Request): await check_permission(request, "user.tokens.*") uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] - service_id = request.match_info['service'] + service_id = request.match_info["service"] async with engine.acquire() as conn: - query = (sa.select([tokens.c.token_data]) - .where(sql.and_( - tokens.c.user_id == uid, - tokens.c.token_service == service_id) ) + query = sa.select([tokens.c.token_data]).where( + sql.and_(tokens.c.user_id == uid, tokens.c.token_service == service_id) ) result = await conn.execute(query) row = await result.first() @@ -142,17 +135,15 @@ async def update_token(request: web.Request): await check_permission(request, "user.tokens.*") uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] - service_id = request.match_info['service'] + service_id = request.match_info["service"] # TODO: validate body = await request.json() # TODO: optimize to a single call? async with engine.acquire() as conn: - query = (sa.select([tokens.c.token_data, tokens.c.token_id]) - .where(sql.and_( - tokens.c.user_id == uid, - tokens.c.token_service == service_id) ) + query = sa.select([tokens.c.token_data, tokens.c.token_id]).where( + sql.and_(tokens.c.user_id == uid, tokens.c.token_service == service_id) ) result = await conn.execute(query) row = await result.first() @@ -161,26 +152,24 @@ async def update_token(request: web.Request): tid = row["token_id"] data.update(body) - query = (tokens.update() - .where(tokens.c.token_id == tid ) - .values(token_data=data) - ) + query = tokens.update().where(tokens.c.token_id == tid).values(token_data=data) resp = await conn.execute(query) - assert resp.rowcount == 1 # nosec + assert resp.rowcount == 1 # nosec + + raise web.HTTPNoContent(content_type="application/json") - raise web.HTTPNoContent(content_type='application/json') @login_required async def delete_token(request: web.Request): await check_permission(request, "user.tokens.*") uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] - service_id = request.match_info.get('service') + service_id = request.match_info.get("service") async with engine.acquire() as conn: - query = tokens.delete().where(sql.and_(tokens.c.user_id == uid, - tokens.c.token_service == service_id) - ) + query = tokens.delete().where( + sql.and_(tokens.c.user_id == uid, tokens.c.token_service == service_id) + ) await conn.execute(query) - raise web.HTTPNoContent(content_type='application/json') + raise web.HTTPNoContent(content_type="application/json") diff --git a/services/web/server/tests/conftest.py b/services/web/server/tests/conftest.py index 876d126aca9..b71afc71d47 100644 --- a/services/web/server/tests/conftest.py +++ b/services/web/server/tests/conftest.py @@ -23,19 +23,21 @@ logging.getLogger("sqlalchemy").setLevel(logging.WARNING) ## HELPERS -sys.path.append(str(current_dir / 'helpers')) +sys.path.append(str(current_dir / "helpers")) ## FIXTURES: standard paths -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def package_dir() -> Path: """ osparc-simcore installed directory """ dirpath = Path(simcore_service_webserver.__file__).resolve().parent assert dirpath.exists() return dirpath -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def osparc_simcore_root_dir() -> Path: """ osparc-simcore repo root dir """ WILDCARD = "services/web/server" @@ -47,23 +49,26 @@ def osparc_simcore_root_dir() -> Path: msg = f"'{root_dir}' does not look like the git root directory of osparc-simcore" assert root_dir.exists(), msg assert any(root_dir.glob(WILDCARD)), msg - assert any(root_dir.glob(".git")), msg + assert any(root_dir.glob(".git")), msg return root_dir + @pytest.fixture(scope="session") def env_devel_file(osparc_simcore_root_dir) -> Path: env_devel_fpath = osparc_simcore_root_dir / ".env-devel" assert env_devel_fpath.exists() return env_devel_fpath -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def api_specs_dir(osparc_simcore_root_dir: Path) -> Path: - specs_dir = osparc_simcore_root_dir/ "api" / "specs" / "webserver" + specs_dir = osparc_simcore_root_dir / "api" / "specs" / "webserver" assert specs_dir.exists() return specs_dir -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def fake_data_dir() -> Path: dirpath = (current_dir / "data").resolve() assert dirpath.exists() diff --git a/services/web/server/tests/data/static/resource/.gitkeep b/services/web/server/tests/data/static/resource/.gitkeep deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/services/web/server/tests/helpers/utils_assert.py b/services/web/server/tests/helpers/utils_assert.py index f6e6eddaf6b..c09d3aea5a5 100644 --- a/services/web/server/tests/helpers/utils_assert.py +++ b/services/web/server/tests/helpers/utils_assert.py @@ -4,7 +4,9 @@ from servicelib.rest_responses import unwrap_envelope -async def assert_status(response: web.Response, expected_cls:web.HTTPException, expected_msg: str=None): +async def assert_status( + response: web.Response, expected_cls: web.HTTPException, expected_msg: str = None +): data, error = unwrap_envelope(await response.json()) assert response.status == expected_cls.status_code, (data, error) @@ -24,24 +26,29 @@ async def assert_status(response: web.Response, expected_cls:web.HTTPException, return data, error -async def assert_error(response: web.Response, expected_cls:web.HTTPException, expected_msg: str=None): + +async def assert_error( + response: web.Response, expected_cls: web.HTTPException, expected_msg: str = None +): data, error = unwrap_envelope(await response.json()) return do_assert_error(data, error, expected_cls, expected_msg) -def do_assert_error(data, error, expected_cls:web.HTTPException, expected_msg: str=None): +def do_assert_error( + data, error, expected_cls: web.HTTPException, expected_msg: str = None +): assert not data, pformat(data) assert error, pformat(error) # TODO: improve error messages - assert len(error['errors']) == 1 + assert len(error["errors"]) == 1 - err = error['errors'][0] + err = error["errors"][0] if expected_msg: - assert expected_msg in err['message'] + assert expected_msg in err["message"] if expected_cls != web.HTTPInternalServerError: # otherwise, code is exactly the name of the Exception class - assert expected_cls.__name__ == err['code'] + assert expected_cls.__name__ == err["code"] return data, error diff --git a/services/web/server/tests/helpers/utils_docker.py b/services/web/server/tests/helpers/utils_docker.py index 2b0bcb4de33..9caf8c9d492 100644 --- a/services/web/server/tests/helpers/utils_docker.py +++ b/services/web/server/tests/helpers/utils_docker.py @@ -1,4 +1,3 @@ - import logging import os import subprocess @@ -12,11 +11,13 @@ log = logging.getLogger(__name__) + @retry( - wait=wait_fixed(2), - stop=stop_after_attempt(10), - after=after_log(log, logging.WARN)) -def get_service_published_port(service_name: str, target_port: Optional[int]=None) -> str: + wait=wait_fixed(2), stop=stop_after_attempt(10), after=after_log(log, logging.WARN) +) +def get_service_published_port( + service_name: str, target_port: Optional[int] = None +) -> str: """ WARNING: ENSURE that service name exposes a port in Dockerfile file or docker-compose config file """ @@ -25,25 +26,35 @@ def get_service_published_port(service_name: str, target_port: Optional[int]=Non services = [x for x in client.services.list() if service_name in x.name] if not services: - raise RuntimeError(f"Cannot find published port for service '{service_name}'. Probably services still not started.") + raise RuntimeError( + f"Cannot find published port for service '{service_name}'. Probably services still not started." + ) service_ports = services[0].attrs["Endpoint"].get("Ports") if not service_ports: - raise RuntimeError(f"Cannot find published port for service '{service_name}' in endpoint. Probably services still not started.") + raise RuntimeError( + f"Cannot find published port for service '{service_name}' in endpoint. Probably services still not started." + ) published_port = None - msg = ", ".join( f"{p.get('TargetPort')} -> {p.get('PublishedPort')}" for p in service_ports ) + msg = ", ".join( + f"{p.get('TargetPort')} -> {p.get('PublishedPort')}" for p in service_ports + ) if target_port is None: - if len(service_ports)>1: - log.warning("Multiple ports published in service '%s': %s. Defaulting to first", service_name, msg) + if len(service_ports) > 1: + log.warning( + "Multiple ports published in service '%s': %s. Defaulting to first", + service_name, + msg, + ) published_port = service_ports[0]["PublishedPort"] else: target_port = int(target_port) for p in service_ports: - if p['TargetPort'] == target_port: - published_port = p['PublishedPort'] + if p["TargetPort"] == target_port: + published_port = p["PublishedPort"] break if published_port is None: @@ -55,7 +66,8 @@ def get_service_published_port(service_name: str, target_port: Optional[int]=Non def run_docker_compose_config( docker_compose_paths: Union[List[Path], Path], workdir: Path, - destination_path: Optional[Path]=None) -> Dict: + destination_path: Optional[Path] = None, +) -> Dict: """ Runs docker-compose config to validate and resolve a compose file configuration - Composes all configurations passed in 'docker_compose_paths' @@ -64,19 +76,27 @@ def run_docker_compose_config( """ if not isinstance(docker_compose_paths, List): - docker_compose_paths = [docker_compose_paths, ] + docker_compose_paths = [ + docker_compose_paths, + ] temp_dir = None if destination_path is None: - temp_dir = Path(tempfile.mkdtemp(prefix='')) - destination_path = temp_dir / 'docker-compose.yml' + temp_dir = Path(tempfile.mkdtemp(prefix="")) + destination_path = temp_dir / "docker-compose.yml" - config_paths = [ f"-f {os.path.relpath(docker_compose_path, workdir)}" for docker_compose_path in docker_compose_paths] + config_paths = [ + f"-f {os.path.relpath(docker_compose_path, workdir)}" + for docker_compose_path in docker_compose_paths + ] configs_prefix = " ".join(config_paths) - subprocess.run( f"docker-compose {configs_prefix} config > {destination_path}", - shell=True, check=True, - cwd=workdir) + subprocess.run( + f"docker-compose {configs_prefix} config > {destination_path}", + shell=True, + check=True, + cwd=workdir, + ) with destination_path.open() as f: config = yaml.safe_load(f) diff --git a/services/web/server/tests/helpers/utils_environs.py b/services/web/server/tests/helpers/utils_environs.py index 29ead968d46..f02d4b1822e 100644 --- a/services/web/server/tests/helpers/utils_environs.py +++ b/services/web/server/tests/helpers/utils_environs.py @@ -8,7 +8,8 @@ import yaml -VARIABLE_SUBSTITUTION = re.compile(r'\$\{(\w+)(?:(:{0,1}[-?]{0,1})(.*))?\}$') +VARIABLE_SUBSTITUTION = re.compile(r"\$\{(\w+)(?:(:{0,1}[-?]{0,1})(.*))?\}$") + def load_env(file_handler) -> Dict: """ Deserializes an environment file like .env-devel and @@ -16,7 +17,7 @@ def load_env(file_handler) -> Dict: Analogous to json.load """ - PATTERN_ENVIRON_EQUAL= re.compile(r"^(\w+)=(.*)$") + PATTERN_ENVIRON_EQUAL = re.compile(r"^(\w+)=(.*)$") # Works even for `POSTGRES_EXPORTER_DATA_SOURCE_NAME=postgresql://simcore:simcore@postgres:5432/simcoredb?sslmode=disable` environ = {} @@ -27,8 +28,14 @@ def load_env(file_handler) -> Dict: environ[key] = str(value) return environ -def eval_environs_in_docker_compose(docker_compose: Dict, docker_compose_dir: Path, - host_environ: Dict=None, *, use_env_devel=True): + +def eval_environs_in_docker_compose( + docker_compose: Dict, + docker_compose_dir: Path, + host_environ: Dict = None, + *, + use_env_devel=True +): """ Resolves environments in docker compose and sets them under 'environment' section TODO: deprecated. Use instead docker-compose config in services/web/server/tests/integration/fixtures/docker_compose.py @@ -36,16 +43,22 @@ def eval_environs_in_docker_compose(docker_compose: Dict, docker_compose_dir: Pa """ content = deepcopy(docker_compose) for _name, service in content["services"].items(): - replace_environs_in_docker_compose_service(service, docker_compose_dir, - host_environ, use_env_devel=use_env_devel) + replace_environs_in_docker_compose_service( + service, docker_compose_dir, host_environ, use_env_devel=use_env_devel + ) return content + from typing import List -def replace_environs_in_docker_compose_service(service_section: Dict, + +def replace_environs_in_docker_compose_service( + service_section: Dict, docker_compose_dir: Path, - host_environ: Dict=None, - *, use_env_devel=True): + host_environ: Dict = None, + *, + use_env_devel=True +): """ Resolves environments in docker-compose's service section, drops any reference to env_file and sets all environs 'environment' section @@ -73,25 +86,30 @@ def replace_environs_in_docker_compose_service(service_section: Dict, for item in environ_items: key, value = item.split("=") m = VARIABLE_SUBSTITUTION.match(value) - if m: # There is a variable as value in docker-compose - envkey = m.groups()[0] # Variable name - if len(m.groups()) == 3: # There is a default value + if m: # There is a variable as value in docker-compose + envkey = m.groups()[0] # Variable name + if len(m.groups()) == 3: # There is a default value default_value = m.groups()[2] if envkey in host_environ: - value = host_environ[envkey] # Use host environ - if default_value and len(value) == 0 and m.groups()[1] == ':-': - value = default_value # Unless it is empty and default exists + value = host_environ[envkey] # Use host environ + if default_value and len(value) == 0 and m.groups()[1] == ":-": + value = default_value # Unless it is empty and default exists elif default_value: - value = default_value # Use default if exists - + value = default_value # Use default if exists + service_environ[key] = value service_section["environment"] = service_environ -def eval_service_environ(docker_compose_path:Path, service_name:str, - host_environ: Dict=None, - image_environ: Dict=None, - *, use_env_devel=True) -> Dict: + +def eval_service_environ( + docker_compose_path: Path, + service_name: str, + host_environ: Dict = None, + image_environ: Dict = None, + *, + use_env_devel=True +) -> Dict: """ Deduces a service environment with it runs in a stack from confirmation :param docker_compose_path: path to stack configuration @@ -110,8 +128,9 @@ def eval_service_environ(docker_compose_path:Path, service_name:str, content = yaml.safe_load(f) service = content["services"][service_name] - replace_environs_in_docker_compose_service(service, docker_compose_dir, - host_environ, use_env_devel=use_env_devel) + replace_environs_in_docker_compose_service( + service, docker_compose_dir, host_environ, use_env_devel=use_env_devel + ) host_environ = host_environ or {} image_environ = image_environ or {} diff --git a/services/web/server/tests/helpers/utils_login.py b/services/web/server/tests/helpers/utils_login.py index c3d503a08b0..1b42e940875 100644 --- a/services/web/server/tests/helpers/utils_login.py +++ b/services/web/server/tests/helpers/utils_login.py @@ -6,11 +6,11 @@ from simcore_service_webserver.db_models import UserRole, UserStatus from simcore_service_webserver.login.cfg import cfg, get_storage from simcore_service_webserver.login.registration import create_invitation -from simcore_service_webserver.login.utils import (encrypt_password, - get_random_string) +from simcore_service_webserver.login.utils import encrypt_password, get_random_string from utils_assert import assert_status -TEST_MARKS = re.compile(r'TEST (\w+):(.*)') +TEST_MARKS = re.compile(r"TEST (\w+):(.*)") + def parse_test_marks(text): """ Checs for marks as @@ -34,28 +34,28 @@ async def create_user(data=None): data = data or {} password = get_random_string(10) params = { - 'name': get_random_string(10), - 'email': '{}@gmail.com'.format(get_random_string(10)), - 'password_hash': encrypt_password(password) + "name": get_random_string(10), + "email": "{}@gmail.com".format(get_random_string(10)), + "password_hash": encrypt_password(password), } params.update(data) - params.setdefault('status', UserStatus.ACTIVE.name) - params.setdefault('role', UserRole.USER.name) - params.setdefault('created_ip', '127.0.0.1') + params.setdefault("status", UserStatus.ACTIVE.name) + params.setdefault("role", UserRole.USER.name) + params.setdefault("created_ip", "127.0.0.1") user = await cfg.STORAGE.create_user(params) - user['raw_password'] = password + user["raw_password"] = password return user + async def log_client_in(client, user_data=None, *, enable_check=True): # creates user directly in db user = await create_user(user_data) # login - url = client.app.router['auth_login'].url_for() - r = await client.post(url, json={ - 'email': user['email'], - 'password': user['raw_password'], - }) + url = client.app.router["auth_login"].url_for() + r = await client.post( + url, json={"email": user["email"], "password": user["raw_password"],} + ) if enable_check: await assert_status(r, web.HTTPOk, cfg.MSG_LOGGED_IN) @@ -65,11 +65,12 @@ async def log_client_in(client, user_data=None, *, enable_check=True): # CONTEXT MANAGERS ------------------------------ + class NewUser: - def __init__(self, params=None, app: web.Application=None): + def __init__(self, params=None, app: web.Application = None): self.params = params self.user = None - self.db = get_storage(app) if app else cfg.STORAGE # FIXME: + self.db = get_storage(app) if app else cfg.STORAGE # FIXME: async def __aenter__(self): self.user = await create_user(self.params) @@ -86,9 +87,12 @@ def __init__(self, client, params=None, *, check_if_succeeds=True): self.enable_check = check_if_succeeds async def __aenter__(self): - self.user = await log_client_in(self.client, self.params, enable_check=self.enable_check) + self.user = await log_client_in( + self.client, self.params, enable_check=self.enable_check + ) return self.user + class NewInvitation(NewUser): def __init__(self, client, guest="", host=None): super().__init__(host, client.app) diff --git a/services/web/server/tests/helpers/utils_projects.py b/services/web/server/tests/helpers/utils_projects.py index 0c3f810d986..77e5bc0f88e 100644 --- a/services/web/server/tests/helpers/utils_projects.py +++ b/services/web/server/tests/helpers/utils_projects.py @@ -12,14 +12,23 @@ from aiohttp import web -from simcore_service_webserver.projects.projects_db import APP_PROJECT_DBAPI, DB_EXCLUSIVE_COLUMNS +from simcore_service_webserver.projects.projects_db import ( + APP_PROJECT_DBAPI, + DB_EXCLUSIVE_COLUMNS, +) from simcore_service_webserver.resources import resources -fake_template_resources = ['data/'+name for name in resources.listdir('data') - if re.match(r"^fake-template-(.+).json", name) ] +fake_template_resources = [ + "data/" + name + for name in resources.listdir("data") + if re.match(r"^fake-template-(.+).json", name) +] -fake_project_resources = ['data/'+name for name in resources.listdir('data') - if re.match(r"^fake-user-(.+).json", name) ] +fake_project_resources = [ + "data/" + name + for name in resources.listdir("data") + if re.match(r"^fake-user-(.+).json", name) +] def load_data(name): @@ -27,7 +36,9 @@ def load_data(name): return json.load(fp) -async def create_project(app: web.Application, params: Dict=None, user_id=None, *, force_uuid=False) -> Dict: +async def create_project( + app: web.Application, params: Dict = None, user_id=None, *, force_uuid=False +) -> Dict: """ Injects new project in database for user or as template :param params: predefined project properties (except for non-writeable e.g. uuid), defaults to None @@ -39,12 +50,14 @@ async def create_project(app: web.Application, params: Dict=None, user_id=None, """ params = params or {} - project_data = load_data('data/fake-template-projects.isan.json')[0] + project_data = load_data("data/fake-template-projects.isan.json")[0] project_data.update(params) db = app[APP_PROJECT_DBAPI] - project_uuid = await db.add_project(project_data, user_id, force_project_uuid=force_uuid) + project_uuid = await db.add_project( + project_data, user_id, force_project_uuid=force_uuid + ) assert project_uuid == project_data["uuid"] for key in DB_EXCLUSIVE_COLUMNS: @@ -54,7 +67,10 @@ async def create_project(app: web.Application, params: Dict=None, user_id=None, async def delete_all_projects(app: web.Application): - from simcore_service_webserver.projects.projects_models import projects, user_to_projects + from simcore_service_webserver.projects.projects_models import ( + projects, + user_to_projects, + ) db = app[APP_PROJECT_DBAPI] async with db.engine.acquire() as conn: @@ -66,7 +82,15 @@ async def delete_all_projects(app: web.Application): class NewProject: - def __init__(self, params: Dict=None, app: web.Application=None, clear_all=True, user_id=None, *, force_uuid=False): + def __init__( + self, + params: Dict = None, + app: web.Application = None, + clear_all=True, + user_id=None, + *, + force_uuid=False + ): self.params = params self.user_id = user_id self.app = app @@ -76,10 +100,14 @@ def __init__(self, params: Dict=None, app: web.Application=None, clear_all=True, if not self.clear_all: # TODO: add delete_project. Deleting a single project implies having to delete as well all dependencies created - raise ValueError("UNDER DEVELOPMENT: Currently can only delete all projects ") + raise ValueError( + "UNDER DEVELOPMENT: Currently can only delete all projects " + ) async def __aenter__(self): - self.prj = await create_project(self.app, self.params, self.user_id, force_uuid=self.force_uuid) + self.prj = await create_project( + self.app, self.params, self.user_id, force_uuid=self.force_uuid + ) return self.prj async def __aexit__(self, *args): diff --git a/services/web/server/tests/helpers/utils_tokens.py b/services/web/server/tests/helpers/utils_tokens.py index 6cc65a62b23..b84a1196aff 100644 --- a/services/web/server/tests/helpers/utils_tokens.py +++ b/services/web/server/tests/helpers/utils_tokens.py @@ -29,7 +29,7 @@ async def create_token_in_db(engine, **data): "token_data": { "token_secret": get_random_string(3), "token_key": get_random_string(4), - } + }, } params.update(data) @@ -40,11 +40,17 @@ async def create_token_in_db(engine, **data): return dict(row) -async def get_token_from_db(engine, *, token_id=None, user_id=None, token_service=None, token_data=None): +async def get_token_from_db( + engine, *, token_id=None, user_id=None, token_service=None, token_data=None +): async with engine.acquire() as conn: - expr = to_expression(token_id=token_id, user_id=user_id, - token_service=token_service, token_data=token_data) - stmt = sa.select([tokens, ]).where(expr) + expr = to_expression( + token_id=token_id, + user_id=user_id, + token_service=token_service, + token_data=token_data, + ) + stmt = sa.select([tokens,]).where(expr) result = await conn.execute(stmt) row = await result.first() return dict(row) if row else None @@ -66,8 +72,10 @@ def to_expression(**params): expressions = [] for key, value in params.items(): if value is not None: - statement = (cast(getattr(tokens.c, key), String) == json.dumps(value)) \ - if isinstance(getattr(tokens.c, key).type, JSON) \ + statement = ( + (cast(getattr(tokens.c, key), String) == json.dumps(value)) + if isinstance(getattr(tokens.c, key).type, JSON) else (getattr(tokens.c, key) == value) + ) expressions.append(statement) return reduce(and_, expressions) diff --git a/services/web/server/tests/integration/computation/conftest.py b/services/web/server/tests/integration/computation/conftest.py index 73c39911ff9..77e6cc02961 100644 --- a/services/web/server/tests/integration/computation/conftest.py +++ b/services/web/server/tests/integration/computation/conftest.py @@ -15,19 +15,22 @@ current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def mock_workbench_payload(): file_path = current_dir / "workbench_sleeper_payload.json" with file_path.open() as fp: return json.load(fp) -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def mock_project(fake_data_dir, mock_workbench_payload): with (fake_data_dir / "fake-project.json").open() as fp: project = json.load(fp) project["workbench"] = mock_workbench_payload["workbench"] return project + @pytest.fixture async def logged_user(client, user_role: UserRole): """ adds a user in db and logs in with client @@ -37,29 +40,31 @@ async def logged_user(client, user_role: UserRole): async with LoggedUser( client, {"role": user_role.name}, - check_if_succeeds = user_role!=UserRole.ANONYMOUS + check_if_succeeds=user_role != UserRole.ANONYMOUS, ) as user: yield user + @pytest.fixture async def user_project(client, mock_project, logged_user): mock_project["prjOwner"] = logged_user["name"] async with NewProject( - mock_project, - client.app, - user_id=logged_user["id"] + mock_project, client.app, user_id=logged_user["id"] ) as project: yield project + @pytest.fixture def project_id() -> str: return str(uuid.uuid4()) + @pytest.fixture(scope="session") def node_uuid() -> str: return "some_node_id" + @pytest.fixture(scope="session") def user_id() -> str: return "some_id" diff --git a/services/web/server/tests/integration/computation/test_computation.py b/services/web/server/tests/integration/computation/test_computation.py index 784c8147a74..5dbf108f8ba 100644 --- a/services/web/server/tests/integration/computation/test_computation.py +++ b/services/web/server/tests/integration/computation/test_computation.py @@ -16,7 +16,10 @@ from servicelib.application import create_safe_application from servicelib.application_keys import APP_CONFIG_KEY from simcore_sdk.models.pipeline_models import ( - SUCCESS, ComputationalPipeline, ComputationalTask) + SUCCESS, + ComputationalPipeline, + ComputationalTask, +) from simcore_service_webserver.computation import setup_computation from simcore_service_webserver.db import setup_db from simcore_service_webserver.login import setup_login @@ -36,29 +39,24 @@ # TODO: create conftest at computation/ folder level # Selection of core and tool services started in this swarm fixture (integration) -core_services = [ - 'director', - 'rabbit', - 'postgres', - 'sidecar', - 'storage' -] +core_services = ["director", "rabbit", "postgres", "sidecar", "storage"] ops_services = [ - 'minio', -# 'adminer', -# 'portainer' + "minio", + # 'adminer', + # 'portainer' ] + @pytest.fixture -def client(loop, aiohttp_client, - app_config, ## waits until swarm with *_services are up - ): +def client( + loop, aiohttp_client, app_config, ## waits until swarm with *_services are up +): assert app_config["rest"]["version"] == API_VERSION - app_config['storage']['enabled'] = False - app_config['main']['testing'] = True - app_config['db']['init_tables'] = True # inits postgres_service + app_config["storage"]["enabled"] = False + app_config["main"]["testing"] = True + app_config["db"]["init_tables"] = True # inits postgres_service pprint(app_config) @@ -76,13 +74,18 @@ def client(loop, aiohttp_client, setup_projects(app) setup_computation(app) - yield loop.run_until_complete(aiohttp_client(app, server_kwargs={ - 'port': app_config["main"]["port"], - 'host': app_config['main']['host'] - })) + yield loop.run_until_complete( + aiohttp_client( + app, + server_kwargs={ + "port": app_config["main"]["port"], + "host": app_config["main"]["host"], + }, + ) + ) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def mock_workbench_adjacency_list(): file_path = current_dir / "workbench_sleeper_dag_adjacency_list.json" with file_path.open() as fp: @@ -90,19 +93,28 @@ def mock_workbench_adjacency_list(): # HELPERS ---------------------------------- -def assert_db_contents(project_id, postgres_session, - mock_workbench_payload, mock_workbench_adjacency_list, - check_outputs:bool - ): +def assert_db_contents( + project_id, + postgres_session, + mock_workbench_payload, + mock_workbench_adjacency_list, + check_outputs: bool, +): # pylint: disable=no-member - pipeline_db = postgres_session.query(ComputationalPipeline)\ - .filter(ComputationalPipeline.project_id == project_id).one() + pipeline_db = ( + postgres_session.query(ComputationalPipeline) + .filter(ComputationalPipeline.project_id == project_id) + .one() + ) assert pipeline_db.project_id == project_id assert pipeline_db.dag_adjacency_list == mock_workbench_adjacency_list # check db comp_tasks - tasks_db = postgres_session.query(ComputationalTask)\ - .filter(ComputationalTask.project_id == project_id).all() + tasks_db = ( + postgres_session.query(ComputationalTask) + .filter(ComputationalTask.project_id == project_id) + .all() + ) mock_pipeline = mock_workbench_payload assert len(tasks_db) == len(mock_pipeline) @@ -119,14 +131,21 @@ def assert_db_contents(project_id, postgres_session, assert task_db.image["name"] == mock_pipeline[task_db.node_id]["key"] assert task_db.image["tag"] == mock_pipeline[task_db.node_id]["version"] + def assert_sleeper_services_completed(project_id, postgres_session): # pylint: disable=no-member # we wait 15 secs before testing... time.sleep(15) - pipeline_db = postgres_session.query(ComputationalPipeline)\ - .filter(ComputationalPipeline.project_id == project_id).one() - tasks_db = postgres_session.query(ComputationalTask)\ - .filter(ComputationalTask.project_id == project_id).all() + pipeline_db = ( + postgres_session.query(ComputationalPipeline) + .filter(ComputationalPipeline.project_id == project_id) + .one() + ) + tasks_db = ( + postgres_session.query(ComputationalTask) + .filter(ComputationalTask.project_id == project_id) + .all() + ) for task_db in tasks_db: if "sleeper" in task_db.image["name"]: assert task_db.state == SUCCESS @@ -135,26 +154,34 @@ def assert_sleeper_services_completed(project_id, postgres_session): # TESTS ------------------------------------------ async def test_check_health(docker_stack, client): # TODO: check health of all core_services in list above! - resp = await client.get( API_VERSION + "/") + resp = await client.get(API_VERSION + "/") data, _ = await assert_status(resp, web.HTTPOk) - assert data['name'] == 'simcore_service_webserver' - assert data['status'] == 'SERVICE_RUNNING' - - -@pytest.mark.parametrize("user_role,expected_response", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_start_pipeline(client, postgres_session, celery_service, sleeper_service, - logged_user, user_project, - mock_workbench_adjacency_list, - expected_response - ): + assert data["name"] == "simcore_service_webserver" + assert data["status"] == "SERVICE_RUNNING" + + +@pytest.mark.parametrize( + "user_role,expected_response", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_start_pipeline( + client, + postgres_session, + celery_service, + sleeper_service, + logged_user, + user_project, + mock_workbench_adjacency_list, + expected_response, +): project_id = user_project["uuid"] - mock_workbench_payload = user_project['workbench'] + mock_workbench_payload = user_project["workbench"] url = client.app.router["start_pipeline"].url_for(project_id=project_id) assert url == URL(API_PREFIX + "/computation/pipeline/{}/start".format(project_id)) @@ -166,26 +193,39 @@ async def test_start_pipeline(client, postgres_session, celery_service, sleeper_ if not error: assert "pipeline_name" in data assert "project_id" in data - assert data['project_id'] == project_id - - assert_db_contents(project_id, postgres_session, mock_workbench_payload, - mock_workbench_adjacency_list, check_outputs=False) + assert data["project_id"] == project_id + + assert_db_contents( + project_id, + postgres_session, + mock_workbench_payload, + mock_workbench_adjacency_list, + check_outputs=False, + ) # assert_sleeper_services_completed(project_id, postgres_session) -@pytest.mark.parametrize("user_role,expected_response", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPNoContent), - (UserRole.USER, web.HTTPNoContent), - (UserRole.TESTER, web.HTTPNoContent), -]) -async def test_update_pipeline(client, docker_stack, postgres_session, - logged_user, user_project, - mock_workbench_payload, mock_workbench_adjacency_list, - expected_response - ): +@pytest.mark.parametrize( + "user_role,expected_response", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPNoContent), + (UserRole.USER, web.HTTPNoContent), + (UserRole.TESTER, web.HTTPNoContent), + ], +) +async def test_update_pipeline( + client, + docker_stack, + postgres_session, + logged_user, + user_project, + mock_workbench_payload, + mock_workbench_adjacency_list, + expected_response, +): project_id = user_project["uuid"] - assert user_project['workbench'] == mock_workbench_payload['workbench'] + assert user_project["workbench"] == mock_workbench_payload["workbench"] url = client.app.router["update_pipeline"].url_for(project_id=project_id) assert url == URL(API_PREFIX + "/computation/pipeline/{}".format(project_id)) @@ -197,5 +237,10 @@ async def test_update_pipeline(client, docker_stack, postgres_session, if not error: # check db comp_pipeline - assert_db_contents(project_id, postgres_session, mock_workbench_payload['workbench'], - mock_workbench_adjacency_list, check_outputs=True) + assert_db_contents( + project_id, + postgres_session, + mock_workbench_payload["workbench"], + mock_workbench_adjacency_list, + check_outputs=True, + ) diff --git a/services/web/server/tests/integration/computation/test_rabbit.py b/services/web/server/tests/integration/computation/test_rabbit.py index a98de09d611..be2fdf92000 100644 --- a/services/web/server/tests/integration/computation/test_rabbit.py +++ b/services/web/server/tests/integration/computation/test_rabbit.py @@ -30,24 +30,22 @@ API_VERSION = "v0" # Selection of core and tool services started in this swarm fixture (integration) -core_services = [ - 'postgres', - 'redis', - 'rabbit' -] +core_services = ["postgres", "redis", "rabbit"] + +ops_services = [] -ops_services = [ -] @pytest.fixture -def client(loop, aiohttp_client, - app_config, ## waits until swarm with *_services are up - rabbit_service ## waits until rabbit is responsive - ): +def client( + loop, + aiohttp_client, + app_config, ## waits until swarm with *_services are up + rabbit_service, ## waits until rabbit is responsive +): assert app_config["rest"]["version"] == API_VERSION - app_config['storage']['enabled'] = False - app_config["db"]["init_tables"] = True # inits postgres_service + app_config["storage"]["enabled"] = False + app_config["db"]["init_tables"] = True # inits postgres_service # fake config app = create_safe_application() @@ -63,31 +61,45 @@ def client(loop, aiohttp_client, setup_sockets(app) setup_resource_manager(app) - yield loop.run_until_complete(aiohttp_client(app, server_kwargs={ - 'port': app_config["main"]["port"], - 'host': app_config['main']['host'] - })) + yield loop.run_until_complete( + aiohttp_client( + app, + server_kwargs={ + "port": app_config["main"]["port"], + "host": app_config["main"]["host"], + }, + ) + ) + @pytest.fixture def rabbit_config(app_config): rb_config = app_config[CONFIG_SECTION_NAME] yield rb_config + @pytest.fixture def rabbit_broker(rabbit_config): rabbit_broker = eval_broker(rabbit_config) yield rabbit_broker + @pytest.fixture async def pika_connection(loop, rabbit_broker): - connection = await aio_pika.connect(rabbit_broker, ssl=True, connection_attempts=100) + connection = await aio_pika.connect( + rabbit_broker, ssl=True, connection_attempts=100 + ) yield connection await connection.close() + # ------------------------------------------ + @pytest.fixture -async def rabbit_channels(loop, pika_connection, rabbit_config: Dict) -> Dict[str, aio_pika.Exchange]: +async def rabbit_channels( + loop, pika_connection, rabbit_config: Dict +) -> Dict[str, aio_pika.Exchange]: async def create(channel_name: str) -> aio_pika.Exchange: # create rabbit pika exchange channel channel = await pika_connection.channel() @@ -97,18 +109,17 @@ async def create(channel_name: str) -> aio_pika.Exchange: ) return pika_exchange - return { - "log": await create("log"), - "progress": await create("progress") - } + return {"log": await create("log"), "progress": await create("progress")} -def _create_rabbit_message(message_name: str, node_uuid: str, user_id: str, project_id: str, param: Any) -> Dict: +def _create_rabbit_message( + message_name: str, node_uuid: str, user_id: str, project_id: str, param: Any +) -> Dict: message = { - "Channel":message_name.title(), + "Channel": message_name.title(), "Node": node_uuid, "user_id": user_id, - "project_id": project_id + "project_id": project_id, } if message_name == "log": @@ -117,26 +128,43 @@ def _create_rabbit_message(message_name: str, node_uuid: str, user_id: str, proj message["Progress"] = param return message + @pytest.fixture def client_session_id(): return str(uuid4()) -async def _publish_messages(num_messages: int, node_uuid: str, user_id: str, project_id: str, rabbit_channels: Dict[str, aio_pika.Exchange]) -> Tuple[Dict, Dict]: - log_messages = [_create_rabbit_message("log", node_uuid, user_id, project_id, f"log number {n}") for n in range(num_messages)] - progress_messages = [_create_rabbit_message("progress", node_uuid, user_id, project_id, n/num_messages) for n in range(num_messages)] +async def _publish_messages( + num_messages: int, + node_uuid: str, + user_id: str, + project_id: str, + rabbit_channels: Dict[str, aio_pika.Exchange], +) -> Tuple[Dict, Dict]: + log_messages = [ + _create_rabbit_message("log", node_uuid, user_id, project_id, f"log number {n}") + for n in range(num_messages) + ] + progress_messages = [ + _create_rabbit_message( + "progress", node_uuid, user_id, project_id, n / num_messages + ) + for n in range(num_messages) + ] # send the messages over rabbit for n in range(num_messages): await rabbit_channels["log"].publish( aio_pika.Message( - body=json.dumps(log_messages[n]).encode(), - content_type="text/json"), routing_key = "" + body=json.dumps(log_messages[n]).encode(), content_type="text/json" + ), + routing_key="", ) await rabbit_channels["progress"].publish( aio_pika.Message( - body=json.dumps(progress_messages[n]).encode(), - content_type="text/json"), routing_key = "" + body=json.dumps(progress_messages[n]).encode(), content_type="text/json" + ), + routing_key="", ) return (log_messages, progress_messages) @@ -150,14 +178,22 @@ async def _wait_until(pred: Callable, timeout: int): await sleep(1) pytest.fail("waited too long for getting websockets events") -@pytest.mark.parametrize("user_role", [ - (UserRole.GUEST), - (UserRole.USER), - (UserRole.TESTER), -]) -async def test_rabbit_websocket_computation(loop, logged_user, user_project, - socketio_client, client_session_id, mocker, - rabbit_channels, node_uuid, user_id, project_id): + +@pytest.mark.parametrize( + "user_role", [(UserRole.GUEST), (UserRole.USER), (UserRole.TESTER),] +) +async def test_rabbit_websocket_computation( + loop, + logged_user, + user_project, + socketio_client, + client_session_id, + mocker, + rabbit_channels, + node_uuid, + user_id, + project_id, +): # corresponding websocket event names websocket_log_event = "logger" @@ -173,22 +209,34 @@ async def test_rabbit_websocket_computation(loop, logged_user, user_project, NUMBER_OF_MESSAGES = 1 TIMEOUT_S = 20 - await _publish_messages(NUMBER_OF_MESSAGES, node_uuid, user_id, project_id, rabbit_channels) + await _publish_messages( + NUMBER_OF_MESSAGES, node_uuid, user_id, project_id, rabbit_channels + ) await sleep(1) mock_log_handler_fct.assert_not_called() mock_node_update_handler_fct.assert_not_called() # publish messages with correct user id, but no project - log_messages, _ = await _publish_messages(NUMBER_OF_MESSAGES, node_uuid, logged_user["id"], project_id, rabbit_channels) + log_messages, _ = await _publish_messages( + NUMBER_OF_MESSAGES, node_uuid, logged_user["id"], project_id, rabbit_channels + ) + def predicate() -> bool: return mock_log_handler_fct.call_count == (NUMBER_OF_MESSAGES) + await _wait_until(predicate, TIMEOUT_S) log_calls = [call(json.dumps(message)) for message in log_messages] mock_log_handler_fct.assert_has_calls(log_calls, any_order=True) mock_node_update_handler_fct.assert_not_called() # publish message with correct user id, project but not node mock_log_handler_fct.reset_mock() - log_messages, _ = await _publish_messages(NUMBER_OF_MESSAGES, node_uuid, logged_user["id"], user_project["uuid"], rabbit_channels) + log_messages, _ = await _publish_messages( + NUMBER_OF_MESSAGES, + node_uuid, + logged_user["id"], + user_project["uuid"], + rabbit_channels, + ) await _wait_until(predicate, TIMEOUT_S) log_calls = [call(json.dumps(message)) for message in log_messages] mock_log_handler_fct.assert_has_calls(log_calls, any_order=True) @@ -198,10 +246,19 @@ def predicate() -> bool: # publish message with correct user id, project node mock_log_handler_fct.reset_mock() node_uuid = list(user_project["workbench"])[0] - log_messages, progress_messages = await _publish_messages(NUMBER_OF_MESSAGES, node_uuid, logged_user["id"], user_project["uuid"], rabbit_channels) + log_messages, progress_messages = await _publish_messages( + NUMBER_OF_MESSAGES, + node_uuid, + logged_user["id"], + user_project["uuid"], + rabbit_channels, + ) + def predicate2() -> bool: - return mock_log_handler_fct.call_count == (NUMBER_OF_MESSAGES) and \ - mock_node_update_handler_fct.call_count == (NUMBER_OF_MESSAGES) + return mock_log_handler_fct.call_count == ( + NUMBER_OF_MESSAGES + ) and mock_node_update_handler_fct.call_count == (NUMBER_OF_MESSAGES) + await _wait_until(predicate2, TIMEOUT_S) log_calls = [call(json.dumps(message)) for message in log_messages] mock_log_handler_fct.assert_has_calls(log_calls, any_order=True) diff --git a/services/web/server/tests/integration/conftest.py b/services/web/server/tests/integration/conftest.py index 81488bb026b..3152a13bdef 100644 --- a/services/web/server/tests/integration/conftest.py +++ b/services/web/server/tests/integration/conftest.py @@ -37,15 +37,18 @@ "fixtures.celery_service", "fixtures.postgres_service", "fixtures.redis_service", - "fixtures.websocket_client" + "fixtures.websocket_client", ] current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent log = logging.getLogger(__name__) + @pytest.fixture(scope="module") -def webserver_environ(request, docker_stack: Dict, simcore_docker_compose: Dict) -> Dict[str, str]: +def webserver_environ( + request, docker_stack: Dict, simcore_docker_compose: Dict +) -> Dict[str, str]: """ Started already swarm with integration stack (via dependency with 'docker_stack') @@ -56,15 +59,19 @@ def webserver_environ(request, docker_stack: Dict, simcore_docker_compose: Dict) """ assert "webserver" not in docker_stack["services"] - dockerfile_environ = {'SIMCORE_WEB_OUTDIR': "undefined" } # TODO: parse webserver dockerfile ?? - docker_compose_environ = simcore_docker_compose['services']['webserver'].get('environment',{}) + dockerfile_environ = { + "SIMCORE_WEB_OUTDIR": "undefined" + } # TODO: parse webserver dockerfile ?? + docker_compose_environ = simcore_docker_compose["services"]["webserver"].get( + "environment", {} + ) environ = {} environ.update(dockerfile_environ) environ.update(docker_compose_environ) # get the list of core services the test module wants - core_services = getattr(request.module, 'core_services', []) + core_services = getattr(request.module, "core_services", []) # OVERRIDES: # One of the biggest differences with respect to the real system @@ -72,26 +79,32 @@ def webserver_environ(request, docker_stack: Dict, simcore_docker_compose: Dict) # version tha loads only the subsystems under test. For that reason, # the test webserver is built-up in webserver_service fixture that runs # on the host. - services_with_published_ports = [name for name in core_services - if 'ports' in simcore_docker_compose['services'][name] ] + services_with_published_ports = [ + name + for name in core_services + if "ports" in simcore_docker_compose["services"][name] + ] for name in services_with_published_ports: - host_key = f'{name.upper()}_HOST' - port_key = f'{name.upper()}_PORT' + host_key = f"{name.upper()}_HOST" + port_key = f"{name.upper()}_PORT" # published port is sometimes dynamically defined by the swarm - assert host_key in environ, "Variables names expected to be prefix with service names in docker-compose" + assert ( + host_key in environ + ), "Variables names expected to be prefix with service names in docker-compose" assert port_key in environ # to swarm boundary since webserver is installed in the host and therefore outside the swarm's network published_port = get_service_published_port(name, int(environ.get(port_key))) - environ[host_key] = '127.0.0.1' + environ[host_key] = "127.0.0.1" environ[port_key] = published_port - pprint(environ) # NOTE: displayed only if error + pprint(environ) # NOTE: displayed only if error return environ -@pytest.fixture(scope='module') + +@pytest.fixture(scope="module") def _webserver_dev_config(webserver_environ: Dict, docker_stack: Dict) -> Dict: """ Swarm with integration stack already started @@ -106,18 +119,22 @@ def _webserver_dev_config(webserver_environ: Dict, docker_stack: Dict) -> Dict: with app_resources.stream("config/server-docker-dev.yaml") as f: cfg = yaml.safe_load(f) # test webserver works in host - cfg["main"]['host'] = '127.0.0.1' + cfg["main"]["host"] = "127.0.0.1" - with config_file_path.open('wt') as f: + with config_file_path.open("wt") as f: yaml.dump(cfg, f, default_flow_style=False) # Emulates cli config_environ = {} config_environ.update(webserver_environ) - config_environ.update( create_environ(skip_host_environ=True) ) # TODO: can be done monkeypathcing os.environ and calling create_environ as well + config_environ.update( + create_environ(skip_host_environ=True) + ) # TODO: can be done monkeypathcing os.environ and calling create_environ as well # validates - cfg_dict = trafaret_config.read_and_validate(config_file_path, app_schema, vars=config_environ) + cfg_dict = trafaret_config.read_and_validate( + config_file_path, app_schema, vars=config_environ + ) # WARNING: changes to this fixture during testing propagates to other tests. Use cfg = deepcopy(cfg_dict) # FIXME: freeze read/only json obj @@ -129,6 +146,7 @@ def _webserver_dev_config(webserver_environ: Dict, docker_stack: Dict) -> Dict: return cfg_dict + @pytest.fixture(scope="function") def app_config(_webserver_dev_config: Dict, aiohttp_unused_port) -> Dict: """ diff --git a/services/web/server/tests/integration/fixtures/celery_service.py b/services/web/server/tests/integration/fixtures/celery_service.py index bad896a07b8..a0e17ab8e8d 100644 --- a/services/web/server/tests/integration/fixtures/celery_service.py +++ b/services/web/server/tests/integration/fixtures/celery_service.py @@ -23,10 +23,11 @@ def celery_service(_webserver_dev_config, docker_stack): wait_till_celery_responsive(url) yield url + @tenacity.retry(wait=tenacity.wait_fixed(0.1), stop=tenacity.stop_after_delay(60)) def wait_till_celery_responsive(url): - app = celery.Celery('tasks', broker=url) + app = celery.Celery("tasks", broker=url) - status = celery.bin.celery.CeleryCommand.commands['status']() + status = celery.bin.celery.CeleryCommand.commands["status"]() status.app = status.get_app() - status.run() # raises celery.bin.base.Error if cannot run + status.run() # raises celery.bin.base.Error if cannot run diff --git a/services/web/server/tests/integration/fixtures/docker_compose.py b/services/web/server/tests/integration/fixtures/docker_compose.py index 52179bb2ab9..5555c919a7e 100644 --- a/services/web/server/tests/integration/fixtures/docker_compose.py +++ b/services/web/server/tests/integration/fixtures/docker_compose.py @@ -27,7 +27,7 @@ def devel_environ(env_devel_file: Path) -> Dict[str, str]: """ Loads and extends .env-devel """ - PATTERN_ENVIRON_EQUAL= re.compile(r"^(\w+)=(.*)$") + PATTERN_ENVIRON_EQUAL = re.compile(r"^(\w+)=(.*)$") env_devel = {} with env_devel_file.open() as f: for line in f: @@ -37,26 +37,28 @@ def devel_environ(env_devel_file: Path) -> Dict[str, str]: env_devel[key] = str(value) # Customized EXTENSION: change some of the environ to accomodate the test case ---- - if 'REGISTRY_SSL' in env_devel: - env_devel['REGISTRY_SSL'] = 'False' - if 'REGISTRY_URL' in env_devel: - env_devel['REGISTRY_URL'] = "{}:5000".format(_get_ip()) - if 'REGISTRY_USER' in env_devel: - env_devel['REGISTRY_USER'] = "simcore" - if 'REGISTRY_PW' in env_devel: - env_devel['REGISTRY_PW'] = "" - if 'REGISTRY_AUTH' in env_devel: - env_devel['REGISTRY_AUTH'] = False - - if 'SWARM_STACK_NAME' not in os.environ: - env_devel['SWARM_STACK_NAME'] = "simcore" + if "REGISTRY_SSL" in env_devel: + env_devel["REGISTRY_SSL"] = "False" + if "REGISTRY_URL" in env_devel: + env_devel["REGISTRY_URL"] = "{}:5000".format(_get_ip()) + if "REGISTRY_USER" in env_devel: + env_devel["REGISTRY_USER"] = "simcore" + if "REGISTRY_PW" in env_devel: + env_devel["REGISTRY_PW"] = "" + if "REGISTRY_AUTH" in env_devel: + env_devel["REGISTRY_AUTH"] = False + + if "SWARM_STACK_NAME" not in os.environ: + env_devel["SWARM_STACK_NAME"] = "simcore" return env_devel @pytest.fixture(scope="module") def temp_folder(request, tmpdir_factory) -> Path: - tmp = Path(tmpdir_factory.mktemp("docker_compose_{}".format(request.module.__name__))) + tmp = Path( + tmpdir_factory.mktemp("docker_compose_{}".format(request.module.__name__)) + ) yield tmp @@ -71,7 +73,7 @@ def env_file(osparc_simcore_root_dir: Path, devel_environ: Dict[str, str]) -> Pa if env_path.exists(): shutil.copy(env_path, backup_path) - with env_path.open('wt') as fh: + with env_path.open("wt") as fh: print(f"# TEMPORARY .env auto-generated from env_path in {__file__}") for key, value in devel_environ.items(): print(f"{key}={value}", file=fh) @@ -84,35 +86,42 @@ def env_file(osparc_simcore_root_dir: Path, devel_environ: Dict[str, str]) -> Pa backup_path.unlink() - @pytest.fixture("module") -def simcore_docker_compose(osparc_simcore_root_dir: Path, env_file: Path, temp_folder: Path) -> Dict: +def simcore_docker_compose( + osparc_simcore_root_dir: Path, env_file: Path, temp_folder: Path +) -> Dict: """ Resolves docker-compose for simcore stack in local host Produces same as `make .stack-simcore-version.yml` in a temporary folder """ - COMPOSE_FILENAMES = [ - "docker-compose.yml", - "docker-compose.local.yml" - ] + COMPOSE_FILENAMES = ["docker-compose.yml", "docker-compose.local.yml"] # ensures .env at git_root_dir assert env_file.exists() assert env_file.parent == osparc_simcore_root_dir # target docker-compose path - docker_compose_paths = [osparc_simcore_root_dir / "services" / filename - for filename in COMPOSE_FILENAMES] - assert all(docker_compose_path.exists() for docker_compose_path in docker_compose_paths) + docker_compose_paths = [ + osparc_simcore_root_dir / "services" / filename + for filename in COMPOSE_FILENAMES + ] + assert all( + docker_compose_path.exists() for docker_compose_path in docker_compose_paths + ) - config = run_docker_compose_config(docker_compose_paths, + config = run_docker_compose_config( + docker_compose_paths, workdir=env_file.parent, - destination_path=temp_folder / "simcore_docker_compose.yml") + destination_path=temp_folder / "simcore_docker_compose.yml", + ) return config + @pytest.fixture("module") -def ops_docker_compose(osparc_simcore_root_dir: Path, env_file: Path, temp_folder: Path) -> Dict: +def ops_docker_compose( + osparc_simcore_root_dir: Path, env_file: Path, temp_folder: Path +) -> Dict: """ Filters only services in docker-compose-ops.yml and returns yaml data Produces same as `make .stack-ops.yml` in a temporary folder @@ -122,36 +131,47 @@ def ops_docker_compose(osparc_simcore_root_dir: Path, env_file: Path, temp_folde assert env_file.parent == osparc_simcore_root_dir # target docker-compose path - docker_compose_path = osparc_simcore_root_dir / "services" / "docker-compose-ops.yml" + docker_compose_path = ( + osparc_simcore_root_dir / "services" / "docker-compose-ops.yml" + ) assert docker_compose_path.exists() - config = run_docker_compose_config(docker_compose_path, + config = run_docker_compose_config( + docker_compose_path, workdir=env_file.parent, - destination_path=temp_folder / "ops_docker_compose.yml") + destination_path=temp_folder / "ops_docker_compose.yml", + ) return config -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def core_services_config_file(request, temp_folder, simcore_docker_compose): """ Creates a docker-compose config file for every stack of services in'core_services' module variable File is created in a temp folder """ - core_services = getattr(request.module, 'core_services', []) # TODO: PC->SAN could also be defined as a fixture instead of a single variable (as with docker_compose) - assert core_services, f"Expected at least one service in 'core_services' within '{request.module.__name__}'" + core_services = getattr( + request.module, "core_services", [] + ) # TODO: PC->SAN could also be defined as a fixture instead of a single variable (as with docker_compose) + assert ( + core_services + ), f"Expected at least one service in 'core_services' within '{request.module.__name__}'" - docker_compose_path = Path(temp_folder / 'simcore_docker_compose.filtered.yml') + docker_compose_path = Path(temp_folder / "simcore_docker_compose.filtered.yml") - _filter_services_and_dump(core_services, simcore_docker_compose, docker_compose_path) + _filter_services_and_dump( + core_services, simcore_docker_compose, docker_compose_path + ) return docker_compose_path -@pytest.fixture(scope='module') + +@pytest.fixture(scope="module") def ops_services_config_file(request, temp_folder, ops_docker_compose): """ Creates a docker-compose config file for every stack of services in 'ops_services' module variable File is created in a temp folder """ - ops_services = getattr(request.module, 'ops_services', []) - docker_compose_path = Path(temp_folder / 'ops_docker_compose.filtered.yml') + ops_services = getattr(request.module, "ops_services", []) + docker_compose_path = Path(temp_folder / "ops_docker_compose.filtered.yml") _filter_services_and_dump(ops_services, ops_docker_compose, docker_compose_path) @@ -159,40 +179,42 @@ def ops_services_config_file(request, temp_folder, ops_docker_compose): # HELPERS --------------------------------------------- -def _get_ip()->str: +def _get_ip() -> str: s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) try: # doesn't even have to be reachable - s.connect(('10.255.255.255', 1)) + s.connect(("10.255.255.255", 1)) IP = s.getsockname()[0] - except Exception: #pylint: disable=W0703 - IP = '127.0.0.1' + except Exception: # pylint: disable=W0703 + IP = "127.0.0.1" finally: s.close() return IP -def _filter_services_and_dump(include: List, services_compose: Dict, docker_compose_path: Path): +def _filter_services_and_dump( + include: List, services_compose: Dict, docker_compose_path: Path +): content = deepcopy(services_compose) # filters services - remove = [name for name in content['services'] if name not in include] + remove = [name for name in content["services"] if name not in include] for name in remove: - content['services'].pop(name, None) + content["services"].pop(name, None) for name in include: - service = content['services'][name] + service = content["services"][name] # removes builds (No more) if "build" in service: service.pop("build", None) # updates current docker-compose (also versioned ... do not change by hand) - with docker_compose_path.open('wt') as fh: - if 'TRAVIS' in os.environ: + with docker_compose_path.open("wt") as fh: + if "TRAVIS" in os.environ: # in travis we do not have access to file print("{:-^100}".format(str(docker_compose_path))) yaml.dump(content, sys.stdout, default_flow_style=False) - print("-"*100) + print("-" * 100) else: # locally we have access to file print(f"Saving config to '{docker_compose_path}'") diff --git a/services/web/server/tests/integration/fixtures/docker_registry.py b/services/web/server/tests/integration/fixtures/docker_registry.py index bea2ab0412c..a0d49514f7e 100644 --- a/services/web/server/tests/integration/fixtures/docker_registry.py +++ b/services/web/server/tests/integration/fixtures/docker_registry.py @@ -9,15 +9,17 @@ import tenacity import time + @pytest.fixture(scope="session") def docker_registry(): # run the registry outside of the stack docker_client = docker.from_env() - container = docker_client.containers.run("registry:2", - ports={"5000":"5000"}, + container = docker_client.containers.run( + "registry:2", + ports={"5000": "5000"}, environment=["REGISTRY_STORAGE_DELETE_ENABLED=true"], - restart_policy={"Name":"always"}, - detach=True + restart_policy={"Name": "always"}, + detach=True, ) host = "127.0.0.1" port = 5000 @@ -28,7 +30,7 @@ def docker_registry(): # test the registry docker_client = docker.from_env() # get the hello world example from docker hub - hello_world_image = docker_client.images.pull("hello-world","latest") + hello_world_image = docker_client.images.pull("hello-world", "latest") # login to private registry docker_client.login(registry=url, username="simcore") # tag the image @@ -50,6 +52,7 @@ def docker_registry(): while docker_client.containers.list(filters={"name": container.name}): time.sleep(1) + @tenacity.retry(wait=tenacity.wait_fixed(1), stop=tenacity.stop_after_delay(60)) def _wait_till_registry_is_responsive(url): docker_client = docker.from_env() @@ -57,7 +60,7 @@ def _wait_till_registry_is_responsive(url): return True -#pull from itisfoundation/sleeper and push into local registry +# pull from itisfoundation/sleeper and push into local registry @pytest.fixture(scope="session") def sleeper_service(docker_registry) -> str: """ Adds a itisfoundation/sleeper in docker registry @@ -73,6 +76,7 @@ def sleeper_service(docker_registry) -> str: assert image yield repo + @pytest.fixture(scope="session") def jupyter_service(docker_registry) -> str: """ Adds a itisfoundation/jupyter-base-notebook in docker registry diff --git a/services/web/server/tests/integration/fixtures/docker_swarm.py b/services/web/server/tests/integration/fixtures/docker_swarm.py index 46255a9de31..8b1abf5f571 100644 --- a/services/web/server/tests/integration/fixtures/docker_swarm.py +++ b/services/web/server/tests/integration/fixtures/docker_swarm.py @@ -14,12 +14,13 @@ import yaml -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def docker_client(): client = docker.from_env() yield client -@pytest.fixture(scope='module') + +@pytest.fixture(scope="module") def docker_swarm(docker_client): try: docker_client.swarm.reload() @@ -30,33 +31,31 @@ def docker_swarm(docker_client): yield # teardown assert docker_client.swarm.leave(force=True) - -@pytest.fixture(scope='module') -def docker_stack(docker_swarm, docker_client, core_services_config_file: Path, ops_services_config_file: Path): - stacks = { - 'simcore': core_services_config_file, - 'ops': ops_services_config_file - } +@pytest.fixture(scope="module") +def docker_stack( + docker_swarm, + docker_client, + core_services_config_file: Path, + ops_services_config_file: Path, +): + stacks = {"simcore": core_services_config_file, "ops": ops_services_config_file} # make up-version stacks_up = [] for stack_name, stack_config_file in stacks.items(): - subprocess.run( f"docker stack deploy -c {stack_config_file.name} {stack_name}", - shell=True, check=True, - cwd=stack_config_file.parent) + subprocess.run( + f"docker stack deploy -c {stack_config_file.name} {stack_name}", + shell=True, + check=True, + cwd=stack_config_file.parent, + ) stacks_up.append(stack_name) # wait for the stack to come up def _wait_for_services(retry_count, max_wait_time_s): - pre_states = [ - "NEW", - "PENDING", - "ASSIGNED", - "PREPARING", - "STARTING" - ] + pre_states = ["NEW", "PENDING", "ASSIGNED", "PREPARING", "STARTING"] services = docker_client.services.list() WAIT_TIME_BEFORE_RETRY = 5 start_time = time.time() @@ -71,21 +70,22 @@ def _wait_for_services(retry_count, max_wait_time_s): print(f"Waiting for {service.name}...") time.sleep(WAIT_TIME_BEFORE_RETRY) - def _print_services(msg): from pprint import pprint + print("{:*^100}".format("docker services running " + msg)) for service in docker_client.services.list(): pprint(service.attrs) - print("-"*100) + print("-" * 100) + RETRY_COUNT = 12 WAIT_TIME_BEFORE_FAILING = 60 _wait_for_services(RETRY_COUNT, WAIT_TIME_BEFORE_FAILING) _print_services("[BEFORE TEST]") yield { - 'stacks': stacks_up, - 'services': [service.name for service in docker_client.services.list()] + "stacks": stacks_up, + "services": [service.name for service in docker_client.services.list()], } _print_services("[AFTER TEST]") @@ -111,10 +111,14 @@ def _print_services(msg): for stack in stacks_up: subprocess.run(f"docker stack rm {stack}", shell=True, check=True) - while docker_client.services.list(filters={"label":f"com.docker.stack.namespace={stack}"}): + while docker_client.services.list( + filters={"label": f"com.docker.stack.namespace={stack}"} + ): time.sleep(WAIT_BEFORE_RETRY_SECS) - while docker_client.networks.list(filters={"label":f"com.docker.stack.namespace={stack}"}): + while docker_client.networks.list( + filters={"label": f"com.docker.stack.namespace={stack}"} + ): time.sleep(WAIT_BEFORE_RETRY_SECS) _print_services("[AFTER REMOVED]") diff --git a/services/web/server/tests/integration/fixtures/postgres_service.py b/services/web/server/tests/integration/fixtures/postgres_service.py index b5ecd1c9ae9..1e183463406 100644 --- a/services/web/server/tests/integration/fixtures/postgres_service.py +++ b/services/web/server/tests/integration/fixtures/postgres_service.py @@ -12,7 +12,7 @@ from sqlalchemy.orm import sessionmaker -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def postgres_db(_webserver_dev_config, webserver_environ, docker_stack): cfg = deepcopy(_webserver_dev_config["db"]["postgres"]) url = DSN.format(**cfg) @@ -30,13 +30,15 @@ def postgres_db(_webserver_dev_config, webserver_environ, docker_stack): metadata.drop_all(engine) engine.dispose() -@pytest.fixture(scope='module') + +@pytest.fixture(scope="module") def postgres_session(postgres_db): Session = sessionmaker(postgres_db) session = Session() yield session session.close() + @tenacity.retry(**PostgresRetryPolicyUponInitialization().kwargs) def wait_till_postgres_responsive(url): """Check if something responds to ``url`` """ diff --git a/services/web/server/tests/integration/fixtures/rabbit_service.py b/services/web/server/tests/integration/fixtures/rabbit_service.py index bee2052a840..5ff28f6c3fe 100644 --- a/services/web/server/tests/integration/fixtures/rabbit_service.py +++ b/services/web/server/tests/integration/fixtures/rabbit_service.py @@ -22,6 +22,7 @@ async def rabbit_service(_webserver_dev_config: Dict, docker_stack): url = "amqp://{}:{}@{}:{}".format(user, password, host, port) await wait_till_rabbit_responsive(url) + @tenacity.retry(wait=tenacity.wait_fixed(0.1), stop=tenacity.stop_after_delay(60)) async def wait_till_rabbit_responsive(url: str): await aio_pika.connect(url) diff --git a/services/web/server/tests/integration/fixtures/redis_service.py b/services/web/server/tests/integration/fixtures/redis_service.py index 2f3ab61f373..66dab115689 100644 --- a/services/web/server/tests/integration/fixtures/redis_service.py +++ b/services/web/server/tests/integration/fixtures/redis_service.py @@ -12,7 +12,7 @@ from yarl import URL -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") async def redis_service(loop, _webserver_dev_config, webserver_environ, docker_stack): cfg = deepcopy(_webserver_dev_config["resource_manager"]["redis"]) @@ -32,7 +32,8 @@ async def wait_till_redis_responsive(redis_url: URL) -> bool: await client.wait_closed() return True -@pytest.fixture(scope='module') + +@pytest.fixture(scope="module") async def redis_client(loop, redis_service): client = await aioredis.create_redis_pool(str(redis_service), encoding="utf-8") yield client diff --git a/services/web/server/tests/integration/fixtures/websocket_client.py b/services/web/server/tests/integration/fixtures/websocket_client.py index 04a1affd111..174a6452827 100644 --- a/services/web/server/tests/integration/fixtures/websocket_client.py +++ b/services/web/server/tests/integration/fixtures/websocket_client.py @@ -26,21 +26,26 @@ async def security_cookie(loop, client) -> str: cookie = resp.request_info.headers["Cookie"] yield cookie + @pytest.fixture() async def socketio_url(loop, client) -> str: - SOCKET_IO_PATH = '/socket.io/' + SOCKET_IO_PATH = "/socket.io/" return str(client.make_url(SOCKET_IO_PATH)) + @pytest.fixture() async def socketio_client(socketio_url: str, security_cookie: str): clients = [] async def connect(client_session_id): sio = socketio.AsyncClient() - url = str(URL(socketio_url).with_query({'client_session_id': client_session_id})) - await sio.connect(url, headers={'Cookie': security_cookie}) + url = str( + URL(socketio_url).with_query({"client_session_id": client_session_id}) + ) + await sio.connect(url, headers={"Cookie": security_cookie}) clients.append(sio) return sio + yield connect for sio in clients: await sio.disconnect() diff --git a/services/web/server/tests/integration/test_project_workflow.py b/services/web/server/tests/integration/test_project_workflow.py index 91a154636ca..51261eb14aa 100644 --- a/services/web/server/tests/integration/test_project_workflow.py +++ b/services/web/server/tests/integration/test_project_workflow.py @@ -35,27 +35,27 @@ # Selection of core and tool services started in this swarm fixture (integration) core_services = [ - 'director', - 'postgres', - 'redis', + "director", + "postgres", + "redis", ] ops_services = [ -# 'adminer' + # 'adminer' ] @pytest.fixture -def client(loop, aiohttp_client, - app_config, ## waits until swarm with *_services are up - ): +def client( + loop, aiohttp_client, app_config, ## waits until swarm with *_services are up +): assert app_config["rest"]["version"] == API_VERSION - app_config['main']['testing'] = True - app_config['db']['init_tables'] = True + app_config["main"]["testing"] = True + app_config["db"]["init_tables"] = True - app_config['storage']['enabled'] = False - app_config['rabbit']['enabled'] = False + app_config["storage"]["enabled"] = False + app_config["rabbit"]["enabled"] = False pprint(app_config) @@ -69,10 +69,15 @@ def client(loop, aiohttp_client, setup_resource_manager(app) assert setup_projects(app) - yield loop.run_until_complete(aiohttp_client(app, server_kwargs={ - 'port': app_config["main"]["port"], - 'host': app_config['main']['host'] - })) + yield loop.run_until_complete( + aiohttp_client( + app, + server_kwargs={ + "port": app_config["main"]["port"], + "host": app_config["main"]["host"], + }, + ) + ) @pytest.fixture(scope="session") @@ -82,6 +87,7 @@ def fake_template_projects(package_dir: Path) -> Dict: with projects_file.open() as fp: return json.load(fp) + @pytest.fixture(scope="session") def fake_template_projects_isan(package_dir: Path) -> Dict: projects_file = package_dir / "data" / "fake-template-projects.isan.json" @@ -89,6 +95,7 @@ def fake_template_projects_isan(package_dir: Path) -> Dict: with projects_file.open() as fp: return json.load(fp) + @pytest.fixture(scope="session") def fake_template_projects_osparc(package_dir: Path) -> Dict: projects_file = package_dir / "data" / "fake-template-projects.osparc.json" @@ -96,29 +103,30 @@ def fake_template_projects_osparc(package_dir: Path) -> Dict: with projects_file.open() as fp: return json.load(fp) + @pytest.fixture def fake_db(): Fake.reset() yield Fake Fake.reset() + @pytest.fixture def fake_project_data(fake_data_dir: Path) -> Dict: with (fake_data_dir / "fake-project.json").open() as fp: return json.load(fp) + @pytest.fixture -async def logged_user(client): #, role: UserRole): +async def logged_user(client): # , role: UserRole): """ adds a user in db and logs in with client NOTE: role fixture is defined as a parametrization below """ - role = UserRole.USER # TODO: parameterize roles + role = UserRole.USER # TODO: parameterize roles async with LoggedUser( - client, - {"role": role.name}, - check_if_succeeds = role!=UserRole.ANONYMOUS + client, {"role": role.name}, check_if_succeeds=role != UserRole.ANONYMOUS ) as user: yield user await delete_all_projects(client.app) @@ -128,10 +136,14 @@ async def logged_user(client): #, role: UserRole): def computational_system_mock(mocker): # director needs access to service registry which unfortunately cannot be provided for testing. For that reason we need to mock # interaction with director - mock_fun = mocker.patch('simcore_service_webserver.projects.projects_handlers.update_pipeline_db', return_value=Future()) + mock_fun = mocker.patch( + "simcore_service_webserver.projects.projects_handlers.update_pipeline_db", + return_value=Future(), + ) mock_fun.return_value.set_result("") return mock_fun + @pytest.fixture async def storage_subsystem_mock(loop, mocker): """ @@ -140,21 +152,29 @@ async def storage_subsystem_mock(loop, mocker): Patched functions are exposed within projects but call storage subsystem """ # requests storage to copy data - mock = mocker.patch('simcore_service_webserver.projects.projects_api.copy_data_folders_from_project') + mock = mocker.patch( + "simcore_service_webserver.projects.projects_api.copy_data_folders_from_project" + ) + async def _mock_copy_data_from_project(*args): return args[2] mock.side_effect = _mock_copy_data_from_project # requests storage to delete data - #mock1 = mocker.patch('simcore_service_webserver.projects.projects_handlers.delete_data_folders_of_project', return_value=None) - mock1 = mocker.patch('simcore_service_webserver.projects.projects_handlers.projects_api.delete_data_folders_of_project', return_value=Future()) + # mock1 = mocker.patch('simcore_service_webserver.projects.projects_handlers.delete_data_folders_of_project', return_value=None) + mock1 = mocker.patch( + "simcore_service_webserver.projects.projects_handlers.projects_api.delete_data_folders_of_project", + return_value=Future(), + ) mock1.return_value.set_result("") return mock, mock1 + # Tests CRUD operations -------------------------------------------- # TODO: merge both unit/with_postgress/test_projects + async def _request_list(client) -> List[Dict]: # GET /v0/projects url = client.app.router["list_projects"].url_for() @@ -164,6 +184,7 @@ async def _request_list(client) -> List[Dict]: return projects + async def _request_get(client, pid) -> Dict: url = client.app.router["get_project"].url_for(project_id=pid) resp = await client.get(url) @@ -172,6 +193,7 @@ async def _request_get(client, pid) -> Dict: return project + async def _request_create(client, project): url = client.app.router["create_projects"].url_for() resp = await client.post(url, json=project) @@ -180,6 +202,7 @@ async def _request_create(client, project): return new_project + async def _request_update(client, project, pid): # PUT /v0/projects/{project_id} url = client.app.router["replace_project"].url_for(project_id=pid) @@ -189,6 +212,7 @@ async def _request_update(client, project, pid): return updated_project + async def _request_delete(client, pid): url = client.app.router["delete_project"].url_for(project_id=pid) resp = await client.delete(url) @@ -196,8 +220,13 @@ async def _request_delete(client, pid): await assert_status(resp, web.HTTPNoContent) - -async def test_workflow(client, fake_project_data, logged_user, computational_system_mock, storage_subsystem_mock): +async def test_workflow( + client, + fake_project_data, + logged_user, + computational_system_mock, + storage_subsystem_mock, +): # empty list projects = await _request_list(client) assert not projects @@ -209,13 +238,15 @@ async def test_workflow(client, fake_project_data, logged_user, computational_sy projects = await _request_list(client) assert len(projects) == 1 for key in projects[0].keys(): - if key not in ('uuid', 'prjOwner', 'creationDate', 'lastChangeDate'): + if key not in ("uuid", "prjOwner", "creationDate", "lastChangeDate"): assert projects[0][key] == fake_project_data[key] modified_project = deepcopy(projects[0]) modified_project["name"] = "some other name" modified_project["description"] = "John Raynor killed Kerrigan" - modified_project["workbench"]["ReNamed"] = modified_project["workbench"].pop( list(modified_project["workbench"].keys())[0] ) + modified_project["workbench"]["ReNamed"] = modified_project["workbench"].pop( + list(modified_project["workbench"].keys())[0] + ) modified_project["workbench"]["ReNamed"]["position"]["x"] = 0 # modify pid = modified_project["uuid"] @@ -226,13 +257,13 @@ async def test_workflow(client, fake_project_data, logged_user, computational_sy assert len(projects) == 1 for key in projects[0].keys(): - if key not in ('lastChangeDate', ): + if key not in ("lastChangeDate",): assert projects[0][key] == modified_project[key] # get project = await _request_get(client, pid) for key in project.keys(): - if key not in ('lastChangeDate', ): + if key not in ("lastChangeDate",): assert project[key] == modified_project[key] # delete @@ -270,10 +301,13 @@ async def test_delete_invalid_project(client, logged_user): await assert_status(resp, web.HTTPNotFound) -async def test_list_template_projects(client, logged_user, fake_db, +async def test_list_template_projects( + client, + logged_user, + fake_db, fake_template_projects, fake_template_projects_isan, - fake_template_projects_osparc + fake_template_projects_osparc, ): fake_db.load_template_projects() url = client.app.router["list_projects"].url_for() @@ -282,6 +316,8 @@ async def test_list_template_projects(client, logged_user, fake_db, projects, _ = await assert_status(resp, web.HTTPOk) # fake-template-projects.json + fake-template-projects.isan.json + fake-template-projects.osparc.json - assert len(projects) == (len(fake_template_projects) + \ - len(fake_template_projects_isan) + \ - len(fake_template_projects_osparc)) + assert len(projects) == ( + len(fake_template_projects) + + len(fake_template_projects_isan) + + len(fake_template_projects_osparc) + ) diff --git a/services/web/server/tests/sandbox/TODO - integration-proxy/conftest.py b/services/web/server/tests/sandbox/TODO - integration-proxy/conftest.py index 87a6717d978..709aa2aa64b 100644 --- a/services/web/server/tests/sandbox/TODO - integration-proxy/conftest.py +++ b/services/web/server/tests/sandbox/TODO - integration-proxy/conftest.py @@ -1,4 +1,3 @@ - """ Tests reverse proxy within an environment having a selection of core and tool services running in a swarm """ @@ -37,15 +36,9 @@ MAX_BOOT_TIME_SECS = 20 # Selection of core and tool services started in this swarm fixture (integration) -core_services = [ - 'director', - '' -] +core_services = ["director", ""] -ops_services = [ - 'adminer', - 'portainer' -] +ops_services = ["adminer", "portainer"] @pytest.fixture(scope="session") @@ -53,11 +46,13 @@ def here() -> Path: return Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def osparc_simcore_root_dir(here) -> Path: root_dir = here.parent.parent.parent.parent.parent.resolve() assert root_dir.exists(), "Is this service within osparc-simcore repo?" - assert any(root_dir.glob("services/web/server")), "%s not look like rootdir" % root_dir + assert any(root_dir.glob("services/web/server")), ( + "%s not look like rootdir" % root_dir + ) return root_dir @@ -75,21 +70,25 @@ def _load_docker_compose(docker_compose_path) -> Dict[str, str]: content = yaml.safe_load(f) return content + @pytest.fixture("session") def services_docker_compose(osparc_simcore_root_dir) -> Dict[str, str]: docker_compose_path = osparc_simcore_root_dir / "services" / "docker-compose.yml" return _load_docker_compose(docker_compose_path) + @pytest.fixture("session") def ops_docker_compose(osparc_simcore_root_dir) -> Dict[str, str]: - docker_compose_path = osparc_simcore_root_dir / "services" / "docker-compose-ops.yml" + docker_compose_path = ( + osparc_simcore_root_dir / "services" / "docker-compose-ops.yml" + ) return _load_docker_compose(docker_compose_path) @pytest.fixture("session") def devel_environ(env_devel_file) -> Dict[str, str]: """ Environ dict from .env-devel """ - PATTERN_ENVIRON_EQUAL= re.compile(r"^(\w+)=(.*)$") + PATTERN_ENVIRON_EQUAL = re.compile(r"^(\w+)=(.*)$") env_devel = {} with env_devel_file.open() as f: for line in f: @@ -107,9 +106,11 @@ def webserver_environ(devel_environ, services_docker_compose) -> Dict[str, str]: """ Environment variables for the webserver application """ - dockerfile_environ = {'SIMCORE_WEB_OUTDIR': "undefined" } # TODO: parse webserver dockerfile ?? + dockerfile_environ = { + "SIMCORE_WEB_OUTDIR": "undefined" + } # TODO: parse webserver dockerfile ?? - service = services_docker_compose['services']['webserver'] + service = services_docker_compose["services"]["webserver"] docker_compose_environ = resolve_environ(service, devel_environ) environ = {} @@ -123,12 +124,15 @@ def webserver_environ(devel_environ, services_docker_compose) -> Dict[str, str]: # the test webserver is built-up in webserver_service fixture that runs # on the host. for name in core_services: - environ['%s_HOST' % name.upper()] = '127.0.0.1' - environ['%s_PORT' % name.upper()] = \ - services_docker_compose['services'][name]['ports'][0].split(':')[ - 0] # takes port exposed + environ["%s_HOST" % name.upper()] = "127.0.0.1" + environ["%s_PORT" % name.upper()] = services_docker_compose["services"][name][ + "ports" + ][0].split(":")[ + 0 + ] # takes port exposed # to swarm boundary since webserver is installed in the host and therefore outside the swarm's network from pprint import pprint + pprint(environ) return environ @@ -137,14 +141,15 @@ def webserver_environ(devel_environ, services_docker_compose) -> Dict[str, str]: @pytest.fixture def app_config(here, webserver_environ) -> Dict: config_file_path = here / "config.yaml" + def _recreate_config_file(): with app_resources.stream("config/server-docker-dev.yaml") as f: cfg = yaml.safe_load(f) # test webserver works in host - cfg["main"]['host'] = '127.0.0.1' + cfg["main"]["host"] = "127.0.0.1" cfg["director"]["host"] = "127.0.0.1" - with config_file_path.open('wt') as f: + with config_file_path.open("wt") as f: yaml.dump(cfg, f, default_flow_style=False) _recreate_config_file() @@ -154,10 +159,14 @@ def _recreate_config_file(): # Emulates cli config_environ = {} config_environ.update(webserver_environ) - config_environ.update( create_environ(skip_host_environ=True) ) # TODO: can be done monkeypathcing os.environ and calling create_environ as well + config_environ.update( + create_environ(skip_host_environ=True) + ) # TODO: can be done monkeypathcing os.environ and calling create_environ as well # validates - cfg_dict = trafaret_config.read_and_validate(config_file_path, app_schema, vars=config_environ) + cfg_dict = trafaret_config.read_and_validate( + config_file_path, app_schema, vars=config_environ + ) yield cfg_dict @@ -166,17 +175,18 @@ def _recreate_config_file(): config_file_path.unlink() - # DOCKER STACK ------------------------------------------- -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def docker_compose_file(here, services_docker_compose, devel_environ): """ Overrides pytest-docker fixture """ - docker_compose_path = here / 'docker-compose.yml' + docker_compose_path = here / "docker-compose.yml" # creates a docker-compose file only with SERVICES and replaces environ - _recreate_compose_file(core_services, services_docker_compose, docker_compose_path, devel_environ) + _recreate_compose_file( + core_services, services_docker_compose, docker_compose_path, devel_environ + ) logger.info(get_content_formatted(docker_compose_path)) @@ -186,29 +196,32 @@ def docker_compose_file(here, services_docker_compose, devel_environ): docker_compose_path.unlink() - -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def docker_client(): client = docker.from_env() yield client -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def docker_swarm(docker_client): docker_client.swarm.init() yield assert docker_client.swarm.leave(force=True) == True -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def docker_stack(docker_swarm, docker_client, docker_compose_file: Path): """ """ - assert subprocess.run( + assert ( + subprocess.run( "docker stack deploy -c {} services".format(docker_compose_file.name), shell=True, - cwd=docker_compose_file.parent - ).returncode == 0 + cwd=docker_compose_file.parent, + ).returncode + == 0 + ) # NOTE: # ``failed to create service services_apihub: Error response from daemon: network services_default not found``` # workaround is to restart daemon: ``sudo systemctl restart docker``` @@ -224,7 +237,6 @@ def docker_stack(docker_swarm, docker_client, docker_compose_file: Path): assert subprocess.run("docker stack rm services", shell=True).returncode == 0 - # CORE SERVICES --------------------------------------------- # @pytest.fixture(scope='session') # def director_service(docker_services, docker_ip): @@ -235,16 +247,15 @@ def docker_stack(docker_swarm, docker_client, docker_compose_file: Path): # return docker_ip, docker_services.port_for('director', 8001) - - # HELPERS --------------------------------------------- # TODO: should be reused integration-* + def get_content_formatted(textfile: Path) -> str: return "{:=^10s}\n{}\n{:=^10s}".format( - str(textfile), - textfile.read_text("utf8"), - '') + str(textfile), textfile.read_text("utf8"), "" + ) + def resolve_environ(service, environ): _environs = {} @@ -262,21 +273,23 @@ def resolve_environ(service, environ): return _environs -def _recreate_compose_file(keep, services_compose, docker_compose_path: Path, devel_environ): +def _recreate_compose_file( + keep, services_compose, docker_compose_path: Path, devel_environ +): # reads service/docker-compose.yml content = deepcopy(services_compose) # remove unnecessary services - remove = [name for name in content['services'] if name not in keep] + remove = [name for name in content["services"] if name not in keep] for name in remove: - content['services'].pop(name, None) + content["services"].pop(name, None) for name in keep: - service = content['services'][name] + service = content["services"][name] # remove builds if "build" in service: service.pop("build", None) - service['image'] = "services_{}:latest".format(name) + service["image"] = "services_{}:latest".format(name) # replaces environs if "environment" in service: _environs = {} @@ -285,7 +298,9 @@ def _recreate_compose_file(keep, services_compose, docker_compose_path: Path, de if value.startswith("${") and value.endswith("}"): value = devel_environ.get(value[2:-1], value) _environs[key] = value - service["environment"] = [ "{}={}".format(k,v) for k,v in _environs.items() ] + service["environment"] = [ + "{}={}".format(k, v) for k, v in _environs.items() + ] # updates current docker-compose (also versioned ... do not change by hand) - with docker_compose_path.open('wt') as f: + with docker_compose_path.open("wt") as f: yaml.dump(content, f, default_flow_style=False) diff --git a/services/web/server/tests/sandbox/TODO - integration-proxy/test_application_proxy.py b/services/web/server/tests/sandbox/TODO - integration-proxy/test_application_proxy.py index bbd65aeedac..8e41d7aa844 100644 --- a/services/web/server/tests/sandbox/TODO - integration-proxy/test_application_proxy.py +++ b/services/web/server/tests/sandbox/TODO - integration-proxy/test_application_proxy.py @@ -15,28 +15,35 @@ import simcore_service_webserver.reverse_proxy.handlers.paraview as rp_paraview from servicelib.application import create_safe_application from servicelib.rest_responses import unwrap_envelope -from simcore_service_webserver.application import (setup_app_proxy, - setup_director, setup_rest) +from simcore_service_webserver.application import ( + setup_app_proxy, + setup_director, + setup_rest, +) from simcore_service_webserver.reverse_proxy.settings import PROXY_MOUNTPOINT -API_VERSION = 'v0' +API_VERSION = "v0" + + @pytest.fixture -#def webserver_service(loop, app_config, director_service, aiohttp_unused_port, aiohttp_server, here): -#def webserver_service(loop, app_config, aiohttp_unused_port, aiohttp_server, here): -def webserver_service(docker_stack, loop, app_config, aiohttp_unused_port, aiohttp_server, here): +# def webserver_service(loop, app_config, director_service, aiohttp_unused_port, aiohttp_server, here): +# def webserver_service(loop, app_config, aiohttp_unused_port, aiohttp_server, here): +def webserver_service( + docker_stack, loop, app_config, aiohttp_unused_port, aiohttp_server, here +): # OVERRIDES app_config: # - server lives with the testing framework - port = app_config['main']['port'] = aiohttp_unused_port() - host = app_config['main']['host'] = '127.0.0.1' + port = app_config["main"]["port"] = aiohttp_unused_port() + host = app_config["main"]["host"] = "127.0.0.1" # - disable some subsystems - app_config['rabbit']['enabled'] = False - app_config['db']['enabled'] = False - app_config['storage']['enabled'] = False + app_config["rabbit"]["enabled"] = False + app_config["db"]["enabled"] = False + app_config["storage"]["enabled"] = False # TODO: parse_and_validate config_app_path = here / "config.app.yaml" - with (config_app_path).open('wt') as f: + with (config_app_path).open("wt") as f: yaml.dump(app_config, f, default_flow_style=False) # app @@ -44,7 +51,7 @@ def webserver_service(docker_stack, loop, app_config, aiohttp_unused_port, aioht setup_rest(app) setup_director(app, disable_login=True) - setup_app_proxy(app) # <-----------|UNDER TEST + setup_app_proxy(app) # <-----------|UNDER TEST server = loop.run_until_complete(aiohttp_server(app, port=port)) @@ -52,25 +59,32 @@ def webserver_service(docker_stack, loop, app_config, aiohttp_unused_port, aioht config_app_path.unlink() + @pytest.fixture -def client(loop, webserver_service, aiohttp_client): +def client(loop, webserver_service, aiohttp_client): """ webserver's API client """ - client = loop.run_until_complete(aiohttp_client(webserver_service) ) + client = loop.run_until_complete(aiohttp_client(webserver_service)) return client # TESTS ---------------------------------------------------------------------------- # + [(service_key, "????", "NJKfISIRB-%d"%i) for i, service_key in enumerate(rp_jupyter.SUPPORTED_IMAGE_NAME)] -@pytest.mark.parametrize("service_key,service_version,service_uuid", [ - (rp_jupyter.SUPPORTED_IMAGE_NAME[0], "1.7.0", "NJKfISIRB"), - ("simcore/services/dynamic/raw-graphs", "2.8.0", "4J6GoxSNL"), - ("simcore/services/dynamic/modeler/webserver", "2.7.0", "4k4zZL90S"), - #(rp_paraview.SUPPORTED_IMAGE_NAME, "1.0.5", "EkE7LSU0r"), - ]) -async def test_reverse_proxy_workflow(client, service_key, service_version, service_uuid): + +@pytest.mark.parametrize( + "service_key,service_version,service_uuid", + [ + (rp_jupyter.SUPPORTED_IMAGE_NAME[0], "1.7.0", "NJKfISIRB"), + ("simcore/services/dynamic/raw-graphs", "2.8.0", "4J6GoxSNL"), + ("simcore/services/dynamic/modeler/webserver", "2.7.0", "4k4zZL90S"), + # (rp_paraview.SUPPORTED_IMAGE_NAME, "1.0.5", "EkE7LSU0r"), + ], +) +async def test_reverse_proxy_workflow( + client, service_key, service_version, service_uuid +): """ client <--> webserver <--> director @@ -79,59 +93,68 @@ async def test_reverse_proxy_workflow(client, service_key, service_version, serv - Tests webserserver.reverser proxy subsystem as well """ # List services in registry ------------------------------------------------ - resp = await client.get("/"+API_VERSION+"/services?service_type=interactive") - assert resp.status == 200, (await resp.text()) + resp = await client.get("/" + API_VERSION + "/services?service_type=interactive") + assert resp.status == 200, await resp.text() payload = await resp.json() data, error = unwrap_envelope(payload) assert data assert not error - assert any(srv['key']==service_key and srv['version']==service_version for srv in data), \ - "version of service NOT listed in registry" + assert any( + srv["key"] == service_key and srv["version"] == service_version for srv in data + ), "version of service NOT listed in registry" # Start backend dynamic service ------------------------------------------------ - resp = await client.post( URL("/"+API_VERSION+"/running_interactive_services").with_query( - service_key=service_key, - service_version =service_version, - service_uuid = service_uuid) + resp = await client.post( + URL("/" + API_VERSION + "/running_interactive_services").with_query( + service_key=service_key, + service_version=service_version, + service_uuid=service_uuid, + ) ) - assert resp.status == 201, (await resp.text()) + assert resp.status == 201, await resp.text() payload = await resp.json() data, error = unwrap_envelope(payload) assert data assert not error - service_basepath = data['service_basepath'] + service_basepath = data["service_basepath"] assert service_basepath == PROXY_MOUNTPOINT + "/" + service_uuid # Wait until service is responsive---------------------------------------------- - #TODO: all dynamic services boot time should be bounded!! + # TODO: all dynamic services boot time should be bounded!! WAIT_FIXED_SECS = 5 MAX_TRIALS = 5 count = 0 - while count>> msg: %s', pprint.pformat(msg)) + logger.info(">>> msg: %s", pprint.pformat(msg)) mt = msg.type md = msg.data if mt == aiohttp.WSMsgType.TEXT: @@ -46,27 +47,29 @@ async def ws_forward(ws_from, ws_to): await ws_to.close(code=ws_to.close_code, message=msg.extra) else: raise ValueError( - 'unexpected message type: %s' % pprint.pformat(msg)) + "unexpected message type: %s" % pprint.pformat(msg) + ) - await asyncio.wait([ws_forward(ws_server, ws_client), ws_forward(ws_client, ws_server)], return_when=asyncio.FIRST_COMPLETED) + await asyncio.wait( + [ws_forward(ws_server, ws_client), ws_forward(ws_client, ws_server)], + return_when=asyncio.FIRST_COMPLETED, + ) return ws_server else: async with client.request( - req.method, baseUrl+mountPoint+proxyPath, + req.method, + baseUrl + mountPoint + proxyPath, headers=reqH, allow_redirects=False, - data=await req.read() + data=await req.read(), ) as res: headers = res.headers.copy() body = await res.read() - return web.Response( - headers=headers, - status=res.status, - body=body - ) + return web.Response(headers=headers, status=res.status, body=body) return ws_server + app = web.Application() -app.router.add_route('*', mountPoint + '{proxyPath:.*}', handler) +app.router.add_route("*", mountPoint + "{proxyPath:.*}", handler) web.run_app(app, port=3984) diff --git a/services/web/server/tests/sandbox/paraview-proxy.py b/services/web/server/tests/sandbox/paraview-proxy.py index 3dc6de60a0d..1b5651789f7 100644 --- a/services/web/server/tests/sandbox/paraview-proxy.py +++ b/services/web/server/tests/sandbox/paraview-proxy.py @@ -9,29 +9,30 @@ logger = logging.getLogger(__name__) -baseUrl = 'http://0.0.0.0:8080' -mountPoint = '/fakeUuid' +baseUrl = "http://0.0.0.0:8080" +mountPoint = "/fakeUuid" async def handler(req): - proxyPath = req.match_info.get( - 'proxyPath', 'no proxyPath placeholder defined') + proxyPath = req.match_info.get("proxyPath", "no proxyPath placeholder defined") reqH = req.headers.copy() - if reqH['connection'].lower() == 'upgrade' and reqH['upgrade'].lower() == 'websocket' and req.method == 'GET': + if ( + reqH["connection"].lower() == "upgrade" + and reqH["upgrade"].lower() == "websocket" + and req.method == "GET" + ): ws_server = web.WebSocketResponse() await ws_server.prepare(req) - logger.info('##### WS_SERVER %s', pprint.pformat(ws_server)) + logger.info("##### WS_SERVER %s", pprint.pformat(ws_server)) client_session = aiohttp.ClientSession(cookies=req.cookies) - async with client_session.ws_connect( - baseUrl+proxyPath, - ) as ws_client: - logger.info('##### WS_CLIENT %s', pprint.pformat(ws_client)) + async with client_session.ws_connect(baseUrl + proxyPath,) as ws_client: + logger.info("##### WS_CLIENT %s", pprint.pformat(ws_client)) async def ws_forward(ws_from, ws_to): async for msg in ws_from: - #logger.info('>>> msg: %s',pprint.pformat(msg)) + # logger.info('>>> msg: %s',pprint.pformat(msg)) mt = msg.type md = msg.data if mt == aiohttp.WSMsgType.TEXT: @@ -46,34 +47,36 @@ async def ws_forward(ws_from, ws_to): await ws_to.close(code=ws_to.close_code, message=msg.extra) else: raise ValueError( - 'unexpected message type: %s' % pprint.pformat(msg)) + "unexpected message type: %s" % pprint.pformat(msg) + ) - await asyncio.wait([ws_forward(ws_server, ws_client), ws_forward(ws_client, ws_server)], return_when=asyncio.FIRST_COMPLETED) + await asyncio.wait( + [ws_forward(ws_server, ws_client), ws_forward(ws_client, ws_server)], + return_when=asyncio.FIRST_COMPLETED, + ) return ws_server else: async with client.request( - req.method, baseUrl+proxyPath, + req.method, + baseUrl + proxyPath, headers=reqH, allow_redirects=False, - data=await req.read() + data=await req.read(), ) as res: headers = res.headers.copy() - del headers['content-length'] + del headers["content-length"] body = await res.read() - if proxyPath == '/Visualizer.js': - body = body.replace(b'"/ws"', b'"%s/ws"' % - mountPoint.encode(), 1) + if proxyPath == "/Visualizer.js": + body = body.replace(b'"/ws"', b'"%s/ws"' % mountPoint.encode(), 1) body = body.replace( - b'"/paraview/"', b'"%s/paraview/"' % mountPoint.encode(), 1) + b'"/paraview/"', b'"%s/paraview/"' % mountPoint.encode(), 1 + ) logger.info("fixed Visualizer.js paths on the fly") - return web.Response( - headers=headers, - status=res.status, - body=body - ) + return web.Response(headers=headers, status=res.status, body=body) return ws_server + app = web.Application() -app.router.add_route('*', mountPoint + '{proxyPath:.*}', handler) +app.router.add_route("*", mountPoint + "{proxyPath:.*}", handler) web.run_app(app, port=3985) diff --git a/services/web/server/tests/sandbox/reverse_proxy.py b/services/web/server/tests/sandbox/reverse_proxy.py index f9c479334ef..2b147a641c0 100644 --- a/services/web/server/tests/sandbox/reverse_proxy.py +++ b/services/web/server/tests/sandbox/reverse_proxy.py @@ -16,11 +16,8 @@ from simcore_service_webserver.reverse_proxy import APP_SOCKETS_KEY if __name__ == "__main__": - BASE_URL = 'http://0.0.0.0:8888' - MOUNT_POINT = '/x/12345' - - - + BASE_URL = "http://0.0.0.0:8888" + MOUNT_POINT = "/x/12345" def adapter(req: web.Request): return rp_handlers.generic.handler(req, service_url=BASE_URL) @@ -28,5 +25,5 @@ def adapter(req: web.Request): app = web.Application() app[APP_SOCKETS_KEY] = list() - app.router.add_route('*', MOUNT_POINT + '/{proxyPath:.*}', adapter) + app.router.add_route("*", MOUNT_POINT + "/{proxyPath:.*}", adapter) web.run_app(app, port=3984) diff --git a/services/web/server/tests/unit/conftest.py b/services/web/server/tests/unit/conftest.py index 1353d4c9705..d68411e366a 100644 --- a/services/web/server/tests/unit/conftest.py +++ b/services/web/server/tests/unit/conftest.py @@ -28,14 +28,14 @@ log = logging.getLogger(__name__) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def here(): cdir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent assert cdir == current_dir, "Somebody changing current_dir?" return cdir -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def fake_static_dir(fake_data_dir: Path) -> Dict: return fake_data_dir / "static" @@ -45,10 +45,12 @@ def fake_project(fake_data_dir: Path) -> Dict: with (fake_data_dir / "fake-project.json").open() as fp: yield json.load(fp) + @pytest.fixture def api_version_prefix() -> str: return "v0" + @pytest.fixture def empty_project(): def create(): @@ -60,15 +62,18 @@ def create(): "creationDate": now_str(), "lastChangeDate": now_str(), "thumbnail": "", - "workbench": {} + "workbench": {}, } return empty_project + return create @pytest.fixture def project_schema_file(api_version_prefix) -> Path: - prj_schema_path = resources.get_path(f"api/{api_version_prefix}/schemas/project-v0.0.1.json") + prj_schema_path = resources.get_path( + f"api/{api_version_prefix}/schemas/project-v0.0.1.json" + ) assert prj_schema_path.exists() return prj_schema_path @@ -78,7 +83,8 @@ def activity_data(fake_data_dir: Path) -> Dict: with (fake_data_dir / "test_activity_data.json").open() as fp: yield json.load(fp) + @pytest.fixture def test_tags_data(fake_data_dir: Path) -> Dict: - with (fake_data_dir / 'test_tags_data.json').open() as fp: - yield json.load(fp).get('added_tags') + with (fake_data_dir / "test_tags_data.json").open() as fp: + yield json.load(fp).get("added_tags") diff --git a/services/web/server/tests/unit/test_activity.py b/services/web/server/tests/unit/test_activity.py index a03d21cb101..79068d489c3 100644 --- a/services/web/server/tests/unit/test_activity.py +++ b/services/web/server/tests/unit/test_activity.py @@ -28,49 +28,65 @@ def future_with_result(result): @pytest.fixture def mocked_login_required(mocker): mock = mocker.patch( - 'simcore_service_webserver.login.decorators.login_required', - lambda h: h) + "simcore_service_webserver.login.decorators.login_required", lambda h: h + ) importlib.reload(handlers) return mock + @pytest.fixture def mocked_monitoring(loop, mocker, activity_data): - prometheus_data = activity_data.get('prometheus') - cpu_ret = prometheus_data.get('cpu_return') - mocker.patch('simcore_service_webserver.activity.handlers.get_cpu_usage', - return_value=future_with_result(cpu_ret)) + prometheus_data = activity_data.get("prometheus") + cpu_ret = prometheus_data.get("cpu_return") + mocker.patch( + "simcore_service_webserver.activity.handlers.get_cpu_usage", + return_value=future_with_result(cpu_ret), + ) - mem_ret = prometheus_data.get('memory_return') - mocker.patch('simcore_service_webserver.activity.handlers.get_memory_usage', - return_value=future_with_result(mem_ret)) + mem_ret = prometheus_data.get("memory_return") + mocker.patch( + "simcore_service_webserver.activity.handlers.get_memory_usage", + return_value=future_with_result(mem_ret), + ) + + labels_ret = prometheus_data.get("labels_return") + mocker.patch( + "simcore_service_webserver.activity.handlers.get_container_metric_for_labels", + return_value=future_with_result(labels_ret), + ) - labels_ret = prometheus_data.get('labels_return') - mocker.patch('simcore_service_webserver.activity.handlers.get_container_metric_for_labels', - return_value=future_with_result(labels_ret)) + celery_data = activity_data.get("celery") + celery_ret = celery_data.get("celery_return") + mocker.patch( + "simcore_service_webserver.activity.handlers.get_celery_reserved", + return_value=future_with_result(celery_ret), + ) - celery_data = activity_data.get('celery') - celery_ret = celery_data.get('celery_return') - mocker.patch('simcore_service_webserver.activity.handlers.get_celery_reserved', - return_value=future_with_result(celery_ret)) @pytest.fixture def mocked_monitoring_down(mocker): mocker.patch( - 'simcore_service_webserver.activity.handlers.query_prometheus', - side_effect=ClientConnectionError) + "simcore_service_webserver.activity.handlers.query_prometheus", + side_effect=ClientConnectionError, + ) mocker.patch( - 'simcore_service_webserver.activity.handlers.celery_reserved', - side_effect=ClientConnectionError) + "simcore_service_webserver.activity.handlers.celery_reserved", + side_effect=ClientConnectionError, + ) return mocker + @pytest.fixture def app_config(fake_data_dir: Path, osparc_simcore_root_dir: Path): - with open(fake_data_dir/"test_activity_config.yml") as fh: + with open(fake_data_dir / "test_activity_config.yml") as fh: content = fh.read() - config = content.replace("${OSPARC_SIMCORE_REPO_ROOTDIR}", str(osparc_simcore_root_dir)) + config = content.replace( + "${OSPARC_SIMCORE_REPO_ROOTDIR}", str(osparc_simcore_root_dir) + ) return yaml.safe_load(config) + @pytest.fixture def client(loop, aiohttp_client, app_config): app = create_safe_application(app_config) @@ -85,35 +101,37 @@ def client(loop, aiohttp_client, app_config): async def test_has_login_required(client): - resp = await client.get('/v0/activity/status') + resp = await client.get("/v0/activity/status") await assert_status(resp, web.HTTPUnauthorized) + async def test_monitoring_up(mocked_login_required, mocked_monitoring, client): - QUEUED_NODE_ID = '35f95ad4-67b8-4ed8-bd55-84a5d600e687' - RUNNING_NODE_ID = '894dd8d5-de3b-4767-950c-7c3ed8f51d8c' + QUEUED_NODE_ID = "35f95ad4-67b8-4ed8-bd55-84a5d600e687" + RUNNING_NODE_ID = "894dd8d5-de3b-4767-950c-7c3ed8f51d8c" - resp = await client.get('/v0/activity/status') + resp = await client.get("/v0/activity/status") data, _ = await assert_status(resp, web.HTTPOk) - assert QUEUED_NODE_ID in data, 'Queued node not present' - assert RUNNING_NODE_ID in data, 'Running node not present' + assert QUEUED_NODE_ID in data, "Queued node not present" + assert RUNNING_NODE_ID in data, "Running node not present" celery = data.get(QUEUED_NODE_ID) prometheus = data.get(RUNNING_NODE_ID) - assert 'queued' in celery, 'There is no queued key for queued node' - assert celery.get('queued'), 'Queued should be True for queued node' + assert "queued" in celery, "There is no queued key for queued node" + assert celery.get("queued"), "Queued should be True for queued node" + + assert "limits" in prometheus, "There is no limits key for executing node" + assert "stats" in prometheus, "There is no stats key for executed node" - assert 'limits' in prometheus, 'There is no limits key for executing node' - assert 'stats' in prometheus, 'There is no stats key for executed node' + limits = prometheus.get("limits") + assert limits.get("cpus") == 4.0, "Incorrect value: Cpu limit" + assert limits.get("mem") == 2048.0, "Incorrect value: Memory limit" - limits = prometheus.get('limits') - assert limits.get('cpus') == 4.0, 'Incorrect value: Cpu limit' - assert limits.get('mem') == 2048.0, 'Incorrect value: Memory limit' + stats = prometheus.get("stats") + assert stats.get("cpuUsage") == 3.9952102200000006, "Incorrect value: Cpu usage" + assert stats.get("memUsage") == 177.664, "Incorrect value: Memory usage" - stats = prometheus.get('stats') - assert stats.get('cpuUsage') == 3.9952102200000006, 'Incorrect value: Cpu usage' - assert stats.get('memUsage') == 177.664, 'Incorrect value: Memory usage' async def test_monitoring_down(mocked_login_required, mocked_monitoring_down, client): - resp = await client.get('/v0/activity/status') + resp = await client.get("/v0/activity/status") await assert_status(resp, web.HTTPNoContent) diff --git a/services/web/server/tests/unit/test_configs.py b/services/web/server/tests/unit/test_configs.py index bb8919f5f8b..e3fb1a7c363 100644 --- a/services/web/server/tests/unit/test_configs.py +++ b/services/web/server/tests/unit/test_configs.py @@ -18,16 +18,19 @@ from simcore_service_webserver.application_config import create_schema from simcore_service_webserver.cli import parse, setup_parser from simcore_service_webserver.login import APP_CONFIG_KEY -from simcore_service_webserver.login import \ - CONFIG_SECTION_NAME as LOGIN_SECTION -from simcore_service_webserver.login import (DB_SECTION, SMTP_SECTION, - _create_login_config) +from simcore_service_webserver.login import CONFIG_SECTION_NAME as LOGIN_SECTION +from simcore_service_webserver.login import ( + DB_SECTION, + SMTP_SECTION, + _create_login_config, +) from simcore_service_webserver.login.cfg import DEFAULTS as CONFIG_DEFAULTS from simcore_service_webserver.login.cfg import Cfg from simcore_service_webserver.resources import resources from utils_environs import eval_service_environ, load_env -config_yaml_filenames = [str(name) for name in resources.listdir("config") ] +config_yaml_filenames = [str(name) for name in resources.listdir("config")] + @pytest.fixture("session") def app_config_schema(): @@ -55,39 +58,50 @@ def devel_environ(env_devel_file): env_devel = load_env(f) return env_devel + @pytest.fixture("session") -def service_webserver_environ(services_docker_compose_file, devel_environ, osparc_simcore_root_dir): +def service_webserver_environ( + services_docker_compose_file, devel_environ, osparc_simcore_root_dir +): """ Creates a dict with the environment variables inside of a webserver container """ host_environ = devel_environ image_environ = { - 'SIMCORE_WEB_OUTDIR': 'home/scu/services/web/client', # defined in Dockerfile - 'OSPARC_SIMCORE_REPO_ROOTDIR': str(osparc_simcore_root_dir) # defined if pip install --edit (but not in travis!) + "SIMCORE_WEB_OUTDIR": "home/scu/services/web/client", # defined in Dockerfile + "OSPARC_SIMCORE_REPO_ROOTDIR": str( + osparc_simcore_root_dir + ), # defined if pip install --edit (but not in travis!) } - webserver_environ = eval_service_environ(services_docker_compose_file, "webserver", - host_environ, image_environ, use_env_devel=True) + webserver_environ = eval_service_environ( + services_docker_compose_file, + "webserver", + host_environ, + image_environ, + use_env_devel=True, + ) return webserver_environ - @pytest.fixture("session") def app_submodules_with_setup_funs(package_dir) -> List: """ subsystem = all modules in package with a setup function """ + def is_py_module(path: Path) -> bool: - return not path.name.startswith((".", "__")) and \ - ( path.suffix == ".py" or any(path.glob("__init__.py")) ) + return not path.name.startswith((".", "__")) and ( + path.suffix == ".py" or any(path.glob("__init__.py")) + ) modules = [] for path in package_dir.iterdir(): if is_py_module(path): name = path.name.replace(path.suffix, "") module = importlib.import_module("." + name, package_dir.name) - if module.__name__ != 'simcore_service_webserver.application': + if module.__name__ != "simcore_service_webserver.application": if any(inspect.getmembers(module, is_setup_function)): modules.append(module) @@ -102,32 +116,36 @@ def app_subsystems(app_submodules_with_setup_funs) -> List[Dict]: setup_members = inspect.getmembers(module, is_setup_function) if setup_members: # finds setup for module - module_name = module.__name__.replace(".__init__", '') + module_name = module.__name__.replace(".__init__", "") setup_fun = None for name, fun in setup_members: - if fun.metadata()['module_name'] == module_name: + if fun.metadata()["module_name"] == module_name: setup_fun = fun break - assert setup_fun, f"None of {setup_members} are setup funs for {module_name}" + assert ( + setup_fun + ), f"None of {setup_members} are setup funs for {module_name}" metadata.append(setup_fun.metadata()) return metadata - # TESTS ---------------------------------------------------------------------- + @pytest.mark.parametrize("configfile", config_yaml_filenames) def test_correctness_under_environ(configfile, service_webserver_environ): parser = setup_parser(argparse.ArgumentParser("test-parser")) - with mock.patch('os.environ', service_webserver_environ): + with mock.patch("os.environ", service_webserver_environ): cmd = ["-c", configfile] config = parse(cmd, parser) for key, value in config.items(): - assert value != 'None', "Use instead Null in {} for {}".format(configfile, key) + assert value != "None", "Use instead Null in {} for {}".format( + configfile, key + ) # adds some defaults checks here @@ -137,14 +155,16 @@ def test_setup_per_app_subsystem(app_submodules_with_setup_funs): setup_members = inspect.getmembers(module, is_setup_function) if setup_members: # finds setup for module - module_name = module.__name__.replace(".__init__", '') + module_name = module.__name__.replace(".__init__", "") setup_fun = None for name, fun in setup_members: - if fun.metadata()['module_name'] == module_name: + if fun.metadata()["module_name"] == module_name: setup_fun = fun break - assert setup_fun, f"None of {setup_members} are setup funs for {module_name}" + assert ( + setup_fun + ), f"None of {setup_members} are setup funs for {module_name}" def test_schema_sections(app_config_schema, app_subsystems): @@ -153,7 +173,10 @@ def test_schema_sections(app_config_schema, app_subsystems): Every section in the config-file (except for 'version' and 'main') is named after an application's subsystem """ - section_names= [ metadata['config_section'] for metadata in app_subsystems] + ['version', 'main'] + section_names = [metadata["config_section"] for metadata in app_subsystems] + [ + "version", + "main", + ] for section in app_config_schema.keys: assert section.name in section_names, "Check application config schema!" @@ -163,27 +186,28 @@ def test_schema_sections(app_config_schema, app_subsystems): def test_creation_of_login_config(configfile, service_webserver_environ): parser = setup_parser(argparse.ArgumentParser("test-parser")) - with mock.patch('os.environ', service_webserver_environ): + with mock.patch("os.environ", service_webserver_environ): app_config = parse(["-c", configfile], parser) for key, value in app_config.items(): - assert value != 'None', "Use instead Null in {} for {}".format(configfile, key) + assert value != "None", "Use instead Null in {} for {}".format( + configfile, key + ) # sections of app config used assert LOGIN_SECTION in app_config.keys() assert SMTP_SECTION in app_config.keys() assert DB_SECTION in app_config.keys() - # creates update config - fake_app = { APP_CONFIG_KEY: app_config} + fake_app = {APP_CONFIG_KEY: app_config} fake_storage = object() update_cfg = _create_login_config(fake_app, fake_storage) assert all( - value.lower() is not ['none', 'null', ''] - for value in update_cfg.values() - if isinstance(value, str) + value.lower() is not ["none", "null", ""] + for value in update_cfg.values() + if isinstance(value, str) ) # creates login.cfg diff --git a/services/web/server/tests/unit/test_consistency.py b/services/web/server/tests/unit/test_consistency.py index 2cbbe6af4be..29d54146e85 100644 --- a/services/web/server/tests/unit/test_consistency.py +++ b/services/web/server/tests/unit/test_consistency.py @@ -5,13 +5,21 @@ def test_docker_composes_service_versions(osparc_simcore_root_dir: Path, here: Path): # look for main docker-compose file - main_docker_compose_path = osparc_simcore_root_dir / "services" / "docker-compose.yml" + main_docker_compose_path = ( + osparc_simcore_root_dir / "services" / "docker-compose.yml" + ) main_docker_compose_specs = yaml.safe_load(main_docker_compose_path.open()) - main_services_image_names = [service["image"] for _service_name, service in main_docker_compose_specs["services"].items()] + main_services_image_names = [ + service["image"] + for _service_name, service in main_docker_compose_specs["services"].items() + ] # look for other docker-compose files in test folders - for compose_file in here.glob('**/docker-compose.yml'): + for compose_file in here.glob("**/docker-compose.yml"): compose_specs = yaml.safe_load(compose_file.open()) - service_image_names = [service["image"] for _service_name, service in compose_specs["services"].items()] + service_image_names = [ + service["image"] + for _service_name, service in compose_specs["services"].items() + ] assert all(elem in main_services_image_names for elem in service_image_names) diff --git a/services/web/server/tests/unit/test_package.py b/services/web/server/tests/unit/test_package.py index 7939a7158ed..28d563a15cb 100644 --- a/services/web/server/tests/unit/test_package.py +++ b/services/web/server/tests/unit/test_package.py @@ -20,30 +20,32 @@ def pylintrc(osparc_simcore_root_dir): assert pylintrc.exists() return pylintrc + def test_run_pylint(pylintrc, package_dir): try: - AUTODETECT=0 - cmd = f'pylint --jobs={AUTODETECT} --rcfile {pylintrc} -v {package_dir}'.split() + AUTODETECT = 0 + cmd = f"pylint --jobs={AUTODETECT} --rcfile {pylintrc} -v {package_dir}".split() assert subprocess.check_call(cmd) == 0 except subprocess.CalledProcessError as err: pytest.fail("Linting error. Linter existed with code %d" % err.returncode) -def test_main(here): # pylint: disable=unused-variable +def test_main(here): # pylint: disable=unused-variable with pytest.raises(SystemExit) as excinfo: main("--help".split()) assert excinfo.value.code == 0 + def test_no_pdbs_in_place(package_dir): # TODO: add also test_dir excluding this function!? # TODO: it can be commented! - MATCH = re.compile(r'pdb.set_trace()') + MATCH = re.compile(r"pdb.set_trace()") EXCLUDE = ["__pycache__", ".git"] for root, dirs, files in os.walk(package_dir): for name in files: if name.endswith(".py"): - pypth = (Path(root) / name) + pypth = Path(root) / name code = pypth.read_text() found = MATCH.findall(code) assert not found, "pbd.set_trace found in %s" % pypth diff --git a/services/web/server/tests/unit/test_projects_models.py b/services/web/server/tests/unit/test_projects_models.py index 44315e463ee..2d8e58bfd08 100644 --- a/services/web/server/tests/unit/test_projects_models.py +++ b/services/web/server/tests/unit/test_projects_models.py @@ -10,30 +10,35 @@ import pytest -from simcore_service_webserver.projects.projects_db import (ProjectDBAPI, - _convert_to_db_names, - _convert_to_schema_names) +from simcore_service_webserver.projects.projects_db import ( + ProjectDBAPI, + _convert_to_db_names, + _convert_to_schema_names, +) @pytest.fixture def fake_schema_dict(): return { "anEntryThatUsesCamelCase": "I'm the entry", - "anotherEntryThatUsesCamelCase": "I'm also an entry" + "anotherEntryThatUsesCamelCase": "I'm also an entry", } + @pytest.fixture def fake_db_dict(): return { "an_entry_that_uses_snake_case": "I'm the entry", - "another_entry_that_uses_snake_case": "I'm also an entry" + "another_entry_that_uses_snake_case": "I'm also an entry", } + def test_convert_to_db_names(fake_schema_dict): db_entries = _convert_to_db_names(fake_schema_dict) assert "an_entry_that_uses_camel_case" in db_entries assert "another_entry_that_uses_camel_case" in db_entries + def test_convert_to_schema_names(fake_db_dict): db_entries = _convert_to_schema_names(fake_db_dict) assert "anEntryThatUsesSnakeCase" in db_entries @@ -43,21 +48,26 @@ def test_convert_to_schema_names(fake_db_dict): fake_db_dict["time_entry"] = date db_entries = _convert_to_schema_names(fake_db_dict) assert "timeEntry" in db_entries - assert db_entries["timeEntry"] == "{}Z".format(date.isoformat(timespec='milliseconds')) + assert db_entries["timeEntry"] == "{}Z".format( + date.isoformat(timespec="milliseconds") + ) @pytest.fixture def user_id(): return -1 + class MockAsyncContextManager(MagicMock): mock_object = None async def __aenter__(self): return self.mock_object + async def __aexit__(self, *args): pass + @pytest.fixture def mock_db_engine(mocker): def create_engine(mock_result): @@ -71,8 +81,10 @@ def create_engine(mock_result): mock_db_engine = mocker.patch("aiopg.sa.engine.Engine", spec=True) mock_db_engine.acquire.return_value = mock_context_manager return mock_db_engine, mock_connection + yield create_engine + async def test_add_projects(fake_project, user_id, mocker, mock_db_engine): mock_result_row = mocker.patch("aiopg.sa.result.RowProxy", spec=True) @@ -83,7 +95,6 @@ async def test_add_projects(fake_project, user_id, mocker, mock_db_engine): db_engine, mock_connection = mock_db_engine(mock_result) - db = ProjectDBAPI.init_from_engine(db_engine) await db.add_projects([fake_project], user_id=user_id) @@ -91,6 +102,7 @@ async def test_add_projects(fake_project, user_id, mocker, mock_db_engine): mock_connection.execute.assert_called() assert mock_connection.execute.call_count == 3 + # not sure this is useful... # async def test_load_projects(user_id, mocker, mock_db_engine): # mock_result_row = mocker.patch("aiopg.sa.result.RowProxy", spec=True) diff --git a/services/web/server/tests/unit/test_projects_utils.py b/services/web/server/tests/unit/test_projects_utils.py index aec0b9ffd5a..8ad34fca72d 100644 --- a/services/web/server/tests/unit/test_projects_utils.py +++ b/services/web/server/tests/unit/test_projects_utils.py @@ -11,17 +11,18 @@ import pytest from jsonschema import ValidationError -from simcore_service_webserver.projects.projects_utils import \ - clone_project_document +from simcore_service_webserver.projects.projects_utils import clone_project_document from simcore_service_webserver.resources import resources def load_template_projects(): projects = [] - projects_names = [name for name in resources.listdir('data') if 'template-projects' in name] + projects_names = [ + name for name in resources.listdir("data") if "template-projects" in name + ] for name in projects_names: - with resources.stream(f'data/{name}') as fp: - projects.extend( json.load(fp) ) + with resources.stream(f"data/{name}") as fp: + projects.extend(json.load(fp)) return projects @@ -31,8 +32,10 @@ def project_schema(project_schema_file): schema = json.load(fh) return schema -@pytest.mark.parametrize("name,project", - [(p['name'], p) for p in load_template_projects()] ) + +@pytest.mark.parametrize( + "name,project", [(p["name"], p) for p in load_template_projects()] +) def test_clone_project_document(name, project, project_schema): source = deepcopy(project) @@ -42,10 +45,10 @@ def test_clone_project_document(name, project, project_schema): assert source == project # valid clone - assert clone['uuid'] != project['uuid'] + assert clone["uuid"] != project["uuid"] - node_ids = project['workbench'].keys() - for clone_node_id in clone['workbench']: + node_ids = project["workbench"].keys() + for clone_node_id in clone["workbench"]: assert clone_node_id not in node_ids try: diff --git a/services/web/server/tests/unit/test_resources.py b/services/web/server/tests/unit/test_resources.py index 172041f0c22..5e6a0613f1b 100644 --- a/services/web/server/tests/unit/test_resources.py +++ b/services/web/server/tests/unit/test_resources.py @@ -13,17 +13,20 @@ log = logging.getLogger(__name__) + @pytest.fixture def app_resources(package_dir: Path) -> List[str]: resource_names = [] base = package_dir - for name in (resources.config_folder, 'api'): + for name in (resources.config_folder, "api"): folder = base / name - resource_names += [ str(p.relative_to(base)) for p in folder.rglob("*.y*ml") ] + resource_names += [str(p.relative_to(base)) for p in folder.rglob("*.y*ml")] return resource_names -#------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ + def test_resource_io_utils(app_resources: List[str]): @@ -41,10 +44,12 @@ def test_resource_io_utils(app_resources: List[str]): assert ostream.closed + def test_named_resources(): - exposed = [getattr(resources, name) + exposed = [ + getattr(resources, name) for name in dir(resources) - if name.startswith("RESOURCES") + if name.startswith("RESOURCES") ] for resource_name in exposed: @@ -52,6 +57,7 @@ def test_named_resources(): assert resources.isdir(resource_name) assert resources.listdir(resource_name) + def test_paths(app_resources: List[str]): for resource_name in app_resources: assert resources.get_path(resource_name).exists() diff --git a/services/web/server/tests/unit/test_rest.py b/services/web/server/tests/unit/test_rest.py index a4312099b4e..3ef4e9ea639 100644 --- a/services/web/server/tests/unit/test_rest.py +++ b/services/web/server/tests/unit/test_rest.py @@ -19,6 +19,7 @@ # TODO: reduce log from openapi_core loggers + @pytest.fixture def spec_dict(openapi_path): with openapi_path.open() as f: @@ -30,20 +31,17 @@ def spec_dict(openapi_path): def client(loop, aiohttp_unused_port, aiohttp_client, api_version_prefix): app = create_safe_application() - server_kwargs={'port': aiohttp_unused_port(), 'host': 'localhost'} + server_kwargs = {"port": aiohttp_unused_port(), "host": "localhost"} # fake config app[APP_CONFIG_KEY] = { "main": server_kwargs, - "rest": { - "enabled": True, - "version": api_version_prefix - } + "rest": {"enabled": True, "version": api_version_prefix}, } # activates only security+restAPI sub-modules setup_security(app) setup_rest(app) - cli = loop.run_until_complete( aiohttp_client(app, server_kwargs=server_kwargs) ) + cli = loop.run_until_complete(aiohttp_client(app, server_kwargs=server_kwargs)) return cli @@ -52,32 +50,31 @@ async def test_check_health(client, api_version_prefix): payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple(payload.get(k) for k in ('data', 'error')) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert data assert not error - assert data['name'] == 'simcore_service_webserver' - assert data['status'] == 'SERVICE_RUNNING' + assert data["name"] == "simcore_service_webserver" + assert data["status"] == "SERVICE_RUNNING" -FAKE = { - 'path_value': 'one', - 'query_value': 'two', - 'body_value': { - 'a': 'foo', - 'b': '45' - } - } +FAKE = { + "path_value": "one", + "query_value": "two", + "body_value": {"a": "foo", "b": "45"}, +} async def test_check_action(client, api_version_prefix): - QUERY = 'value' - ACTION = 'echo' + QUERY = "value" + ACTION = "echo" - resp = await client.post(f"/{api_version_prefix}/check/{ACTION}?data={QUERY}", json=FAKE) + resp = await client.post( + f"/{api_version_prefix}/check/{ACTION}?data={QUERY}", json=FAKE + ) payload = await resp.json() - data, error = tuple(payload.get(k) for k in ('data', 'error')) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert resp.status == 200, str(payload) assert data @@ -85,14 +82,15 @@ async def test_check_action(client, api_version_prefix): # TODO: validate response against specs - assert data['path_value'] == ACTION - assert data['query_value'] == QUERY - assert data['body_value'] == FAKE - + assert data["path_value"] == ACTION + assert data["query_value"] == QUERY + assert data["body_value"] == FAKE async def test_check_fail(client, api_version_prefix): - url = client.app.router["check_action"].url_for(action="fail").with_query(data="foo") + url = ( + client.app.router["check_action"].url_for(action="fail").with_query(data="foo") + ) assert str(url) == f"/{api_version_prefix}/check/fail?data=foo" resp = await client.post(url, json=FAKE) @@ -100,7 +98,6 @@ async def test_check_fail(client, api_version_prefix): assert "some randome failure" in str(error) - async def test_frontend_config(client, api_version_prefix): url = client.app.router["get_config"].url_for() assert str(url) == f"/{api_version_prefix}/config" @@ -113,20 +110,22 @@ async def test_frontend_config(client, api_version_prefix): # w/ invitation explicitly for enabled in (True, False): - client.app[APP_CONFIG_KEY]['login'] = {'registration_invitation_required': enabled} + client.app[APP_CONFIG_KEY]["login"] = { + "registration_invitation_required": enabled + } response = await client.get(f"/{api_version_prefix}/config") data, _ = await assert_status(response, web.HTTPOk) assert data["invitation_required"] is enabled - - # FIXME: hard-coded v0 @pytest.mark.parametrize("resource_name", resources.listdir("api/v0/schemas")) def test_validate_component_schema(resource_name, api_version_prefix): try: - with resources.stream(f"api/{api_version_prefix}/schemas/{resource_name}") as fh: + with resources.stream( + f"api/{api_version_prefix}/schemas/{resource_name}" + ) as fh: schema_under_test = json.load(fh) validator = jsonschema.validators.validator_for(schema_under_test) diff --git a/services/web/server/tests/unit/test_reverse_proxy.py b/services/web/server/tests/unit/test_reverse_proxy.py index 66795584fdf..a826b936207 100644 --- a/services/web/server/tests/unit/test_reverse_proxy.py +++ b/services/web/server/tests/unit/test_reverse_proxy.py @@ -15,7 +15,9 @@ import pytest from aiohttp import web from aiohttp.client_reqrep import ClientResponse -from aiohttp.test_utils import TestClient as DTestClient # renaming avoid pytest to collect +from aiohttp.test_utils import ( + TestClient as DTestClient, +) # renaming avoid pytest to collect from yarl import URL import simcore_service_webserver.reverse_proxy.handlers as reverse_proxy_handlers @@ -33,16 +35,18 @@ async def handler(request: web.Request): Echos back received info + its name """ body = await request.text() - return web.json_response({ - "name": name, - "image": image, - "received": { - "method": request.method, - "url": str(request.url), - "body": body, - "proxy_path": request.match_info.get("proxy_path", "") + return web.json_response( + { + "name": name, + "image": image, + "received": { + "method": request.method, + "url": str(request.url), + "body": body, + "proxy_path": request.match_info.get("proxy_path", ""), + }, } - }) + ) app = create_safe_application() app.router.add_route("*", basepath + "/{proxy_path:.*}", handler) @@ -50,7 +54,10 @@ async def handler(request: web.Request): def random_name(lenght=5): - return ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(lenght)) + return "".join( + random.choice(string.ascii_lowercase + string.digits) for _ in range(lenght) + ) + # FIXTURES ------------------ @@ -70,12 +77,13 @@ async def info(req: web.Request): serviceid = req.match_info.get("serviceId") for mountpoint, item in registry.items(): - if item["info"]['id'] == serviceid: + if item["info"]["id"] == serviceid: return web.json_response(registry[mountpoint]["info"]) raise web.HTTPServiceUnavailable( reason="Service {} is not running".format(serviceid), - content_type="application/json") + content_type="application/json", + ) async def start(req: web.Request): # client requests to run image in basepath @@ -94,16 +102,19 @@ async def start(req: web.Request): registry[mountpoint] = { "server": server, "info": { - 'name': name, - 'image': image, - 'mountpoint': mountpoint, - 'id': serviceid, - 'url': str(URL.build( - scheme=server.scheme, - host=server.host, - port=server.port, - path=mountpoint)) - } + "name": name, + "image": image, + "mountpoint": mountpoint, + "id": serviceid, + "url": str( + URL.build( + scheme=server.scheme, + host=server.host, + port=server.port, + path=mountpoint, + ) + ), + }, } # produces an identifier @@ -114,7 +125,7 @@ async def stop(req: web.Request): info = {"id": serviceid} # determines unique mountpoint for mountpoint, item in registry.items(): - if item["info"]['id'] == serviceid: + if item["info"]["id"] == serviceid: print("stopping %s ...", item["info"]) service = registry[mountpoint]["server"] await service.close() @@ -162,9 +173,7 @@ async def find_url(self, service_identifier: str) -> URL: info = await res.json() return info["url"] - app = create_safe_application({'reverse_proxy':{ - 'enabled': True - }}) + app = create_safe_application({"reverse_proxy": {"enabled": True}}) # setup app["director.client"] = spawner_client @@ -175,15 +184,19 @@ async def find_url(self, service_identifier: str) -> URL: app["reverse_proxy.basemount"] = monitor.base_mountpoint url = app.router["reverse_proxy"].url_for( - serviceId="foo", proxyPath="bar") # <-- another way to "publish", with a named-resouce + serviceId="foo", proxyPath="bar" + ) # <-- another way to "publish", with a named-resouce assert url == URL(app["reverse_proxy.basemount"] + "/foo/bar") # adds api async def bypass(req: web.Request): """ bypasses traffic to spawmer """ # /services/{serviceId}?action=xxx -> /services/{serviceId}/{action} - method, path, body = req.method, join( - req.path, req.query.get("action", "")).rstrip("/"), None + method, path, body = ( + req.method, + join(req.path, req.query.get("action", "")).rstrip("/"), + None, + ) if method != "GET": body = await req.json() body["basepath"] = req.app["reverse_proxy.basemount"] @@ -194,8 +207,7 @@ async def bypass(req: web.Request): res = await cli.request(method, path, json=body) assert isinstance(res, ClientResponse), "NOTE: %s" % type(res) - response = web.StreamResponse(status=res.status, - headers=res.headers) + response = web.StreamResponse(status=res.status, headers=res.headers) await response.prepare(req) payload = await res.read() await response.write_eof(payload) @@ -229,18 +241,14 @@ async def test_spawner(spawner_client): assert resp.status == 200, data assert data == [] - resp = await spawner_client.post("/services/start", json={ - "image": "A:latest", - "name": "a", - "basepath": BASEPATH - }) + resp = await spawner_client.post( + "/services/start", json={"image": "A:latest", "name": "a", "basepath": BASEPATH} + ) data = await resp.text() - resp = await spawner_client.post("/services/start", json={ - "image": "B:latest", - "name": "b", - "basepath": BASEPATH - }) + resp = await spawner_client.post( + "/services/start", json={"image": "B:latest", "name": "b", "basepath": BASEPATH} + ) data = await resp.text() assert resp.status == 200, data @@ -277,11 +285,9 @@ async def test_spawner_from_client(client): assert len(data) == 0 # start - resp = await client.post("/services", params="action=start", - json={ - "image": "A:latest" - } - ) + resp = await client.post( + "/services", params="action=start", json={"image": "A:latest"} + ) data = await resp.json() assert resp.status == 200, data assert data["mountpoint"].startswith(PROXY_MOUNTPOINT) @@ -318,11 +324,9 @@ async def test_spawned_from_client(client): # spawns 3 services: client <-> reverse_proxy_server <-> spawner_server for _ in range(3): - resp = await client.post("/services", params="action=start", - json={ - "image": IMAGE - } - ) + resp = await client.post( + "/services", params="action=start", json={"image": IMAGE} + ) data = await resp.json() assert resp.status == 200, data @@ -332,7 +336,7 @@ async def test_spawned_from_client(client): # pings them: client <-> reverse_proxy_server <-> spawned_servers for sid, mountpoint in registry.items(): - resp = await client.get(mountpoint+"/ping") + resp = await client.get(mountpoint + "/ping") assert resp.status == 200 data = await resp.json() @@ -342,11 +346,14 @@ async def test_spawned_from_client(client): assert data["received"]["proxy_path"] == "ping" tail = client.app.router["reverse_proxy"].url_for( - serviceId=sid, proxyPath="ping") + serviceId=sid, proxyPath="ping" + ) url = URL(data["received"]["url"]) assert url.relative() == tail assert not data["received"]["body"] def test_module_configs(): - assert reverse_proxy_handlers.jupyter.APP_SOCKETS_KEY == reverse_proxy.APP_SOCKETS_KEY + assert ( + reverse_proxy_handlers.jupyter.APP_SOCKETS_KEY == reverse_proxy.APP_SOCKETS_KEY + ) diff --git a/services/web/server/tests/unit/test_security_access_model.py b/services/web/server/tests/unit/test_security_access_model.py index a81139d51b5..9147d2f560c 100644 --- a/services/web/server/tests/unit/test_security_access_model.py +++ b/services/web/server/tests/unit/test_security_access_model.py @@ -9,6 +9,7 @@ import copy import difflib import json + # https://blog.nodeswat.com/implement-access-control-in-node-js-8567e7b484d1 # from typing import Callable, Dict, List @@ -20,18 +21,18 @@ from simcore_service_webserver.resources import resources from simcore_service_webserver.security_access_model import ( - RoleBasedAccessModel, check_access) + RoleBasedAccessModel, + check_access, +) from simcore_service_webserver.security_permissions import and_, or_ -from simcore_service_webserver.security_roles import (ROLES_PERMISSIONS, - UserRole) +from simcore_service_webserver.security_roles import ROLES_PERMISSIONS, UserRole @pytest.fixture def access_model(): - def can_update_inputs(context): - current_data = context['current'] - candidate_data = context['candidate'] + current_data = context["current"] + candidate_data = context["candidate"] diffs = jsondiff.diff(current_data, candidate_data) @@ -39,8 +40,8 @@ def can_update_inputs(context): try: for node in diffs["workbench"]: # can ONLY modify `inputs` fields set as ReadAndWrite - access = current_data['workbench'][node]["inputAccess"] - inputs = diffs["workbench"][node]['inputs'] + access = current_data["workbench"][node]["inputAccess"] + inputs = diffs["workbench"][node]["inputs"] for key in inputs: if access.get(key) != "ReadAndWrite": return False @@ -49,23 +50,23 @@ def can_update_inputs(context): pass return False - return len(diffs)==0 # no changes + return len(diffs) == 0 # no changes - #----------- + # ----------- fake_roles_permissions = { UserRole.ANONYMOUS: { - 'can': [ + "can": [ "studies.templates.read", "study.start", "study.stop", { "name": "study.pipeline.node.inputs.update", - "check": can_update_inputs - } + "check": can_update_inputs, + }, ] }, UserRole.USER: { - 'can': [ + "can": [ "study.node.create", "study.node.delete", "study.node.rename", @@ -73,17 +74,14 @@ def can_update_inputs(context): "study.node.data.push", "study.node.data.delete", "study.edge.create", - "study.edge.delete" + "study.edge.delete", ], - 'inherits': [UserRole.ANONYMOUS] + "inherits": [UserRole.ANONYMOUS], }, UserRole.TESTER: { - 'can': [ - "study.nodestree.uuid.read", - "study.logger.debug.read" - ], - # This double inheritance is done intentionally redundant - 'inherits': [UserRole.USER, UserRole.ANONYMOUS] + "can": ["study.nodestree.uuid.read", "study.logger.debug.read"], + # This double inheritance is done intentionally redundant + "inherits": [UserRole.USER, UserRole.ANONYMOUS], }, } @@ -91,13 +89,15 @@ def can_update_inputs(context): rbac = RoleBasedAccessModel.from_rawdata(fake_roles_permissions) return rbac + # TESTS ------------------------------------------------------------------------- + def test_roles(): super_users = UserRole.super_users() assert super_users assert UserRole.USER not in super_users - assert all( r in UserRole for r in super_users ) + assert all(r in UserRole for r in super_users) def test_unique_permissions(): @@ -110,7 +110,10 @@ def test_unique_permissions(): for role in ROLES_PERMISSIONS: can = ROLES_PERMISSIONS[role].get("can", []) for permission in can: - assert permission not in used, "'%s' in %s is repeated in security_roles.ROLES_PERMISSIONS" % (permission, role) + assert permission not in used, ( + "'%s' in %s is repeated in security_roles.ROLES_PERMISSIONS" + % (permission, role) + ) used.append(permission) @@ -125,7 +128,7 @@ def test_access_model_loads(): async def test_named_permissions(access_model): - R = UserRole # alias + R = UserRole # alias # direct permission assert await access_model.can(R.USER, "study.edge.delete") @@ -135,7 +138,6 @@ async def test_named_permissions(access_model): assert await access_model.can(R.TESTER, "study.edge.delete") assert await access_model.can(R.ANONYMOUS, "studies.templates.read") - who_can_delete = await access_model.who_can("study.edge.delete") assert R.USER in who_can_delete assert R.TESTER in who_can_delete @@ -169,13 +171,13 @@ async def test_permissions_inheritance(access_model): @pytest.mark.skip(reason="REVIEW") async def test_checked_permissions(access_model): - R = UserRole # alias - MOCKPATH = 'data/fake-template-projects.json' + R = UserRole # alias + MOCKPATH = "data/fake-template-projects.json" with resources.stream(MOCKPATH) as fh: data = json.load(fh) - current ={} + current = {} for prj in data: if prj["uuid"] == "template-uuid-1234-a1a7-f7d4f3a8f26b": current = prj @@ -185,54 +187,56 @@ async def test_checked_permissions(access_model): # updates both allowed and not allowed fields candidate = copy.deepcopy(current) - candidate['workbench']['template-uuid-409d-998c-c1f04de67f8b']["inputs"]["Kr"] = 66 # ReadOnly! - candidate['workbench']['template-uuid-409d-998c-c1f04de67f8b']["inputs"]["Na"] = 66 # ReadWrite + candidate["workbench"]["template-uuid-409d-998c-c1f04de67f8b"]["inputs"][ + "Kr" + ] = 66 # ReadOnly! + candidate["workbench"]["template-uuid-409d-998c-c1f04de67f8b"]["inputs"][ + "Na" + ] = 66 # ReadWrite assert not await access_model.can( R.ANONYMOUS, "study.pipeline.node.inputs.update", - context={'current': current, 'candidate': candidate} + context={"current": current, "candidate": candidate}, ) # updates allowed fields candidate = copy.deepcopy(current) - candidate['workbench']['template-uuid-409d-998c-c1f04de67f8b']["inputs"]["Na"] = 66 # ReadWrite + candidate["workbench"]["template-uuid-409d-998c-c1f04de67f8b"]["inputs"][ + "Na" + ] = 66 # ReadWrite assert await access_model.can( R.ANONYMOUS, "study.pipeline.node.inputs.update", - context={'current': current, 'candidate': candidate} + context={"current": current, "candidate": candidate}, ) # udpates not permitted fields candidate = copy.deepcopy(current) - candidate['description'] = 'not allowed to write here' + candidate["description"] = "not allowed to write here" assert not await access_model.can( R.ANONYMOUS, "study.pipeline.node.inputs.update", - context={'current': current, 'candidate': candidate} + context={"current": current, "candidate": candidate}, ) async def test_async_checked_permissions(access_model): - R = UserRole # alias + R = UserRole # alias # add checked permissions async def async_callback(context) -> bool: - return context['response'] + return context["response"] - access_model.roles[R.TESTER].check['study.edge.edit'] = async_callback + access_model.roles[R.TESTER].check["study.edge.edit"] = async_callback assert not await access_model.can( - R.TESTER, - "study.edge.edit", - context={'response':False} + R.TESTER, "study.edge.edit", context={"response": False} ) assert await access_model.can( - R.TESTER, - "study.edge.edit", - context={'response':True} + R.TESTER, "study.edge.edit", context={"response": True} ) @@ -241,18 +245,19 @@ async def test_check_access_expressions(access_model): assert await check_access(access_model, R.ANONYMOUS, "study.stop") - assert await check_access(access_model, R.ANONYMOUS, - "study.stop |study.node.create") + assert await check_access( + access_model, R.ANONYMOUS, "study.stop |study.node.create" + ) - assert not await check_access(access_model, R.ANONYMOUS, - "study.stop & study.node.create") + assert not await check_access( + access_model, R.ANONYMOUS, "study.stop & study.node.create" + ) - assert await check_access(access_model, R.USER, - "study.stop & study.node.create") + assert await check_access(access_model, R.USER, "study.stop & study.node.create") # TODO: extend expression parser - #assert await check_access(access_model, R.USER, + # assert await check_access(access_model, R.USER, # "study.stop & (study.node.create|study.nodestree.uuid.read)") - #assert await check_access(access_model, R.TESTER, + # assert await check_access(access_model, R.TESTER, # "study.stop & study.node.create & study.nodestree.uuid.read") diff --git a/services/web/server/tests/unit/test_template_projects.py b/services/web/server/tests/unit/test_template_projects.py index 43d3e8ce575..0d30184d930 100644 --- a/services/web/server/tests/unit/test_template_projects.py +++ b/services/web/server/tests/unit/test_template_projects.py @@ -15,7 +15,9 @@ from servicelib.jsonschema_validation import validate_instance from simcore_service_webserver.projects.projects_fakes import Fake from simcore_service_webserver.projects.projects_utils import ( - substitute_parameterized_inputs, variable_pattern) + substitute_parameterized_inputs, + variable_pattern, +) from simcore_service_webserver.resources import resources from yarl import URL @@ -41,14 +43,14 @@ def fake_db(): @pytest.fixture def mock_parametrized_project(fake_data_dir): - path = fake_data_dir/"parametrized_project.json" + path = fake_data_dir / "parametrized_project.json" with path.open() as fh: prj = json.load(fh) # check parameterized - inputs = prj['workbench']['template-uuid-409d-998c-c1f04de67f8b']['inputs'] - assert variable_pattern.match(inputs['Na']) - assert variable_pattern.match(inputs['BCL']) + inputs = prj["workbench"]["template-uuid-409d-998c-c1f04de67f8b"]["inputs"] + assert variable_pattern.match(inputs["Na"]) + assert variable_pattern.match(inputs["BCL"]) return prj @@ -62,10 +64,17 @@ async def test_validate_templates(loop, project_specs: Dict, fake_db): def test_substitutions(mock_parametrized_project): - template_id = mock_parametrized_project['uuid'] - url = URL(f"https://myplatform/study/{template_id}").with_query(my_Na='33', my_BCL="54.0") + template_id = mock_parametrized_project["uuid"] + url = URL(f"https://myplatform/study/{template_id}").with_query( + my_Na="33", my_BCL="54.0" + ) prj = substitute_parameterized_inputs(mock_parametrized_project, dict(url.query)) assert prj - assert prj['workbench']['template-uuid-409d-998c-c1f04de67f8b']['inputs']['Na'] == 33 - assert prj['workbench']['template-uuid-409d-998c-c1f04de67f8b']['inputs']['BCL'] == 54.0 + assert ( + prj["workbench"]["template-uuid-409d-998c-c1f04de67f8b"]["inputs"]["Na"] == 33 + ) + assert ( + prj["workbench"]["template-uuid-409d-998c-c1f04de67f8b"]["inputs"]["BCL"] + == 54.0 + ) diff --git a/services/web/server/tests/unit/with_dbs/conftest.py b/services/web/server/tests/unit/with_dbs/conftest.py index 520ee4201c1..0521c762477 100644 --- a/services/web/server/tests/unit/with_dbs/conftest.py +++ b/services/web/server/tests/unit/with_dbs/conftest.py @@ -29,8 +29,7 @@ from servicelib.aiopg_utils import DSN from servicelib.rest_responses import unwrap_envelope from simcore_service_webserver.application import create_application -from simcore_service_webserver.application_config import \ - app_schema as app_schema +from simcore_service_webserver.application_config import app_schema as app_schema from simcore_service_webserver.db_models import confirmations, metadata, users ## current directory @@ -44,9 +43,9 @@ def default_app_cfg(osparc_simcore_root_dir, fake_static_dir): assert cfg_path.exists() variables = dict(os.environ) - variables.update({ - 'OSPARC_SIMCORE_REPO_ROOTDIR': str(osparc_simcore_root_dir), - }) + variables.update( + {"OSPARC_SIMCORE_REPO_ROOTDIR": str(osparc_simcore_root_dir),} + ) # validates and fills all defaults/optional entries that normal load would not do cfg_dict = trafaret_config.read_and_validate(cfg_path, app_schema, vars=variables) @@ -57,6 +56,7 @@ def default_app_cfg(osparc_simcore_root_dir, fake_static_dir): # FIXME: free cfg_dict but deepcopy shall be r/w return cfg_dict + @pytest.fixture(scope="function") def app_cfg(default_app_cfg, aiohttp_unused_port): cfg = deepcopy(default_app_cfg) @@ -68,7 +68,8 @@ def app_cfg(default_app_cfg, aiohttp_unused_port): # this fixture can be safely modified during test since it is renovated on every call return cfg -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def docker_compose_file(default_app_cfg): """ Overrides pytest-docker fixture @@ -78,11 +79,11 @@ def docker_compose_file(default_app_cfg): cfg = deepcopy(default_app_cfg["db"]["postgres"]) # docker-compose reads these environs - os.environ['TEST_POSTGRES_DB']=cfg['database'] - os.environ['TEST_POSTGRES_USER']=cfg['user'] - os.environ['TEST_POSTGRES_PASSWORD']=cfg['password'] + os.environ["TEST_POSTGRES_DB"] = cfg["database"] + os.environ["TEST_POSTGRES_USER"] = cfg["user"] + os.environ["TEST_POSTGRES_PASSWORD"] = cfg["password"] - dc_path = current_dir / 'docker-compose.yml' + dc_path = current_dir / "docker-compose.yml" assert dc_path.exists() yield str(dc_path) @@ -90,23 +91,22 @@ def docker_compose_file(default_app_cfg): os.environ = old -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def postgres_service(docker_services, docker_ip, default_app_cfg): cfg = deepcopy(default_app_cfg["db"]["postgres"]) - cfg['host'] = docker_ip - cfg['port'] = docker_services.port_for('postgres', 5432) + cfg["host"] = docker_ip + cfg["port"] = docker_services.port_for("postgres", 5432) url = DSN.format(**cfg) # Wait until service is responsive. docker_services.wait_until_responsive( - check=lambda: is_postgres_responsive(url), - timeout=30.0, - pause=0.1, + check=lambda: is_postgres_responsive(url), timeout=30.0, pause=0.1, ) return url + @pytest.fixture def postgres_db(app_cfg, postgres_service): cfg = app_cfg["db"]["postgres"] @@ -124,18 +124,21 @@ def postgres_db(app_cfg, postgres_service): metadata.drop_all(engine) engine.dispose() + @pytest.fixture def web_server(loop, aiohttp_server, app_cfg, monkeypatch, postgres_db): app = create_application(app_cfg) path_mail(monkeypatch) - server = loop.run_until_complete( aiohttp_server(app, port=app_cfg["main"]["port"]) ) + server = loop.run_until_complete(aiohttp_server(app, port=app_cfg["main"]["port"])) return server + @pytest.fixture def client(loop, aiohttp_client, web_server): client = loop.run_until_complete(aiohttp_client(web_server)) return client + @pytest.fixture async def storage_subsystem_mock(loop, mocker): """ @@ -144,26 +147,34 @@ async def storage_subsystem_mock(loop, mocker): Patched functions are exposed within projects but call storage subsystem """ # requests storage to copy data - mock = mocker.patch('simcore_service_webserver.projects.projects_api.copy_data_folders_from_project') + mock = mocker.patch( + "simcore_service_webserver.projects.projects_api.copy_data_folders_from_project" + ) + async def _mock_copy_data_from_project(*args): return args[2] mock.side_effect = _mock_copy_data_from_project # requests storage to delete data - #mock1 = mocker.patch('simcore_service_webserver.projects.projects_handlers.delete_data_folders_of_project', return_value=None) - mock1 = mocker.patch('simcore_service_webserver.projects.projects_handlers.projects_api.delete_data_folders_of_project', return_value=Future()) + # mock1 = mocker.patch('simcore_service_webserver.projects.projects_handlers.delete_data_folders_of_project', return_value=None) + mock1 = mocker.patch( + "simcore_service_webserver.projects.projects_handlers.projects_api.delete_data_folders_of_project", + return_value=Future(), + ) mock1.return_value.set_result("") return mock, mock1 # helpers --------------- + def path_mail(monkeypatch): async def send_mail(*args): - print('=== EMAIL TO: {}\n=== SUBJECT: {}\n=== BODY:\n{}'.format(*args)) + print("=== EMAIL TO: {}\n=== SUBJECT: {}\n=== BODY:\n{}".format(*args)) + + monkeypatch.setattr(simcore_service_webserver.login.utils, "send_mail", send_mail) - monkeypatch.setattr(simcore_service_webserver.login.utils, 'send_mail', send_mail) def is_postgres_responsive(url): """Check if something responds to ``url`` """ @@ -175,24 +186,25 @@ def is_postgres_responsive(url): return False return True -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def redis_service(docker_services, docker_ip): host = docker_ip - port = docker_services.port_for('redis', 6379) + port = docker_services.port_for("redis", 6379) url = URL(f"redis://{host}:{port}") docker_services.wait_until_responsive( - check=lambda: is_redis_responsive(host, port), - timeout=30.0, - pause=0.1, + check=lambda: is_redis_responsive(host, port), timeout=30.0, pause=0.1, ) return url + def is_redis_responsive(host: str, port: str) -> bool: r = redis.Redis(host=host, port=port) return r.ping() == True + @pytest.fixture async def redis_client(loop, redis_service): client = await aioredis.create_redis_pool(str(redis_service), encoding="utf-8") @@ -205,9 +217,10 @@ async def redis_client(loop, redis_service): @pytest.fixture() async def socketio_url(client) -> str: - SOCKET_IO_PATH = '/socket.io/' + SOCKET_IO_PATH = "/socket.io/" return str(client.make_url(SOCKET_IO_PATH)) + @pytest.fixture() async def security_cookie(client) -> str: # get the cookie by calling the root entrypoint @@ -223,45 +236,57 @@ async def security_cookie(client) -> str: cookie = resp.request_info.headers["Cookie"] yield cookie + @pytest.fixture() async def socketio_client(socketio_url: str, security_cookie: str): clients = [] async def connect(client_session_id): sio = socketio.AsyncClient() - url = str(URL(socketio_url).with_query({'client_session_id': client_session_id})) - await sio.connect(url, headers={'Cookie': security_cookie}) + url = str( + URL(socketio_url).with_query({"client_session_id": client_session_id}) + ) + await sio.connect(url, headers={"Cookie": security_cookie}) assert sio.sid clients.append(sio) return sio + yield connect for sio in clients: await sio.disconnect() assert not sio.sid + @pytest.fixture() def client_session_id() -> str: def create() -> str(): return str(uuid4()) + return create @pytest.fixture async def mocked_director_api(loop, mocker): mocks = {} - mocked_running_services = mocker.patch('simcore_service_webserver.director.director_api.get_running_interactive_services', - return_value=Future()) + mocked_running_services = mocker.patch( + "simcore_service_webserver.director.director_api.get_running_interactive_services", + return_value=Future(), + ) mocked_running_services.return_value.set_result("") mocks["get_running_interactive_services"] = mocked_running_services - mocked_stop_service = mocker.patch('simcore_service_webserver.director.director_api.stop_service', - return_value=Future()) + mocked_stop_service = mocker.patch( + "simcore_service_webserver.director.director_api.stop_service", + return_value=Future(), + ) mocked_stop_service.return_value.set_result("") mocks["stop_service"] = mocked_stop_service yield mocks + @pytest.fixture async def mocked_dynamic_service(loop, client, mocked_director_api): services = [] + async def create(user_id, project_id) -> Dict: SERVICE_UUID = str(uuid4()) SERVICE_KEY = "simcore/services/dynamic/3d-viewer" @@ -270,7 +295,7 @@ async def create(user_id, project_id) -> Dict: create_node_data = { "service_key": SERVICE_KEY, "service_version": SERVICE_VERSION, - "service_uuid": SERVICE_UUID + "service_uuid": SERVICE_UUID, } running_service_dict = { @@ -280,12 +305,15 @@ async def create(user_id, project_id) -> Dict: "service_version": SERVICE_VERSION, "service_host": "some_service_host", "service_port": "some_service_port", - "service_state": "some_service_state" + "service_state": "some_service_state", } services.append(running_service_dict) # reset the future or an invalidStateError will appear as set_result sets the future to done mocked_director_api["get_running_interactive_services"].return_value = Future() - mocked_director_api["get_running_interactive_services"].return_value.set_result(services) + mocked_director_api["get_running_interactive_services"].return_value.set_result( + services + ) return running_service_dict + return create diff --git a/services/web/server/tests/unit/with_dbs/test_access_to_studies.py b/services/web/server/tests/unit/with_dbs/test_access_to_studies.py index 70e38ad8d59..2b2a5736cbe 100644 --- a/services/web/server/tests/unit/with_dbs/test_access_to_studies.py +++ b/services/web/server/tests/unit/with_dbs/test_access_to_studies.py @@ -36,15 +36,21 @@ SHARED_STUDY_UUID = "e2e38eee-c569-4e55-b104-70d159e49c87" + @pytest.fixture def qx_client_outdir(tmpdir, mocker): """ Emulates qx output at service/web/client after compiling """ basedir = tmpdir.mkdir("source-output") - folders = [ basedir.mkdir(folder_name) for folder_name in ('osparc', 'resource', 'transpiled')] - - index_file = Path( basedir.join("index.html") ) - index_file.write_text(textwrap.dedent("""\ + folders = [ + basedir.mkdir(folder_name) + for folder_name in ("osparc", "resource", "transpiled") + ] + + index_file = Path(basedir.join("index.html")) + index_file.write_text( + textwrap.dedent( + """\ @@ -52,7 +58,9 @@ def qx_client_outdir(tmpdir, mocker):

This is a result of qx_client_outdir fixture

- """)) + """ + ) + ) # patch get_client_outdir mocker.patch.object(simcore_service_webserver.statics, "get_client_outdir") @@ -60,14 +68,16 @@ def qx_client_outdir(tmpdir, mocker): @pytest.fixture -def client(loop, aiohttp_client, app_cfg, postgres_service, qx_client_outdir, monkeypatch): -#def client(loop, aiohttp_client, app_cfg, qx_client_outdir, monkeypatch): # <<<< FOR DEVELOPMENT. DO NOT REMOVE. +def client( + loop, aiohttp_client, app_cfg, postgres_service, qx_client_outdir, monkeypatch +): + # def client(loop, aiohttp_client, app_cfg, qx_client_outdir, monkeypatch): # <<<< FOR DEVELOPMENT. DO NOT REMOVE. cfg = deepcopy(app_cfg) - cfg["db"]["init_tables"] = True # inits tables of postgres_service upon startup - cfg['projects']['enabled'] = True - cfg['storage']['enabled'] = False - cfg['rabbit']['enabled'] = False + cfg["db"]["init_tables"] = True # inits tables of postgres_service upon startup + cfg["projects"]["enabled"] = True + cfg["storage"]["enabled"] = False + cfg["rabbit"]["enabled"] = False app = create_safe_application(cfg) @@ -75,35 +85,36 @@ def client(loop, aiohttp_client, app_cfg, postgres_service, qx_client_outdir, mo setup_db(app) setup_session(app) setup_security(app) - setup_rest(app) # TODO: why should we need this?? + setup_rest(app) # TODO: why should we need this?? setup_login(app) setup_users(app) assert setup_projects(app), "Shall not skip this setup" assert setup_studies_access(app), "Shall not skip this setup" # server and client - yield loop.run_until_complete(aiohttp_client(app, server_kwargs={ - 'port': cfg["main"]["port"], - 'host': cfg['main']['host'] - })) + yield loop.run_until_complete( + aiohttp_client( + app, + server_kwargs={"port": cfg["main"]["port"], "host": cfg["main"]["host"]}, + ) + ) @pytest.fixture -async def logged_user(client): #, role: UserRole): +async def logged_user(client): # , role: UserRole): """ adds a user in db and logs in with client NOTE: role fixture is defined as a parametrization below """ - role = UserRole.USER # TODO: parameterize roles + role = UserRole.USER # TODO: parameterize roles async with LoggedUser( - client, - {"role": role.name}, - check_if_succeeds = role!=UserRole.ANONYMOUS + client, {"role": role.name}, check_if_succeeds=role != UserRole.ANONYMOUS ) as user: yield user await delete_all_projects(client.app) + @pytest.fixture async def published_project(client, fake_project): project_data = deepcopy(fake_project) @@ -112,13 +123,11 @@ async def published_project(client, fake_project): project_data["published"] = True async with NewProject( - project_data, - client.app, - user_id=None, - clear_all=True + project_data, client.app, user_id=None, clear_all=True ) as template_project: yield template_project + @pytest.fixture async def unpublished_project(client, fake_project): project_data = deepcopy(fake_project) @@ -127,10 +136,7 @@ async def unpublished_project(client, fake_project): project_data["published"] = False async with NewProject( - project_data, - client.app, - user_id=None, - clear_all=True + project_data, client.app, user_id=None, clear_all=True ) as template_project: yield template_project @@ -146,6 +152,7 @@ async def _get_user_projects(client): return projects + def _assert_same_projects(got: Dict, expected: Dict): # TODO: validate using api/specs/webserver/v0/components/schemas/project-v0.0.1.json # TODO: validate workbench! @@ -171,15 +178,15 @@ async def test_access_to_forbidden_study(client, unpublished_project): resp = await client.get("/study/%s" % valid_but_not_sharable) content = await resp.text() - assert resp.status == web.HTTPNotFound.status_code, \ + assert resp.status == web.HTTPNotFound.status_code, ( "STANDARD studies are NOT sharable: %s" % content + ) -async def test_access_study_anonymously(client, qx_client_outdir, published_project, storage_subsystem_mock): - params = { - "uuid":SHARED_STUDY_UUID, - "name":"some-template" - } +async def test_access_study_anonymously( + client, qx_client_outdir, published_project, storage_subsystem_mock +): + params = {"uuid": SHARED_STUDY_UUID, "name": "some-template"} url_path = "/study/%s" % SHARED_STUDY_UUID resp = await client.get(url_path) @@ -188,17 +195,18 @@ async def test_access_study_anonymously(client, qx_client_outdir, published_proj # index assert resp.status == web.HTTPOk.status_code, "Got %s" % str(content) assert str(resp.url.path) == "/" - assert "OSPARC-SIMCORE" in content, \ - "Expected front-end rendering workbench's study, got %s" % str(content) + assert ( + "OSPARC-SIMCORE" in content + ), "Expected front-end rendering workbench's study, got %s" % str(content) real_url = str(resp.real_url) # has auto logged in as guest? resp = await client.get("/v0/me") data, _ = await assert_status(resp, web.HTTPOk) - assert data['login'].endswith("guest-at-osparc.io") - assert data['gravatar_id'] - assert data['role'].upper() == UserRole.GUEST.name + assert data["login"].endswith("guest-at-osparc.io") + assert data["gravatar_id"] + assert data["role"].upper() == UserRole.GUEST.name # guest user only a copy of the template project projects = await _get_user_projects(client) @@ -208,14 +216,13 @@ async def test_access_study_anonymously(client, qx_client_outdir, published_proj assert real_url.endswith("#/study/%s" % guest_project["uuid"]) _assert_same_projects(guest_project, published_project) - assert guest_project['prjOwner'] == data['login'] + assert guest_project["prjOwner"] == data["login"] -async def test_access_study_by_logged_user(client, logged_user, qx_client_outdir, published_project, storage_subsystem_mock): - params = { - "uuid":SHARED_STUDY_UUID, - "name":"some-template" - } +async def test_access_study_by_logged_user( + client, logged_user, qx_client_outdir, published_project, storage_subsystem_mock +): + params = {"uuid": SHARED_STUDY_UUID, "name": "some-template"} url_path = "/study/%s" % SHARED_STUDY_UUID resp = await client.get(url_path) @@ -226,8 +233,9 @@ async def test_access_study_by_logged_user(client, logged_user, qx_client_outdir assert str(resp.url.path) == "/" real_url = str(resp.real_url) - assert "OSPARC-SIMCORE" in content, \ - "Expected front-end rendering workbench's study, got %s" % str(content) + assert ( + "OSPARC-SIMCORE" in content + ), "Expected front-end rendering workbench's study, got %s" % str(content) # user has a copy of the template project projects = await _get_user_projects(client) @@ -239,4 +247,4 @@ async def test_access_study_by_logged_user(client, logged_user, qx_client_outdir _assert_same_projects(user_project, published_project) - assert user_project['prjOwner'] == logged_user['email'] + assert user_project["prjOwner"] == logged_user["email"] diff --git a/services/web/server/tests/unit/with_dbs/test_change_email.py b/services/web/server/tests/unit/with_dbs/test_change_email.py index 375022f247e..373bcf23328 100644 --- a/services/web/server/tests/unit/with_dbs/test_change_email.py +++ b/services/web/server/tests/unit/with_dbs/test_change_email.py @@ -13,44 +13,40 @@ from utils_assert import assert_status from utils_login import LoggedUser, NewUser, parse_link -NEW_EMAIL = 'new@mail.com' +NEW_EMAIL = "new@mail.com" async def test_unauthorized(client): - url = client.app.router['auth_change_email'].url_for() - rsp = await client.post(url, json={ - 'email': NEW_EMAIL, - }) + url = client.app.router["auth_change_email"].url_for() + rsp = await client.post(url, json={"email": NEW_EMAIL,}) assert rsp.status == 401 await assert_status(rsp, web.HTTPUnauthorized) async def test_change_to_existing_email(client): - url = client.app.router['auth_change_email'].url_for() + url = client.app.router["auth_change_email"].url_for() async with LoggedUser(client) as user: async with NewUser() as other: - rsp = await client.post(url, json={ - 'email': other['email'], - }) - await assert_status(rsp, web.HTTPUnprocessableEntity, "This email cannot be used") + rsp = await client.post(url, json={"email": other["email"],}) + await assert_status( + rsp, web.HTTPUnprocessableEntity, "This email cannot be used" + ) async def test_change_and_confirm(client, capsys): cfg = client.app[APP_LOGIN_CONFIG] - url = client.app.router['auth_change_email'].url_for() + url = client.app.router["auth_change_email"].url_for() index_url = client.app.router[INDEX_RESOURCE_NAME].url_for() - login_url = client.app.router['auth_login'].url_for() - logout_url = client.app.router['auth_logout'].url_for() + login_url = client.app.router["auth_login"].url_for() + logout_url = client.app.router["auth_logout"].url_for() assert index_url.path == URL(cfg.LOGIN_REDIRECT).path async with LoggedUser(client) as user: # request change email - rsp = await client.post(url, json={ - 'email': NEW_EMAIL, - }) + rsp = await client.post(url, json={"email": NEW_EMAIL,}) assert rsp.url_obj.path == url.path await assert_status(rsp, web.HTTPOk, cfg.MSG_CHANGE_EMAIL_REQUESTED) @@ -70,16 +66,15 @@ async def test_change_and_confirm(client, capsys): assert rsp.url_obj.path == index_url.path assert "welcome to fake web front-end" in txt - rsp = await client.post(login_url, json={ - 'email': NEW_EMAIL, - 'password': user['raw_password'], - }) + rsp = await client.post( + login_url, json={"email": NEW_EMAIL, "password": user["raw_password"],} + ) payload = await rsp.json() assert rsp.url_obj.path == login_url.path await assert_status(rsp, web.HTTPOk, cfg.MSG_LOGGED_IN) - -if __name__ == '__main__': +if __name__ == "__main__": import pytest - pytest.main([__file__, '--maxfail=1']) + + pytest.main([__file__, "--maxfail=1"]) diff --git a/services/web/server/tests/unit/with_dbs/test_change_password.py b/services/web/server/tests/unit/with_dbs/test_change_password.py index a508b6bdd57..0de8f038a14 100644 --- a/services/web/server/tests/unit/with_dbs/test_change_password.py +++ b/services/web/server/tests/unit/with_dbs/test_change_password.py @@ -12,30 +12,31 @@ from utils_assert import assert_status from utils_login import LoggedUser, parse_link -NEW_PASSWORD = 'NewPassword1*&^' +NEW_PASSWORD = "NewPassword1*&^" async def test_unauthorized(client): - url = client.app.router['auth_change_password'].url_for() - rsp = await client.post(url, json={ - 'current':' fake', - 'new': NEW_PASSWORD, - 'confirm': NEW_PASSWORD, - }) + url = client.app.router["auth_change_password"].url_for() + rsp = await client.post( + url, json={"current": " fake", "new": NEW_PASSWORD, "confirm": NEW_PASSWORD,} + ) assert rsp.status == 401 await assert_status(rsp, web.HTTPUnauthorized) async def test_wrong_current_password(client): cfg = client.app[APP_LOGIN_CONFIG] - url = client.app.router['auth_change_password'].url_for() + url = client.app.router["auth_change_password"].url_for() async with LoggedUser(client): - rsp = await client.post(url, json={ - 'current': 'wrongpassword', - 'new': NEW_PASSWORD, - 'confirm': NEW_PASSWORD, - }) + rsp = await client.post( + url, + json={ + "current": "wrongpassword", + "new": NEW_PASSWORD, + "confirm": NEW_PASSWORD, + }, + ) assert rsp.url_obj.path == url.path assert rsp.status == 422 assert cfg.MSG_WRONG_PASSWORD in await rsp.text() @@ -44,14 +45,17 @@ async def test_wrong_current_password(client): async def test_wrong_confirm_pass(client): cfg = client.app[APP_LOGIN_CONFIG] - url = client.app.router['auth_change_password'].url_for() + url = client.app.router["auth_change_password"].url_for() async with LoggedUser(client) as user: - rsp = await client.post(url, json={ - 'current': user['raw_password'], - 'new': NEW_PASSWORD, - 'confirm': NEW_PASSWORD.upper(), - }) + rsp = await client.post( + url, + json={ + "current": user["raw_password"], + "new": NEW_PASSWORD, + "confirm": NEW_PASSWORD.upper(), + }, + ) assert rsp.url_obj.path == url.path assert rsp.status == 409 await assert_status(rsp, web.HTTPConflict, cfg.MSG_PASSWORD_MISMATCH) @@ -60,16 +64,19 @@ async def test_wrong_confirm_pass(client): async def test_success(client): cfg = client.app[APP_LOGIN_CONFIG] - url = client.app.router['auth_change_password'].url_for() - login_url = client.app.router['auth_login'].url_for() - logout_url = client.app.router['auth_logout'].url_for() + url = client.app.router["auth_change_password"].url_for() + login_url = client.app.router["auth_login"].url_for() + logout_url = client.app.router["auth_logout"].url_for() async with LoggedUser(client) as user: - rsp = await client.post(url, json={ - 'current': user['raw_password'], - 'new': NEW_PASSWORD, - 'confirm': NEW_PASSWORD, - }) + rsp = await client.post( + url, + json={ + "current": user["raw_password"], + "new": NEW_PASSWORD, + "confirm": NEW_PASSWORD, + }, + ) assert rsp.url_obj.path == url.path assert rsp.status == 200 assert cfg.MSG_PASSWORD_CHANGED in await rsp.text() @@ -79,16 +86,15 @@ async def test_success(client): assert rsp.status == 200 assert rsp.url_obj.path == logout_url.path - rsp = await client.post(login_url, json={ - 'email': user['email'], - 'password': NEW_PASSWORD, - }) + rsp = await client.post( + login_url, json={"email": user["email"], "password": NEW_PASSWORD,} + ) assert rsp.status == 200 assert rsp.url_obj.path == login_url.path await assert_status(rsp, web.HTTPOk, cfg.MSG_LOGGED_IN) - -if __name__ == '__main__': +if __name__ == "__main__": import pytest - pytest.main([__file__, '--maxfail=1']) + + pytest.main([__file__, "--maxfail=1"]) diff --git a/services/web/server/tests/unit/with_dbs/test_db.py b/services/web/server/tests/unit/with_dbs/test_db.py index a7a216acaaf..cea0674a231 100644 --- a/services/web/server/tests/unit/with_dbs/test_db.py +++ b/services/web/server/tests/unit/with_dbs/test_db.py @@ -2,8 +2,8 @@ import yaml -from simcore_service_webserver.db import (is_service_enabled, - is_service_responsive) +from simcore_service_webserver.db import is_service_enabled, is_service_responsive + def test_uses_same_postgres_version(docker_compose_file, osparc_simcore_root_dir): with io.open(docker_compose_file) as fh: @@ -12,7 +12,10 @@ def test_uses_same_postgres_version(docker_compose_file, osparc_simcore_root_dir with io.open(osparc_simcore_root_dir / "services" / "docker-compose.yml") as fh: expected = yaml.safe_load(fh) - assert fixture['services']['postgres']['image'] == expected['services']['postgres']['image'] + assert ( + fixture["services"]["postgres"]["image"] + == expected["services"]["postgres"]["image"] + ) async def test_responsive(web_server): diff --git a/services/web/server/tests/unit/with_dbs/test_guests_management.py b/services/web/server/tests/unit/with_dbs/test_guests_management.py index 218f51e7dd6..f75ecaf9050 100644 --- a/services/web/server/tests/unit/with_dbs/test_guests_management.py +++ b/services/web/server/tests/unit/with_dbs/test_guests_management.py @@ -20,16 +20,15 @@ def client(loop, aiohttp_client, app_cfg, postgres_service): # config app cfg = deepcopy(app_cfg) port = cfg["main"]["port"] - cfg["db"]["init_tables"] = True # inits tables of postgres_service upon startup + cfg["db"]["init_tables"] = True # inits tables of postgres_service upon startup cfg["projects"]["enabled"] = True app = application.create_application(cfg) # server and client - return loop.run_until_complete(aiohttp_client(app, server_kwargs={ - 'port': port, - 'host': 'localhost' - })) + return loop.run_until_complete( + aiohttp_client(app, server_kwargs={"port": port, "host": "localhost"}) + ) @pytest.mark.skip(reason="Under dev") @@ -43,7 +42,6 @@ def test_users_projects_db(client): pass - @pytest.mark.skip(reason="Under dev") def test_cleanup_expired_guest_users(client): pass diff --git a/services/web/server/tests/unit/with_dbs/test_login.py b/services/web/server/tests/unit/with_dbs/test_login.py index 128d58dfa03..8a5c3e96c7b 100644 --- a/services/web/server/tests/unit/with_dbs/test_login.py +++ b/services/web/server/tests/unit/with_dbs/test_login.py @@ -11,15 +11,14 @@ from simcore_service_webserver.login.cfg import cfg from utils_login import NewUser -EMAIL, PASSWORD = 'tester@test.com', 'password' +EMAIL, PASSWORD = "tester@test.com", "password" async def test_login_with_unknown_email(client): - url = client.app.router['auth_login'].url_for() - r = await client.post(url, json={ - 'email': 'unknown@email.com', - 'password': 'wrong.' - }) + url = client.app.router["auth_login"].url_for() + r = await client.post( + url, json={"email": "unknown@email.com", "password": "wrong."} + ) payload = await r.json() assert r.status == web.HTTPUnauthorized.status_code, str(payload) @@ -28,17 +27,14 @@ async def test_login_with_unknown_email(client): async def test_login_with_wrong_password(client): - url = client.app.router['auth_login'].url_for() + url = client.app.router["auth_login"].url_for() r = await client.get(url) payload = await r.json() assert cfg.MSG_WRONG_PASSWORD not in await r.text(), str(payload) async with NewUser() as user: - r = await client.post(url, json={ - 'email': user['email'], - 'password': 'wrong.', - }) + r = await client.post(url, json={"email": user["email"], "password": "wrong.",}) payload = await r.json() assert r.status == web.HTTPUnauthorized.status_code, str(payload) assert r.url_obj.path == url.path @@ -46,48 +42,45 @@ async def test_login_with_wrong_password(client): async def test_login_banned_user(client): - url = client.app.router['auth_login'].url_for() + url = client.app.router["auth_login"].url_for() r = await client.get(url) assert cfg.MSG_USER_BANNED not in await r.text() - async with NewUser({'status': UserStatus.BANNED.name}) as user: - r = await client.post(url, json={ - 'email': user['email'], - 'password': user['raw_password'] - }) + async with NewUser({"status": UserStatus.BANNED.name}) as user: + r = await client.post( + url, json={"email": user["email"], "password": user["raw_password"]} + ) payload = await r.json() assert r.status == web.HTTPUnauthorized.status_code, str(payload) assert r.url_obj.path == url.path - assert cfg.MSG_USER_BANNED in payload['error']['errors'][0]['message'] + assert cfg.MSG_USER_BANNED in payload["error"]["errors"][0]["message"] async def test_login_inactive_user(client): - url = client.app.router['auth_login'].url_for() + url = client.app.router["auth_login"].url_for() r = await client.get(url) assert cfg.MSG_ACTIVATION_REQUIRED not in await r.text() - async with NewUser({'status': UserStatus.CONFIRMATION_PENDING.name}) as user: - r = await client.post(url, json={ - 'email': user['email'], - 'password': user['raw_password'] - }) + async with NewUser({"status": UserStatus.CONFIRMATION_PENDING.name}) as user: + r = await client.post( + url, json={"email": user["email"], "password": user["raw_password"]} + ) assert r.status == web.HTTPUnauthorized.status_code assert r.url_obj.path == url.path assert cfg.MSG_ACTIVATION_REQUIRED in await r.text() async def test_login_successfully(client): - url = client.app.router['auth_login'].url_for() + url = client.app.router["auth_login"].url_for() async with NewUser() as user: - r = await client.post(url, json={ - 'email': user['email'], - 'password': user['raw_password'] - }) + r = await client.post( + url, json={"email": user["email"], "password": user["raw_password"]} + ) assert r.status == 200 data, error = unwrap_envelope(await r.json()) assert not error assert data - assert cfg.MSG_LOGGED_IN in data['message'] + assert cfg.MSG_LOGGED_IN in data["message"] diff --git a/services/web/server/tests/unit/with_dbs/test_logout.py b/services/web/server/tests/unit/with_dbs/test_logout.py index 98f908d936d..e8ed4e945bf 100644 --- a/services/web/server/tests/unit/with_dbs/test_logout.py +++ b/services/web/server/tests/unit/with_dbs/test_logout.py @@ -8,13 +8,13 @@ async def test_logout(client): db = get_storage(client.app) - logout_url = client.app.router['auth_logout'].url_for() - protected_url = client.app.router['auth_change_email'].url_for() + logout_url = client.app.router["auth_logout"].url_for() + protected_url = client.app.router["auth_change_email"].url_for() async with LoggedUser(client) as user: # try to access protected page - r = await client.post(protected_url, json={'email': user['email']}) + r = await client.post(protected_url, json={"email": user["email"]}) assert r.url_obj.path == protected_url.path await assert_status(r, web.HTTPOk) @@ -28,10 +28,10 @@ async def test_logout(client): assert r.url_obj.path == protected_url.path await assert_status(r, web.HTTPUnauthorized) - await db.delete_user(user) -if __name__ == '__main__': +if __name__ == "__main__": import pytest - pytest.main([__file__, '--maxfail=1']) + + pytest.main([__file__, "--maxfail=1"]) diff --git a/services/web/server/tests/unit/with_dbs/test_projects.py b/services/web/server/tests/unit/with_dbs/test_projects.py index 0b17f22d06c..29a11190736 100644 --- a/services/web/server/tests/unit/with_dbs/test_projects.py +++ b/services/web/server/tests/unit/with_dbs/test_projects.py @@ -37,22 +37,51 @@ from utils_projects import NewProject, delete_all_projects API_VERSION = "v0" -RESOURCE_NAME = 'projects' +RESOURCE_NAME = "projects" API_PREFIX = "/" + API_VERSION +def future_with_result(result) -> Future: + f = Future() + f.set_result(result) + return f + + +@pytest.fixture +def mocked_director_subsystem(mocker): + mock_director_api = { + "get_running_interactive_services": mocker.patch( + "simcore_service_webserver.director.director_api.get_running_interactive_services", + return_value=future_with_result(""), + ), + "start_service": mocker.patch( + "simcore_service_webserver.director.director_api.start_service", + return_value=future_with_result(""), + ), + "stop_service": mocker.patch( + "simcore_service_webserver.director.director_api.stop_service", + return_value=future_with_result(""), + ), + } + return mock_director_api + + @pytest.fixture -def client(loop, aiohttp_client, app_cfg, postgres_service): -#def client(loop, aiohttp_client, app_cfg): # <<<< FOR DEVELOPMENT. DO NOT REMOVE. +def client(loop, aiohttp_client, app_cfg, postgres_service, mocked_director_subsystem): + # def client(loop, aiohttp_client, app_cfg): # <<<< FOR DEVELOPMENT. DO NOT REMOVE. # config app cfg = deepcopy(app_cfg) port = cfg["main"]["port"] - cfg["db"]["init_tables"] = True # inits tables of postgres_service upon startup + cfg["db"]["init_tables"] = True # inits tables of postgres_service upon startup cfg["projects"]["enabled"] = True cfg["director"]["enabled"] = True - cfg["resource_manager"]["garbage_collection_interval_seconds"] = 3 # increase speed of garbage collection - cfg["resource_manager"]["resource_deletion_timeout_seconds"] = 3 # reduce deletion delay + cfg["resource_manager"][ + "garbage_collection_interval_seconds" + ] = 3 # increase speed of garbage collection + cfg["resource_manager"][ + "resource_deletion_timeout_seconds" + ] = 3 # reduce deletion delay app = create_safe_application(cfg) # setup app @@ -60,7 +89,7 @@ def client(loop, aiohttp_client, app_cfg, postgres_service): setup_session(app) setup_security(app) setup_rest(app) - setup_login(app) # needed for login_utils fixtures + setup_login(app) # needed for login_utils fixtures setup_resource_manager(app) setup_sockets(app) setup_director(app) @@ -68,13 +97,13 @@ def client(loop, aiohttp_client, app_cfg, postgres_service): assert setup_projects(app) # server and client - yield loop.run_until_complete(aiohttp_client(app, server_kwargs={ - 'port': port, - 'host': 'localhost' - })) + yield loop.run_until_complete( + aiohttp_client(app, server_kwargs={"port": port, "host": "localhost"}) + ) # teardown here ... + @pytest.fixture() async def logged_user(client, user_role: UserRole): """ adds a user in db and logs in with client @@ -84,23 +113,23 @@ async def logged_user(client, user_role: UserRole): async with LoggedUser( client, {"role": user_role.name}, - check_if_succeeds = user_role!=UserRole.ANONYMOUS + check_if_succeeds=user_role != UserRole.ANONYMOUS, ) as user: print("-----> logged in user", user_role) yield user print("<----- logged out user", user_role) + @pytest.fixture async def user_project(client, fake_project, logged_user): async with NewProject( - fake_project, - client.app, - user_id=logged_user["id"] + fake_project, client.app, user_id=logged_user["id"] ) as project: print("-----> added project", project["name"]) yield project print("<----- removed project", project["name"]) + @pytest.fixture async def template_project(client, fake_project): project_data = deepcopy(fake_project) @@ -108,21 +137,23 @@ async def template_project(client, fake_project): project_data["uuid"] = "d4d0eca3-d210-4db6-84f9-63670b07176b" async with NewProject( - project_data, - client.app, - user_id=None, - clear_all=True + project_data, client.app, user_id=None, clear_all=True ) as template_project: print("-----> added template project", template_project["name"]) yield template_project print("<----- removed template project", template_project["name"]) + @pytest.fixture def computational_system_mock(mocker): - mock_fun = mocker.patch('simcore_service_webserver.projects.projects_handlers.update_pipeline_db', return_value=Future()) + mock_fun = mocker.patch( + "simcore_service_webserver.projects.projects_handlers.update_pipeline_db", + return_value=Future(), + ) mock_fun.return_value.set_result("") return mock_fun + @pytest.fixture def fake_services(): def create_fakes(number_services: int) -> List[Dict]: @@ -134,9 +165,11 @@ def create_fakes(number_services: int) -> List[Dict]: def assert_replaced(current_project, update_data): def _extract(dikt, keys): - return {k:dikt[k] for k in keys} + return {k: dikt[k] for k in keys} - modified = ["lastChangeDate", ] + modified = [ + "lastChangeDate", + ] keep = [k for k in update_data.keys() if k not in modified] assert _extract(current_project, keep) == _extract(update_data, keep) @@ -145,17 +178,20 @@ def _extract(dikt, keys): assert to_datetime(update_data[k]) < to_datetime(current_project[k]) - - # GET -------- -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_list_projects(client, logged_user, user_project, template_project, expected): - #TODO: GET /v0/projects?start=0&count=3 +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_list_projects( + client, logged_user, user_project, template_project, expected +): + # TODO: GET /v0/projects?start=0&count=3 # GET /v0/projects url = client.app.router["list_projects"].url_for() @@ -169,30 +205,34 @@ async def test_list_projects(client, logged_user, user_project, template_project assert data[0] == template_project assert data[1] == user_project - #GET /v0/projects?type=user - resp = await client.get(url.with_query(type='user')) + # GET /v0/projects?type=user + resp = await client.get(url.with_query(type="user")) data, errors = await assert_status(resp, expected) if not errors: assert len(data) == 1 assert data[0] == user_project - #GET /v0/projects?type=template + # GET /v0/projects?type=template # instead /v0/projects/templates ?? - resp = await client.get(url.with_query(type='template')) + resp = await client.get(url.with_query(type="template")) data, errors = await assert_status(resp, expected) if not errors: assert len(data) == 1 assert data[0] == template_project - -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_get_project(client, logged_user, user_project, template_project, expected): +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_get_project( + client, logged_user, user_project, template_project, expected +): # GET /v0/projects/{project_id} # with a project owned by user @@ -215,14 +255,18 @@ async def test_get_project(client, logged_user, user_project, template_project, # POST -------- -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPCreated), - (UserRole.TESTER, web.HTTPCreated), -]) -async def test_new_project(client, logged_user, expected, - computational_system_mock, storage_subsystem_mock): +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPCreated), + (UserRole.TESTER, web.HTTPCreated), + ], +) +async def test_new_project( + client, logged_user, expected, computational_system_mock, storage_subsystem_mock +): # POST /v0/projects url = client.app.router["create_projects"].url_for() assert str(url) == API_PREFIX + "/projects" @@ -236,7 +280,7 @@ async def test_new_project(client, logged_user, expected, "creationDate": now_str(), "lastChangeDate": now_str(), "thumbnail": "", - "workbench": {} + "workbench": {}, } resp = await client.post(url, json=default_project) @@ -249,11 +293,13 @@ async def test_new_project(client, logged_user, expected, # updated fields assert default_project["uuid"] != new_project["uuid"] assert default_project["prjOwner"] != logged_user["name"] - assert to_datetime(default_project["creationDate"]) < to_datetime(new_project["creationDate"]) + assert to_datetime(default_project["creationDate"]) < to_datetime( + new_project["creationDate"] + ) # invariant fields for key in new_project.keys(): - if key not in ('uuid', 'prjOwner', 'creationDate', 'lastChangeDate'): + if key not in ("uuid", "prjOwner", "creationDate", "lastChangeDate"): assert default_project[key] == new_project[key] # TODO: validate response using OAS? @@ -264,16 +310,30 @@ async def test_new_project(client, logged_user, expected, # violates foreign key constraint "user_to_projects_user_id_fkey" on table "user_to_projects" await delete_all_projects(client.app) -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPCreated), - (UserRole.TESTER, web.HTTPCreated), -]) -async def test_new_project_from_template(client, logged_user, template_project, expected, - computational_system_mock, storage_subsystem_mock): + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPCreated), + (UserRole.TESTER, web.HTTPCreated), + ], +) +async def test_new_project_from_template( + client, + logged_user, + template_project, + expected, + computational_system_mock, + storage_subsystem_mock, +): # POST /v0/projects?from_template={template_uuid} - url = client.app.router["create_projects"].url_for().with_query(from_template=template_project["uuid"]) + url = ( + client.app.router["create_projects"] + .url_for() + .with_query(from_template=template_project["uuid"]) + ) resp = await client.post(url) @@ -288,8 +348,12 @@ async def test_new_project_from_template(client, logged_user, template_project, assert project["prjOwner"] != template_project["prjOwner"] # different timestamps - assert to_datetime(template_project["creationDate"]) < to_datetime(project["creationDate"]) - assert to_datetime(template_project["lastChangeDate"]) < to_datetime(project["lastChangeDate"]) + assert to_datetime(template_project["creationDate"]) < to_datetime( + project["creationDate"] + ) + assert to_datetime(template_project["lastChangeDate"]) < to_datetime( + project["lastChangeDate"] + ) # different uuids for project and nodes!? assert project["uuid"] != template_project["uuid"] @@ -301,26 +365,40 @@ async def test_new_project_from_template(client, logged_user, template_project, except ValueError: pytest.fail("Invalid uuid in workbench node {}".format(node_name)) -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPCreated), - (UserRole.TESTER, web.HTTPCreated), -]) -async def test_new_project_from_template_with_body(client, logged_user, template_project, expected, - computational_system_mock, storage_subsystem_mock): + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPCreated), + (UserRole.TESTER, web.HTTPCreated), + ], +) +async def test_new_project_from_template_with_body( + client, + logged_user, + template_project, + expected, + computational_system_mock, + storage_subsystem_mock, +): # POST /v0/projects?from_template={template_uuid} - url = client.app.router["create_projects"].url_for().with_query(from_template=template_project["uuid"]) + url = ( + client.app.router["create_projects"] + .url_for() + .with_query(from_template=template_project["uuid"]) + ) predefined = { - "uuid":"", - "name":"Sleepers8", - "description":"Some lines from user", - "thumbnail":"", - "prjOwner":"", - "creationDate":"2019-06-03T09:59:31.987Z", - "lastChangeDate":"2019-06-03T09:59:31.987Z", - "workbench":{} + "uuid": "", + "name": "Sleepers8", + "description": "Some lines from user", + "thumbnail": "", + "prjOwner": "", + "creationDate": "2019-06-03T09:59:31.987Z", + "lastChangeDate": "2019-06-03T09:59:31.987Z", + "workbench": {}, } resp = await client.post(url, json=predefined) @@ -334,7 +412,6 @@ async def test_new_project_from_template_with_body(client, logged_user, template assert project["name"] == predefined["name"] assert project["description"] == predefined["description"] - modified = ["prjOwner", "creationDate", "lastChangeDate", "uuid"] # different ownership @@ -342,8 +419,12 @@ async def test_new_project_from_template_with_body(client, logged_user, template assert project["prjOwner"] != template_project["prjOwner"] # different timestamps - assert to_datetime(template_project["creationDate"]) < to_datetime(project["creationDate"]) - assert to_datetime(template_project["lastChangeDate"]) < to_datetime(project["lastChangeDate"]) + assert to_datetime(template_project["creationDate"]) < to_datetime( + project["creationDate"] + ) + assert to_datetime(template_project["lastChangeDate"]) < to_datetime( + project["lastChangeDate"] + ) # different uuids for project and nodes!? assert project["uuid"] != template_project["uuid"] @@ -356,17 +437,29 @@ async def test_new_project_from_template_with_body(client, logged_user, template pytest.fail("Invalid uuid in workbench node {}".format(node_name)) -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPForbidden), - (UserRole.TESTER, web.HTTPCreated), -]) -async def test_new_template_from_project(client, logged_user, user_project, expected, - computational_system_mock, storage_subsystem_mock): +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPForbidden), + (UserRole.TESTER, web.HTTPCreated), + ], +) +async def test_new_template_from_project( + client, + logged_user, + user_project, + expected, + computational_system_mock, + storage_subsystem_mock, +): # POST /v0/projects?as_template={user_uuid} - url = client.app.router["create_projects"].url_for().\ - with_query(as_template=user_project["uuid"]) + url = ( + client.app.router["create_projects"] + .url_for() + .with_query(as_template=user_project["uuid"]) + ) resp = await client.post(url) data, error = await assert_status(resp, expected) @@ -389,15 +482,19 @@ async def test_new_template_from_project(client, logged_user, user_project, expe # TODO: check in detail all fields in a node - # PUT -------- -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_replace_project(client, logged_user, user_project, expected, computational_system_mock): +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_replace_project( + client, logged_user, user_project, expected, computational_system_mock +): # PUT /v0/projects/{project_id} url = client.app.router["replace_project"].url_for(project_id=user_project["uuid"]) @@ -410,19 +507,25 @@ async def test_replace_project(client, logged_user, user_project, expected, comp if not error: assert_replaced(current_project=data, update_data=project_update) -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_replace_project_updated_inputs(client, logged_user, user_project, expected, computational_system_mock): + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_replace_project_updated_inputs( + client, logged_user, user_project, expected, computational_system_mock +): # PUT /v0/projects/{project_id} url = client.app.router["replace_project"].url_for(project_id=user_project["uuid"]) project_update = deepcopy(user_project) # - #"inputAccess": { + # "inputAccess": { # "Na": "ReadAndWrite", <-------- # "Kr": "ReadOnly", # "BCL": "ReadAndWrite", @@ -430,7 +533,9 @@ async def test_replace_project_updated_inputs(client, logged_user, user_project, # "Ligand": "Invisible", # "cAMKII": "Invisible" # }, - project_update["workbench"]["5739e377-17f7-4f09-a6ad-62659fb7fdec"]["inputs"]["Na"] = 55 + project_update["workbench"]["5739e377-17f7-4f09-a6ad-62659fb7fdec"]["inputs"][ + "Na" + ] = 55 resp = await client.put(url, json=project_update) data, error = await assert_status(resp, expected) @@ -438,19 +543,29 @@ async def test_replace_project_updated_inputs(client, logged_user, user_project, if not error: assert_replaced(current_project=data, update_data=project_update) -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_replace_project_updated_readonly_inputs(client, logged_user, user_project, expected, computational_system_mock): + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_replace_project_updated_readonly_inputs( + client, logged_user, user_project, expected, computational_system_mock +): # PUT /v0/projects/{project_id} url = client.app.router["replace_project"].url_for(project_id=user_project["uuid"]) project_update = deepcopy(user_project) - project_update["workbench"]["5739e377-17f7-4f09-a6ad-62659fb7fdec"]["inputs"]["Na"] = 55 - project_update["workbench"]["5739e377-17f7-4f09-a6ad-62659fb7fdec"]["inputs"]["Kr"] = 5 + project_update["workbench"]["5739e377-17f7-4f09-a6ad-62659fb7fdec"]["inputs"][ + "Na" + ] = 55 + project_update["workbench"]["5739e377-17f7-4f09-a6ad-62659fb7fdec"]["inputs"][ + "Kr" + ] = 5 resp = await client.put(url, json=project_update) data, error = await assert_status(resp, expected) @@ -461,30 +576,42 @@ async def test_replace_project_updated_readonly_inputs(client, logged_user, user # DELETE ------- -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPNoContent), - (UserRole.TESTER, web.HTTPNoContent), -]) -async def test_delete_project(client, logged_user, user_project, expected, storage_subsystem_mock, mocker, fake_services): + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPNoContent), + (UserRole.TESTER, web.HTTPNoContent), + ], +) +async def test_delete_project( + client, + logged_user, + user_project, + expected, + storage_subsystem_mock, + mocked_director_subsystem, + fake_services, +): # DELETE /v0/projects/{project_id} fakes = fake_services(5) - mock_director_api = mocker.patch('simcore_service_webserver.director.director_api.get_running_interactive_services', return_value=Future()) - mock_director_api.return_value.set_result(fakes) - - mock_director_api_stop_services = mocker.patch('simcore_service_webserver.director.director_api.stop_service', return_value=Future()) - mock_director_api_stop_services.return_value.set_result("") + mocked_director_subsystem[ + "get_running_interactive_services" + ].return_value = future_with_result(fakes) url = client.app.router["delete_project"].url_for(project_id=user_project["uuid"]) resp = await client.delete(url) await assert_status(resp, expected) if resp.status == web.HTTPNoContent.status_code: - mock_director_api.assert_called_once() + mocked_director_subsystem[ + "get_running_interactive_services" + ].assert_called_once() calls = [call(client.server.app, service["service_uuid"]) for service in fakes] - mock_director_api_stop_services.has_calls(calls) + mocked_director_subsystem["stop_service"].has_calls(calls) # wait for the fire&forget to run await sleep(2) # check if database entries are correctly removed, there should be no project available here @@ -493,90 +620,133 @@ async def test_delete_project(client, logged_user, user_project, expected, stora resp = await client.get(url) data, error = await assert_status(resp, web.HTTPNotFound) -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_open_project(client, logged_user, user_project, client_session_id, expected, mocker): + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_open_project( + client, + logged_user, + user_project, + client_session_id, + expected, + mocked_director_subsystem, +): # POST /v0/projects/{project_id}:open # open project - mock_director_api = mocker.patch('simcore_service_webserver.director.director_api.get_running_interactive_services', return_value=Future()) - mock_director_api.return_value.set_result("") - - mock_director_api_start_service = mocker.patch('simcore_service_webserver.director.director_api.start_service', return_value=Future()) - mock_director_api_start_service.return_value.set_result("") url = client.app.router["open_project"].url_for(project_id=user_project["uuid"]) resp = await client.post(url, json=client_session_id()) await assert_status(resp, expected) if resp.status == web.HTTPOk.status_code: - dynamic_services = {service_uuid:service for service_uuid, service in user_project["workbench"].items() if "/dynamic/" in service["key"]} + dynamic_services = { + service_uuid: service + for service_uuid, service in user_project["workbench"].items() + if "/dynamic/" in service["key"] + } calls = [] for service_uuid, service in dynamic_services.items(): - calls.append(call(client.server.app, project_id=user_project["uuid"], service_key=service["key"], service_uuid=service_uuid, service_version=service["version"], user_id=logged_user["id"])) - mock_director_api_start_service.assert_has_calls(calls) - -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPNoContent), - (UserRole.TESTER, web.HTTPNoContent), -]) -async def test_close_project(client, logged_user, user_project, client_session_id, expected, mocker, fake_services): + calls.append( + call( + client.server.app, + project_id=user_project["uuid"], + service_key=service["key"], + service_uuid=service_uuid, + service_version=service["version"], + user_id=logged_user["id"], + ) + ) + mocked_director_subsystem["start_service"].assert_has_calls(calls) + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPNoContent), + (UserRole.TESTER, web.HTTPNoContent), + ], +) +async def test_close_project( + client, + logged_user, + user_project, + client_session_id, + expected, + mocked_director_subsystem, + fake_services, +): # POST /v0/projects/{project_id}:close fakes = fake_services(5) assert len(fakes) == 5 - mock_director_api = mocker.patch('simcore_service_webserver.director.director_api.get_running_interactive_services', return_value=Future()) - mock_director_api.return_value.set_result(fakes) + mocked_director_subsystem[ + "get_running_interactive_services" + ].return_value = future_with_result(fakes) - mock_director_api_start_service = mocker.patch('simcore_service_webserver.director.director_api.start_service', return_value=Future()) - mock_director_api_start_service.return_value.set_result("") - - mock_director_api_stop_services = mocker.patch('simcore_service_webserver.director.director_api.stop_service', return_value=Future()) - mock_director_api_stop_services.return_value.set_result("") # open project client_id = client_session_id() url = client.app.router["open_project"].url_for(project_id=user_project["uuid"]) resp = await client.post(url, json=client_id) + if resp.status == web.HTTPOk.status_code: - mock_director_api.assert_called_once() - mock_director_api.reset_mock() - else: - mock_director_api.assert_not_called() + calls = [ + call(client.server.app, user_project["uuid"], logged_user["id"]), + ] + mocked_director_subsystem[ + "get_running_interactive_services" + ].has_calls(calls) + mocked_director_subsystem["get_running_interactive_services"].reset_mock() + # close project url = client.app.router["close_project"].url_for(project_id=user_project["uuid"]) resp = await client.post(url, json=client_id) await assert_status(resp, expected) if resp.status == web.HTTPNoContent.status_code: - mock_director_api.assert_called_once() + calls = [ + call(client.server.app, user_project["uuid"], None), + call(client.server.app, user_project["uuid"], logged_user["id"]), + ] + mocked_director_subsystem["get_running_interactive_services"].has_calls(calls) calls = [call(client.server.app, service["service_uuid"]) for service in fakes] - mock_director_api_stop_services.has_calls(calls) - else: - mock_director_api.assert_not_called() - -@pytest.mark.parametrize("user_role, expected", [ - # (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_get_active_project(client, logged_user, user_project, client_session_id, expected, socketio_client, mocker): - mock_director_api = mocker.patch('simcore_service_webserver.director.director_api.get_running_interactive_services', return_value=Future()) - mock_director_api.return_value.set_result("") - - mock_director_api_start_service = mocker.patch('simcore_service_webserver.director.director_api.start_service', return_value=Future()) - mock_director_api_start_service.return_value.set_result("") - - mock_director_api_stop_services = mocker.patch('simcore_service_webserver.director.director_api.stop_service', return_value=Future()) - mock_director_api_stop_services.return_value.set_result("") + mocked_director_subsystem["stop_service"].has_calls(calls) + + + +@pytest.mark.parametrize( + "user_role, expected", + [ + # (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_get_active_project( + client, + logged_user, + user_project, + client_session_id, + expected, + socketio_client, + mocked_director_subsystem, +): # login with socket using client session id client_id1 = client_session_id() sio = await socketio_client(client_id1) assert sio.sid # get active projects -> empty - get_active_projects_url = client.app.router["get_active_project"].url_for().with_query(client_session_id=client_id1) + get_active_projects_url = ( + client.app.router["get_active_project"] + .url_for() + .with_query(client_session_id=client_id1) + ) resp = await client.get(get_active_projects_url) data, error = await assert_status(resp, expected) if resp.status == web.HTTPOk.status_code: @@ -584,7 +754,9 @@ async def test_get_active_project(client, logged_user, user_project, client_sess assert not error # open project - open_project_url = client.app.router["open_project"].url_for(project_id=user_project["uuid"]) + open_project_url = client.app.router["open_project"].url_for( + project_id=user_project["uuid"] + ) resp = await client.post(open_project_url, json=client_id1) data, error = await assert_status(resp, expected) resp = await client.get(get_active_projects_url) @@ -598,7 +770,11 @@ async def test_get_active_project(client, logged_user, user_project, client_sess sio = await socketio_client(client_id2) assert sio.sid # get active projects -> empty - get_active_projects_url = client.app.router["get_active_project"].url_for().with_query(client_session_id=client_id2) + get_active_projects_url = ( + client.app.router["get_active_project"] + .url_for() + .with_query(client_session_id=client_id2) + ) resp = await client.get(get_active_projects_url) data, error = await assert_status(resp, expected) if resp.status == web.HTTPOk.status_code: @@ -606,22 +782,27 @@ async def test_get_active_project(client, logged_user, user_project, client_sess assert not error -@pytest.mark.parametrize("user_role, expected", [ - # (UserRole.ANONYMOUS), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPForbidden), - (UserRole.TESTER, web.HTTPForbidden), -]) -async def test_delete_shared_project_forbidden(loop, client, logged_user, user_project, mocked_director_api, mocked_dynamic_service, socketio_client, client_session_id, expected, mocker): - mock_director_api = mocker.patch('simcore_service_webserver.director.director_api.get_running_interactive_services', return_value=Future()) - mock_director_api.return_value.set_result("") - - mock_director_api_start_service = mocker.patch('simcore_service_webserver.director.director_api.start_service', return_value=Future()) - mock_director_api_start_service.return_value.set_result("") - - mock_director_api_stop_services = mocker.patch('simcore_service_webserver.director.director_api.stop_service', return_value=Future()) - mock_director_api_stop_services.return_value.set_result("") - +@pytest.mark.parametrize( + "user_role, expected", + [ + # (UserRole.ANONYMOUS), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPForbidden), + (UserRole.TESTER, web.HTTPForbidden), + ], +) +async def test_delete_shared_project_forbidden( + loop, + client, + logged_user, + user_project, + mocked_director_api, + mocked_dynamic_service, + socketio_client, + client_session_id, + expected, + mocked_director_subsystem, +): # service in project = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"]) service = await mocked_dynamic_service(logged_user["id"], user_project["uuid"]) # open project in tab1 @@ -637,57 +818,75 @@ async def test_delete_shared_project_forbidden(loop, client, logged_user, user_p resp = await client.delete(url) await assert_status(resp, expected) -@pytest.mark.parametrize("user_role, create_exp, get_exp, deletion_exp", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized, web.HTTPUnauthorized, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden, web.HTTPOk, web.HTTPForbidden), - (UserRole.USER, web.HTTPCreated, web.HTTPOk, web.HTTPNoContent), - (UserRole.TESTER, web.HTTPCreated, web.HTTPOk, web.HTTPNoContent), -]) -async def test_project_node_lifetime(loop, client, logged_user, user_project, create_exp, get_exp, deletion_exp, mocker, storage_subsystem_mock): - mock_director_api_get_running_services = mocker.patch('simcore_service_webserver.director.director_api.get_running_interactive_services', return_value=Future()) - - mock_director_api_start_service = mocker.patch('simcore_service_webserver.director.director_api.start_service', return_value=Future()) - mock_director_api_start_service.return_value.set_result("") - mock_director_api_stop_services = mocker.patch('simcore_service_webserver.director.director_api.stop_service', return_value=Future()) - mock_director_api_stop_services.return_value.set_result("") - mock_storage_api_delete_data_folders_of_project_node = mocker.patch('simcore_service_webserver.projects.projects_handlers.projects_api.delete_data_folders_of_project_node', return_value=Future()) + +@pytest.mark.parametrize( + "user_role, create_exp, get_exp, deletion_exp", + [ + ( + UserRole.ANONYMOUS, + web.HTTPUnauthorized, + web.HTTPUnauthorized, + web.HTTPUnauthorized, + ), + (UserRole.GUEST, web.HTTPForbidden, web.HTTPOk, web.HTTPForbidden), + (UserRole.USER, web.HTTPCreated, web.HTTPOk, web.HTTPNoContent), + (UserRole.TESTER, web.HTTPCreated, web.HTTPOk, web.HTTPNoContent), + ], +) +async def test_project_node_lifetime( + loop, + client, + logged_user, + user_project, + create_exp, + get_exp, + deletion_exp, + mocked_director_subsystem, + storage_subsystem_mock, + mocker, +): + + mock_storage_api_delete_data_folders_of_project_node = mocker.patch( + "simcore_service_webserver.projects.projects_handlers.projects_api.delete_data_folders_of_project_node", + return_value=Future(), + ) mock_storage_api_delete_data_folders_of_project_node.return_value.set_result("") # create a new dynamic node... url = client.app.router["create_node"].url_for(project_id=user_project["uuid"]) - body = { - "service_key": "some/dynamic/key", - "service_version": "1.3.4" - } + body = {"service_key": "some/dynamic/key", "service_version": "1.3.4"} resp = await client.post(url, json=body) data, errors = await assert_status(resp, create_exp) node_id = "wrong_node_id" if resp.status == web.HTTPCreated.status_code: - mock_director_api_start_service.assert_called_once() + mocked_director_subsystem["start_service"].assert_called_once() assert "node_id" in data node_id = data["node_id"] else: - mock_director_api_start_service.assert_not_called() + mocked_director_subsystem["start_service"].assert_not_called() # create a new NOT dynamic node... - mock_director_api_start_service.reset_mock() + mocked_director_subsystem["start_service"].reset_mock() url = client.app.router["create_node"].url_for(project_id=user_project["uuid"]) - body = { - "service_key": "some/notdynamic/key", - "service_version": "1.3.4" - } + body = {"service_key": "some/notdynamic/key", "service_version": "1.3.4"} resp = await client.post(url, json=body) data, errors = await assert_status(resp, create_exp) node_id_2 = "wrong_node_id" if resp.status == web.HTTPCreated.status_code: - mock_director_api_start_service.assert_not_called() + mocked_director_subsystem["start_service"].assert_not_called() assert "node_id" in data node_id_2 = data["node_id"] else: - mock_director_api_start_service.assert_not_called() + mocked_director_subsystem["start_service"].assert_not_called() # get the node state - mock_director_api_get_running_services.return_value.set_result([{"service_uuid": node_id, "service_state": "running"}]) - url = client.app.router["get_node"].url_for(project_id=user_project["uuid"], node_id=node_id) + mocked_director_subsystem[ + "get_running_interactive_services" + ].return_value = future_with_result( + [{"service_uuid": node_id, "service_state": "running"}] + ) + url = client.app.router["get_node"].url_for( + project_id=user_project["uuid"], node_id=node_id + ) resp = await client.get(url) data, errors = await assert_status(resp, get_exp) if resp.status == web.HTTPOk.status_code: @@ -695,9 +894,13 @@ async def test_project_node_lifetime(loop, client, logged_user, user_project, cr assert data["service_state"] == "running" # get the NOT dynamic node state - mock_director_api_get_running_services.return_value = Future() - mock_director_api_get_running_services.return_value.set_result("") - url = client.app.router["get_node"].url_for(project_id=user_project["uuid"], node_id=node_id_2) + mocked_director_subsystem[ + "get_running_interactive_services" + ].return_value = future_with_result("") + + url = client.app.router["get_node"].url_for( + project_id=user_project["uuid"], node_id=node_id_2 + ) resp = await client.get(url) data, errors = await assert_status(resp, get_exp) if resp.status == web.HTTPOk.status_code: @@ -705,37 +908,42 @@ async def test_project_node_lifetime(loop, client, logged_user, user_project, cr assert data["service_state"] == "idle" # delete the node - mock_director_api_get_running_services.return_value = Future() - mock_director_api_get_running_services.return_value.set_result([{"service_uuid": node_id}]) - url = client.app.router["delete_node"].url_for(project_id=user_project["uuid"], node_id=node_id) + mocked_director_subsystem[ + "get_running_interactive_services" + ].return_value = future_with_result([{"service_uuid": node_id}]) + url = client.app.router["delete_node"].url_for( + project_id=user_project["uuid"], node_id=node_id + ) resp = await client.delete(url) data, errors = await assert_status(resp, deletion_exp) if resp.status == web.HTTPNoContent.status_code: - mock_director_api_stop_services.assert_called_once() + mocked_director_subsystem["stop_service"].assert_called_once() mock_storage_api_delete_data_folders_of_project_node.assert_called_once() else: - mock_director_api_stop_services.assert_not_called() + mocked_director_subsystem["stop_service"].assert_not_called() mock_storage_api_delete_data_folders_of_project_node.assert_not_called() # delete the NOT dynamic node - mock_director_api_stop_services.reset_mock() + mocked_director_subsystem["stop_service"].reset_mock() mock_storage_api_delete_data_folders_of_project_node.reset_mock() # mock_director_api_get_running_services.return_value.set_result([{"service_uuid": node_id}]) - url = client.app.router["delete_node"].url_for(project_id=user_project["uuid"], node_id=node_id_2) + url = client.app.router["delete_node"].url_for( + project_id=user_project["uuid"], node_id=node_id_2 + ) resp = await client.delete(url) data, errors = await assert_status(resp, deletion_exp) if resp.status == web.HTTPNoContent.status_code: - mock_director_api_stop_services.assert_not_called() + mocked_director_subsystem["stop_service"].assert_not_called() mock_storage_api_delete_data_folders_of_project_node.assert_called_once() else: - mock_director_api_stop_services.assert_not_called() + mocked_director_subsystem["stop_service"].assert_not_called() mock_storage_api_delete_data_folders_of_project_node.assert_not_called() -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.USER, web.HTTPOk) -]) -async def test_tags_to_studies(client, logged_user, user_project, expected, test_tags_data): +@pytest.mark.parametrize("user_role,expected", [(UserRole.USER, web.HTTPOk)]) +async def test_tags_to_studies( + client, logged_user, user_project, expected, test_tags_data +): # Add test tags tags = test_tags_data added_tags = [] @@ -745,7 +953,9 @@ async def test_tags_to_studies(client, logged_user, user_project, expected, test added_tag, _ = await assert_status(resp, expected) added_tags.append(added_tag) # Add tag to study - url = client.app.router["add_tag"].url_for(study_uuid=user_project.get("uuid"), tag_id=str(added_tag.get("id"))) + url = client.app.router["add_tag"].url_for( + study_uuid=user_project.get("uuid"), tag_id=str(added_tag.get("id")) + ) resp = await client.put(url) data, _ = await assert_status(resp, expected) # Tag is included in response @@ -756,17 +966,23 @@ async def test_tags_to_studies(client, logged_user, user_project, expected, test resp = await client.delete(url) await assert_status(resp, web.HTTPNoContent) # Get project and check that tag is no longer there - url = client.app.router["get_project"].url_for(project_id=str(user_project.get("uuid"))) + url = client.app.router["get_project"].url_for( + project_id=str(user_project.get("uuid")) + ) resp = await client.get(url) data, _ = await assert_status(resp, expected) assert added_tags[0].get("id") not in data.get("tags") - #Remove tag1 from project - url = client.app.router["remove_tag"].url_for(study_uuid=user_project.get("uuid"), tag_id=str(added_tags[1].get("id"))) + # Remove tag1 from project + url = client.app.router["remove_tag"].url_for( + study_uuid=user_project.get("uuid"), tag_id=str(added_tags[1].get("id")) + ) resp = await client.delete(url) await assert_status(resp, expected) # Get project and check that tag is no longer there - url = client.app.router["get_project"].url_for(project_id=str(user_project.get("uuid"))) + url = client.app.router["get_project"].url_for( + project_id=str(user_project.get("uuid")) + ) resp = await client.get(url) data, _ = await assert_status(resp, expected) assert added_tags[1].get("id") not in data.get("tags") diff --git a/services/web/server/tests/unit/with_dbs/test_redis.py b/services/web/server/tests/unit/with_dbs/test_redis.py index ad5d801b7dd..90833683ae4 100644 --- a/services/web/server/tests/unit/with_dbs/test_redis.py +++ b/services/web/server/tests/unit/with_dbs/test_redis.py @@ -4,8 +4,8 @@ import aioredis -async def test_aioredis(loop, redis_client): - await redis_client.set('my-key', 'value') - val = await redis_client.get('my-key') - assert val == 'value' +async def test_aioredis(loop, redis_client): + await redis_client.set("my-key", "value") + val = await redis_client.get("my-key") + assert val == "value" diff --git a/services/web/server/tests/unit/with_dbs/test_redis_registry.py b/services/web/server/tests/unit/with_dbs/test_redis_registry.py index 07da37160fb..0eaa7320901 100644 --- a/services/web/server/tests/unit/with_dbs/test_redis_registry.py +++ b/services/web/server/tests/unit/with_dbs/test_redis_registry.py @@ -9,12 +9,19 @@ import pytest from simcore_service_webserver.resource_manager.config import ( - APP_CLIENT_REDIS_CLIENT_KEY, APP_CLIENT_SOCKET_REGISTRY_KEY, - APP_CONFIG_KEY, CONFIG_SECTION_NAME) + APP_CLIENT_REDIS_CLIENT_KEY, + APP_CLIENT_SOCKET_REGISTRY_KEY, + APP_CONFIG_KEY, + CONFIG_SECTION_NAME, +) from simcore_service_webserver.resource_manager.registry import ( - ALIVE_SUFFIX, RESOURCE_SUFFIX, RedisResourceRegistry) -from simcore_service_webserver.resource_manager.websocket_manager import \ - managed_resource + ALIVE_SUFFIX, + RESOURCE_SUFFIX, + RedisResourceRegistry, +) +from simcore_service_webserver.resource_manager.websocket_manager import ( + managed_resource, +) @pytest.fixture @@ -22,14 +29,11 @@ def redis_enabled_app(redis_client) -> Dict: app = { APP_CLIENT_REDIS_CLIENT_KEY: redis_client, APP_CLIENT_SOCKET_REGISTRY_KEY: None, - APP_CONFIG_KEY: { - CONFIG_SECTION_NAME: { - "resource_deletion_timeout_seconds": 3 - } - } + APP_CONFIG_KEY: {CONFIG_SECTION_NAME: {"resource_deletion_timeout_seconds": 3}}, } yield app + @pytest.fixture def redis_registry(redis_enabled_app) -> RedisResourceRegistry: registry = RedisResourceRegistry(redis_enabled_app) @@ -37,39 +41,49 @@ def redis_registry(redis_enabled_app) -> RedisResourceRegistry: yield registry - @pytest.fixture def user_ids(): def create_user_id(number: int) -> List[str]: return [f"user id {i}" for i in range(number)] + return create_user_id -@pytest.mark.parametrize("key, hash_key", [ - ({"some_key": "some_value"}, "some_key=some_value"), - ({"some_key": "some_value", "another_key": "another_value"}, "some_key=some_value:another_key=another_value") -]) + +@pytest.mark.parametrize( + "key, hash_key", + [ + ({"some_key": "some_value"}, "some_key=some_value"), + ( + {"some_key": "some_value", "another_key": "another_value"}, + "some_key=some_value:another_key=another_value", + ), + ], +) async def test_redis_registry_hashes(loop, redis_enabled_app, key, hash_key): # pylint: disable=protected-access assert RedisResourceRegistry._hash_key(key) == hash_key - assert RedisResourceRegistry._decode_hash_key(f"{hash_key}:{RESOURCE_SUFFIX}") == key + assert ( + RedisResourceRegistry._decode_hash_key(f"{hash_key}:{RESOURCE_SUFFIX}") == key + ) assert RedisResourceRegistry._decode_hash_key(f"{hash_key}:{ALIVE_SUFFIX}") == key + async def test_redis_registry(loop, redis_registry): random_value = randint(1, 10) - key = {f"key_{x}":f"value_{x}" for x in range(random_value)} - second_key = {f"sec_key_{x}":f"sec_value_{x}" for x in range(random_value)} - invalid_key = {f"invalid_key":f"invalid_value"} + key = {f"key_{x}": f"value_{x}" for x in range(random_value)} + second_key = {f"sec_key_{x}": f"sec_value_{x}" for x in range(random_value)} + invalid_key = {f"invalid_key": f"invalid_value"} NUM_RESOURCES = 7 resources = [(f"res_key{x}", f"res_value{x}") for x in range(NUM_RESOURCES)] - invalid_resource = (f"invalid_res_key",f"invalid_res_value") + invalid_resource = (f"invalid_res_key", f"invalid_res_value") # create resources for res in resources: await redis_registry.set_resource(key, res) - assert len(await redis_registry.get_resources(key)) == resources.index(res)+1 + assert len(await redis_registry.get_resources(key)) == resources.index(res) + 1 # get them - assert await redis_registry.get_resources(key) == {x[0]:x[1] for x in resources} + assert await redis_registry.get_resources(key) == {x[0]: x[1] for x in resources} assert not await redis_registry.get_resources(invalid_key) # find them for res in resources: @@ -81,7 +95,10 @@ async def test_redis_registry(loop, redis_registry): # add second key for res in resources: await redis_registry.set_resource(second_key, res) - assert len(await redis_registry.get_resources(second_key)) == resources.index(res)+1 + assert ( + len(await redis_registry.get_resources(second_key)) + == resources.index(res) + 1 + ) # find them for res in resources: assert await redis_registry.find_resources(key, res[0]) == [res[1]] @@ -115,7 +132,10 @@ async def test_redis_registry(loop, redis_registry): for res in resources: assert await redis_registry.find_keys(res) == [second_key] await redis_registry.remove_resource(second_key, res[0]) - assert len(await redis_registry.get_resources(second_key)) == len(resources) - (resources.index(res)+1) + assert len(await redis_registry.get_resources(second_key)) == len(resources) - ( + resources.index(res) + 1 + ) + async def test_websocket_manager(loop, redis_enabled_app, redis_registry, user_ids): @@ -136,25 +156,34 @@ async def test_websocket_manager(loop, redis_enabled_app, redis_registry, user_i assert socket_id not in tabs tabs[socket_id] = client_session_id with managed_resource(user, client_session_id, redis_enabled_app) as rt: - #pylint: disable=protected-access - resource_key = {"user_id":user, "client_session_id": client_session_id} + # pylint: disable=protected-access + resource_key = {"user_id": user, "client_session_id": client_session_id} assert rt._resource_key() == resource_key # set the socket id and check it is rightfully there await rt.set_socket_id(socket_id) assert await rt.get_socket_id() == socket_id - assert await redis_registry.get_resources(resource_key) == {"socket_id": socket_id} + assert await redis_registry.get_resources(resource_key) == { + "socket_id": socket_id + } list_of_sockets_of_user = await rt.find_socket_ids() assert socket_id in list_of_sockets_of_user # resource key shall be empty assert await rt.find(res_key) == [] # add the resource now await rt.add(res_key, res_value) - assert await redis_registry.get_resources(resource_key) == {"socket_id": socket_id, res_key: res_value} + assert await redis_registry.get_resources(resource_key) == { + "socket_id": socket_id, + res_key: res_value, + } # resource key shall be filled assert await rt.find(res_key) == [res_value] - list_of_same_resource_users = await rt.find_users_of_resource(res_key, res_value) - assert list_user_ids[:(list_user_ids.index(user)+1)] == sorted(list_of_same_resource_users) + list_of_same_resource_users = await rt.find_users_of_resource( + res_key, res_value + ) + assert list_user_ids[: (list_user_ids.index(user) + 1)] == sorted( + list_of_same_resource_users + ) # remove sockets for user in list_user_ids: diff --git a/services/web/server/tests/unit/with_dbs/test_registration.py b/services/web/server/tests/unit/with_dbs/test_registration.py index bf8be8b21b0..b301232a16f 100644 --- a/services/web/server/tests/unit/with_dbs/test_registration.py +++ b/services/web/server/tests/unit/with_dbs/test_registration.py @@ -13,117 +13,129 @@ from utils_assert import assert_error, assert_status from utils_login import NewInvitation, NewUser, parse_link -EMAIL, PASSWORD = 'tester@test.com', 'password' +EMAIL, PASSWORD = "tester@test.com", "password" async def test_regitration_availibility(client): - url = client.app.router['auth_register'].url_for() - r = await client.post(url, json={ - 'email': EMAIL, - 'password': PASSWORD, - 'confirm': PASSWORD, - }) + url = client.app.router["auth_register"].url_for() + r = await client.post( + url, json={"email": EMAIL, "password": PASSWORD, "confirm": PASSWORD,} + ) await assert_status(r, web.HTTPOk) + async def test_regitration_is_not_get(client): - url = client.app.router['auth_register'].url_for() + url = client.app.router["auth_register"].url_for() r = await client.get(url) await assert_error(r, web.HTTPMethodNotAllowed) + async def test_registration_with_existing_email(client): db = get_storage(client.app) - url = client.app.router['auth_register'].url_for() + url = client.app.router["auth_register"].url_for() async with NewUser() as user: - r = await client.post(url, json={ - 'email': user['email'], - 'password': user['raw_password'], - 'confirm': user['raw_password'] - }) + r = await client.post( + url, + json={ + "email": user["email"], + "password": user["raw_password"], + "confirm": user["raw_password"], + }, + ) await assert_error(r, web.HTTPConflict, cfg.MSG_EMAIL_EXISTS) + @pytest.mark.skip("TODO: Feature still not implemented") async def test_registration_with_expired_confirmation(client, monkeypatch): - monkeypatch.setitem(cfg, 'REGISTRATION_CONFIRMATION_REQUIRED', True) - monkeypatch.setitem(cfg, 'REGISTRATION_CONFIRMATION_LIFETIME', -1) + monkeypatch.setitem(cfg, "REGISTRATION_CONFIRMATION_REQUIRED", True) + monkeypatch.setitem(cfg, "REGISTRATION_CONFIRMATION_LIFETIME", -1) db = get_storage(client.app) - url = client.app.router['auth_register'].url_for() - - async with NewUser({'status': UserStatus.CONFIRMATION_PENDING.name}) as user: - confirmation = await db.create_confirmation(user, ConfirmationAction.REGISTRATION.name) - r = await client.post(url, json={ - 'email': user['email'], - 'password': user['raw_password'], - 'confirm': user['raw_password'], - }) + url = client.app.router["auth_register"].url_for() + + async with NewUser({"status": UserStatus.CONFIRMATION_PENDING.name}) as user: + confirmation = await db.create_confirmation( + user, ConfirmationAction.REGISTRATION.name + ) + r = await client.post( + url, + json={ + "email": user["email"], + "password": user["raw_password"], + "confirm": user["raw_password"], + }, + ) await db.delete_confirmation(confirmation) await assert_error(r, web.HTTPConflict, cfg.MSG_EMAIL_EXISTS) + async def test_registration_without_confirmation(client, monkeypatch): - monkeypatch.setitem(cfg, 'REGISTRATION_CONFIRMATION_REQUIRED', False) + monkeypatch.setitem(cfg, "REGISTRATION_CONFIRMATION_REQUIRED", False) db = get_storage(client.app) - url = client.app.router['auth_register'].url_for() - r = await client.post(url, json={ - 'email': EMAIL, - 'password': PASSWORD, - 'confirm': PASSWORD - }) + url = client.app.router["auth_register"].url_for() + r = await client.post( + url, json={"email": EMAIL, "password": PASSWORD, "confirm": PASSWORD} + ) data, error = unwrap_envelope(await r.json()) assert r.status == 200, (data, error) assert cfg.MSG_LOGGED_IN in data["message"] - user = await db.get_user({'email': EMAIL}) + user = await db.get_user({"email": EMAIL}) assert user await db.delete_user(user) + async def test_registration_with_confirmation(client, capsys, monkeypatch): - monkeypatch.setitem(cfg, 'REGISTRATION_CONFIRMATION_REQUIRED', True) + monkeypatch.setitem(cfg, "REGISTRATION_CONFIRMATION_REQUIRED", True) db = get_storage(client.app) - url = client.app.router['auth_register'].url_for() - r = await client.post(url, json={ - 'email': EMAIL, - 'password': PASSWORD, - 'confirm': PASSWORD - }) + url = client.app.router["auth_register"].url_for() + r = await client.post( + url, json={"email": EMAIL, "password": PASSWORD, "confirm": PASSWORD} + ) data, error = unwrap_envelope(await r.json()) assert r.status == 200, (data, error) - user = await db.get_user({'email': EMAIL}) - assert user['status'] == UserStatus.CONFIRMATION_PENDING.name + user = await db.get_user({"email": EMAIL}) + assert user["status"] == UserStatus.CONFIRMATION_PENDING.name assert "verification link" in data["message"] # retrieves sent link by email (see monkeypatch of email in conftest.py) out, err = capsys.readouterr() link = parse_link(out) - assert '/auth/confirmation/' in str(link) + assert "/auth/confirmation/" in str(link) resp = await client.get(link) text = await resp.text() assert "welcome to fake web front-end" in text assert resp.status == 200 - user = await db.get_user({'email': EMAIL}) - assert user['status'] == UserStatus.ACTIVE.name + user = await db.get_user({"email": EMAIL}) + assert user["status"] == UserStatus.ACTIVE.name await db.delete_user(user) -@pytest.mark.parametrize("is_invitation_required,has_valid_invitation,expected_response", [ - (True, True, web.HTTPOk), - (True, False, web.HTTPForbidden), - (False, True, web.HTTPOk), - (False, False, web.HTTPOk), -]) -async def test_registration_with_invitation(client, is_invitation_required, has_valid_invitation, expected_response): +@pytest.mark.parametrize( + "is_invitation_required,has_valid_invitation,expected_response", + [ + (True, True, web.HTTPOk), + (True, False, web.HTTPForbidden), + (False, True, web.HTTPOk), + (False, False, web.HTTPOk), + ], +) +async def test_registration_with_invitation( + client, is_invitation_required, has_valid_invitation, expected_response +): from servicelib.application_keys import APP_CONFIG_KEY from simcore_service_webserver.login.config import CONFIG_SECTION_NAME - client.app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] = { + client.app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] = { "registration_confirmation_required": False, - "registration_invitation_required": is_invitation_required + "registration_invitation_required": is_invitation_required, } # @@ -133,24 +145,28 @@ async def test_registration_with_invitation(client, is_invitation_required, has_ # Front end then creates the following request # async with NewInvitation(client) as confirmation: - print( get_confirmation_info(confirmation) ) - - url = client.app.router['auth_register'].url_for() - - r = await client.post(url, json={ - 'email': EMAIL, - 'password': PASSWORD, - 'confirm': PASSWORD, - 'invitation': confirmation['code'] if has_valid_invitation else "WRONG_CODE" - }) + print(get_confirmation_info(confirmation)) + + url = client.app.router["auth_register"].url_for() + + r = await client.post( + url, + json={ + "email": EMAIL, + "password": PASSWORD, + "confirm": PASSWORD, + "invitation": confirmation["code"] + if has_valid_invitation + else "WRONG_CODE", + }, + ) await assert_status(r, expected_response) # check optional fields in body if not has_valid_invitation or not is_invitation_required: - r = await client.post(url, json={ - 'email': "new-user" + EMAIL, - 'password': PASSWORD - }) + r = await client.post( + url, json={"email": "new-user" + EMAIL, "password": PASSWORD} + ) await assert_status(r, expected_response) if is_invitation_required and has_valid_invitation: @@ -158,5 +174,5 @@ async def test_registration_with_invitation(client, is_invitation_required, has_ assert not await db.get_confirmation(confirmation) -if __name__ == '__main__': - pytest.main([__file__, '--maxfail=1']) +if __name__ == "__main__": + pytest.main([__file__, "--maxfail=1"]) diff --git a/services/web/server/tests/unit/with_dbs/test_reset_password.py b/services/web/server/tests/unit/with_dbs/test_reset_password.py index 954d51a1906..d324374d51f 100644 --- a/services/web/server/tests/unit/with_dbs/test_reset_password.py +++ b/services/web/server/tests/unit/with_dbs/test_reset_password.py @@ -15,7 +15,7 @@ from utils_assert import assert_status from utils_login import NewUser, parse_link, parse_test_marks -EMAIL, PASSWORD = 'tester@test.com', 'password' +EMAIL, PASSWORD = "tester@test.com", "password" @pytest.fixture @@ -24,11 +24,9 @@ def cfg(client): async def test_unknown_email(client, capsys, cfg): - reset_url = client.app.router['auth_reset_password'].url_for() + reset_url = client.app.router["auth_reset_password"].url_for() - rp = await client.post(reset_url, json={ - 'email': EMAIL, - }) + rp = await client.post(reset_url, json={"email": EMAIL,}) payload = await rp.text() assert rp.url_obj.path == reset_url.path @@ -39,12 +37,10 @@ async def test_unknown_email(client, capsys, cfg): async def test_banned_user(client, capsys, cfg): - reset_url = client.app.router['auth_reset_password'].url_for() + reset_url = client.app.router["auth_reset_password"].url_for() - async with NewUser({'status': UserStatus.BANNED.name}) as user: - rp = await client.post(reset_url, json={ - 'email': user['email'], - }) + async with NewUser({"status": UserStatus.BANNED.name}) as user: + rp = await client.post(reset_url, json={"email": user["email"],}) assert rp.url_obj.path == reset_url.path await assert_status(rp, web.HTTPOk, cfg.MSG_EMAIL_SENT.format(**user)) @@ -54,12 +50,10 @@ async def test_banned_user(client, capsys, cfg): async def test_inactive_user(client, capsys, cfg): - reset_url = client.app.router['auth_reset_password'].url_for() + reset_url = client.app.router["auth_reset_password"].url_for() - async with NewUser({'status': UserStatus.CONFIRMATION_PENDING.name}) as user: - rp = await client.post(reset_url, json={ - 'email': user['email'], - }) + async with NewUser({"status": UserStatus.CONFIRMATION_PENDING.name}) as user: + rp = await client.post(reset_url, json={"email": user["email"],}) assert rp.url_obj.path == reset_url.path await assert_status(rp, web.HTTPOk, cfg.MSG_EMAIL_SENT.format(**user)) @@ -69,16 +63,16 @@ async def test_inactive_user(client, capsys, cfg): async def test_too_often(client, capsys, cfg): - reset_url = client.app.router['auth_reset_password'].url_for() + reset_url = client.app.router["auth_reset_password"].url_for() cfg = client.app[APP_LOGIN_CONFIG] db = cfg.STORAGE async with NewUser() as user: - confirmation = await db.create_confirmation(user, ConfirmationAction.RESET_PASSWORD.name) - rp = await client.post(reset_url, json={ - 'email': user['email'], - }) + confirmation = await db.create_confirmation( + user, ConfirmationAction.RESET_PASSWORD.name + ) + rp = await client.post(reset_url, json={"email": user["email"],}) await db.delete_confirmation(confirmation) assert rp.url_obj.path == reset_url.path @@ -88,13 +82,10 @@ async def test_too_often(client, capsys, cfg): assert parse_test_marks(out)["reason"] == cfg.MSG_OFTEN_RESET_PASSWORD - async def test_reset_and_confirm(client, capsys, cfg): async with NewUser() as user: - reset_url = client.app.router['auth_reset_password'].url_for() - rp = await client.post(reset_url, json={ - 'email': user['email'], - }) + reset_url = client.app.router["auth_reset_password"].url_for() + rp = await client.post(reset_url, json={"email": user["email"],}) assert rp.url_obj.path == reset_url.path await assert_status(rp, web.HTTPOk, cfg.MSG_EMAIL_SENT.format(**user)) @@ -105,15 +96,21 @@ async def test_reset_and_confirm(client, capsys, cfg): # emulates user click on email url rp = await client.get(confirmation_url) assert rp.status == 200 - assert rp.url_obj.path_qs == URL(cfg.LOGIN_REDIRECT).with_fragment("reset-password?code=%s" % code ).path_qs + assert ( + rp.url_obj.path_qs + == URL(cfg.LOGIN_REDIRECT) + .with_fragment("reset-password?code=%s" % code) + .path_qs + ) # api/specs/webserver/v0/components/schemas/auth.yaml#/ResetPasswordForm - reset_allowed_url = client.app.router['auth_reset_password_allowed'].url_for(code=code) - new_password = get_random_string(5,10) - rp = await client.post(reset_allowed_url, json={ - 'password': new_password, - 'confirm': new_password, - }) + reset_allowed_url = client.app.router["auth_reset_password_allowed"].url_for( + code=code + ) + new_password = get_random_string(5, 10) + rp = await client.post( + reset_allowed_url, json={"password": new_password, "confirm": new_password,} + ) payload = await rp.json() assert rp.status == 200, payload assert rp.url_obj.path == reset_allowed_url.path @@ -121,19 +118,18 @@ async def test_reset_and_confirm(client, capsys, cfg): # TODO: multiple flash messages # Try new password - logout_url = client.app.router['auth_logout'].url_for() + logout_url = client.app.router["auth_logout"].url_for() rp = await client.post(logout_url) assert rp.url_obj.path == logout_url.path await assert_status(rp, web.HTTPUnauthorized, "Unauthorized") - login_url = client.app.router['auth_login'].url_for() - rp = await client.post(login_url, json={ - 'email': user['email'], - 'password': new_password, - }) + login_url = client.app.router["auth_login"].url_for() + rp = await client.post( + login_url, json={"email": user["email"], "password": new_password,} + ) assert rp.url_obj.path == login_url.path await assert_status(rp, web.HTTPOk, cfg.MSG_LOGGED_IN) -if __name__ == '__main__': - pytest.main([__file__, '--maxfail=1']) +if __name__ == "__main__": + pytest.main([__file__, "--maxfail=1"]) diff --git a/services/web/server/tests/unit/with_dbs/test_resource_manager.py b/services/web/server/tests/unit/with_dbs/test_resource_manager.py index fd3f25917b2..eb2551e52d2 100644 --- a/services/web/server/tests/unit/with_dbs/test_resource_manager.py +++ b/services/web/server/tests/unit/with_dbs/test_resource_manager.py @@ -22,8 +22,11 @@ from simcore_service_webserver.director import setup_director from simcore_service_webserver.login import setup_login from simcore_service_webserver.projects import setup_projects -from simcore_service_webserver.resource_manager import (config, registry, - setup_resource_manager) +from simcore_service_webserver.resource_manager import ( + config, + registry, + setup_resource_manager, +) from simcore_service_webserver.resource_manager.registry import get_registry from simcore_service_webserver.rest import setup_rest from simcore_service_webserver.security import setup_security @@ -40,6 +43,7 @@ GARBAGE_COLLECTOR_INTERVAL = 1 SERVICE_DELETION_DELAY = 1 + @pytest.fixture def client(loop, aiohttp_client, app_cfg, postgres_service): cfg = deepcopy(app_cfg) @@ -49,7 +53,9 @@ def client(loop, aiohttp_client, app_cfg, postgres_service): cfg["db"]["init_tables"] = True # inits postgres_service cfg["projects"]["enabled"] = True cfg["director"]["enabled"] = True - cfg[config.CONFIG_SECTION_NAME]["garbage_collection_interval_seconds"] = GARBAGE_COLLECTOR_INTERVAL # increase speed of garbage collection + cfg[config.CONFIG_SECTION_NAME][ + "garbage_collection_interval_seconds" + ] = GARBAGE_COLLECTOR_INTERVAL # increase speed of garbage collection # fake config app = create_safe_application(cfg) @@ -66,10 +72,12 @@ def client(loop, aiohttp_client, app_cfg, postgres_service): setup_director(app) assert setup_resource_manager(app) - yield loop.run_until_complete(aiohttp_client(app, server_kwargs={ - 'port': cfg["main"]["port"], - 'host': cfg['main']['host'] - })) + yield loop.run_until_complete( + aiohttp_client( + app, + server_kwargs={"port": cfg["main"]["port"], "host": cfg["main"]["host"]}, + ) + ) @pytest.fixture() @@ -81,12 +89,13 @@ async def logged_user(client, user_role: UserRole): async with LoggedUser( client, {"role": user_role.name}, - check_if_succeeds=user_role != UserRole.ANONYMOUS + check_if_succeeds=user_role != UserRole.ANONYMOUS, ) as user: print("-----> logged in user", user_role) yield user print("<----- logged out user", user_role) + @pytest.fixture() async def logged_user2(client, user_role: UserRole): """ adds a user in db and logs in with client @@ -96,74 +105,77 @@ async def logged_user2(client, user_role: UserRole): async with LoggedUser( client, {"role": user_role.name}, - check_if_succeeds=user_role != UserRole.ANONYMOUS + check_if_succeeds=user_role != UserRole.ANONYMOUS, ) as user: print("-----> logged in user", user_role) yield user print("<----- logged out user", user_role) - - @pytest.fixture async def empty_user_project(client, empty_project, logged_user): project = empty_project() - async with NewProject( - project, - client.app, - user_id=logged_user["id"] - ) as project: + async with NewProject(project, client.app, user_id=logged_user["id"]) as project: print("-----> added project", project["name"]) yield project print("<----- removed project", project["name"]) + @pytest.fixture async def empty_user_project2(client, empty_project, logged_user): project = empty_project() - async with NewProject( - project, - client.app, - user_id=logged_user["id"] - ) as project: + async with NewProject(project, client.app, user_id=logged_user["id"]) as project: print("-----> added project", project["name"]) yield project print("<----- removed project", project["name"]) - - # ------------------------ UTILS ---------------------------------- def set_service_deletion_delay(delay: int, app: web.Application): - app[config.APP_CONFIG_KEY][config.CONFIG_SECTION_NAME]["resource_deletion_timeout_seconds"] = delay + app[config.APP_CONFIG_KEY][config.CONFIG_SECTION_NAME][ + "resource_deletion_timeout_seconds" + ] = delay + async def open_project(client, project_uuid: str, client_session_id: str) -> None: url = client.app.router["open_project"].url_for(project_id=project_uuid) resp = await client.post(url, json=client_session_id) await assert_status(resp, web.HTTPOk) + async def close_project(client, project_uuid: str, client_session_id: str) -> None: url = client.app.router["close_project"].url_for(project_id=project_uuid) resp = await client.post(url, json=client_session_id) await assert_status(resp, web.HTTPNoContent) + # ------------------------ TESTS ------------------------------- async def test_anonymous_websocket_connection(socketio_client, client_session_id): with pytest.raises(socketio.exceptions.ConnectionError): await socketio_client(client_session_id()) -@pytest.mark.parametrize("user_role", [ - # (UserRole.ANONYMOUS), - (UserRole.GUEST), - (UserRole.USER), - (UserRole.TESTER), -]) -async def test_websocket_resource_management(client, logged_user, socketio_client, client_session_id): + +@pytest.mark.parametrize( + "user_role", + [ + # (UserRole.ANONYMOUS), + (UserRole.GUEST), + (UserRole.USER), + (UserRole.TESTER), + ], +) +async def test_websocket_resource_management( + client, logged_user, socketio_client, client_session_id +): app = client.server.app socket_registry = get_registry(app) cur_client_session_id = client_session_id() sio = await socketio_client(cur_client_session_id) sid = sio.sid - resource_key = {"user_id":str(logged_user["id"]), "client_session_id": cur_client_session_id} + resource_key = { + "user_id": str(logged_user["id"]), + "client_session_id": cur_client_session_id, + } assert await socket_registry.find_keys(("socket_id", sio.sid)) == [resource_key] assert sio.sid in await socket_registry.find_resources(resource_key, "socket_id") assert len(await socket_registry.find_resources(resource_key, "socket_id")) == 1 @@ -174,13 +186,19 @@ async def test_websocket_resource_management(client, logged_user, socketio_clien assert not sid in await socket_registry.find_resources(resource_key, "socket_id") assert not await socket_registry.find_resources(resource_key, "socket_id") -@pytest.mark.parametrize("user_role", [ - # (UserRole.ANONYMOUS), - (UserRole.GUEST), - (UserRole.USER), - (UserRole.TESTER), -]) -async def test_websocket_multiple_connections(client, logged_user, socketio_client, client_session_id): + +@pytest.mark.parametrize( + "user_role", + [ + # (UserRole.ANONYMOUS), + (UserRole.GUEST), + (UserRole.USER), + (UserRole.TESTER), + ], +) +async def test_websocket_multiple_connections( + client, logged_user, socketio_client, client_session_id +): app = client.server.app socket_registry = get_registry(app) NUMBER_OF_SOCKETS = 5 @@ -189,10 +207,20 @@ async def test_websocket_multiple_connections(client, logged_user, socketio_clie for socket in range(NUMBER_OF_SOCKETS): cur_client_session_id = client_session_id() sio = await socketio_client(cur_client_session_id) - resource_key = {"user_id": str(logged_user["id"]), "client_session_id": cur_client_session_id} + resource_key = { + "user_id": str(logged_user["id"]), + "client_session_id": cur_client_session_id, + } assert await socket_registry.find_keys(("socket_id", sio.sid)) == [resource_key] - assert [sio.sid] == await socket_registry.find_resources(resource_key, "socket_id") - assert len(await socket_registry.find_resources({"user_id": str(logged_user["id"]), "client_session_id": "*"}, "socket_id")) == (socket+1) + assert [sio.sid] == await socket_registry.find_resources( + resource_key, "socket_id" + ) + assert len( + await socket_registry.find_resources( + {"user_id": str(logged_user["id"]), "client_session_id": "*"}, + "socket_id", + ) + ) == (socket + 1) clients.append(sio) # NOTE: the socket.io client needs the websockets package in order to upgrade to websocket transport @@ -202,18 +230,25 @@ async def test_websocket_multiple_connections(client, logged_user, socketio_clie await sio.disconnect() assert not sio.sid assert not await socket_registry.find_keys(("socket_id", sio.sid)) - assert not sid in await socket_registry.find_resources(resource_key, "socket_id") + assert not sid in await socket_registry.find_resources( + resource_key, "socket_id" + ) assert not await socket_registry.find_resources(resource_key, "socket_id") -@pytest.mark.parametrize("user_role,expected", [ - # (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_websocket_disconnected_after_logout(client, logged_user, socketio_client, client_session_id, expected, mocker): +@pytest.mark.parametrize( + "user_role,expected", + [ + # (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_websocket_disconnected_after_logout( + client, logged_user, socketio_client, client_session_id, expected, mocker +): app = client.server.app socket_registry = get_registry(app) @@ -221,24 +256,25 @@ async def test_websocket_disconnected_after_logout(client, logged_user, socketio cur_client_session_id1 = client_session_id() sio = await socketio_client(cur_client_session_id1) socket_logout_mock_callable = mocker.Mock() - sio.on('logout', handler=socket_logout_mock_callable) + sio.on("logout", handler=socket_logout_mock_callable) # connect second socket cur_client_session_id2 = client_session_id() sio2 = await socketio_client(cur_client_session_id2) socket_logout_mock_callable2 = mocker.Mock() - sio2.on('logout', handler=socket_logout_mock_callable2) + sio2.on("logout", handler=socket_logout_mock_callable2) # connect third socket cur_client_session_id3 = client_session_id() sio3 = await socketio_client(cur_client_session_id3) socket_logout_mock_callable3 = mocker.Mock() - sio3.on('logout', handler=socket_logout_mock_callable3) - + sio3.on("logout", handler=socket_logout_mock_callable3) # logout client with socket 2 - logout_url = client.app.router['auth_logout'].url_for() - r = await client.post(logout_url, json={"client_session_id": cur_client_session_id2}) + logout_url = client.app.router["auth_logout"].url_for() + r = await client.post( + logout_url, json={"client_session_id": cur_client_session_id2} + ) assert r.url_obj.path == logout_url.path await assert_status(r, expected) @@ -250,7 +286,7 @@ async def test_websocket_disconnected_after_logout(client, logged_user, socketio # the others should receive a logout message through their respective sockets await sleep(3) socket_logout_mock_callable.assert_called_once() - socket_logout_mock_callable2.assert_not_called() # note 2 should be not called ever + socket_logout_mock_callable2.assert_not_called() # note 2 should be not called ever socket_logout_mock_callable3.assert_called_once() await sleep(3) @@ -260,47 +296,71 @@ async def test_websocket_disconnected_after_logout(client, logged_user, socketio assert not sio3.sid -@pytest.mark.parametrize("user_role", [ - (UserRole.GUEST), - (UserRole.USER), - (UserRole.TESTER), -]) -async def test_interactive_services_removed_after_logout(loop, client, logged_user, empty_user_project, mocked_director_api, mocked_dynamic_service, client_session_id, socketio_client): +@pytest.mark.parametrize( + "user_role", [(UserRole.GUEST), (UserRole.USER), (UserRole.TESTER),] +) +async def test_interactive_services_removed_after_logout( + loop, + client, + logged_user, + empty_user_project, + mocked_director_api, + mocked_dynamic_service, + client_session_id, + socketio_client, +): set_service_deletion_delay(SERVICE_DELETION_DELAY, client.server.app) # login - logged_user fixture # create empty study - empty_user_project fixture # create dynamic service - mocked_dynamic_service fixture - service = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"]) + service = await mocked_dynamic_service( + logged_user["id"], empty_user_project["uuid"] + ) # create websocket client_session_id1 = client_session_id() sio = await socketio_client(client_session_id1) # open project in client 1 await open_project(client, empty_user_project["uuid"], client_session_id1) # logout - logout_url = client.app.router['auth_logout'].url_for() + logout_url = client.app.router["auth_logout"].url_for() r = await client.post(logout_url, json={"client_session_id": client_session_id1}) assert r.url_obj.path == logout_url.path await assert_status(r, web.HTTPOk) # ensure sufficient time is wasted here - await sleep(SERVICE_DELETION_DELAY+GARBAGE_COLLECTOR_INTERVAL+1) + await sleep(SERVICE_DELETION_DELAY + GARBAGE_COLLECTOR_INTERVAL + 1) # assert dynamic service is removed calls = [call(client.server.app, service["service_uuid"])] mocked_director_api["stop_service"].assert_has_calls(calls) -@pytest.mark.parametrize("user_role, expected", [ - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_interactive_services_remain_after_websocket_reconnection_from_2_tabs(loop, client, logged_user, expected, empty_user_project, mocked_director_api, mocked_dynamic_service, socketio_client, client_session_id): +@pytest.mark.parametrize( + "user_role, expected", + [ + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_interactive_services_remain_after_websocket_reconnection_from_2_tabs( + loop, + client, + logged_user, + expected, + empty_user_project, + mocked_director_api, + mocked_dynamic_service, + socketio_client, + client_session_id, +): set_service_deletion_delay(SERVICE_DELETION_DELAY, client.server.app) # login - logged_user fixture # create empty study - empty_user_project fixture # create dynamic service - mocked_dynamic_service fixture - service = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"]) + service = await mocked_dynamic_service( + logged_user["id"], empty_user_project["uuid"] + ) # create first websocket client_session_id1 = client_session_id() sio = await socketio_client(client_session_id1) @@ -317,7 +377,7 @@ async def test_interactive_services_remain_after_websocket_reconnection_from_2_t await sio.disconnect() assert not sio.sid # ensure sufficient time is wasted here - await sleep(SERVICE_DELETION_DELAY+GARBAGE_COLLECTOR_INTERVAL) + await sleep(SERVICE_DELETION_DELAY + GARBAGE_COLLECTOR_INTERVAL) # assert dynamic service is still around mocked_director_api["stop_service"].assert_not_called() # disconnect second websocket @@ -330,24 +390,38 @@ async def test_interactive_services_remain_after_websocket_reconnection_from_2_t # assert dynamic service is still around mocked_director_api["stop_service"].assert_not_called() # event after waiting some time - await sleep(SERVICE_DELETION_DELAY+1) + await sleep(SERVICE_DELETION_DELAY + 1) mocked_director_api["stop_service"].assert_not_called() # now really disconnect await sio2.disconnect() assert not sio2.sid # we need to wait for the service deletion delay - await sleep(SERVICE_DELETION_DELAY+GARBAGE_COLLECTOR_INTERVAL+1) + await sleep(SERVICE_DELETION_DELAY + GARBAGE_COLLECTOR_INTERVAL + 1) # assert dynamic service is gone calls = [call(client.server.app, service["service_uuid"])] mocked_director_api["stop_service"].assert_has_calls(calls) -@pytest.mark.parametrize("user_role", [ - # (UserRole.ANONYMOUS), - (UserRole.GUEST), - (UserRole.USER), - (UserRole.TESTER), -]) -async def test_interactive_services_removed_per_project(loop, client, logged_user, empty_user_project, empty_user_project2, mocked_director_api, mocked_dynamic_service, socketio_client, client_session_id): + +@pytest.mark.parametrize( + "user_role", + [ + # (UserRole.ANONYMOUS), + (UserRole.GUEST), + (UserRole.USER), + (UserRole.TESTER), + ], +) +async def test_interactive_services_removed_per_project( + loop, + client, + logged_user, + empty_user_project, + empty_user_project2, + mocked_director_api, + mocked_dynamic_service, + socketio_client, + client_session_id, +): set_service_deletion_delay(SERVICE_DELETION_DELAY, client.server.app) # create server with delay set to DELAY # login - logged_user fixture @@ -356,9 +430,15 @@ async def test_interactive_services_removed_per_project(loop, client, logged_use # service1 in project1 = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"]) # service2 in project2 = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"]) # service3 in project2 = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"]) - service = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"]) - service2 = await mocked_dynamic_service(logged_user["id"], empty_user_project2["uuid"]) - service3 = await mocked_dynamic_service(logged_user["id"], empty_user_project2["uuid"]) + service = await mocked_dynamic_service( + logged_user["id"], empty_user_project["uuid"] + ) + service2 = await mocked_dynamic_service( + logged_user["id"], empty_user_project2["uuid"] + ) + service3 = await mocked_dynamic_service( + logged_user["id"], empty_user_project2["uuid"] + ) # create websocket1 from tab1 client_session_id1 = client_session_id() sio1 = await socketio_client(client_session_id1) @@ -373,7 +453,7 @@ async def test_interactive_services_removed_per_project(loop, client, logged_use # assert dynamic service is still around mocked_director_api["stop_service"].assert_not_called() # wait the defined delay - await sleep(SERVICE_DELETION_DELAY+GARBAGE_COLLECTOR_INTERVAL) + await sleep(SERVICE_DELETION_DELAY + GARBAGE_COLLECTOR_INTERVAL) # assert dynamic service 1 is removed calls = [call(client.server.app, service["service_uuid"])] mocked_director_api["stop_service"].assert_has_calls(calls) @@ -385,26 +465,44 @@ async def test_interactive_services_removed_per_project(loop, client, logged_use # assert dynamic services are still around mocked_director_api["stop_service"].assert_not_called() # wait the defined delay - await sleep(SERVICE_DELETION_DELAY+GARBAGE_COLLECTOR_INTERVAL) + await sleep(SERVICE_DELETION_DELAY + GARBAGE_COLLECTOR_INTERVAL) # assert dynamic service 2,3 is removed - calls = [call(client.server.app, service2["service_uuid"]), - call(client.server.app, service3["service_uuid"])] + calls = [ + call(client.server.app, service2["service_uuid"]), + call(client.server.app, service3["service_uuid"]), + ] mocked_director_api["stop_service"].assert_has_calls(calls) mocked_director_api["stop_service"].reset_mock() -@pytest.mark.parametrize("user_role", [ - # (UserRole.ANONYMOUS), - # (UserRole.GUEST), - (UserRole.USER), - (UserRole.TESTER), -]) -async def test_services_remain_after_closing_one_out_of_two_tabs(loop, client, logged_user, empty_user_project, empty_user_project2, mocked_director_api, mocked_dynamic_service, socketio_client, client_session_id): + +@pytest.mark.parametrize( + "user_role", + [ + # (UserRole.ANONYMOUS), + # (UserRole.GUEST), + (UserRole.USER), + (UserRole.TESTER), + ], +) +async def test_services_remain_after_closing_one_out_of_two_tabs( + loop, + client, + logged_user, + empty_user_project, + empty_user_project2, + mocked_director_api, + mocked_dynamic_service, + socketio_client, + client_session_id, +): set_service_deletion_delay(SERVICE_DELETION_DELAY, client.server.app) # create server with delay set to DELAY # login - logged_user fixture # create empty study in project - empty_user_project fixture # service in project = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"]) - service = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"]) + service = await mocked_dynamic_service( + logged_user["id"], empty_user_project["uuid"] + ) # open project in tab1 client_session_id1 = client_session_id() sio1 = await socketio_client(client_session_id1) @@ -416,13 +514,13 @@ async def test_services_remain_after_closing_one_out_of_two_tabs(loop, client, l # close project in tab1 await close_project(client, empty_user_project["uuid"], client_session_id1) # wait the defined delay - await sleep(SERVICE_DELETION_DELAY+GARBAGE_COLLECTOR_INTERVAL) + await sleep(SERVICE_DELETION_DELAY + GARBAGE_COLLECTOR_INTERVAL) # assert dynamic service is still around mocked_director_api["stop_service"].assert_not_called() # close project in tab2 await close_project(client, empty_user_project["uuid"], client_session_id2) # wait the defined delay - await sleep(SERVICE_DELETION_DELAY+GARBAGE_COLLECTOR_INTERVAL) - mocked_director_api["stop_service"].assert_has_calls([ - call(client.server.app, service["service_uuid"]) - ]) + await sleep(SERVICE_DELETION_DELAY + GARBAGE_COLLECTOR_INTERVAL) + mocked_director_api["stop_service"].assert_has_calls( + [call(client.server.app, service["service_uuid"])] + ) diff --git a/services/web/server/tests/unit/with_dbs/test_storage.py b/services/web/server/tests/unit/with_dbs/test_storage.py index fe1a1d132ad..50652df1aff 100644 --- a/services/web/server/tests/unit/with_dbs/test_storage.py +++ b/services/web/server/tests/unit/with_dbs/test_storage.py @@ -33,9 +33,7 @@ async def _get_locs(request: web.Request): assert "user_id" in query assert query["user_id"], "Expected user id" - return web.json_response({ - 'data': [{"user_id": int(query["user_id"])}, ] - }) + return web.json_response({"data": [{"user_id": int(query["user_id"])},]}) async def _get_filemeta(request: web.Request): assert not request.has_body @@ -46,9 +44,7 @@ async def _get_filemeta(request: web.Request): assert query["user_id"], "Expected user id" - return web.json_response({ - 'data': [{"filemeta": 42}, ] - }) + return web.json_response({"data": [{"filemeta": 42},]}) async def _get_filtered_list(request: web.Request): assert not request.has_body @@ -59,9 +55,7 @@ async def _get_filtered_list(request: web.Request): assert query["user_id"], "Expected user id" assert query["uuid_filter"], "expected a filter" - return web.json_response({ - 'data': [{"uuid_filter": query["uuid_filter"]}, ] - }) + return web.json_response({"data": [{"uuid_filter": query["uuid_filter"]},]}) async def _get_datasets(request: web.Request): assert not request.has_body @@ -72,9 +66,9 @@ async def _get_datasets(request: web.Request): assert query["user_id"], "Expected user id" - return web.json_response({ - 'data': [{"dataset_id": "asdf", "display_name" : "bbb"}, ] - }) + return web.json_response( + {"data": [{"dataset_id": "asdf", "display_name": "bbb"},]} + ) async def _get_datasets_meta(request: web.Request): assert not request.has_body @@ -85,23 +79,31 @@ async def _get_datasets_meta(request: web.Request): assert query["user_id"], "Expected user id" - return web.json_response({ - 'data': [{"dataset_id": "asdf", "display_name" : "bbb"}, ] - }) - - storage_api_version = cfg['version'] - assert storage_api_version != API_VERSION, "backend service w/ different version as webserver entrypoint" - - app.router.add_get(f"/{storage_api_version}/locations" , _get_locs) - app.router.add_get(f"/{storage_api_version}/locations/0/files/{{file_id}}/metadata", _get_filemeta) - app.router.add_get(f"/{storage_api_version}/locations/0/files/metadata", _get_filtered_list) + return web.json_response( + {"data": [{"dataset_id": "asdf", "display_name": "bbb"},]} + ) + + storage_api_version = cfg["version"] + assert ( + storage_api_version != API_VERSION + ), "backend service w/ different version as webserver entrypoint" + + app.router.add_get(f"/{storage_api_version}/locations", _get_locs) + app.router.add_get( + f"/{storage_api_version}/locations/0/files/{{file_id}}/metadata", _get_filemeta + ) + app.router.add_get( + f"/{storage_api_version}/locations/0/files/metadata", _get_filtered_list + ) app.router.add_get(f"/{storage_api_version}/locations/0/datasets", _get_datasets) - app.router.add_get(f"/{storage_api_version}/locations/0/datasets/{{dataset_id}}/metadata", _get_datasets_meta) + app.router.add_get( + f"/{storage_api_version}/locations/0/datasets/{{dataset_id}}/metadata", + _get_datasets_meta, + ) - assert cfg['host']=='localhost' + assert cfg["host"] == "localhost" - - server = loop.run_until_complete(aiohttp_server(app, port= cfg['port'])) + server = loop.run_until_complete(aiohttp_server(app, port=cfg["port"])) return server @@ -112,23 +114,27 @@ async def logged_user(client, role: UserRole): NOTE: role fixture is defined as a parametrization below """ async with LoggedUser( - client, - {"role": role.name}, - check_if_succeeds = role!=UserRole.ANONYMOUS + client, {"role": role.name}, check_if_succeeds=role != UserRole.ANONYMOUS ) as user: yield user -#-------------------------------------------------------------------------- +# -------------------------------------------------------------------------- PREFIX = "/" + API_VERSION + "/storage" -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_get_storage_locations(client, storage_server, logged_user, role, expected): + +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_get_storage_locations( + client, storage_server, logged_user, role, expected +): url = "/v0/storage/locations" assert url.startswith(PREFIX) @@ -137,21 +143,27 @@ async def test_get_storage_locations(client, storage_server, logged_user, role, if not error: assert len(data) == 1 - assert data[0]['user_id'] == logged_user['id'] - -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_get_datasets_metadata(client, storage_server, logged_user, role, expected): + assert data[0]["user_id"] == logged_user["id"] + + +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_get_datasets_metadata( + client, storage_server, logged_user, role, expected +): url = "/v0/storage/locations/0/datasets" assert url.startswith(PREFIX) _url = client.app.router["get_datasets_metadata"].url_for(location_id="0") - assert url==str(_url) + assert url == str(_url) resp = await client.get(url) data, error = await assert_status(resp, expected) @@ -161,19 +173,26 @@ async def test_get_datasets_metadata(client, storage_server, logged_user, role, assert data[0]["dataset_id"] == "asdf" -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_get_files_metadata_dataset(client, storage_server, logged_user, role, expected): +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_get_files_metadata_dataset( + client, storage_server, logged_user, role, expected +): url = "/v0/storage/locations/0/datasets/N:asdfsdf/metadata" assert url.startswith(PREFIX) - _url = client.app.router["get_files_metadata_dataset"].url_for(location_id="0", dataset_id="N:asdfsdf") + _url = client.app.router["get_files_metadata_dataset"].url_for( + location_id="0", dataset_id="N:asdfsdf" + ) - assert url==str(_url) + assert url == str(_url) resp = await client.get(url) data, error = await assert_status(resp, expected) @@ -182,16 +201,20 @@ async def test_get_files_metadata_dataset(client, storage_server, logged_user, r assert len(data) == 1 assert data[0]["dataset_id"] == "asdf" -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) + +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) async def test_storage_file_meta(client, storage_server, logged_user, role, expected): # tests redirect of path with quotes in path file_id = "a/b/c/d/e/dat" - url = "/v0/storage/locations/0/files/{}/metadata".format(quote(file_id, safe='')) + url = "/v0/storage/locations/0/files/{}/metadata".format(quote(file_id, safe="")) assert url.startswith(PREFIX) @@ -200,19 +223,24 @@ async def test_storage_file_meta(client, storage_server, logged_user, role, expe if not error: assert len(data) == 1 - assert data[0]['filemeta'] == 42 - - -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) + assert data[0]["filemeta"] == 42 + + +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) async def test_storage_list_filter(client, storage_server, logged_user, role, expected): # tests composition of 2 queries file_id = "a/b/c/d/e/dat" - url = "/v0/storage/locations/0/files/metadata?uuid_filter={}".format(quote(file_id, safe='')) + url = "/v0/storage/locations/0/files/metadata?uuid_filter={}".format( + quote(file_id, safe="") + ) assert url.startswith(PREFIX) @@ -221,4 +249,4 @@ async def test_storage_list_filter(client, storage_server, logged_user, role, ex if not error: assert len(data) == 1 - assert data[0]['uuid_filter'] == file_id + assert data[0]["uuid_filter"] == file_id diff --git a/services/web/server/tests/unit/with_dbs/test_users.py b/services/web/server/tests/unit/with_dbs/test_users.py index 4a57675655c..738092f756e 100644 --- a/services/web/server/tests/unit/with_dbs/test_users.py +++ b/services/web/server/tests/unit/with_dbs/test_users.py @@ -24,8 +24,11 @@ from simcore_service_webserver.users import setup_users from utils_assert import assert_status from utils_login import LoggedUser -from utils_tokens import (create_token_in_db, delete_all_tokens_from_db, - get_token_from_db) +from utils_tokens import ( + create_token_in_db, + delete_all_tokens_from_db, + get_token_from_db, +) API_VERSION = "v0" @@ -38,7 +41,7 @@ def client(loop, aiohttp_client, app_cfg, postgres_service): assert cfg["rest"]["version"] == API_VERSION - cfg["db"]["init_tables"] = True # inits postgres_service + cfg["db"]["init_tables"] = True # inits postgres_service # fake config app = create_safe_application(cfg) @@ -50,10 +53,9 @@ def client(loop, aiohttp_client, app_cfg, postgres_service): setup_login(app) setup_users(app) - client = loop.run_until_complete(aiohttp_client(app, server_kwargs={ - 'port': port, - 'host': 'localhost' - })) + client = loop.run_until_complete( + aiohttp_client(app, server_kwargs={"port": port, "host": "localhost"}) + ) return client @@ -63,6 +65,7 @@ def client(loop, aiohttp_client, app_cfg, postgres_service): # https://github.com/pytest-dev/pytest-asyncio/issues/76 # + @pytest.fixture async def logged_user(client, role: UserRole): """ adds a user in db and logs in with client @@ -70,9 +73,7 @@ async def logged_user(client, role: UserRole): NOTE: role fixture is defined as a parametrization below """ async with LoggedUser( - client, - {"role": role.name}, - check_if_succeeds = role!=UserRole.ANONYMOUS + client, {"role": role.name}, check_if_succeeds=role != UserRole.ANONYMOUS ) as user: yield user @@ -90,7 +91,7 @@ async def fake_tokens(logged_user, tokens_db): from faker.providers import lorem fake = faker.Factory.create() - fake.seed(4567) # Always the same fakes + fake.seed(4567) # Always the same fakes fake.add_provider(lorem) all_tokens = [] @@ -100,28 +101,33 @@ async def fake_tokens(logged_user, tokens_db): for _ in repeat(None, 5): # TODO: add tokens from other users data = { - 'service': fake.word(ext_word_list=None), - 'token_key': fake.md5(raw_output=False), - 'token_secret': fake.md5(raw_output=False) + "service": fake.word(ext_word_list=None), + "token_key": fake.md5(raw_output=False), + "token_secret": fake.md5(raw_output=False), } - row = await create_token_in_db( tokens_db, - user_id = logged_user['id'], - token_service = data['service'], - token_data = data + row = await create_token_in_db( + tokens_db, + user_id=logged_user["id"], + token_service=data["service"], + token_data=data, ) all_tokens.append(data) return all_tokens -#-------------------------------------------------------------------------- +# -------------------------------------------------------------------------- PREFIX = "/" + API_VERSION + "/me" -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) + +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) async def test_get_profile(logged_user, client, role, expected): url = client.app.router["get_my_profile"].url_for() assert str(url) == "/v0/me" @@ -130,18 +136,22 @@ async def test_get_profile(logged_user, client, role, expected): data, error = await assert_status(resp, expected) if not error: - assert data['login'] == logged_user["email"] - assert data['gravatar_id'] - assert data['first_name'] == logged_user["name"] - assert data['last_name'] == "" - assert data['role'] == role.name.capitalize() - -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPNoContent), - (UserRole.TESTER, web.HTTPNoContent), -]) + assert data["login"] == logged_user["email"] + assert data["gravatar_id"] + assert data["first_name"] == logged_user["name"] + assert data["last_name"] == "" + assert data["role"] == role.name.capitalize() + + +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPNoContent), + (UserRole.TESTER, web.HTTPNoContent), + ], +) async def test_update_profile(logged_user, client, role, expected): url = client.app.router["update_my_profile"].url_for() assert str(url) == "/v0/me" @@ -153,51 +163,55 @@ async def test_update_profile(logged_user, client, role, expected): resp = await client.get(url) data, _ = await assert_status(resp, web.HTTPOk) - assert data['first_name'] == logged_user["name"] - assert data['last_name'] == "Foo" - assert data['role'] == role.name.capitalize() - + assert data["first_name"] == logged_user["name"] + assert data["last_name"] == "Foo" + assert data["role"] == role.name.capitalize() # Test CRUD on tokens -------------------------------------------- # TODO: template for CRUD testing? # TODO: create parametrize fixture with resource_name -RESOURCE_NAME = 'tokens' +RESOURCE_NAME = "tokens" PREFIX = "/" + API_VERSION + "/me/" + RESOURCE_NAME - -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPCreated), - (UserRole.TESTER, web.HTTPCreated), -]) +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPCreated), + (UserRole.TESTER, web.HTTPCreated), + ], +) async def test_create_token(client, logged_user, tokens_db, role, expected): url = client.app.router["create_tokens"].url_for() - assert '/v0/me/tokens' == str(url) + assert "/v0/me/tokens" == str(url) token = { - 'service': "blackfynn", - 'token_key': '4k9lyzBTS', - 'token_secret': 'my secret' + "service": "blackfynn", + "token_key": "4k9lyzBTS", + "token_secret": "my secret", } resp = await client.post(url, json=token) data, error = await assert_status(resp, expected) if not error: db_token = await get_token_from_db(tokens_db, token_data=token) - assert db_token['token_data'] == token - assert db_token['user_id'] == logged_user["id"] - - -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) + assert db_token["token_data"] == token + assert db_token["user_id"] == logged_user["id"] + + +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) async def test_read_token(client, logged_user, tokens_db, fake_tokens, role, expected): # list all url = client.app.router["list_tokens"].url_for() @@ -208,7 +222,7 @@ async def test_read_token(client, logged_user, tokens_db, fake_tokens, role, exp if not error: expected_token = random.choice(fake_tokens) - sid = expected_token['service'] + sid = expected_token["service"] # get one url = client.app.router["get_token"].url_for(service=sid) @@ -220,44 +234,52 @@ async def test_read_token(client, logged_user, tokens_db, fake_tokens, role, exp assert data == expected_token, "list and read item are both read operations" -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPNoContent), - (UserRole.TESTER, web.HTTPNoContent), -]) -async def test_update_token(client, logged_user, tokens_db, fake_tokens, role, expected): +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPNoContent), + (UserRole.TESTER, web.HTTPNoContent), + ], +) +async def test_update_token( + client, logged_user, tokens_db, fake_tokens, role, expected +): selected = random.choice(fake_tokens) - sid = selected['service'] + sid = selected["service"] url = client.app.router["get_token"].url_for(service=sid) assert "/v0/me/tokens/%s" % sid == str(url) - resp = await client.put(url, json={ - 'token_secret': 'some completely new secret' - }) + resp = await client.put(url, json={"token_secret": "some completely new secret"}) data, error = await assert_status(resp, expected) if not error: # check in db token_in_db = await get_token_from_db(tokens_db, token_service=sid) - assert token_in_db['token_data']['token_secret'] == 'some completely new secret' - assert token_in_db['token_data']['token_secret'] != selected['token_secret'] + assert token_in_db["token_data"]["token_secret"] == "some completely new secret" + assert token_in_db["token_data"]["token_secret"] != selected["token_secret"] - selected['token_secret'] = 'some completely new secret' - assert token_in_db['token_data'] == selected + selected["token_secret"] = "some completely new secret" + assert token_in_db["token_data"] == selected -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPNoContent), - (UserRole.TESTER, web.HTTPNoContent), -]) -async def test_delete_token(client, logged_user, tokens_db, fake_tokens, role, expected): - sid = fake_tokens[0]['service'] +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPNoContent), + (UserRole.TESTER, web.HTTPNoContent), + ], +) +async def test_delete_token( + client, logged_user, tokens_db, fake_tokens, role, expected +): + sid = fake_tokens[0]["service"] url = client.app.router["delete_token"].url_for(service=sid) assert "/v0/me/tokens/%s" % sid == str(url) @@ -272,6 +294,7 @@ async def test_delete_token(client, logged_user, tokens_db, fake_tokens, role, e ## BUG FIXES ####################################################### + @pytest.fixture def mock_failing_connection(mocker) -> MagicMock: """ @@ -280,16 +303,22 @@ def mock_failing_connection(mocker) -> MagicMock: """ # See http://initd.org/psycopg/docs/module.html conn_execute = mocker.patch.object(SAConnection, "execute") - conn_execute.side_effect=OperationalError("MOCK: server closed the connection unexpectedly") + conn_execute.side_effect = OperationalError( + "MOCK: server closed the connection unexpectedly" + ) return conn_execute -@pytest.mark.parametrize("role,expected", [ - (UserRole.USER, web.HTTPServiceUnavailable), -]) -async def test_get_profile_with_failing_db_connection(logged_user, client, + +@pytest.mark.parametrize( + "role,expected", [(UserRole.USER, web.HTTPServiceUnavailable),] +) +async def test_get_profile_with_failing_db_connection( + logged_user, + client, mock_failing_connection: MagicMock, role: UserRole, - expected: web.HTTPException): + expected: web.HTTPException, +): """ Reproduces issue https://github.com/ITISFoundation/osparc-simcore/pull/1160 @@ -305,6 +334,8 @@ async def test_get_profile_with_failing_db_connection(logged_user, client, resp = await client.get(url) NUM_RETRY = 3 - assert mock_failing_connection.call_count == NUM_RETRY, "Expected mock failure raised in AuthorizationPolicy.authorized_userid after severals" + assert ( + mock_failing_connection.call_count == NUM_RETRY + ), "Expected mock failure raised in AuthorizationPolicy.authorized_userid after severals" data, error = await assert_status(resp, expected) diff --git a/tests/e2e/Makefile b/tests/e2e/Makefile index 95326767b0e..9d0e8683a24 100644 --- a/tests/e2e/Makefile +++ b/tests/e2e/Makefile @@ -20,11 +20,23 @@ else @awk --posix 'BEGIN {FS = ":.*?## "} /^[[:alpha:][:space:]_-]+:.*?## / {printf "%-20s %s\n", $$1, $$2}' $(MAKEFILE_LIST) endif + .PHONY: install install: ## install testing framework # install puppeteer @npm install @pip install -r requirements/requirements.txt + # installing postgres-db + @pushd "$(abspath $(CURDIR)/../../packages/postgres-database)"; pip install -r requirements/prod.txt; popd + + +.PHONY: pg-db-tables +pg-db-tables: ## inits pg database tables + # Initializing tables + sc-pg discover -u scu -p adminadmin -d simcoredb + sc-pg upgrade + sc-pg clean + registry-up: ## deploys the insecure docker registry in the simcore network docker stack deploy -c docker-compose.yml registry diff --git a/tests/e2e/requirements/Makefile b/tests/e2e/requirements/Makefile index 8e65c3d6de4..d7832d2ec84 100644 --- a/tests/e2e/requirements/Makefile +++ b/tests/e2e/requirements/Makefile @@ -4,7 +4,7 @@ ROOT_DIR = $(realpath $(CURDIR)/../../../) VENV_DIR ?= $(realpath $(ROOT_DIR)/.venv) -%.txt: %.in +requirements.txt: requirements.in # pip compiling $< @$(VENV_DIR)/bin/pip-compile --output-file $@ $< diff --git a/tests/e2e/requirements/requirements.txt b/tests/e2e/requirements/requirements.txt index d7a43e29178..d7caf7825c5 100644 --- a/tests/e2e/requirements/requirements.txt +++ b/tests/e2e/requirements/requirements.txt @@ -2,15 +2,15 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile +# pip-compile --output-file=requirements.txt requirements.in # certifi==2019.11.28 # via requests chardet==3.0.4 # via requests -docker==4.1.0 -idna==2.8 # via requests +docker==4.2.0 +idna==2.9 # via requests pyyaml==5.3 -requests==2.22.0 # via docker -six==1.13.0 # via docker, tenacity, websocket-client -tenacity==6.0.0 -urllib3==1.25.7 # via requests +requests==2.23.0 # via docker +six==1.14.0 # via docker, tenacity, websocket-client +tenacity==6.1.0 +urllib3==1.25.8 # via requests websocket-client==0.57.0 # via docker diff --git a/tests/e2e/setup_env_insecure_registry b/tests/e2e/scripts/setup_env_insecure_registry.bash old mode 100755 new mode 100644 similarity index 96% rename from tests/e2e/setup_env_insecure_registry rename to tests/e2e/scripts/setup_env_insecure_registry.bash index f4f89b28830..129e58bc1a4 --- a/tests/e2e/setup_env_insecure_registry +++ b/tests/e2e/scripts/setup_env_insecure_registry.bash @@ -9,7 +9,6 @@ make .env # disable email verification echo WEBSERVER_LOGIN_REGISTRATION_INVITATION_REQUIRED=0 >>.env echo WEBSERVER_LOGIN_REGISTRATION_CONFIRMATION_REQUIRED=0 >>.env -echo WEBSERVER_DB_INITTABLES=1 >>.env # set max number of CPUs sidecar echo SERVICES_MAX_NANO_CPUS=2000000000 >> .env diff --git a/tests/e2e/tutorials/tutorialBase.js b/tests/e2e/tutorials/tutorialBase.js index d3744801a68..272493f7cf0 100644 --- a/tests/e2e/tutorials/tutorialBase.js +++ b/tests/e2e/tutorials/tutorialBase.js @@ -55,13 +55,39 @@ class TutorialBase { async login() { this.__responsesQueue.addResponseListener("projects?type=template"); + this.__responsesQueue.addResponseListener("catalog/dags"); + this.__responsesQueue.addResponseListener("services"); await auto.logIn(this.__page, this.__user, this.__pass); try { - await this.__responsesQueue.waitUntilResponse("projects?type=template"); + const resp = await this.__responsesQueue.waitUntilResponse("projects?type=template"); + const templates = resp["data"]; + console.log("Templates received", templates.length); + templates.forEach(template => { + console.log(" - ", template.name); + }); } catch(err) { console.error("Templates could not be fetched", err); } + try { + const resp = await this.__responsesQueue.waitUntilResponse("catalog/dags"); + const dags = resp["data"]; + console.log("DAGs received:", dags.length); + dags.forEach(dag => { + console.log(" - ", dag.name); + }); + } + catch(err) { + console.error("DAGs could not be fetched", err); + } + try { + const resp = await this.__responsesQueue.waitUntilResponse("services"); + const services = resp["data"]; + console.log("Services received:", services.length); + } + catch(err) { + console.error("Services could not be fetched", err); + } } async openTemplate(waitFor = 1000) { diff --git a/tests/e2e/utils/responsesQueue.js b/tests/e2e/utils/responsesQueue.js index 1366f6c0085..8f161468ab2 100644 --- a/tests/e2e/utils/responsesQueue.js +++ b/tests/e2e/utils/responsesQueue.js @@ -4,19 +4,20 @@ class ResponsesQueue { constructor(page) { this.__page = page; this.__reqQueue = []; - this.__respQueue = []; + this.__respPendingQueue = []; + this.__respReceivedQueue = {}; } addResponseListener(url) { const page = this.__page; const reqQueue = this.__reqQueue; - const respQueue = this.__respQueue; + const respPendingQueue = this.__respPendingQueue; reqQueue.push(url); - respQueue.push(url); + respPendingQueue.push(url); console.log("-- Expected response added to queue", url); page.on("request", function callback(req) { if (req.url().includes(url)) { - console.log((new Date).toUTCString(), "-- Queued request sent", req.url()); + console.log((new Date).toUTCString(), "-- Queued request sent", req.method(), req.url()); page.removeListener("request", callback); const index = reqQueue.indexOf(url); if (index > -1) { @@ -24,14 +25,18 @@ class ResponsesQueue { } } }); + const that = this; page.on("response", function callback(resp) { if (resp.url().includes(url)) { - console.log((new Date).toUTCString(), "-- Queued response received", resp.url()); - page.removeListener("response", callback); - const index = respQueue.indexOf(url); - if (index > -1) { - respQueue.splice(index, 1); - } + console.log((new Date).toUTCString(), "-- Queued response received", resp.url(), ":"); + resp.json().then(data => { + that.__respReceivedQueue[url] = data; + page.removeListener("response", callback); + const index = respPendingQueue.indexOf(url); + if (index > -1) { + respPendingQueue.splice(index, 1); + } + }); } }); } @@ -41,7 +46,7 @@ class ResponsesQueue { } isResponseInQueue(url) { - return this.__respQueue.includes(url); + return this.__respPendingQueue.includes(url); } async waitUntilResponse(url, timeout = 10000) { @@ -55,6 +60,16 @@ class ResponsesQueue { if (sleptFor >= timeout) { throw("-- Timeout reached." + new Date().toUTCString()); } + // console.log("waitUntilResponse", url); + // console.log(Object.keys(this.__respReceivedQueue)); + if (Object.prototype.hasOwnProperty.call(this.__respReceivedQueue, url)) { + const resp = this.__respReceivedQueue[url]; + if (resp && "error" in resp && resp["error"] !== null) { + throw("-- Error in response", resp["error"]); + } + delete this.__respReceivedQueue[url]; + return resp; + } } } diff --git a/tests/e2e/utils/utils.js b/tests/e2e/utils/utils.js index 7bdf0797f24..f1c852f0dc0 100644 --- a/tests/e2e/utils/utils.js +++ b/tests/e2e/utils/utils.js @@ -8,7 +8,8 @@ function getUserAndPass(args) { userPass.user = args[1]; userPass.pass = args[2]; userPass.newUser = false; - } else { + } + else { const rand = __getRandUserAndPass(); userPass.user = rand.user; userPass.pass = rand.pass; diff --git a/tests/swarm-deploy/Makefile b/tests/swarm-deploy/Makefile index 4b402d3f865..bad6aefc0de 100644 --- a/tests/swarm-deploy/Makefile +++ b/tests/swarm-deploy/Makefile @@ -1,10 +1,13 @@ -.DEFAULT_GOAL := help +# +# Targets for DEVELOPMENT for system tests +# +include ../../scripts/common.Makefile ROOT_DIR = $(abspath $(CURDIR)/../../) VENV_DIR ?= $(abspath $(ROOT_DIR)/.venv) -%.txt: %.in +requirements.txt: requirements.in # pip compiling $< @$(VENV_DIR)/bin/pip-compile --output-file $@ $< @@ -19,12 +22,3 @@ install: $(VENV_DIR) requirements.txt ## installs dependencies tests: ## runs all tests [DEV] # running unit tests @$(VENV_DIR)/bin/pytest -vv -x --ff --pdb $(CURDIR) - - -.PHONY: help -# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html -help: ## this colorful help - @echo "Recipes for tests/swarm-deploy:" - @echo "" - @awk --posix 'BEGIN {FS = ":.*?## "} /^[[:alpha:][:space:]_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo "" diff --git a/tests/swarm-deploy/requirements.txt b/tests/swarm-deploy/requirements.txt index cfa0279f2d2..8369f9c0e01 100644 --- a/tests/swarm-deploy/requirements.txt +++ b/tests/swarm-deploy/requirements.txt @@ -9,32 +9,32 @@ async-timeout==3.0.1 # via aiohttp attrs==19.3.0 # via aiohttp, pytest certifi==2019.11.28 # via requests chardet==3.0.4 # via aiohttp, requests -coverage==4.5.1 # via -r requirements.in (line 1), pytest-cov -docker==4.2.0 # via -r requirements.in (line 11) +coverage==4.5.1 +docker==4.2.0 idna-ssl==1.1.0 # via aiohttp idna==2.9 # via idna-ssl, requests, yarl importlib-metadata==1.5.0 # via pluggy, pytest more-itertools==8.2.0 # via pytest multidict==4.7.5 # via aiohttp, yarl -packaging==20.1 # via pytest, pytest-sugar +packaging==20.3 # via pytest, pytest-sugar pluggy==0.13.1 # via pytest py==1.8.1 # via pytest pyparsing==2.4.6 # via packaging -pytest-aiohttp==0.3.0 # via -r requirements.in (line 4) -pytest-cov==2.8.1 # via -r requirements.in (line 5) -pytest-instafail==0.4.1.post0 # via -r requirements.in (line 6) -pytest-mock==2.0.0 # via -r requirements.in (line 7) -pytest-runner==5.2 # via -r requirements.in (line 8) -pytest-sugar==0.9.2 # via -r requirements.in (line 9) -pytest==5.3.5 # via -r requirements.in (line 3), pytest-aiohttp, pytest-cov, pytest-instafail, pytest-mock, pytest-sugar -pyyaml==5.3 # via -r requirements.in (line 13) +pytest-aiohttp==0.3.0 +pytest-cov==2.8.1 +pytest-instafail==0.4.1.post0 +pytest-mock==2.0.0 +pytest-runner==5.2 +pytest-sugar==0.9.2 +pytest==5.3.5 +pyyaml==5.3 requests==2.23.0 # via docker six==1.14.0 # via docker, packaging, tenacity, websocket-client -tenacity==6.1.0 # via -r requirements.in (line 12) +tenacity==6.1.0 termcolor==1.1.0 # via pytest-sugar typing-extensions==3.7.4.1 # via aiohttp urllib3==1.25.8 # via requests wcwidth==0.1.8 # via pytest websocket-client==0.57.0 # via docker yarl==1.4.2 # via aiohttp -zipp==3.0.0 # via importlib-metadata +zipp==3.1.0 # via importlib-metadata diff --git a/tests/swarm-deploy/test_service_images.py b/tests/swarm-deploy/test_service_images.py new file mode 100644 index 00000000000..dd5d97c62de --- /dev/null +++ b/tests/swarm-deploy/test_service_images.py @@ -0,0 +1,26 @@ +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + +import subprocess +from typing import Dict +import pytest + + +# search ujson in all _base.txt and add here all services that contains it +@pytest.mark.parametrize("service", [ + 'director', + 'webserver', + 'storage', + 'catalog' +]) +def test_ujson_installation(service:str, osparc_deploy: Dict): + # tets failing installation undetected + # and fixed in PR https://github.com/ITISFoundation/osparc-simcore/pull/1353 + image_name = osparc_deploy['simcore']['services'][service]['image'] + + assert subprocess.run( + f'docker run -t --rm {image_name} python -c "import ujson; print(ujson.__version__)"', + shell=True, + check=True, + ) diff --git a/tests/system-load/Makefile b/tests/system-load/Makefile deleted file mode 100644 index 631f1bf2143..00000000000 --- a/tests/system-load/Makefile +++ /dev/null @@ -1,37 +0,0 @@ - -.PHONY: install .check-venv-active - -.check-venv-active: # prevents from installing in system python - # checking if active virtual environment - @python3 -c "import sys; assert sys.base_prefix!=sys.prefix" - -install: .check-venv-active - pip install -r requirements.txt - - -.PHONY: up-locust down -NUM_CLIENTS = 1000 -HATCH_RATE = 2 - -up-locust: .check-venv-active - # Starting http://localhost:8089/ - locust --csv-full-history --locustfile=locust_files/basic.py - -down: - pkill locust - - -.PHONY: tables -PG_PUBLISHED_PORT := $(shell docker inspect simcore_postgres --format "{{(index .Endpoint.Ports 0).PublishedPort}}") - -define inject-data-in-db = - @PGPASSWORD=adminadmin psql --host localhost \ - --port $(PG_PUBLISHED_PORT) \ - --user scu \ - --dbname simcoredb \ - --command "\copy $(1) from 'data/$(2)' csv header;"; -endef - -tables: ## injects test projects and user in databass - $(call inject-data-in-db,users,users.csv) - $(call inject-data-in-db,projects,projects.csv) diff --git a/tests/system-load/ManualTests.md b/tests/system-load/ManualTests.md deleted file mode 100644 index 094cb1b3323..00000000000 --- a/tests/system-load/ManualTests.md +++ /dev/null @@ -1,12 +0,0 @@ - - - -- [study 1](http://127.0.0.1:9081/study/f5bb0778-524b-11ea-819c-02420a00070b) -- [study 2](http://127.0.0.1:9081/study/194bb264-a717-11e9-9dff-02420aff2767) -- [study 3](http://127.0.0.1:9081/study/ab6a1254-524a-11ea-b940-02420a00070b) -- [study 4](http://127.0.0.1:9081/study/c0ab87ca-524a-11ea-a711-02420a00070b) -- [study 5](http://127.0.0.1:9081/study/8cfbae2c-524b-11ea-9f65-02420a00070b) -- [study 6](http://127.0.0.1:9081/study/6c5fa7ba-524b-11ea-a21e-02420a00070b) -- [study 7](http://127.0.0.1:9081/study/0b478e80-524b-11ea-b243-02420a00070b) -- [study 8](http://127.0.0.1:9081/study/003aaf4a-524a-11ea-b061-02420a00070b) -- [study 9](http://127.0.0.1:9081/study/33eb80e2-524c-11ea-a311-02420a00070b) diff --git a/tests/system-load/data/projects.csv b/tests/system-load/data/projects.csv deleted file mode 100644 index 9e4a274e9d9..00000000000 --- a/tests/system-load/data/projects.csv +++ /dev/null @@ -1,10 +0,0 @@ -id,type,uuid,name,description,thumbnail,prj_owner,creation_date,last_change_date,workbench,published -19,TEMPLATE,f5bb0778-524b-11ea-819c-02420a00070b,[ISAN] CC Rabbit,"","",maiz@foo.com,2020-02-18 12:41:18.692,2020-02-21 09:58:45.498,"{""001dd1f0-e3f5-4695-8e26-e3dfd82dda8e"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker"", ""inputs"": {}, ""inputNodes"": [], ""thumbnail"": """", ""outputs"": {""outFile"": {""store"": 0, ""dataset"": ""f5bb0778-524b-11ea-819c-02420a00070b"", ""path"": ""f5bb0778-524b-11ea-819c-02420a00070b/001dd1f0-e3f5-4695-8e26-e3dfd82dda8e/initial_WTstates_Rabbit.txt"", ""label"": ""initial_WTstates_Rabbit.txt""}}, ""progress"": 100, ""position"": {""x"": 31, ""y"": 27}}, ""27d897b6-52ae-4aa9-afb3-12fbc8d2891e"": {""key"": ""simcore/services/comp/rabbit-ss-0d-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""Rabbit SS 0D cardiac model"", ""inputs"": {""Na"": 0, ""GKr"": 1, ""TotalSimulationTime"": 300, ""TargetHeartRatePhase1"": 60, ""TargetHeartRatePhase2"": 150, ""TargetHeartRatePhase3"": 60, ""cAMKII"": ""WT"", ""tissue_size_tw"": 165, ""tissue_size_tl"": 165, ""Homogeneity"": ""homogeneous"", ""initialWTStates"": {""nodeUuid"": ""001dd1f0-e3f5-4695-8e26-e3dfd82dda8e"", ""output"": ""outFile""}, ""num_threads"": 2}, ""inputNodes"": [""001dd1f0-e3f5-4695-8e26-e3dfd82dda8e""], ""thumbnail"": """", ""position"": {""x"": 305, ""y"": 26}}, ""6fe7f438-630b-429c-8d06-c036c2779cb2"": {""key"": ""simcore/services/comp/rabbit-ss-1d-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""Rabbit SS 1D cardiac model"", ""inputs"": {""Na"": 0, ""GKr"": 1, ""TotalSimulationTime"": 300, ""TargetHeartRatePhase1"": 60, ""TargetHeartRatePhase2"": 150, ""TargetHeartRatePhase3"": 60, ""cAMKII"": ""WT"", ""tissue_size_tw"": 165, ""tissue_size_tl"": 165, ""Homogeneity"": ""homogeneous"", ""initialWTStates"": {""nodeUuid"": ""001dd1f0-e3f5-4695-8e26-e3dfd82dda8e"", ""output"": ""outFile""}, ""num_threads"": 2}, ""inputNodes"": [""001dd1f0-e3f5-4695-8e26-e3dfd82dda8e""], ""thumbnail"": """", ""position"": {""x"": 307, ""y"": 197}}, ""4f7f01dc-825d-4e42-99e3-78697d4de731"": {""key"": ""simcore/services/comp/rabbit-ss-2d-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""Rabbit SS 2D cardiac model"", ""inputs"": {""Na"": 0, ""GKr"": 1, ""TotalSimulationTime"": 10, ""TargetHeartRatePhase1"": 60, ""TargetHeartRatePhase2"": 150, ""TargetHeartRatePhase3"": 60, ""cAMKII"": ""WT"", ""tissue_size_tw"": 65, ""tissue_size_tl"": 65, ""Homogeneity"": ""homogeneous"", ""input_from_1d"": {""nodeUuid"": ""6fe7f438-630b-429c-8d06-c036c2779cb2"", ""output"": ""output_4""}, ""num_threads"": 2}, ""inputNodes"": [""6fe7f438-630b-429c-8d06-c036c2779cb2""], ""thumbnail"": """", ""position"": {""x"": 510, ""y"": 320}}, ""7b103fe1-5650-4c96-add1-53927a30f7bb"": {""key"": ""simcore/services/dynamic/cc-0d-viewer"", ""version"": ""3.0.4"", ""label"": ""0D cardiac model viewer"", ""inputs"": {""vm1Hz"": {""nodeUuid"": ""27d897b6-52ae-4aa9-afb3-12fbc8d2891e"", ""output"": ""vm1Hz""}}, ""inputNodes"": [""27d897b6-52ae-4aa9-afb3-12fbc8d2891e""], ""thumbnail"": """", ""position"": {""x"": 730, ""y"": 26}}, ""e192edf5-bb7f-45f4-a47d-8970bc784cf8"": {""key"": ""simcore/services/dynamic/cc-1d-viewer"", ""version"": ""3.0.3"", ""label"": ""1D cardiac model viewer"", ""inputs"": {""ECGs"": {""nodeUuid"": ""6fe7f438-630b-429c-8d06-c036c2779cb2"", ""output"": ""output_1""}, ""APs"": {""nodeUuid"": ""6fe7f438-630b-429c-8d06-c036c2779cb2"", ""output"": ""output_2""}}, ""inputNodes"": [""6fe7f438-630b-429c-8d06-c036c2779cb2""], ""thumbnail"": """", ""position"": {""x"": 733, ""y"": 199}}, ""52f7f296-9697-4c63-bfed-cd36694afd67"": {""key"": ""simcore/services/dynamic/cc-2d-viewer"", ""version"": ""3.0.3"", ""label"": ""2D cardiac model viewer"", ""inputs"": {""ap"": {""nodeUuid"": ""4f7f01dc-825d-4e42-99e3-78697d4de731"", ""output"": ""output_1""}}, ""inputNodes"": [""4f7f01dc-825d-4e42-99e3-78697d4de731""], ""thumbnail"": """", ""position"": {""x"": 741, ""y"": 320}}}",1 -11,TEMPLATE,194bb264-a717-11e9-9dff-02420aff2767,ISAN: osparc-opencor,"","",crespo@foo.com,2019-07-15 15:42:06.208,2019-07-15 15:42:06.208,"{""f631a142-d3b6-435d-abfb-8ad4acb91a70"": {""key"": ""simcore/services/comp/osparc-opencor"", ""version"": ""0.3.0"", ""label"": ""osparc-opencor"", ""inputs"": {""stimulation_mode"": ""{{stimulation_mode}}"", ""stimulation_level"": ""{{stimulation_level}}""}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 587, ""y"": 279}}, ""744c9209-0450-4272-8357-4a08cf7f8458"": {""key"": ""simcore/services/dynamic/raw-graphs"", ""version"": ""2.10.2"", ""label"": ""2D plot"", ""inputs"": {""input_1"": {""nodeUuid"": ""f631a142-d3b6-435d-abfb-8ad4acb91a70"", ""output"": ""membrane_potential_csv""}}, ""inputNodes"": [""f631a142-d3b6-435d-abfb-8ad4acb91a70""], ""outputNode"": false, ""outputs"": {}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 933, ""y"": 228}}}",1 -12,TEMPLATE,ab6a1254-524a-11ea-b940-02420a00070b,[ISAN] opencor,"","",maiz@foo.com,2020-02-18 12:32:04.513,2020-02-18 12:32:38.546,"{""ab1869c1-1fd6-40e7-b73e-5b71c4f6bf03"": {""key"": ""simcore/services/comp/osparc-opencor"", ""version"": ""0.3.0"", ""label"": ""osparc-opencor"", ""inputs"": {""stimulation_mode"": 1, ""stimulation_level"": 0.5}, ""inputNodes"": [], ""thumbnail"": """", ""position"": {""x"": 291, ""y"": 267}}, ""82d426a6-94ef-4a79-b575-623cccf062b3"": {""key"": ""simcore/services/dynamic/raw-graphs"", ""version"": ""2.10.4"", ""label"": ""2D plot"", ""inputs"": {""input_1"": {""nodeUuid"": ""ab1869c1-1fd6-40e7-b73e-5b71c4f6bf03"", ""output"": ""membrane_potential_csv""}}, ""inputNodes"": [""ab1869c1-1fd6-40e7-b73e-5b71c4f6bf03""], ""thumbnail"": """", ""position"": {""x"": 598, ""y"": 281}}}",1 -13,TEMPLATE,c0ab87ca-524a-11ea-a711-02420a00070b,[ISAN] 3D anatomical,"","",maiz@foo.com,2020-02-18 12:32:40.174,2020-02-18 12:34:40.84,"{""a008fbe3-d4cf-42d9-8ca1-ec37d8a26dea"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker"", ""inputs"": {}, ""inputNodes"": [], ""thumbnail"": """", ""outputs"": {""outFile"": {""store"": 0, ""dataset"": ""c0ab87ca-524a-11ea-a711-02420a00070b"", ""path"": ""c0ab87ca-524a-11ea-a711-02420a00070b/a008fbe3-d4cf-42d9-8ca1-ec37d8a26dea/Jeduk_New.zip"", ""label"": ""Jeduk_New.zip""}}, ""progress"": 100, ""position"": {""x"": 58, ""y"": 93}}, ""e52c222c-4afc-4fa4-98d8-6b8e51f0b9d7"": {""key"": ""simcore/services/dynamic/3d-viewer-gpu"", ""version"": ""2.11.0"", ""label"": ""3d-viewer-gpu"", ""inputs"": {""A"": {""nodeUuid"": ""a008fbe3-d4cf-42d9-8ca1-ec37d8a26dea"", ""output"": ""outFile""}}, ""inputNodes"": [""a008fbe3-d4cf-42d9-8ca1-ec37d8a26dea""], ""thumbnail"": """", ""position"": {""x"": 350, ""y"": 93}}}",1 -14,TEMPLATE,8cfbae2c-524b-11ea-9f65-02420a00070b,[ISAN] Bornstein,"","",maiz@foo.com,2020-02-18 12:38:22.954,2020-02-18 12:41:10.522,"{""4b021656-b2fc-4559-aa5a-54f58678520e"": {""key"": ""simcore/services/dynamic/bornstein-dash"", ""version"": ""1.0.2"", ""label"": ""Bornstein-Dash"", ""inputs"": {}, ""inputNodes"": [], ""thumbnail"": """", ""position"": {""x"": 128, ""y"": 82}}}",1 -15,TEMPLATE,6c5fa7ba-524b-11ea-a21e-02420a00070b,[ISAN] Mattward,"","",maiz@foo.com,2020-02-18 12:37:28.245,2020-02-18 12:38:21.714,"{""25b1896a-4ea6-4fd9-9987-493354f3b0a9"": {""key"": ""simcore/services/dynamic/mattward-dash"", ""version"": ""1.0.2"", ""label"": ""MattWard-Dash"", ""inputs"": {}, ""inputNodes"": [], ""thumbnail"": """", ""position"": {""x"": 124, ""y"": 89}}}",1 -16,TEMPLATE,0b478e80-524b-11ea-b243-02420a00070b,[ISAN] 3D EM,"","",maiz@foo.com,2020-02-18 12:34:45.348,2020-02-18 12:37:26.402,"{""cd0f60f7-b0dc-4e2a-b6fc-7343c94a7165"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker"", ""inputs"": {}, ""inputNodes"": [], ""thumbnail"": """", ""outputs"": {""outFile"": {""store"": 0, ""dataset"": ""0b478e80-524b-11ea-b243-02420a00070b"", ""path"": ""0b478e80-524b-11ea-b243-02420a00070b/cd0f60f7-b0dc-4e2a-b6fc-7343c94a7165/EM_02mm.vtk"", ""label"": ""EM_02mm.vtk""}}, ""progress"": 100, ""position"": {""x"": 83, ""y"": 72}}, ""86fd8042-4ff0-443f-9c85-576418055de6"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker_2"", ""inputs"": {}, ""inputNodes"": [], ""thumbnail"": """", ""outputs"": {""outFile"": {""store"": 0, ""dataset"": ""0b478e80-524b-11ea-b243-02420a00070b"", ""path"": ""0b478e80-524b-11ea-b243-02420a00070b/86fd8042-4ff0-443f-9c85-576418055de6/Neuron_MovieActionPotentialPropagation.zip"", ""label"": ""Neuron_MovieActionPotentialPropagation.zip""}}, ""progress"": 100, ""position"": {""x"": 83, ""y"": 215}}, ""30272b4e-7d59-4bb8-9988-513aea9671eb"": {""key"": ""simcore/services/dynamic/3d-viewer-gpu"", ""version"": ""2.11.0"", ""label"": ""3d-viewer-gpu"", ""inputs"": {""A"": {""nodeUuid"": ""cd0f60f7-b0dc-4e2a-b6fc-7343c94a7165"", ""output"": ""outFile""}, ""B"": {""nodeUuid"": ""86fd8042-4ff0-443f-9c85-576418055de6"", ""output"": ""outFile""}}, ""inputNodes"": [""cd0f60f7-b0dc-4e2a-b6fc-7343c94a7165"", ""86fd8042-4ff0-443f-9c85-576418055de6""], ""thumbnail"": """", ""position"": {""x"": 393, ""y"": 156}}}",1 -17,TEMPLATE,003aaf4a-524a-11ea-b061-02420a00070b,[ISAN] 2D plot,"","",maiz@foo.com,2020-02-18 12:27:17.312,2020-02-18 12:32:01.739,"{""e9798b9b-c354-4eae-ba93-106128fb79fa"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker"", ""inputs"": {}, ""inputNodes"": [], ""thumbnail"": """", ""outputs"": {""outFile"": {""store"": 0, ""dataset"": ""003aaf4a-524a-11ea-b061-02420a00070b"", ""path"": ""003aaf4a-524a-11ea-b061-02420a00070b/e9798b9b-c354-4eae-ba93-106128fb79fa/RNAdat.csv"", ""label"": ""RNAdat.csv""}}, ""progress"": 100, ""position"": {""x"": 95, ""y"": 95}}, ""bbff4cbb-1797-487b-ac58-089952ef278e"": {""key"": ""simcore/services/dynamic/raw-graphs"", ""version"": ""2.10.4"", ""label"": ""2D plot"", ""inputs"": {""input_1"": {""nodeUuid"": ""e9798b9b-c354-4eae-ba93-106128fb79fa"", ""output"": ""outFile""}}, ""inputNodes"": [""e9798b9b-c354-4eae-ba93-106128fb79fa""], ""thumbnail"": """", ""position"": {""x"": 371, ""y"": 95}}}",1 -18,TEMPLATE,33eb80e2-524c-11ea-a311-02420a00070b,[ISAN] CC Human,"","",maiz@foo.com,2020-02-18 12:43:03.028,2020-02-21 09:59:02.357,"{""037d93cf-0734-48a6-81b6-2dde2bf3f24c"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker"", ""inputs"": {}, ""inputNodes"": [], ""thumbnail"": """", ""outputs"": {""outFile"": {""store"": 0, ""dataset"": ""33eb80e2-524c-11ea-a311-02420a00070b"", ""path"": ""33eb80e2-524c-11ea-a311-02420a00070b/037d93cf-0734-48a6-81b6-2dde2bf3f24c/initial_WTStates_Human.txt"", ""label"": ""initial_WTStates_Human.txt""}}, ""progress"": 100, ""position"": {""x"": 23, ""y"": 44}}, ""bbcd45f9-5798-402a-859b-de070221a915"": {""key"": ""simcore/services/comp/human-gb-0d-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""Human GB 0D cardiac model"", ""inputs"": {""Na"": 0, ""GKr"": 1, ""TotalSimulationTime"": 300, ""TargetHeartRatePhase1"": 60, ""TargetHeartRatePhase2"": 150, ""TargetHeartRatePhase3"": 60, ""cAMKII"": ""WT"", ""tissue_size_tw"": 165, ""tissue_size_tl"": 165, ""Homogeneity"": ""homogeneous"", ""initialWTStates"": {""nodeUuid"": ""037d93cf-0734-48a6-81b6-2dde2bf3f24c"", ""output"": ""outFile""}, ""num_threads"": 2}, ""inputNodes"": [""037d93cf-0734-48a6-81b6-2dde2bf3f24c""], ""thumbnail"": """", ""position"": {""x"": 259, ""y"": 13}}, ""8cfbf674-a9e1-4e6c-98e3-8aa382c067c6"": {""key"": ""simcore/services/comp/human-gb-1d-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""Human GB 1D cardiac model"", ""inputs"": {""Na"": 0, ""GKr"": 1, ""TotalSimulationTime"": 300, ""TargetHeartRatePhase1"": 60, ""TargetHeartRatePhase2"": 150, ""TargetHeartRatePhase3"": 60, ""cAMKII"": ""WT"", ""tissue_size_tw"": 165, ""tissue_size_tl"": 165, ""Homogeneity"": ""homogeneous"", ""initialWTStates"": {""nodeUuid"": ""037d93cf-0734-48a6-81b6-2dde2bf3f24c"", ""output"": ""outFile""}, ""num_threads"": 2}, ""inputNodes"": [""037d93cf-0734-48a6-81b6-2dde2bf3f24c""], ""thumbnail"": """", ""position"": {""x"": 261, ""y"": 171}}, ""0e30337a-21e0-4909-b3d3-7d3ffc68c9b6"": {""key"": ""simcore/services/comp/human-gb-2d-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""Human GB 2D cardiac model"", ""inputs"": {""Na"": 0, ""GKr"": 1, ""TotalSimulationTime"": 10, ""TargetHeartRatePhase1"": 60, ""TargetHeartRatePhase2"": 150, ""TargetHeartRatePhase3"": 60, ""cAMKII"": ""WT"", ""tissue_size_tw"": 165, ""tissue_size_tl"": 165, ""Homogeneity"": ""homogeneous"", ""input_from_1d"": {""nodeUuid"": ""8cfbf674-a9e1-4e6c-98e3-8aa382c067c6"", ""output"": ""output_3""}, ""num_threads"": 2}, ""inputNodes"": [""8cfbf674-a9e1-4e6c-98e3-8aa382c067c6""], ""thumbnail"": """", ""position"": {""x"": 462, ""y"": 287}}, ""dafd373b-f8e0-4daa-8a87-b0113e2a95f1"": {""key"": ""simcore/services/dynamic/cc-0d-viewer"", ""version"": ""3.0.4"", ""label"": ""0D cardiac model viewer"", ""inputs"": {""vm1Hz"": {""nodeUuid"": ""bbcd45f9-5798-402a-859b-de070221a915"", ""output"": ""vm1Hz""}}, ""inputNodes"": [""bbcd45f9-5798-402a-859b-de070221a915""], ""thumbnail"": """", ""position"": {""x"": 678, ""y"": 13}}, ""66610243-a8ce-42c9-bcef-a0c85ea79d1a"": {""key"": ""simcore/services/dynamic/cc-1d-viewer"", ""version"": ""3.0.3"", ""label"": ""1D cardiac model viewer"", ""inputs"": {""ECGs"": {""nodeUuid"": ""8cfbf674-a9e1-4e6c-98e3-8aa382c067c6"", ""output"": ""output_1""}, ""APs"": {""nodeUuid"": ""8cfbf674-a9e1-4e6c-98e3-8aa382c067c6"", ""output"": ""output_2""}}, ""inputNodes"": [""8cfbf674-a9e1-4e6c-98e3-8aa382c067c6""], ""thumbnail"": """", ""position"": {""x"": 680, ""y"": 170}}, ""25dd1664-363f-4156-a3a4-b5d91eff7b01"": {""key"": ""simcore/services/dynamic/cc-2d-viewer"", ""version"": ""3.0.3"", ""label"": ""2D cardiac model viewer"", ""inputs"": {""ap"": {""nodeUuid"": ""0e30337a-21e0-4909-b3d3-7d3ffc68c9b6"", ""output"": ""output_1""}}, ""inputNodes"": [""0e30337a-21e0-4909-b3d3-7d3ffc68c9b6""], ""thumbnail"": """", ""position"": {""x"": 689, ""y"": 287}}}",1 diff --git a/tests/system-load/data/users.csv b/tests/system-load/data/users.csv deleted file mode 100644 index 22fca19d757..00000000000 --- a/tests/system-load/data/users.csv +++ /dev/null @@ -1,2 +0,0 @@ -id,name,email,password_hash,status,role,created_at,created_ip -1,test,test@test.com,$5$rounds=1000$hz2IU23TTu5P9d4o$gtDerxvvx12U8nDOxFbYOaGtheNr/WnzDl3XN7BKHWC,ACTIVE,USER,2020-02-21 15:59:15.413612,127.0.0.1 diff --git a/tests/system-load/locust_files/basic.py b/tests/system-load/locust_files/basic.py deleted file mode 100644 index ae63ecb3cdb..00000000000 --- a/tests/system-load/locust_files/basic.py +++ /dev/null @@ -1,48 +0,0 @@ -import os - -from locust import HttpLocust, TaskSet, between, task -import uuid as uuidlib - - -class UserBehaviour(TaskSet): - def on_start(self): - """ on_start is called when a Locust start before any task is scheduled """ - self._login() - self._client_session_id = uuidlib.uuid4() # pylint: disable=attribute-defined-outside-init - - def on_stop(self): - """ on_stop is called when the TaskSet is stopping """ - self._logout() - - def _login(self): - self.client.post( - "/v0/auth/login", - json={ - "email": os.environ.get("TEST_USER", "test@test.com"), - "password": os.environ.get("TEST_PASSWORD", "test"), - }, - ) - - def _logout(self): - self.client.post( - "/v0/auth/logout", json={"client_session_id": str(self._client_session_id) } - ) - - @property - def short_id(self) -> str: - return str(self._client_session_id)[:4] - - @task(1) - def get_me(self): - print(f"{self.short_id} get_me") - self.client.get("/v0/me") - - @task(2) - def list_projects(self): - print(f"{self.short_id} list_projects") - self.client.get("/v0/projects") - - -class WebsiteUser(HttpLocust): - task_set = UserBehaviour - wait_time = between(5, 9) diff --git a/tests/system-load/locust_files/published.py b/tests/system-load/locust_files/published.py deleted file mode 100644 index a80e17efa8b..00000000000 --- a/tests/system-load/locust_files/published.py +++ /dev/null @@ -1,57 +0,0 @@ -import csv -import random -import sys -import time -from pathlib import Path -from typing import Dict, List -from uuid import uuid4 - -from locust import HttpLocust, TaskSet, between, task -from yarl import URL - -# pylint: disable=attribute-defined-outside-init - -current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -data_dir = current_dir / ".."/"data" - -def read_projects() -> List[Dict]: - with open(data_dir / "projects.csv", 'rt') as fh: - reader = csv.reader(fh) - keys = next(reader) - return [ dict( zip(keys, values) ) for values in reader ] - -def get_url(project: Dict) -> URL: - uuid = project['uuid'] - url = origin.with_path(f"/study/{uuid}") - return url - -origin = URL("http://127.0.0.1:9081") -projects = read_projects() - -print(f"Loaded {len(projects)} projects for test...") -print(f"Target host: {origin}") - - -class UserBehaviour(TaskSet): - def on_start(self): - """ on_start is called when a Locust start before any task is scheduled """ - self._id = str(uuid4()) - self._prj = random.choice(projects) - print(f"starting {self._id[:4]} -> {self._prj['name']}") - - def on_stop(self): - """ on_stop is called when the TaskSet is stopping """ - print(f"stoping {self._id[:4]}") - - @task(1) - def run_project(self): - url = get_url(self._prj) - print(f"Getting {url}") - resp = self.client.get(str(url)) - print(resp) - # TODO: shall run project - - -class WebsiteUser(HttpLocust): - task_set = UserBehaviour - wait_time = between(5, 9) diff --git a/tests/system-load/requirements.txt b/tests/system-load/requirements.txt deleted file mode 100644 index 9a4e58c1d66..00000000000 --- a/tests/system-load/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -locust -yarl From d415e3504de91ac6e7ae56cc0ee2900849260799 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 16 Mar 2020 16:41:59 +0100 Subject: [PATCH 11/74] tabs to the top --- services/web/client/source/class/osparc/desktop/Dashboard.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/web/client/source/class/osparc/desktop/Dashboard.js b/services/web/client/source/class/osparc/desktop/Dashboard.js index cf0f587cea7..7fa2b63e097 100644 --- a/services/web/client/source/class/osparc/desktop/Dashboard.js +++ b/services/web/client/source/class/osparc/desktop/Dashboard.js @@ -37,7 +37,7 @@ qx.Class.define("osparc.desktop.Dashboard", { construct: function() { this.base(arguments); - this.setBarPosition("left"); + this.setBarPosition("top"); osparc.wrapper.JsonDiffPatch.getInstance().init(); osparc.wrapper.JsonTreeViewer.getInstance().init(); From 189b2faa3897c6ef2f41cc98778a192a935a7445 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 16 Mar 2020 16:42:08 +0100 Subject: [PATCH 12/74] minor refactoring --- .../class/osparc/desktop/StudyBrowser.js | 29 +++++++++++++++---- 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/services/web/client/source/class/osparc/desktop/StudyBrowser.js b/services/web/client/source/class/osparc/desktop/StudyBrowser.js index 9fe7fcd63b2..d727e849365 100644 --- a/services/web/client/source/class/osparc/desktop/StudyBrowser.js +++ b/services/web/client/source/class/osparc/desktop/StudyBrowser.js @@ -75,6 +75,7 @@ qx.Class.define("osparc.desktop.StudyBrowser", { this.__editPane.removeAll(); iframe.dispose(); this.__createStudiesLayout(); + this.__createEditStudyLayout(); this.__reloadStudies(); this.__attachEventHandlers(); const loadStudyId = osparc.store.Store.getInstance().getCurrentStudyId(); @@ -210,7 +211,15 @@ qx.Class.define("osparc.desktop.StudyBrowser", { __createStudiesLayout: function() { const studyFilters = this.__studyFilters = new osparc.component.filter.group.StudyFilterGroup("studyBrowser"); + const userStudyLayout = this.__createUserStudiesLayout(); + const tempStudyLayout = this.__createTemplateStudiesLayout(); + this.__studiesPane.add(studyFilters); + this.__studiesPane.add(userStudyLayout); + this.__studiesPane.add(tempStudyLayout); + }, + + __createNewStudyButton: function() { const newStudyBtn = new qx.ui.form.Button(this.tr("Create new study"), "@FontAwesome5Solid/plus-circle/18").set({ appearance: "xl-button", allowGrowX: false, @@ -218,6 +227,11 @@ qx.Class.define("osparc.desktop.StudyBrowser", { }); osparc.utils.Utils.setIdToWidget(newStudyBtn, "newStudyBtn"); newStudyBtn.addListener("execute", () => this.__createStudyBtnClkd()); + return newStudyBtn; + }, + + __createUserStudiesLayout: function() { + const newStudyBtn = this.__createNewStudyButton(); const navBarLabelFont = qx.bom.Font.fromConfig(osparc.theme.Font.fonts["nav-bar-label"]); const studiesTitleContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); @@ -234,7 +248,11 @@ qx.Class.define("osparc.desktop.StudyBrowser", { userStudyLayout.add(studiesTitleContainer); userStudyLayout.add(newStudyBtn); userStudyLayout.add(userStudyList); + return userStudyLayout; + }, + __createTemplateStudiesLayout: function() { + const navBarLabelFont = qx.bom.Font.fromConfig(osparc.theme.Font.fonts["nav-bar-label"]); const templateTitleContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); const templateDeleteButton = this.__templateDeleteButton = this.__createDeleteButton(); const tempStudyLabel = new qx.ui.basic.Label(this.tr("Template Studies")).set({ @@ -248,13 +266,12 @@ qx.Class.define("osparc.desktop.StudyBrowser", { }); tempStudyLayout.add(templateTitleContainer); tempStudyLayout.add(tempStudyList); + return tempStudyLayout; + }, - this.__editStudyLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); - - this.__studiesPane.add(studyFilters); - this.__studiesPane.add(userStudyLayout); - this.__studiesPane.add(tempStudyLayout); - this.__editPane.add(this.__editStudyLayout); + __createEditStudyLayout: function() { + const editStudyLayout = this.__editStudyLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); + this.__editPane.add(editStudyLayout); }, __reloadStudies: function() { From 000c37485efee3e63fc18142d7906f3e058033c4 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 16 Mar 2020 16:56:27 +0100 Subject: [PATCH 13/74] minor --- .../source/class/osparc/desktop/StudyBrowserListItem.js | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js b/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js index da18b8771fc..77b3943bc68 100644 --- a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js @@ -32,7 +32,7 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { construct: function(menu) { this.base(arguments); this.set({ - width: 210 + width: this.self().ITEM_WIDTH }); // create a date format like "Oct. 19, 2018 11:31 AM" @@ -65,6 +65,10 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { this.addListener("changeValue", this.__onToggleChange, this); }, + statics: { + ITEM_WIDTH: 210 + }, + events: { /** (Fired by {@link qx.ui.form.List}) */ "action": "qx.event.type.Event" From d3fe6f85854d575aa5d4bfddd19a4070a6423cd1 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 16 Mar 2020 16:56:53 +0100 Subject: [PATCH 14/74] templates first --- .../source/class/osparc/desktop/StudyBrowser.js | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/services/web/client/source/class/osparc/desktop/StudyBrowser.js b/services/web/client/source/class/osparc/desktop/StudyBrowser.js index d727e849365..a735907e2c2 100644 --- a/services/web/client/source/class/osparc/desktop/StudyBrowser.js +++ b/services/web/client/source/class/osparc/desktop/StudyBrowser.js @@ -211,12 +211,12 @@ qx.Class.define("osparc.desktop.StudyBrowser", { __createStudiesLayout: function() { const studyFilters = this.__studyFilters = new osparc.component.filter.group.StudyFilterGroup("studyBrowser"); - const userStudyLayout = this.__createUserStudiesLayout(); const tempStudyLayout = this.__createTemplateStudiesLayout(); + const userStudyLayout = this.__createUserStudiesLayout(); this.__studiesPane.add(studyFilters); - this.__studiesPane.add(userStudyLayout); this.__studiesPane.add(tempStudyLayout); + this.__studiesPane.add(userStudyLayout); }, __createNewStudyButton: function() { @@ -231,12 +231,10 @@ qx.Class.define("osparc.desktop.StudyBrowser", { }, __createUserStudiesLayout: function() { - const newStudyBtn = this.__createNewStudyButton(); - const navBarLabelFont = qx.bom.Font.fromConfig(osparc.theme.Font.fonts["nav-bar-label"]); const studiesTitleContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); const studiesDeleteButton = this.__studiesDeleteButton = this.__createDeleteButton(); - const myStudyLabel = new qx.ui.basic.Label(this.tr("My Studies")).set({ + const myStudyLabel = new qx.ui.basic.Label(this.tr("Recent studies")).set({ font: navBarLabelFont }); studiesTitleContainer.add(myStudyLabel); @@ -246,16 +244,17 @@ qx.Class.define("osparc.desktop.StudyBrowser", { marginTop: 20 }); userStudyLayout.add(studiesTitleContainer); - userStudyLayout.add(newStudyBtn); userStudyLayout.add(userStudyList); return userStudyLayout; }, __createTemplateStudiesLayout: function() { + const newStudyBtn = this.__createNewStudyButton(); + const navBarLabelFont = qx.bom.Font.fromConfig(osparc.theme.Font.fonts["nav-bar-label"]); const templateTitleContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); const templateDeleteButton = this.__templateDeleteButton = this.__createDeleteButton(); - const tempStudyLabel = new qx.ui.basic.Label(this.tr("Template Studies")).set({ + const tempStudyLabel = new qx.ui.basic.Label(this.tr("New studies")).set({ font: navBarLabelFont }); templateTitleContainer.add(tempStudyLabel); @@ -265,6 +264,7 @@ qx.Class.define("osparc.desktop.StudyBrowser", { marginTop: 20 }); tempStudyLayout.add(templateTitleContainer); + tempStudyLayout.add(newStudyBtn); tempStudyLayout.add(tempStudyList); return tempStudyLayout; }, @@ -449,7 +449,7 @@ qx.Class.define("osparc.desktop.StudyBrowser", { }, __createStudyListLayout: function() { - return new osparc.component.form.ToggleButtonContainer(new qx.ui.layout.Flow(8, 8)); + return new osparc.component.form.ToggleButtonContainer(new qx.ui.layout.Flow(12, 12)); }, __createStudyItem: function(study, isTemplate) { From 554f3b9a5b56033be44741dd72a7988cb6fc9bf5 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 16 Mar 2020 17:34:19 +0100 Subject: [PATCH 15/74] minor --- .../client/source/class/osparc/desktop/StudyBrowserListItem.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js b/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js index 77b3943bc68..87a357ca679 100644 --- a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js @@ -55,7 +55,7 @@ qx.Class.define("osparc.desktop.StudyBrowserListItem", { left: 0 }); - if (menu !== null) { + if (menu !== undefined) { this.setMenu(menu); } From ec5c552e255e3c99a148d80a0a368d0aebab06e0 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 16 Mar 2020 17:42:13 +0100 Subject: [PATCH 16/74] narrower and centered dashboard --- .../source/class/osparc/desktop/MainPage.js | 21 +++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/services/web/client/source/class/osparc/desktop/MainPage.js b/services/web/client/source/class/osparc/desktop/MainPage.js index f564a7cc81f..224378f250a 100644 --- a/services/web/client/source/class/osparc/desktop/MainPage.js +++ b/services/web/client/source/class/osparc/desktop/MainPage.js @@ -56,6 +56,7 @@ qx.Class.define("osparc.desktop.MainPage", { __navBar: null, __prjStack: null, __dashboard: null, + __dashboardLayout: null, __studyEditor: null, __createNavigationBar: function() { @@ -84,20 +85,32 @@ qx.Class.define("osparc.desktop.MainPage", { }, __createMainView: function() { - let prjStack = new qx.ui.container.Stack(); + const prjStack = new qx.ui.container.Stack(); - let dashboard = this.__dashboard = new osparc.desktop.Dashboard(); + const dashboard = this.__dashboard = new osparc.desktop.Dashboard().set({ + maxWidth: 1200 + }); dashboard.getStudyBrowser().addListener("startStudy", e => { const studyEditor = e.getData(); this.__startStudyEditor(studyEditor); }, this); - prjStack.add(dashboard); + + const dashboardLayout = this.__dashboardLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)); + dashboardLayout.add(new qx.ui.core.Widget(), { + flex: 1 + }); + dashboardLayout.add(dashboard); + dashboardLayout.add(new qx.ui.core.Widget(), { + flex: 1 + }); + + prjStack.add(dashboardLayout); return prjStack; }, __showDashboard: function() { - this.__prjStack.setSelection([this.__dashboard]); + this.__prjStack.setSelection([this.__dashboardLayout]); this.__dashboard.getStudyBrowser().reloadUserStudies(); this.__navBar.setPathButtons([]); if (this.__studyEditor) { From 35b48187253e9def3f2cb2bbb9c4877a4cbeac14 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 16 Mar 2020 17:45:13 +0100 Subject: [PATCH 17/74] minor --- services/web/client/source/class/osparc/desktop/MainPage.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/web/client/source/class/osparc/desktop/MainPage.js b/services/web/client/source/class/osparc/desktop/MainPage.js index 224378f250a..8897b8edddd 100644 --- a/services/web/client/source/class/osparc/desktop/MainPage.js +++ b/services/web/client/source/class/osparc/desktop/MainPage.js @@ -88,7 +88,7 @@ qx.Class.define("osparc.desktop.MainPage", { const prjStack = new qx.ui.container.Stack(); const dashboard = this.__dashboard = new osparc.desktop.Dashboard().set({ - maxWidth: 1200 + width: 1200 }); dashboard.getStudyBrowser().addListener("startStudy", e => { const studyEditor = e.getData(); From f58338d13822be358adeda80358b6d5eb98c68b6 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Tue, 17 Mar 2020 09:34:55 +0100 Subject: [PATCH 18/74] moving files --- .../class/osparc/{desktop => dashboard}/Dashboard.js | 10 +++++----- .../class/osparc/{desktop => dashboard}/DataBrowser.js | 6 ++---- .../osparc/{desktop => dashboard}/ServiceBrowser.js | 6 +++--- .../{desktop => dashboard}/ServiceBrowserListItem.js | 4 ++-- .../osparc/{desktop => dashboard}/StudyBrowser.js | 10 +++++----- .../{desktop => dashboard}/StudyBrowserListItem.js | 2 +- .../web/client/source/class/osparc/desktop/MainPage.js | 2 +- 7 files changed, 19 insertions(+), 21 deletions(-) rename services/web/client/source/class/osparc/{desktop => dashboard}/Dashboard.js (87%) rename services/web/client/source/class/osparc/{desktop => dashboard}/DataBrowser.js (98%) rename services/web/client/source/class/osparc/{desktop => dashboard}/ServiceBrowser.js (98%) rename services/web/client/source/class/osparc/{desktop => dashboard}/ServiceBrowserListItem.js (97%) rename services/web/client/source/class/osparc/{desktop => dashboard}/StudyBrowser.js (98%) rename services/web/client/source/class/osparc/{desktop => dashboard}/StudyBrowserListItem.js (99%) diff --git a/services/web/client/source/class/osparc/desktop/Dashboard.js b/services/web/client/source/class/osparc/dashboard/Dashboard.js similarity index 87% rename from services/web/client/source/class/osparc/desktop/Dashboard.js rename to services/web/client/source/class/osparc/dashboard/Dashboard.js index 7fa2b63e097..7f39755b6ce 100644 --- a/services/web/client/source/class/osparc/desktop/Dashboard.js +++ b/services/web/client/source/class/osparc/dashboard/Dashboard.js @@ -26,12 +26,12 @@ * Here is a little example of how to use the widget. * *
- *   let dashboard = new osparc.desktop.Dashboard();
+ *   let dashboard = new osparc.dashboard.Dashboard();
  *   this.getRoot().add(dashboard);
  * 
*/ -qx.Class.define("osparc.desktop.Dashboard", { +qx.Class.define("osparc.dashboard.Dashboard", { extend: qx.ui.tabview.TabView, construct: function() { @@ -98,17 +98,17 @@ qx.Class.define("osparc.desktop.Dashboard", { }, __createStudiesView: function() { - const studiesView = this.__prjBrowser = new osparc.desktop.StudyBrowser(); + const studiesView = this.__prjBrowser = new osparc.dashboard.StudyBrowser(); return studiesView; }, __createServicesLayout: function() { - const servicesView = this.__serviceBrowser = new osparc.desktop.ServiceBrowser(); + const servicesView = this.__serviceBrowser = new osparc.dashboard.ServiceBrowser(); return servicesView; }, __createDataManagerLayout: function() { - const dataManagerView = this.__dataManager = new osparc.desktop.DataBrowser(); + const dataManagerView = this.__dataManager = new osparc.dashboard.DataBrowser(); return dataManagerView; } } diff --git a/services/web/client/source/class/osparc/desktop/DataBrowser.js b/services/web/client/source/class/osparc/dashboard/DataBrowser.js similarity index 98% rename from services/web/client/source/class/osparc/desktop/DataBrowser.js rename to services/web/client/source/class/osparc/dashboard/DataBrowser.js index 1d1fe1d269d..d69b0be9f5e 100644 --- a/services/web/client/source/class/osparc/desktop/DataBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/DataBrowser.js @@ -25,14 +25,12 @@ * Here is a little example of how to use the widget. * *
- *   let dataManager = new osparc.desktop.DataBrowser();
+ *   let dataManager = new osparc.dashboard.DataBrowser();
  *   this.getRoot().add(dataManager);
  * 
*/ -/* global document */ - -qx.Class.define("osparc.desktop.DataBrowser", { +qx.Class.define("osparc.dashboard.DataBrowser", { extend: qx.ui.core.Widget, construct: function() { diff --git a/services/web/client/source/class/osparc/desktop/ServiceBrowser.js b/services/web/client/source/class/osparc/dashboard/ServiceBrowser.js similarity index 98% rename from services/web/client/source/class/osparc/desktop/ServiceBrowser.js rename to services/web/client/source/class/osparc/dashboard/ServiceBrowser.js index 2c60abd069d..ff3893edaf1 100644 --- a/services/web/client/source/class/osparc/desktop/ServiceBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/ServiceBrowser.js @@ -31,12 +31,12 @@ * Here is a little example of how to use the widget. * *
- *   let servicesView = this.__serviceBrowser = new osparc.desktop.ServiceBrowser();
+ *   let servicesView = this.__serviceBrowser = new osparc.dashboard.ServiceBrowser();
  *   this.getRoot().add(servicesView);
  * 
*/ -qx.Class.define("osparc.desktop.ServiceBrowser", { +qx.Class.define("osparc.dashboard.ServiceBrowser", { extend: qx.ui.core.Widget, construct: function() { @@ -150,7 +150,7 @@ qx.Class.define("osparc.desktop.ServiceBrowser", { const servCtrl = new qx.data.controller.List(latestServicesModel, servicesUIList, "name"); servCtrl.setDelegate({ createItem: () => { - const item = new osparc.desktop.ServiceBrowserListItem(); + const item = new osparc.dashboard.ServiceBrowserListItem(); item.subscribeToFilterGroup("serviceBrowser"); item.addListener("tap", e => { servicesUIList.setSelection([item]); diff --git a/services/web/client/source/class/osparc/desktop/ServiceBrowserListItem.js b/services/web/client/source/class/osparc/dashboard/ServiceBrowserListItem.js similarity index 97% rename from services/web/client/source/class/osparc/desktop/ServiceBrowserListItem.js rename to services/web/client/source/class/osparc/dashboard/ServiceBrowserListItem.js index 38f192ce30b..b33b28974dd 100644 --- a/services/web/client/source/class/osparc/desktop/ServiceBrowserListItem.js +++ b/services/web/client/source/class/osparc/dashboard/ServiceBrowserListItem.js @@ -28,7 +28,7 @@ * *
  *   tree.setDelegate({
- *     createItem: () => new osparc.desktop.ServiceBrowserListItem(),
+ *     createItem: () => new osparc.dashboard.ServiceBrowserListItem(),
  *     bindItem: (c, item, id) => {
  *       c.bindProperty("key", "model", null, item, id);
  *       c.bindProperty("name", "title", null, item, id);
@@ -41,7 +41,7 @@
  * 
*/ -qx.Class.define("osparc.desktop.ServiceBrowserListItem", { +qx.Class.define("osparc.dashboard.ServiceBrowserListItem", { extend: qx.ui.core.Widget, implement : [qx.ui.form.IModel, osparc.component.filter.IFilterable], include : [qx.ui.form.MModelProperty, osparc.component.filter.MFilterable], diff --git a/services/web/client/source/class/osparc/desktop/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js similarity index 98% rename from services/web/client/source/class/osparc/desktop/StudyBrowser.js rename to services/web/client/source/class/osparc/dashboard/StudyBrowser.js index a735907e2c2..813cad3b331 100644 --- a/services/web/client/source/class/osparc/desktop/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -30,12 +30,12 @@ * Here is a little example of how to use the widget. * *
- *   let prjBrowser = this.__serviceBrowser = new osparc.desktop.StudyBrowser();
+ *   let prjBrowser = this.__serviceBrowser = new osparc.dashboard.StudyBrowser();
  *   this.getRoot().add(prjBrowser);
  * 
*/ -qx.Class.define("osparc.desktop.StudyBrowser", { +qx.Class.define("osparc.dashboard.StudyBrowser", { extend: qx.ui.core.Widget, construct: function() { @@ -432,7 +432,7 @@ qx.Class.define("osparc.desktop.StudyBrowser", { __setStudyList: function(userStudyList) { this.__userStudies = userStudyList; this.__userStudyContainer.removeAll(); - osparc.desktop.StudyBrowser.sortStudyList(userStudyList); + this.self().sortStudyList(userStudyList); for (let i=0; i study.tags.includes(tag.id)) : []; - const item = new osparc.desktop.StudyBrowserListItem(menu).set({ + const item = new osparc.dashboard.StudyBrowserListItem(menu).set({ uuid: study.uuid, studyTitle: study.name, icon: study.thumbnail || "@FontAwesome5Solid/flask/50", diff --git a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js similarity index 99% rename from services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js rename to services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js index 87a357ca679..442404af58c 100644 --- a/services/web/client/source/class/osparc/desktop/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js @@ -24,7 +24,7 @@ * It consists of a thumbnail and creator and last change as caption */ -qx.Class.define("osparc.desktop.StudyBrowserListItem", { +qx.Class.define("osparc.dashboard.StudyBrowserListItem", { extend: qx.ui.form.ToggleButton, implement : [qx.ui.form.IModel, osparc.component.filter.IFilterable], include : [qx.ui.form.MModelProperty, osparc.component.filter.MFilterable], diff --git a/services/web/client/source/class/osparc/desktop/MainPage.js b/services/web/client/source/class/osparc/desktop/MainPage.js index 8897b8edddd..1b36f46ffdd 100644 --- a/services/web/client/source/class/osparc/desktop/MainPage.js +++ b/services/web/client/source/class/osparc/desktop/MainPage.js @@ -87,7 +87,7 @@ qx.Class.define("osparc.desktop.MainPage", { __createMainView: function() { const prjStack = new qx.ui.container.Stack(); - const dashboard = this.__dashboard = new osparc.desktop.Dashboard().set({ + const dashboard = this.__dashboard = new osparc.dashboard.Dashboard().set({ width: 1200 }); dashboard.getStudyBrowser().addListener("startStudy", e => { From 96fc2e53c0141c237b59477b816c860c2e49ff00 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Tue, 17 Mar 2020 11:22:04 +0100 Subject: [PATCH 19/74] New Study button follows StudyBrowserList style --- .../class/osparc/dashboard/StudyBrowser.js | 7 +- .../osparc/dashboard/StudyBrowserListBase.js | 132 ++++++++++++++++++ .../osparc/dashboard/StudyBrowserListItem.js | 98 ++----------- .../osparc/dashboard/StudyBrowserListNew.js | 97 +++++++++++++ 4 files changed, 239 insertions(+), 95 deletions(-) create mode 100644 services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js create mode 100644 services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 813cad3b331..082695ce803 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -220,11 +220,8 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __createNewStudyButton: function() { - const newStudyBtn = new qx.ui.form.Button(this.tr("Create new study"), "@FontAwesome5Solid/plus-circle/18").set({ - appearance: "xl-button", - allowGrowX: false, - width: 210 - }); + const newStudyBtn = new osparc.dashboard.StudyBrowserListNew(); + newStudyBtn.subscribeToFilterGroup("studyBrowser"); osparc.utils.Utils.setIdToWidget(newStudyBtn, "newStudyBtn"); newStudyBtn.addListener("execute", () => this.__createStudyBtnClkd()); return newStudyBtn; diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js new file mode 100644 index 00000000000..42e5ebdd7ef --- /dev/null +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js @@ -0,0 +1,132 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2018 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + * Tobias Oetiker (oetiker) + +************************************************************************ */ + +/* eslint "qx-rules/no-refs-in-members": "warn" */ + +/** + * Widget used mainly by StudyBrowser for displaying Studies + * + * It consists of a thumbnail and creator and last change as caption + */ + +qx.Class.define("osparc.dashboard.StudyBrowserListBase", { + extend: qx.ui.form.ToggleButton, + implement : [qx.ui.form.IModel, osparc.component.filter.IFilterable], + include : [qx.ui.form.MModelProperty, osparc.component.filter.MFilterable], + type: "abstract", + + construct: function() { + this.base(arguments); + this.set({ + width: this.self().ITEM_WIDTH, + allowGrowX: false + }); + + this._setLayout(new qx.ui.layout.Canvas()); + + let mainLayout = this._mainLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(5).set({ + alignY: "middle" + })); + this._add(mainLayout, { + top: 0, + right: 0, + bottom: 0, + left: 0 + }); + + this.addListener("changeValue", this._onToggleChange, this); + + this.addListener("pointerover", this._onPointerOver, this); + this.addListener("pointerout", this._onPointerOut, this); + }, + + statics: { + ITEM_WIDTH: 210 + }, + + events: { + /** (Fired by {@link qx.ui.form.List}) */ + "action": "qx.event.type.Event" + }, + + properties: { + appearance: { + refine : true, + init : "pb-listitem" + } + }, + + members: { // eslint-disable-line qx-rules/no-refs-in-members + _forwardStates: { + focused : true, + hovered : true, + selected : true, + dragover : true + }, + + _mainLayout: null, + + /** + * Event handler for the pointer over event. + */ + _onPointerOver: function() { + this.addState("hovered"); + }, + + /** + * Event handler for the pointer out event. + */ + _onPointerOut : function() { + this.removeState("hovered"); + }, + + /** + * Event handler for filtering events. + */ + _filter: function() { + this.exclude(); + }, + + _unfilter: function() { + this.show(); + }, + + _onToggleChange: function(e) { + throw new Error("Abstract method called!"); + }, + + _shouldApplyFilter: function(data) { + throw new Error("Abstract method called!"); + }, + + _shouldReactToFilter: function(data) { + if (data.text && data.text.length > 1) { + return true; + } + if (data.tags && data.tags.length) { + return true; + } + return false; + } + }, + + destruct : function() { + this.removeListener("pointerover", this._onPointerOver, this); + this.removeListener("pointerout", this._onPointerOut, this); + } +}); diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js index 442404af58c..89644d7e0f7 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js @@ -25,15 +25,10 @@ */ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { - extend: qx.ui.form.ToggleButton, - implement : [qx.ui.form.IModel, osparc.component.filter.IFilterable], - include : [qx.ui.form.MModelProperty, osparc.component.filter.MFilterable], + extend: osparc.dashboard.StudyBrowserListBase, construct: function(menu) { this.base(arguments); - this.set({ - width: this.self().ITEM_WIDTH - }); // create a date format like "Oct. 19, 2018 11:31 AM" this.__dateFormat = new qx.util.format.DateFormat( @@ -43,43 +38,12 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { qx.locale.Date.getTimeFormat("short") ); - this._setLayout(new qx.ui.layout.Canvas()); - - let mainLayout = this.__mainLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(5).set({ - alignY: "middle" - })); - this._add(mainLayout, { - top: 0, - right: 0, - bottom: 0, - left: 0 - }); - if (menu !== undefined) { this.setMenu(menu); } - - this.addListener("pointerover", this._onPointerOver, this); - this.addListener("pointerout", this._onPointerOut, this); - - this.addListener("changeValue", this.__onToggleChange, this); - }, - - statics: { - ITEM_WIDTH: 210 - }, - - events: { - /** (Fired by {@link qx.ui.form.List}) */ - "action": "qx.event.type.Event" }, properties: { - appearance: { - refine : true, - init : "pb-listitem" - }, - /** The menu instance to show when tapping on the button */ menu: { check : "qx.ui.menu.Menu", @@ -117,19 +81,10 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { } }, - members: { // eslint-disable-line qx-rules/no-refs-in-members + members: { __dateFormat: null, __timeFormat: null, - _forwardStates: { - focused : true, - hovered : true, - selected : true, - dragover : true - }, - - __mainLayout: null, - // overridden _createChildControlImpl: function(id) { let control; @@ -165,7 +120,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { anonymous: true }); osparc.utils.Utils.setIdToWidget(control, "studyBrowserListItem_title"); - this.__mainLayout.addAt(control, 0); + this._mainLayout.addAt(control, 0); break; case "creator": control = new qx.ui.basic.Label(this.getCreator()).set({ @@ -174,7 +129,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { anonymous: true }); osparc.utils.Utils.setIdToWidget(control, "studyBrowserListItem_creator"); - this.__mainLayout.addAt(control, 1); + this._mainLayout.addAt(control, 1); break; case "lastChangeDate": control = new qx.ui.basic.Label().set({ @@ -183,7 +138,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { anonymous: true }); osparc.utils.Utils.setIdToWidget(control, "studyBrowserListItem_lastChangeDate"); - this.__mainLayout.addAt(control, 2); + this._mainLayout.addAt(control, 2); break; case "icon": control = new qx.ui.basic.Image(this.getIcon()).set({ @@ -193,11 +148,11 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { allowStretchY: true, height: 120 }); - this.__mainLayout.addAt(control, 3); + this._mainLayout.addAt(control, 3); break; case "tags": control = new qx.ui.container.Composite(new qx.ui.layout.Flow(5, 3)); - this.__mainLayout.addAt(control, 4); + this._mainLayout.addAt(control, 4); break; } @@ -261,35 +216,10 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { } }, - __onToggleChange: function(e) { + _onToggleChange: function(e) { this.getChildControl("tick-selected").setVisibility(e.getData() ? "visible" : "excluded"); }, - /** - * Event handler for the pointer over event. - */ - _onPointerOver: function() { - this.addState("hovered"); - }, - - /** - * Event handler for the pointer out event. - */ - _onPointerOut : function() { - this.removeState("hovered"); - }, - - /** - * Event handler for filtering events. - */ - _filter: function() { - this.exclude(); - }, - - _unfilter: function() { - this.show(); - }, - _shouldApplyFilter: function(data) { if (data.text) { const checks = [ @@ -307,16 +237,6 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { } } return false; - }, - - _shouldReactToFilter: function(data) { - if (data.text && data.text.length > 1) { - return true; - } - if (data.tags && data.tags.length) { - return true; - } - return false; } }, @@ -325,7 +245,5 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { this.__dateFormat = null; this.__timeFormat.dispose(); this.__timeFormat = null; - this.removeListener("pointerover", this._onPointerOver, this); - this.removeListener("pointerout", this._onPointerOut, this); } }); diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js new file mode 100644 index 00000000000..1e80c206858 --- /dev/null +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js @@ -0,0 +1,97 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2018 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + * Tobias Oetiker (oetiker) + +************************************************************************ */ + +/* eslint "qx-rules/no-refs-in-members": "warn" */ + +/** + * Widget used mainly by StudyBrowser for displaying Studies + * + * It consists of a thumbnail and creator and last change as caption + */ + +qx.Class.define("osparc.dashboard.StudyBrowserListNew", { + extend: osparc.dashboard.StudyBrowserListBase, + + construct: function() { + this.base(arguments); + + this.__buildLayout(); + }, + + members: { + // overridden + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "studyTitle": + control = new qx.ui.basic.Label(this.tr("Create New Study")).set({ + margin: [5, 0], + font: "title-14", + anonymous: true + }); + osparc.utils.Utils.setIdToWidget(control, "studyBrowserListNew_title"); + this._mainLayout.addAt(control, 0); + break; + case "studyDescription": + control = new qx.ui.basic.Label(this.tr("Start with a empty study")).set({ + rich: true, + allowGrowY: false, + anonymous: true + }); + this._mainLayout.addAt(control, 1); + break; + case "icon": + control = new qx.ui.basic.Image("@FontAwesome5Solid/plus-circle/64").set({ + anonymous: true, + scale: true, + allowStretchX: true, + allowStretchY: true, + alignY: "middle", + height: 145 + }); + this._mainLayout.addAt(control, 2); + break; + } + + return control || this.base(arguments, id); + }, + + __buildLayout: function() { + this.getChildControl("studyTitle"); + this.getChildControl("studyDescription"); + let icon = this.getChildControl("icon"); + icon.set({ + paddingTop: icon.getSource() && icon.getSource().match(/^@/) ? 30 : 0 + }); + }, + + _onToggleChange: function(e) { + this.setValue(false); + }, + + _shouldApplyFilter: function(data) { + if (data.text) { + return true; + } + if (data.tags && data.tags.length) { + return true; + } + return false; + } + } +}); From 6a54359ea276bf8144e944a8f84b7e7ea57c862f Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Tue, 17 Mar 2020 11:28:27 +0100 Subject: [PATCH 20/74] New Study Button part if the templates list --- .../web/client/source/class/osparc/dashboard/StudyBrowser.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 082695ce803..b40c269c9ef 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -246,8 +246,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __createTemplateStudiesLayout: function() { - const newStudyBtn = this.__createNewStudyButton(); - const navBarLabelFont = qx.bom.Font.fromConfig(osparc.theme.Font.fonts["nav-bar-label"]); const templateTitleContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); const templateDeleteButton = this.__templateDeleteButton = this.__createDeleteButton(); @@ -261,7 +259,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { marginTop: 20 }); tempStudyLayout.add(templateTitleContainer); - tempStudyLayout.add(newStudyBtn); tempStudyLayout.add(tempStudyList); return tempStudyLayout; }, @@ -439,6 +436,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __setTemplateList: function(tempStudyList) { this.__templateStudies = tempStudyList; this.__templateStudyContainer.removeAll(); + this.__templateStudyContainer.add(this.__createNewStudyButton()); this.self().sortStudyList(tempStudyList); for (let i=0; i Date: Tue, 17 Mar 2020 13:09:23 +0100 Subject: [PATCH 21/74] __deleteStudy -> __deleteStudies --- .../source/class/osparc/dashboard/StudyBrowser.js | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index b40c269c9ef..03dfe8a3e69 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -304,7 +304,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { win.open(); win.addListener("close", () => { if (win.getConfirmed()) { - this.__deleteStudy(selection.map(button => this.__getStudyData(button.getUuid(), isTemplate)), isTemplate); + this.__deleteStudies(selection.map(button => this.__getStudyData(button.getUuid(), isTemplate)), isTemplate); } }, this); }, this); @@ -593,17 +593,17 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { deleteButton.setEnabled(isCurrentUserOwner && (!isTemplate || canDeleteTemplate)); }, - __deleteStudy: function(studyData, isTemplate = false) { - Promise.all(studyData.map(study => { + __deleteStudies: function(studiesData, areTemplates = false) { + Promise.all(studiesData.map(study => { const params = { url: { projectId: study.uuid } }; - return osparc.data.Resources.fetch(isTemplate ? "templates" : "studies", "delete", params, study.uuid); + return osparc.data.Resources.fetch(areTemplates ? "templates" : "studies", "delete", params, study.uuid); })) .then(() => { - if (isTemplate) { + if (areTemplates) { this.reloadTemplateStudies(); } else { this.reloadUserStudies(); From 155355d38747c1aa126a8e5200d826cfe90ee522 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Tue, 17 Mar 2020 13:10:16 +0100 Subject: [PATCH 22/74] Edit Study pops up in a new window --- .../class/osparc/dashboard/StudyBrowser.js | 48 +++++-------------- 1 file changed, 11 insertions(+), 37 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 03dfe8a3e69..f601fff03f9 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -43,25 +43,15 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this._setLayout(new qx.ui.layout.HBox()); - this.__studiesPane = new qx.ui.container.Composite(new qx.ui.layout.VBox()); - this.__editPane = new qx.ui.container.Composite(new qx.ui.layout.VBox()).set({ - appearance: "sidepanel", - width: 570, - allowGrowX: false, - visibility: "excluded", - padding: [0, 15] - }); + this.__studiesLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox()); const scrollStudies = new qx.ui.container.Scroll(); - scrollStudies.add(this.__studiesPane); + scrollStudies.add(this.__studiesLayout); this._add(scrollStudies, { flex: 1 }); - const scrollEditStudy = new qx.ui.container.Scroll(); - scrollEditStudy.add(this.__editPane); - this._add(scrollEditStudy); let iframe = osparc.utils.Utils.createLoadingIFrame(this.tr("Studies")); - this.__studiesPane.add(iframe, { + this.__studiesLayout.add(iframe, { flex: 1 }); @@ -71,11 +61,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { userTimer.addListener("interval", () => { if (this.__userReady) { userTimer.stop(); - this.__studiesPane.removeAll(); - this.__editPane.removeAll(); + this.__studiesLayout.removeAll(); iframe.dispose(); this.__createStudiesLayout(); - this.__createEditStudyLayout(); this.__reloadStudies(); this.__attachEventHandlers(); const loadStudyId = osparc.store.Store.getInstance().getCurrentStudyId(); @@ -136,9 +124,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __studyFilters: null, __userStudyContainer: null, __templateStudyContainer: null, - __editStudyLayout: null, - __studiesPane: null, - __editPane: null, + __studiesLayout: null, __userStudies: null, __templateStudies: null, __templateDeleteButton: null, @@ -214,9 +200,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const tempStudyLayout = this.__createTemplateStudiesLayout(); const userStudyLayout = this.__createUserStudiesLayout(); - this.__studiesPane.add(studyFilters); - this.__studiesPane.add(tempStudyLayout); - this.__studiesPane.add(userStudyLayout); + this.__studiesLayout.add(studyFilters); + this.__studiesLayout.add(tempStudyLayout); + this.__studiesLayout.add(userStudyLayout); }, __createNewStudyButton: function() { @@ -263,11 +249,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return tempStudyLayout; }, - __createEditStudyLayout: function() { - const editStudyLayout = this.__editStudyLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); - this.__editPane.add(editStudyLayout); - }, - __reloadStudies: function() { const params = { url: { @@ -535,9 +516,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { if (this.__templateStudyContainer) { this.__templateStudyContainer.resetSelection(); } - if (this.__editStudyLayout) { - this.__editPane.exclude(); - } if (this.__studiesDeleteButton) { this.__studiesDeleteButton.exclude(); } @@ -546,13 +524,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { } return; } - const studyData = this.__getStudyData(studyId, isTemplate); - this.__createForm(studyData, isTemplate); - this.__editPane.setVisibility("visible"); }, - __createForm: function(studyData, isTemplate) { - this.__editStudyLayout.removeAll(); + __createStudyDetailsEditor: function(studyData, isTemplate) { const studyDetails = new osparc.component.metadata.StudyDetailsEditor(studyData, isTemplate); studyDetails.addListener("closed", () => this.__itemSelected(null), this); studyDetails.addListener("updatedStudy", e => this.reloadUserStudies(e.getData()), this); @@ -572,9 +546,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { } }); - this.__editStudyLayout.add(studyDetails); - this.__updateDeleteButtons(studyData, isTemplate); + + return studyDetails; }, __updateDeleteButtons: function(studyData, isTemplate) { From 0dfe44e5986bf2fd6110518ea57e3bd4c7fce253 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Tue, 17 Mar 2020 13:10:37 +0100 Subject: [PATCH 23/74] feature access thought button menu --- .../class/osparc/dashboard/StudyBrowser.js | 32 +++++++++++++++++-- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index f601fff03f9..42a6395012d 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -429,7 +429,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __createStudyItem: function(study, isTemplate) { - const menu = this.__getStudyItemMenu(); + const menu = this.__getStudyItemMenu(study, isTemplate); const tags = study.tags ? @@ -457,13 +457,39 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return item; }, - __getStudyItemMenu: function() { + __getStudyItemMenu: function(studyData, isTemplate) { const menu = new qx.ui.menu.Menu().set({ position: "bottom-right" }); - const moreInfoButton = new qx.ui.menu.Button(this.tr("More info")); + const moreInfoButton = new qx.ui.menu.Button(this.tr("Edit info")); + moreInfoButton.addListener("execute", () => { + const studyDetailsEditor = this.__createStudyDetailsEditor(studyData, isTemplate); + const win = new osparc.ui.window.Dialog(this.tr("Study Details Editor")).set({ + contentPadding: 10, + resizable: true, + maxWidth: 800, + width: 400, + height: 400 + }); + studyDetailsEditor.addListener("openedStudy", () => { + win.close(); + }); + win.add(studyDetailsEditor); + win.open(); + win.center(); + }, this); const selectButton = new qx.ui.menu.Button(this.tr("Select")); const deleteButton = new qx.ui.menu.Button(this.tr("Delete")); + deleteButton.addListener("execute", () => { + const win = this.__createConfirmWindow(false); + win.center(); + win.open(); + win.addListener("close", () => { + if (win.getConfirmed()) { + this.__deleteStudies([studyData], isTemplate); + } + }, this); + }, this); menu.add(moreInfoButton); menu.add(selectButton); menu.add(deleteButton); From c9450b45b55f131c3357ef665b132dfc615383ea Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Tue, 17 Mar 2020 13:50:59 +0100 Subject: [PATCH 24/74] saveAsTemplate Button added --- .../class/osparc/dashboard/StudyBrowser.js | 23 +++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 42a6395012d..ab1f972d244 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -461,7 +461,11 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const menu = new qx.ui.menu.Menu().set({ position: "bottom-right" }); - const moreInfoButton = new qx.ui.menu.Button(this.tr("Edit info")); + + const selectButton = new qx.ui.menu.Button(this.tr("Select")); + menu.add(selectButton); + + const moreInfoButton = new qx.ui.menu.Button(this.tr("More info")); moreInfoButton.addListener("execute", () => { const studyDetailsEditor = this.__createStudyDetailsEditor(studyData, isTemplate); const win = new osparc.ui.window.Dialog(this.tr("Study Details Editor")).set({ @@ -478,7 +482,20 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { win.open(); win.center(); }, this); - const selectButton = new qx.ui.menu.Button(this.tr("Select")); + menu.add(moreInfoButton); + + const isCurrentUserOwner = studyData.prjOwner === osparc.auth.Data.getInstance().getEmail(); + const canCreateTemplate = osparc.data.Permissions.getInstance().canDo("studies.template.create"); + if (isCurrentUserOwner && !isTemplate && canCreateTemplate) { + const saveAsTemplateButton = new qx.ui.menu.Button(this.tr("Save as template")); + saveAsTemplateButton.addListener("execute", e => { + // this.__saveAsTemplate(); + }, this); + menu.add(saveAsTemplateButton); + } + + menu.addSeparator(); + const deleteButton = new qx.ui.menu.Button(this.tr("Delete")); deleteButton.addListener("execute", () => { const win = this.__createConfirmWindow(false); @@ -490,8 +507,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { } }, this); }, this); - menu.add(moreInfoButton); - menu.add(selectButton); menu.add(deleteButton); return menu; }, From 16ba81f84a48ed935bb958b40ef4cd9fcd65e352 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Tue, 17 Mar 2020 18:48:19 +0100 Subject: [PATCH 25/74] minor --- .../client/source/class/osparc/dashboard/StudyBrowser.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index ab1f972d244..91770f614ad 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -149,7 +149,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { osparc.data.Resources.get("studies") .then(studies => { this.__setStudyList(studies); - this.__itemSelected(study ? study.uuid : null, false); + this.__itemSelected(study ? study.uuid : null); }) .catch(err => { console.error(err); @@ -167,7 +167,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { osparc.data.Resources.get("templates") .then(templates => { this.__setTemplateList(templates); - this.__itemSelected(template ? template.uuid : null, true); + this.__itemSelected(template ? template.uuid : null); }) .catch(err => { console.error(err); @@ -525,7 +525,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { } else { this.__templateStudyContainer.resetSelection(); } - this.__itemSelected(item.getUuid(), isTemplate); + this.__itemSelected(item.getUuid()); } else if (isTemplate) { this.__itemSelected(null); this.__templateDeleteButton.exclude(); @@ -549,7 +549,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { } }, - __itemSelected: function(studyId, isTemplate = false) { + __itemSelected: function(studyId) { if (studyId === null) { if (this.__userStudyContainer) { this.__userStudyContainer.resetSelection(); From 2f6d86fb1389f520e8247937b8cb83528872a122 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Tue, 17 Mar 2020 18:49:13 +0100 Subject: [PATCH 26/74] setMenu --- .../source/class/osparc/dashboard/StudyBrowser.js | 11 +++++++---- .../class/osparc/dashboard/StudyBrowserListItem.js | 10 +--------- 2 files changed, 8 insertions(+), 13 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 91770f614ad..1467f78ab97 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -429,8 +429,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __createStudyItem: function(study, isTemplate) { - const menu = this.__getStudyItemMenu(study, isTemplate); - const tags = study.tags ? osparc.store.Store.getInstance().getTags().filter(tag => study.tags.includes(tag.id)) : @@ -443,7 +441,8 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { lastChangeDate: study.lastChangeDate ? new Date(study.lastChangeDate) : null, tags }); - + const menu = this.__getStudyItemMenu(item, study, isTemplate); + item.setMenu(menu); item.subscribeToFilterGroup("studyBrowser"); item.addListener("execute", () => { @@ -457,12 +456,16 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return item; }, - __getStudyItemMenu: function(studyData, isTemplate) { + __getStudyItemMenu: function(item, studyData, isTemplate) { const menu = new qx.ui.menu.Menu().set({ position: "bottom-right" }); const selectButton = new qx.ui.menu.Button(this.tr("Select")); + selectButton.addListener("execute", () => { + item.toggleValue(); + this.__itemPreSelected(item, isTemplate); + }, this); menu.add(selectButton); const moreInfoButton = new qx.ui.menu.Button(this.tr("More info")); diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js index 89644d7e0f7..a3e0a75d781 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js @@ -27,7 +27,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { extend: osparc.dashboard.StudyBrowserListBase, - construct: function(menu) { + construct: function() { this.base(arguments); // create a date format like "Oct. 19, 2018 11:31 AM" @@ -37,10 +37,6 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { this.__timeFormat = new qx.util.format.DateFormat( qx.locale.Date.getTimeFormat("short") ); - - if (menu !== undefined) { - this.setMenu(menu); - } }, properties: { @@ -216,10 +212,6 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { } }, - _onToggleChange: function(e) { - this.getChildControl("tick-selected").setVisibility(e.getData() ? "visible" : "excluded"); - }, - _shouldApplyFilter: function(data) { if (data.text) { const checks = [ From 72e3369aa5e1e59fecaa33c62d9f693e5e4e1b7d Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Tue, 17 Mar 2020 20:34:30 +0100 Subject: [PATCH 27/74] Selection logic improved --- .../class/osparc/dashboard/StudyBrowser.js | 100 ++++++++++-------- .../osparc/dashboard/StudyBrowserListBase.js | 6 -- .../osparc/dashboard/StudyBrowserListItem.js | 15 ++- 3 files changed, 68 insertions(+), 53 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 1467f78ab97..f3dce92bf23 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -144,12 +144,12 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { * Function that asks the backend for the list of studies belonging to the user * and sets it */ - reloadUserStudies: function(study) { + reloadUserStudies: function() { if (osparc.data.Permissions.getInstance().canDo("studies.user.read")) { osparc.data.Resources.get("studies") .then(studies => { this.__setStudyList(studies); - this.__itemSelected(study ? study.uuid : null); + this.__itemSelected(null); }) .catch(err => { console.error(err); @@ -162,12 +162,12 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { /** * Function that asks the backend for the list of template studies and sets it */ - reloadTemplateStudies: function(template) { + reloadTemplateStudies: function() { if (osparc.data.Permissions.getInstance().canDo("studies.templates.read")) { osparc.data.Resources.get("templates") .then(templates => { this.__setTemplateList(templates); - this.__itemSelected(template ? template.uuid : null); + this.__itemSelected(null); }) .catch(err => { console.error(err); @@ -200,6 +200,13 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const tempStudyLayout = this.__createTemplateStudiesLayout(); const userStudyLayout = this.__createUserStudiesLayout(); + this.__userStudyContainer.addListener("changeSelection", () => { + this.__updateDeleteStudiesButton(); + }, this); + this.__templateStudyContainer.addListener("changeSelection", () => { + this.__updateDeleteTemplatesButton(); + }, this); + this.__studiesLayout.add(studyFilters); this.__studiesLayout.add(tempStudyLayout); this.__studiesLayout.add(userStudyLayout); @@ -449,10 +456,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this.__itemClicked(item, isTemplate); }, this); - item.addListener("dbltap", () => { - this.__itemDblClicked(item, isTemplate); - }); - return item; }, @@ -463,8 +466,8 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const selectButton = new qx.ui.menu.Button(this.tr("Select")); selectButton.addListener("execute", () => { - item.toggleValue(); - this.__itemPreSelected(item, isTemplate); + item.setValue(true); + this.__itemMultiSelected(item, isTemplate); }, this); menu.add(selectButton); @@ -520,6 +523,26 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __itemClicked: function(item, isTemplate) { + const selected = item.getValue(); + const studyData = this.__getStudyData(item.getUuid(), isTemplate); + if (isTemplate) { + const selection = this.__templateStudyContainer.getSelection(); + if (selection.length) { + this.__itemMultiSelected(item, isTemplate); + } else if (selected) { + this.__createStudyBtnClkd(studyData); + } + } else { + const selection = this.__userStudyContainer.getSelection(); + if (selection.length > 1) { + this.__itemMultiSelected(item, isTemplate); + } else if (selected) { + this.__startStudy(studyData); + } + } + }, + + __itemMultiSelected: function(item, isTemplate) { // Selection logic if (item.getValue()) { if (isTemplate) { @@ -531,27 +554,16 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this.__itemSelected(item.getUuid()); } else if (isTemplate) { this.__itemSelected(null); - this.__templateDeleteButton.exclude(); } else { const selection = this.__userStudyContainer.getSelection(); if (selection.length) { this.__itemSelected(selection[0].getUuid()); } else { - this.__studiesDeleteButton.exclude(); this.__itemSelected(null); } } }, - __itemDblClicked: function(item, isTemplate) { - const studyData = this.__getStudyData(item.getUuid(), isTemplate); - if (isTemplate) { - this.__createStudyBtnClkd(studyData); - } else { - this.__startStudy(studyData); - } - }, - __itemSelected: function(studyId) { if (studyId === null) { if (this.__userStudyContainer) { @@ -560,21 +572,14 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { if (this.__templateStudyContainer) { this.__templateStudyContainer.resetSelection(); } - if (this.__studiesDeleteButton) { - this.__studiesDeleteButton.exclude(); - } - if (this.__templateDeleteButton) { - this.__templateDeleteButton.exclude(); - } - return; } }, __createStudyDetailsEditor: function(studyData, isTemplate) { const studyDetails = new osparc.component.metadata.StudyDetailsEditor(studyData, isTemplate); studyDetails.addListener("closed", () => this.__itemSelected(null), this); - studyDetails.addListener("updatedStudy", e => this.reloadUserStudies(e.getData()), this); - studyDetails.addListener("updatedTemplate", e => this.reloadTemplateStudies(e.getData()), this); + studyDetails.addListener("updatedStudy", () => this.reloadUserStudies(), this); + studyDetails.addListener("updatedTemplate", () => this.reloadTemplateStudies(), this); studyDetails.addListener("openedStudy", () => { if (isTemplate) { this.__createStudyBtnClkd(studyData); @@ -590,25 +595,34 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { } }); - this.__updateDeleteButtons(studyData, isTemplate); - return studyDetails; }, - __updateDeleteButtons: function(studyData, isTemplate) { + __updateDeleteStudiesButton: function() { + const nSelected = this.__userStudyContainer.getSelection().length; + if (nSelected) { + this.__studiesDeleteButton.setLabel(nSelected > 1 ? this.tr("Delete selected")+" ("+nSelected+")" : this.tr("Delete")); + this.__studiesDeleteButton.setVisibility("visible"); + } else { + this.__studiesDeleteButton.setVisibility("excluded"); + } + }, + + __updateDeleteTemplatesButton: function() { + const templateSelection = this.__templateStudyContainer.getSelection(); const canDeleteTemplate = osparc.data.Permissions.getInstance().canDo("studies.template.delete"); - const isCurrentUserOwner = studyData.prjOwner === osparc.auth.Data.getInstance().getEmail(); - let deleteButton = this.__studiesDeleteButton; - if (isTemplate) { - this.__studiesDeleteButton.exclude(); - deleteButton = this.__templateDeleteButton; + let allMine = Boolean(templateSelection.length) && canDeleteTemplate; + for (let i=0; i 1 ? this.tr("Delete selected")+" ("+nSelected+")" : this.tr("Delete")); + this.__templateDeleteButton.setVisibility("visible"); } else { - this.__templateDeleteButton.exclude(); - const nSelected = this.__userStudyContainer.getSelection().length; - this.__studiesDeleteButton.setLabel(nSelected > 1 ? this.tr("Delete selected")+" ("+nSelected+")" : this.tr("Delete")); + this.__templateDeleteButton.setVisibility("excluded"); } - deleteButton.show(); - deleteButton.setEnabled(isCurrentUserOwner && (!isTemplate || canDeleteTemplate)); }, __deleteStudies: function(studiesData, areTemplates = false) { diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js index 42e5ebdd7ef..d7fa586ee70 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js @@ -49,8 +49,6 @@ qx.Class.define("osparc.dashboard.StudyBrowserListBase", { left: 0 }); - this.addListener("changeValue", this._onToggleChange, this); - this.addListener("pointerover", this._onPointerOver, this); this.addListener("pointerout", this._onPointerOut, this); }, @@ -106,10 +104,6 @@ qx.Class.define("osparc.dashboard.StudyBrowserListBase", { this.show(); }, - _onToggleChange: function(e) { - throw new Error("Abstract method called!"); - }, - _shouldApplyFilter: function(data) { throw new Error("Abstract method called!"); }, diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js index a3e0a75d781..6f917c775b1 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js @@ -37,10 +37,16 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { this.__timeFormat = new qx.util.format.DateFormat( qx.locale.Date.getTimeFormat("short") ); + + const tickIcon = this.getChildControl("tick-selected"); + this.bind("value", tickIcon, "visibility", { + converter: function(value) { + return value ? "visible" : "excluded"; + } + }); }, properties: { - /** The menu instance to show when tapping on the button */ menu: { check : "qx.ui.menu.Menu", nullable : true, @@ -90,7 +96,8 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { width: 30, height: 30, icon: "@FontAwesome5Solid/ellipsis-v/16", - focusable: false + focusable: false, + zIndex: 20 }); this._add(control, { top: 0, @@ -102,7 +109,8 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { width: 30, height: 30, icon: "@FontAwesome5Solid/check-circle/16", - focusable: false + focusable: false, + zIndex: 21 }); this._add(control, { top: 0, @@ -163,7 +171,6 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { menuButton.setVisibility(value ? "visible" : "excluded"); }, - // overridden _applyUuid: function(value, old) { osparc.utils.Utils.setIdToWidget(this, "studyBrowserListItem_"+value); }, From d4891b676330652648d850839d7ad760f830e5ea Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 09:49:54 +0100 Subject: [PATCH 28/74] minor --- .../client/source/class/osparc/dashboard/StudyBrowser.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index f3dce92bf23..6325a55a708 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -285,14 +285,14 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { osparc.utils.Utils.setIdToWidget(deleteButton, "deleteStudiesBtn"); deleteButton.addListener("execute", e => { const thisButton = e.getTarget(); - const isTemplate = this.__templateDeleteButton === thisButton; - const selection = isTemplate ? this.__templateStudyContainer.getSelection() : this.__userStudyContainer.getSelection(); + const areTemplates = this.__templateDeleteButton === thisButton; + const selection = areTemplates ? this.__templateStudyContainer.getSelection() : this.__userStudyContainer.getSelection(); const win = this.__createConfirmWindow(selection.length > 1); win.center(); win.open(); win.addListener("close", () => { if (win.getConfirmed()) { - this.__deleteStudies(selection.map(button => this.__getStudyData(button.getUuid(), isTemplate)), isTemplate); + this.__deleteStudies(selection.map(button => this.__getStudyData(button.getUuid(), areTemplates)), areTemplates); } }, this); }, this); From 8de658998b1e2453cc6613a4f7f842fa9f30a9df Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 09:57:43 +0100 Subject: [PATCH 29/74] minor --- .../source/class/osparc/dashboard/StudyBrowser.js | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 6325a55a708..47a97a9a66d 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -474,12 +474,17 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const moreInfoButton = new qx.ui.menu.Button(this.tr("More info")); moreInfoButton.addListener("execute", () => { const studyDetailsEditor = this.__createStudyDetailsEditor(studyData, isTemplate); - const win = new osparc.ui.window.Dialog(this.tr("Study Details Editor")).set({ - contentPadding: 10, + const win = new qx.ui.window.Window(this.tr("Study Details Editor")).set({ + autoDestroy: true, + layout: new qx.ui.layout.VBox(), + appearance: "service-window", + showMinimize: false, + showMaximize: false, resizable: true, - maxWidth: 800, + contentPadding: 10, width: 400, - height: 400 + height: 400, + modal: true }); studyDetailsEditor.addListener("openedStudy", () => { win.close(); From 8700a3fa8bff8626a9acc88e54bc268ca28cd304 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 10:11:27 +0100 Subject: [PATCH 30/74] minor --- .../class/osparc/dashboard/StudyBrowser.js | 43 +++++++++++-------- 1 file changed, 25 insertions(+), 18 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 47a97a9a66d..4ef0472c267 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -68,22 +68,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this.__attachEventHandlers(); const loadStudyId = osparc.store.Store.getInstance().getCurrentStudyId(); if (loadStudyId) { - const params = { - url: { - projectId: loadStudyId - } - }; - osparc.data.Resources.getOne("studies", params) - .then(studyData => { - this.__startStudy(studyData); - }) - .catch(err => { - if (osparc.data.Permissions.getInstance().getRole() === "Guest") { - // If guest fails to load study, log him out - osparc.auth.Manager.getInstance().logout(); - } - console.error(err); - }); + this.__getStudyAndStart(loadStudyId); } } }, this); @@ -257,6 +242,12 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __reloadStudies: function() { + this.__fetchActiveStudy(); + this.reloadUserStudies(); + this.reloadTemplateStudies(); + }, + + __fetchActiveStudy: function() { const params = { url: { tabId: osparc.utils.Utils.getClientSessionID() @@ -273,9 +264,25 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { .catch(err => { console.error(err); }); + }, - this.reloadUserStudies(); - this.reloadTemplateStudies(); + __getStudyAndStart: function(loadStudyId) { + const params = { + url: { + projectId: loadStudyId + } + }; + osparc.data.Resources.getOne("studies", params) + .then(studyData => { + this.__startStudy(studyData); + }) + .catch(err => { + if (osparc.data.Permissions.getInstance().getRole() === "Guest") { + // If guest fails to load study, log him out + osparc.auth.Manager.getInstance().logout(); + } + console.error(err); + }); }, __createDeleteButton: function() { From f483c2eb7ca5be9d7da9735772009db9be3a1365 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 10:14:13 +0100 Subject: [PATCH 31/74] minor --- .../class/osparc/dashboard/StudyBrowser.js | 68 ++++++++++--------- 1 file changed, 36 insertions(+), 32 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 4ef0472c267..8cd2c0cab87 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -163,16 +163,35 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __initResources: function() { - if (osparc.data.Permissions.getInstance().canDo("study.tag")) { - osparc.data.Resources.get("tags") - .catch(console.error) - .finally(() => this.__userReady = true); - } else { - this.__userReady = true; - } + this.__getTags(); this.__getServicesPreload(); }, + __reloadStudies: function() { + this.__fetchActiveStudy(); + this.reloadUserStudies(); + this.reloadTemplateStudies(); + }, + + __fetchActiveStudy: function() { + const params = { + url: { + tabId: osparc.utils.Utils.getClientSessionID() + } + }; + osparc.data.Resources.fetch("studies", "getActive", params) + .then(studyData => { + if (studyData) { + this.__startStudy(studyData); + } else { + osparc.store.Store.getInstance().setCurrentStudyId(null); + } + }) + .catch(err => { + console.error(err); + }); + }, + __getServicesPreload: function() { let store = osparc.store.Store.getInstance(); store.addListener("servicesRegistered", e => { @@ -180,6 +199,16 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, this); }, + __getTags: function() { + if (osparc.data.Permissions.getInstance().canDo("study.tag")) { + osparc.data.Resources.get("tags") + .catch(console.error) + .finally(() => this.__userReady = true); + } else { + this.__userReady = true; + } + }, + __createStudiesLayout: function() { const studyFilters = this.__studyFilters = new osparc.component.filter.group.StudyFilterGroup("studyBrowser"); const tempStudyLayout = this.__createTemplateStudiesLayout(); @@ -241,31 +270,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return tempStudyLayout; }, - __reloadStudies: function() { - this.__fetchActiveStudy(); - this.reloadUserStudies(); - this.reloadTemplateStudies(); - }, - - __fetchActiveStudy: function() { - const params = { - url: { - tabId: osparc.utils.Utils.getClientSessionID() - } - }; - osparc.data.Resources.fetch("studies", "getActive", params) - .then(studyData => { - if (studyData) { - this.__startStudy(studyData); - } else { - osparc.store.Store.getInstance().setCurrentStudyId(null); - } - }) - .catch(err => { - console.error(err); - }); - }, - __getStudyAndStart: function(loadStudyId) { const params = { url: { From e4e1fcf282ada8090e13c0d198baa84932531a4f Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 10:16:47 +0100 Subject: [PATCH 32/74] minor --- .../source/class/osparc/dashboard/Dashboard.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/Dashboard.js b/services/web/client/source/class/osparc/dashboard/Dashboard.js index 7f39755b6ce..694870ed32e 100644 --- a/services/web/client/source/class/osparc/dashboard/Dashboard.js +++ b/services/web/client/source/class/osparc/dashboard/Dashboard.js @@ -71,9 +71,9 @@ qx.Class.define("osparc.dashboard.Dashboard", { __createMainViewLayout: function() { [ - [this.tr("Studies"), this.__createStudiesView], - [this.tr("Services"), this.__createServicesLayout], - [this.tr("Data"), this.__createDataManagerLayout] + [this.tr("Studies"), this.__createStudyBrowser], + [this.tr("Services"), this.__createServiceBrowser], + [this.tr("Data"), this.__createDataBrowser] ].forEach(tuple => { const tabPage = new qx.ui.tabview.Page(tuple[0]).set({ appearance: "dashboard-page" @@ -97,17 +97,17 @@ qx.Class.define("osparc.dashboard.Dashboard", { }, this); }, - __createStudiesView: function() { + __createStudyBrowser: function() { const studiesView = this.__prjBrowser = new osparc.dashboard.StudyBrowser(); return studiesView; }, - __createServicesLayout: function() { + __createServiceBrowser: function() { const servicesView = this.__serviceBrowser = new osparc.dashboard.ServiceBrowser(); return servicesView; }, - __createDataManagerLayout: function() { + __createDataBrowser: function() { const dataManagerView = this.__dataManager = new osparc.dashboard.DataBrowser(); return dataManagerView; } From 754c2b0356157f08c2e5f1b88373cb21fb622c8e Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 10:34:22 +0100 Subject: [PATCH 33/74] initResources improved --- .../class/osparc/dashboard/StudyBrowser.js | 55 ++++++++----------- 1 file changed, 24 insertions(+), 31 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 8cd2c0cab87..52365ec6f87 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -50,31 +50,12 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { flex: 1 }); - let iframe = osparc.utils.Utils.createLoadingIFrame(this.tr("Studies")); + const iframe = osparc.utils.Utils.createLoadingIFrame(this.tr("Studies")); this.__studiesLayout.add(iframe, { flex: 1 }); - this.__userReady = false; - const interval = 500; - let userTimer = new qx.event.Timer(interval); - userTimer.addListener("interval", () => { - if (this.__userReady) { - userTimer.stop(); - this.__studiesLayout.removeAll(); - iframe.dispose(); - this.__createStudiesLayout(); - this.__reloadStudies(); - this.__attachEventHandlers(); - const loadStudyId = osparc.store.Store.getInstance().getCurrentStudyId(); - if (loadStudyId) { - this.__getStudyAndStart(loadStudyId); - } - } - }, this); - userTimer.start(); - - this.__initResources(); + this.__initResources(iframe); }, events: { @@ -104,7 +85,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, members: { - __userReady: null, __servicesReady: null, __studyFilters: null, __userStudyContainer: null, @@ -162,8 +142,19 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { } }, - __initResources: function() { - this.__getTags(); + __initResources: function(iframe) { + this.__getTags() + .then(() => { + this.__studiesLayout.removeAll(); + iframe.dispose(); + this.__createStudiesLayout(); + this.__reloadStudies(); + this.__attachEventHandlers(); + const loadStudyId = osparc.store.Store.getInstance().getCurrentStudyId(); + if (loadStudyId) { + this.__getStudyAndStart(loadStudyId); + } + }); this.__getServicesPreload(); }, @@ -200,13 +191,15 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __getTags: function() { - if (osparc.data.Permissions.getInstance().canDo("study.tag")) { - osparc.data.Resources.get("tags") - .catch(console.error) - .finally(() => this.__userReady = true); - } else { - this.__userReady = true; - } + return new Promise((resolve, reject) => { + if (osparc.data.Permissions.getInstance().canDo("study.tag")) { + osparc.data.Resources.get("tags") + .catch(console.error) + .finally(() => resolve()); + } else { + resolve(); + } + }); }, __createStudiesLayout: function() { From 0e0656bb0f1211a3be056c0e3ea13fb12fa846f2 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 10:38:45 +0100 Subject: [PATCH 34/74] More promises --- .../class/osparc/dashboard/StudyBrowser.js | 42 ++++++------------- 1 file changed, 12 insertions(+), 30 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 52365ec6f87..e2eb00ed855 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -85,7 +85,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, members: { - __servicesReady: null, __studyFilters: null, __userStudyContainer: null, __templateStudyContainer: null, @@ -155,7 +154,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this.__getStudyAndStart(loadStudyId); } }); - this.__getServicesPreload(); }, __reloadStudies: function() { @@ -183,13 +181,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }); }, - __getServicesPreload: function() { - let store = osparc.store.Store.getInstance(); - store.addListener("servicesRegistered", e => { - this.__servicesReady = true; - }, this); - }, - __getTags: function() { return new Promise((resolve, reject) => { if (osparc.data.Permissions.getInstance().canDo("study.tag")) { @@ -361,28 +352,19 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __startStudy: function(studyData) { - if (this.__servicesReady === null) { - this.__showChildren(false); - let iframe = osparc.utils.Utils.createLoadingIFrame(this.tr("Services")); - this._add(iframe, { - flex: 1 - }); + this.__showChildren(false); + let iframe = osparc.utils.Utils.createLoadingIFrame(this.tr("Services")); + this._add(iframe, { + flex: 1 + }); - const interval = 500; - let servicesTimer = new qx.event.Timer(interval); - servicesTimer.addListener("interval", () => { - if (this.__servicesReady) { - servicesTimer.stop(); - this._remove(iframe); - iframe.dispose(); - this.__showChildren(true); - this.__loadStudy(studyData); - } - }, this); - servicesTimer.start(); - } else { - this.__loadStudy(studyData); - } + osparc.store.Store.getInstance().getServices(false) + .then(() => { + this._remove(iframe); + iframe.dispose(); + this.__showChildren(true); + this.__loadStudy(studyData); + }); }, __loadStudy: function(studyData) { From 9a67294baef5af1aaa4e8c9ac8ebc7a3ab6766a3 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 10:41:00 +0100 Subject: [PATCH 35/74] minor --- .../class/osparc/dashboard/StudyBrowser.js | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index e2eb00ed855..17cfdab92c7 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -50,12 +50,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { flex: 1 }); - const iframe = osparc.utils.Utils.createLoadingIFrame(this.tr("Studies")); - this.__studiesLayout.add(iframe, { - flex: 1 - }); - - this.__initResources(iframe); + this.__initResources(); }, events: { @@ -85,14 +80,14 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, members: { + __studiesLayout: null, __studyFilters: null, __userStudyContainer: null, __templateStudyContainer: null, - __studiesLayout: null, __userStudies: null, __templateStudies: null, - __templateDeleteButton: null, __studiesDeleteButton: null, + __templateDeleteButton: null, /** * Function that resets the selected item @@ -141,7 +136,12 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { } }, - __initResources: function(iframe) { + __initResources: function() { + const iframe = osparc.utils.Utils.createLoadingIFrame(this.tr("Studies")); + this.__studiesLayout.add(iframe, { + flex: 1 + }); + this.__getTags() .then(() => { this.__studiesLayout.removeAll(); @@ -157,12 +157,12 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __reloadStudies: function() { - this.__fetchActiveStudy(); + this.__getActiveStudy(); this.reloadUserStudies(); this.reloadTemplateStudies(); }, - __fetchActiveStudy: function() { + __getActiveStudy: function() { const params = { url: { tabId: osparc.utils.Utils.getClientSessionID() From 45b500912d25ea9a9beccd3d34a610b3604c5389 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 10:42:29 +0100 Subject: [PATCH 36/74] minor --- .../class/osparc/dashboard/StudyBrowser.js | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 17cfdab92c7..cf86b8f5ade 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -43,9 +43,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this._setLayout(new qx.ui.layout.HBox()); - this.__studiesLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox()); + this.__studyBrowserLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox()); const scrollStudies = new qx.ui.container.Scroll(); - scrollStudies.add(this.__studiesLayout); + scrollStudies.add(this.__studyBrowserLayout); this._add(scrollStudies, { flex: 1 }); @@ -80,7 +80,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, members: { - __studiesLayout: null, + __studyBrowserLayout: null, __studyFilters: null, __userStudyContainer: null, __templateStudyContainer: null, @@ -138,13 +138,13 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __initResources: function() { const iframe = osparc.utils.Utils.createLoadingIFrame(this.tr("Studies")); - this.__studiesLayout.add(iframe, { + this.__studyBrowserLayout.add(iframe, { flex: 1 }); this.__getTags() .then(() => { - this.__studiesLayout.removeAll(); + this.__studyBrowserLayout.removeAll(); iframe.dispose(); this.__createStudiesLayout(); this.__reloadStudies(); @@ -205,9 +205,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this.__updateDeleteTemplatesButton(); }, this); - this.__studiesLayout.add(studyFilters); - this.__studiesLayout.add(tempStudyLayout); - this.__studiesLayout.add(userStudyLayout); + this.__studyBrowserLayout.add(studyFilters); + this.__studyBrowserLayout.add(tempStudyLayout); + this.__studyBrowserLayout.add(userStudyLayout); }, __createNewStudyButton: function() { From 9ffa1ba220831a82591178e363cadff73c027f18 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 10:51:59 +0100 Subject: [PATCH 37/74] refactoring --- .../class/osparc/dashboard/StudyBrowser.js | 51 +++++++++++++------ 1 file changed, 36 insertions(+), 15 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index cf86b8f5ade..300d7c01e0f 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -450,13 +450,37 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { position: "bottom-right" }); + const selectButton = this.__getSelectMenuButton(item, isTemplate); + menu.add(selectButton); + + const moreInfoButton = this.__getMoreInfoMenuButton(studyData, isTemplate); + menu.add(moreInfoButton); + + const isCurrentUserOwner = studyData.prjOwner === osparc.auth.Data.getInstance().getEmail(); + const canCreateTemplate = osparc.data.Permissions.getInstance().canDo("studies.template.create"); + if (isCurrentUserOwner && !isTemplate && canCreateTemplate) { + const saveAsTemplateButton = this.__getSaveAsTemplateMenuButton(); + menu.add(saveAsTemplateButton); + } + + menu.addSeparator(); + + const deleteButton = this.__getDeleteStudyMenuButton(studyData, isTemplate); + menu.add(deleteButton); + + return menu; + }, + + __getSelectMenuButton: function(item, isTemplate) { const selectButton = new qx.ui.menu.Button(this.tr("Select")); selectButton.addListener("execute", () => { item.setValue(true); this.__itemMultiSelected(item, isTemplate); }, this); - menu.add(selectButton); + return selectButton; + }, + __getMoreInfoMenuButton: function(studyData, isTemplate) { const moreInfoButton = new qx.ui.menu.Button(this.tr("More info")); moreInfoButton.addListener("execute", () => { const studyDetailsEditor = this.__createStudyDetailsEditor(studyData, isTemplate); @@ -479,20 +503,18 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { win.open(); win.center(); }, this); - menu.add(moreInfoButton); - - const isCurrentUserOwner = studyData.prjOwner === osparc.auth.Data.getInstance().getEmail(); - const canCreateTemplate = osparc.data.Permissions.getInstance().canDo("studies.template.create"); - if (isCurrentUserOwner && !isTemplate && canCreateTemplate) { - const saveAsTemplateButton = new qx.ui.menu.Button(this.tr("Save as template")); - saveAsTemplateButton.addListener("execute", e => { - // this.__saveAsTemplate(); - }, this); - menu.add(saveAsTemplateButton); - } + return moreInfoButton; + }, - menu.addSeparator(); + __getSaveAsTemplateMenuButton: function() { + const saveAsTemplateButton = new qx.ui.menu.Button(this.tr("Save as template")); + saveAsTemplateButton.addListener("execute", e => { + // this.__saveAsTemplate(); + }, this); + return saveAsTemplateButton; + }, + __getDeleteStudyMenuButton: function(studyData, isTemplate) { const deleteButton = new qx.ui.menu.Button(this.tr("Delete")); deleteButton.addListener("execute", () => { const win = this.__createConfirmWindow(false); @@ -504,8 +526,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { } }, this); }, this); - menu.add(deleteButton); - return menu; + return deleteButton; }, __getStudyData: function(id, isTemplate) { From 514aec69b00701726539d3b7e888ede0085456b5 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 11:21:08 +0100 Subject: [PATCH 38/74] SaveAsTemplate from Menu working --- .../class/osparc/dashboard/StudyBrowser.js | 22 ++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 300d7c01e0f..4d69f5d541e 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -459,7 +459,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const isCurrentUserOwner = studyData.prjOwner === osparc.auth.Data.getInstance().getEmail(); const canCreateTemplate = osparc.data.Permissions.getInstance().canDo("studies.template.create"); if (isCurrentUserOwner && !isTemplate && canCreateTemplate) { - const saveAsTemplateButton = this.__getSaveAsTemplateMenuButton(); + const saveAsTemplateButton = this.__getSaveAsTemplateMenuButton(studyData); menu.add(saveAsTemplateButton); } @@ -506,10 +506,26 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return moreInfoButton; }, - __getSaveAsTemplateMenuButton: function() { + __getSaveAsTemplateMenuButton: function(studyData) { const saveAsTemplateButton = new qx.ui.menu.Button(this.tr("Save as template")); saveAsTemplateButton.addListener("execute", e => { - // this.__saveAsTemplate(); + const params = { + url: { + "study_url": studyData.uuid + }, + data: studyData + }; + osparc.data.Resources.fetch("templates", "postToTemplate", params) + .then(() => { + const msg = this.tr("Successfully Saved as template"); + osparc.component.message.FlashMessenger.getInstance().logAs(msg, "INFO"); + this.reloadTemplateStudies(); + }) + .catch(err => { + const msg = this.tr("Failed Saving as template"); + osparc.component.message.FlashMessenger.getInstance().logAs(msg, "ERROR"); + console.error(err); + }); }, this); return saveAsTemplateButton; }, From a72b5a46670ecb1bdc93ecad3dafd6b626a726d4 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 11:41:49 +0100 Subject: [PATCH 39/74] minor --- .../class/osparc/dashboard/StudyBrowser.js | 43 +++++++++---------- 1 file changed, 20 insertions(+), 23 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 4d69f5d541e..33df9d9adb3 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -86,8 +86,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __templateStudyContainer: null, __userStudies: null, __templateStudies: null, - __studiesDeleteButton: null, - __templateDeleteButton: null, /** * Function that resets the selected item @@ -198,13 +196,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const tempStudyLayout = this.__createTemplateStudiesLayout(); const userStudyLayout = this.__createUserStudiesLayout(); - this.__userStudyContainer.addListener("changeSelection", () => { - this.__updateDeleteStudiesButton(); - }, this); - this.__templateStudyContainer.addListener("changeSelection", () => { - this.__updateDeleteTemplatesButton(); - }, this); - this.__studyBrowserLayout.add(studyFilters); this.__studyBrowserLayout.add(tempStudyLayout); this.__studyBrowserLayout.add(userStudyLayout); @@ -221,36 +212,42 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __createUserStudiesLayout: function() { const navBarLabelFont = qx.bom.Font.fromConfig(osparc.theme.Font.fonts["nav-bar-label"]); const studiesTitleContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); - const studiesDeleteButton = this.__studiesDeleteButton = this.__createDeleteButton(); + const studiesDeleteButton = this.__createDeleteButton(); const myStudyLabel = new qx.ui.basic.Label(this.tr("Recent studies")).set({ font: navBarLabelFont }); studiesTitleContainer.add(myStudyLabel); studiesTitleContainer.add(studiesDeleteButton); - const userStudyList = this.__userStudyContainer = this.__createUserStudyList(); + const userStudyContainer = this.__userStudyContainer = this.__createUserStudyList(); + userStudyContainer.addListener("changeSelection", () => { + this.__updateDeleteStudiesButton(studiesDeleteButton); + }, this); const userStudyLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ marginTop: 20 }); userStudyLayout.add(studiesTitleContainer); - userStudyLayout.add(userStudyList); + userStudyLayout.add(userStudyContainer); return userStudyLayout; }, __createTemplateStudiesLayout: function() { const navBarLabelFont = qx.bom.Font.fromConfig(osparc.theme.Font.fonts["nav-bar-label"]); const templateTitleContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); - const templateDeleteButton = this.__templateDeleteButton = this.__createDeleteButton(); + const templateDeleteButton = this.__createDeleteButton(); const tempStudyLabel = new qx.ui.basic.Label(this.tr("New studies")).set({ font: navBarLabelFont }); templateTitleContainer.add(tempStudyLabel); templateTitleContainer.add(templateDeleteButton); - const tempStudyList = this.__templateStudyContainer = this.__createTemplateStudyList(); + const templateStudyContainer = this.__templateStudyContainer = this.__createTemplateStudyList(); + this.__templateStudyContainer.addListener("changeSelection", () => { + this.__updateDeleteTemplatesButton(templateDeleteButton); + }, this); const tempStudyLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ marginTop: 20 }); tempStudyLayout.add(templateTitleContainer); - tempStudyLayout.add(tempStudyList); + tempStudyLayout.add(templateStudyContainer); return tempStudyLayout; }, @@ -626,17 +623,17 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return studyDetails; }, - __updateDeleteStudiesButton: function() { + __updateDeleteStudiesButton: function(studiesDeleteButton) { const nSelected = this.__userStudyContainer.getSelection().length; if (nSelected) { - this.__studiesDeleteButton.setLabel(nSelected > 1 ? this.tr("Delete selected")+" ("+nSelected+")" : this.tr("Delete")); - this.__studiesDeleteButton.setVisibility("visible"); + studiesDeleteButton.setLabel(nSelected > 1 ? this.tr("Delete selected")+" ("+nSelected+")" : this.tr("Delete")); + studiesDeleteButton.setVisibility("visible"); } else { - this.__studiesDeleteButton.setVisibility("excluded"); + studiesDeleteButton.setVisibility("excluded"); } }, - __updateDeleteTemplatesButton: function() { + __updateDeleteTemplatesButton: function(templateDeleteButton) { const templateSelection = this.__templateStudyContainer.getSelection(); const canDeleteTemplate = osparc.data.Permissions.getInstance().canDo("studies.template.delete"); let allMine = Boolean(templateSelection.length) && canDeleteTemplate; @@ -646,10 +643,10 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { } if (allMine) { const nSelected = templateSelection.length; - this.__templateDeleteButton.setLabel(nSelected > 1 ? this.tr("Delete selected")+" ("+nSelected+")" : this.tr("Delete")); - this.__templateDeleteButton.setVisibility("visible"); + templateDeleteButton.setLabel(nSelected > 1 ? this.tr("Delete selected")+" ("+nSelected+")" : this.tr("Delete")); + templateDeleteButton.setVisibility("visible"); } else { - this.__templateDeleteButton.setVisibility("excluded"); + templateDeleteButton.setVisibility("excluded"); } }, From a35fd0d5c2081b5f1957f62dfe652c6e5343e6d8 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 13:05:23 +0100 Subject: [PATCH 40/74] simplied multiSelection logic --- .../class/osparc/dashboard/StudyBrowser.js | 46 +++++++++---------- 1 file changed, 22 insertions(+), 24 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 33df9d9adb3..2400b40f805 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -86,6 +86,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __templateStudyContainer: null, __userStudies: null, __templateStudies: null, + __newStudyBtn: null, /** * Function that resets the selected item @@ -202,7 +203,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __createNewStudyButton: function() { - const newStudyBtn = new osparc.dashboard.StudyBrowserListNew(); + const newStudyBtn = this.__newStudyBtn = new osparc.dashboard.StudyBrowserListNew(); newStudyBtn.subscribeToFilterGroup("studyBrowser"); osparc.utils.Utils.setIdToWidget(newStudyBtn, "newStudyBtn"); newStudyBtn.addListener("execute", () => this.__createStudyBtnClkd()); @@ -241,6 +242,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { templateTitleContainer.add(templateDeleteButton); const templateStudyContainer = this.__templateStudyContainer = this.__createTemplateStudyList(); this.__templateStudyContainer.addListener("changeSelection", () => { + this.__newStudyBtn.setEnabled(!this.__templateStudyContainer.getSelection().length); this.__updateDeleteTemplatesButton(templateDeleteButton); }, this); const tempStudyLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ @@ -550,35 +552,27 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __itemClicked: function(item, isTemplate) { const selected = item.getValue(); const studyData = this.__getStudyData(item.getUuid(), isTemplate); - if (isTemplate) { - const selection = this.__templateStudyContainer.getSelection(); - if (selection.length) { - this.__itemMultiSelected(item, isTemplate); - } else if (selected) { - this.__createStudyBtnClkd(studyData); - } - } else { - const selection = this.__userStudyContainer.getSelection(); - if (selection.length > 1) { - this.__itemMultiSelected(item, isTemplate); - } else if (selected) { - this.__startStudy(studyData); - } + const studyContainer = isTemplate ? this.__templateStudyContainer : this.__userStudyContainer; + + const selection = studyContainer.getSelection(); + if (selection.length > 1) { + this.__itemMultiSelected(item, isTemplate); + } else if (selected) { + isTemplate ? this.__createStudyBtnClkd(studyData) : this.__startStudy(studyData); } }, __itemMultiSelected: function(item, isTemplate) { // Selection logic if (item.getValue()) { - if (isTemplate) { - this.__userStudyContainer.resetSelection(); - this.__templateStudyContainer.selectOne(item); - } else { - this.__templateStudyContainer.resetSelection(); - } this.__itemSelected(item.getUuid()); } else if (isTemplate) { - this.__itemSelected(null); + const selection = this.__templateStudyContainer.getSelection(); + if (selection.length) { + this.__itemSelected(selection[0].getUuid()); + } else { + this.__itemSelected(null); + } } else { const selection = this.__userStudyContainer.getSelection(); if (selection.length) { @@ -638,8 +632,12 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const canDeleteTemplate = osparc.data.Permissions.getInstance().canDo("studies.template.delete"); let allMine = Boolean(templateSelection.length) && canDeleteTemplate; for (let i=0; i Date: Wed, 18 Mar 2020 13:10:49 +0100 Subject: [PATCH 41/74] delete multiple templates working --- .../source/class/osparc/dashboard/StudyBrowser.js | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 2400b40f805..8350ded8c84 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -213,7 +213,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __createUserStudiesLayout: function() { const navBarLabelFont = qx.bom.Font.fromConfig(osparc.theme.Font.fonts["nav-bar-label"]); const studiesTitleContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); - const studiesDeleteButton = this.__createDeleteButton(); + const studiesDeleteButton = this.__createDeleteButton(false); const myStudyLabel = new qx.ui.basic.Label(this.tr("Recent studies")).set({ font: navBarLabelFont }); @@ -234,7 +234,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __createTemplateStudiesLayout: function() { const navBarLabelFont = qx.bom.Font.fromConfig(osparc.theme.Font.fonts["nav-bar-label"]); const templateTitleContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); - const templateDeleteButton = this.__createDeleteButton(); + const templateDeleteButton = this.__createDeleteButton(true); const tempStudyLabel = new qx.ui.basic.Label(this.tr("New studies")).set({ font: navBarLabelFont }); @@ -272,14 +272,12 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }); }, - __createDeleteButton: function() { + __createDeleteButton: function(areTemplates) { const deleteButton = new qx.ui.form.Button(this.tr("Delete"), "@FontAwesome5Solid/trash/14").set({ visibility: "excluded" }); osparc.utils.Utils.setIdToWidget(deleteButton, "deleteStudiesBtn"); - deleteButton.addListener("execute", e => { - const thisButton = e.getTarget(); - const areTemplates = this.__templateDeleteButton === thisButton; + deleteButton.addListener("execute", () => { const selection = areTemplates ? this.__templateStudyContainer.getSelection() : this.__userStudyContainer.getSelection(); const win = this.__createConfirmWindow(selection.length > 1); win.center(); From 11a6fb2e7db51e27e3baa1e4da22f175f9d5543d Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 13:37:25 +0100 Subject: [PATCH 42/74] tick - untick multiSelection logic working --- .../class/osparc/dashboard/StudyBrowser.js | 12 +++++++- .../osparc/dashboard/StudyBrowserListItem.js | 30 ++++++++++++++++--- 2 files changed, 37 insertions(+), 5 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 8350ded8c84..fcb2cdde9bc 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -221,6 +221,10 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { studiesTitleContainer.add(studiesDeleteButton); const userStudyContainer = this.__userStudyContainer = this.__createUserStudyList(); userStudyContainer.addListener("changeSelection", () => { + const nSelected = this.__userStudyContainer.getSelection().length; + this.__userStudyContainer.getChildren().forEach(userStudyItem => { + userStudyItem.multiSelection(nSelected); + }); this.__updateDeleteStudiesButton(studiesDeleteButton); }, this); const userStudyLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ @@ -242,7 +246,13 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { templateTitleContainer.add(templateDeleteButton); const templateStudyContainer = this.__templateStudyContainer = this.__createTemplateStudyList(); this.__templateStudyContainer.addListener("changeSelection", () => { - this.__newStudyBtn.setEnabled(!this.__templateStudyContainer.getSelection().length); + const nSelected = this.__templateStudyContainer.getSelection().length; + this.__newStudyBtn.setEnabled(!nSelected); + this.__templateStudyContainer.getChildren().forEach(templateStudyItem => { + if (templateStudyItem instanceof osparc.dashboard.StudyBrowserListItem) { + templateStudyItem.multiSelection(nSelected); + } + }); this.__updateDeleteTemplatesButton(templateDeleteButton); }, this); const tempStudyLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js index 6f917c775b1..e8a64324b09 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js @@ -83,10 +83,19 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { } }, + statics: { + menuButtonZIndex: 20 + }, + members: { __dateFormat: null, __timeFormat: null, + multiSelection: function(on) { + const untickIcon = this.getChildControl("tick-unselected"); + untickIcon.setVisibility(on ? "visible" : "excluded"); + }, + // overridden _createChildControlImpl: function(id) { let control; @@ -96,8 +105,8 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { width: 30, height: 30, icon: "@FontAwesome5Solid/ellipsis-v/16", - focusable: false, - zIndex: 20 + zIndex: this.self().menuButtonZIndex, + focusable: false }); this._add(control, { top: 0, @@ -109,8 +118,21 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { width: 30, height: 30, icon: "@FontAwesome5Solid/check-circle/16", - focusable: false, - zIndex: 21 + zIndex: this.self().menuButtonZIndex + 2, + focusable: false + }); + this._add(control, { + top: 0, + right: 0 + }); + break; + case "tick-unselected": + control = new qx.ui.form.MenuButton().set({ + width: 30, + height: 30, + icon: "@FontAwesome5Solid/circle/16", + zIndex: this.self().menuButtonZIndex + 1, + focusable: false }); this._add(control, { top: 0, From d8a1ea79c98b6f52d01903a580ccbc1ab0061659 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 13:41:12 +0100 Subject: [PATCH 43/74] minor --- .../class/osparc/dashboard/StudyBrowserListItem.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js index e8a64324b09..044ec08349f 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js @@ -113,12 +113,12 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { right: 0 }); break; - case "tick-selected": + case "tick-unselected": control = new qx.ui.form.MenuButton().set({ width: 30, height: 30, - icon: "@FontAwesome5Solid/check-circle/16", - zIndex: this.self().menuButtonZIndex + 2, + icon: "@FontAwesome5Solid/circle/16", + zIndex: this.self().menuButtonZIndex + 1, focusable: false }); this._add(control, { @@ -126,12 +126,12 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { right: 0 }); break; - case "tick-unselected": + case "tick-selected": control = new qx.ui.form.MenuButton().set({ width: 30, height: 30, - icon: "@FontAwesome5Solid/circle/16", - zIndex: this.self().menuButtonZIndex + 1, + icon: "@FontAwesome5Solid/check-circle/16", + zIndex: this.self().menuButtonZIndex + 2, focusable: false }); this._add(control, { From d766a264c6e8bc01342310f4d96d08f45866224b Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 14:08:01 +0100 Subject: [PATCH 44/74] studyTitle moved to base class --- .../osparc/dashboard/StudyBrowserListBase.js | 36 +++++++++++++++++++ .../osparc/dashboard/StudyBrowserListItem.js | 36 +++++-------------- .../osparc/dashboard/StudyBrowserListNew.js | 11 +----- 3 files changed, 45 insertions(+), 38 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js index d7fa586ee70..16f1de7b059 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js @@ -66,6 +66,12 @@ qx.Class.define("osparc.dashboard.StudyBrowserListBase", { appearance: { refine : true, init : "pb-listitem" + }, + + studyTitle: { + check: "String", + apply : "_applyStudyTitle", + nullable : true } }, @@ -79,6 +85,36 @@ qx.Class.define("osparc.dashboard.StudyBrowserListBase", { _mainLayout: null, + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "studyTitle": + control = new qx.ui.basic.Label(this.getStudyTitle()).set({ + margin: [5, 0], + font: "title-14", + anonymous: true + }); + osparc.utils.Utils.setIdToWidget(control, "studyBrowserListNew_title"); + this._mainLayout.addAt(control, 0); + break; + } + + return control || this.base(arguments, id); + }, + + _applyStudyTitle: function(value, old) { + let label = this.getChildControl("studyTitle"); + label.setValue(value); + }, + + _applyIcon: function(value, old) { + let icon = this.getChildControl("icon"); + icon.set({ + source: value, + paddingTop: value && value.match(/^@/) ? 30 : 0 + }); + }, + /** * Event handler for the pointer over event. */ diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js index 044ec08349f..bb1a38c46bf 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js @@ -59,12 +59,6 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { apply : "_applyUuid" }, - studyTitle: { - check: "String", - apply : "_applyStudyTitle", - nullable : true - }, - creator: { check: "String", apply : "_applyCreator", @@ -139,15 +133,6 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { right: 0 }); break; - case "studyTitle": - control = new qx.ui.basic.Label(this.getStudyTitle()).set({ - margin: [5, 0], - font: "title-14", - anonymous: true - }); - osparc.utils.Utils.setIdToWidget(control, "studyBrowserListItem_title"); - this._mainLayout.addAt(control, 0); - break; case "creator": control = new qx.ui.basic.Label(this.getCreator()).set({ rich: true, @@ -197,19 +182,6 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { osparc.utils.Utils.setIdToWidget(this, "studyBrowserListItem_"+value); }, - _applyIcon: function(value, old) { - let icon = this.getChildControl("icon"); - icon.set({ - source: value, - paddingTop: value && value.match(/^@/) ? 30 : 0 - }); - }, - - _applyStudyTitle: function(value, old) { - let label = this.getChildControl("studyTitle"); - label.setValue(value); - }, - _applyCreator: function(value, old) { let label = this.getChildControl("creator"); label.setValue(value); @@ -233,6 +205,14 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { } }, + _applyIcon: function(value, old) { + let icon = this.getChildControl("icon"); + icon.set({ + source: value, + paddingTop: value && value.match(/^@/) ? 30 : 0 + }); + }, + _applyTags: function(tags) { if (osparc.data.Permissions.getInstance().canDo("study.tag")) { const tagsContainer = this.getChildControl("tags"); diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js index 1e80c206858..405507e4f55 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js @@ -38,15 +38,6 @@ qx.Class.define("osparc.dashboard.StudyBrowserListNew", { _createChildControlImpl: function(id) { let control; switch (id) { - case "studyTitle": - control = new qx.ui.basic.Label(this.tr("Create New Study")).set({ - margin: [5, 0], - font: "title-14", - anonymous: true - }); - osparc.utils.Utils.setIdToWidget(control, "studyBrowserListNew_title"); - this._mainLayout.addAt(control, 0); - break; case "studyDescription": control = new qx.ui.basic.Label(this.tr("Start with a empty study")).set({ rich: true, @@ -72,7 +63,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserListNew", { }, __buildLayout: function() { - this.getChildControl("studyTitle"); + this.setStudyTitle(this.tr("Create New Study")); this.getChildControl("studyDescription"); let icon = this.getChildControl("icon"); icon.set({ From cc610f1ed36d17ea468849e2975fcc87279fbde6 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 14:40:37 +0100 Subject: [PATCH 45/74] minor --- .../client/source/class/osparc/dashboard/StudyBrowserListBase.js | 1 - 1 file changed, 1 deletion(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js index 16f1de7b059..0650df4666a 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js @@ -98,7 +98,6 @@ qx.Class.define("osparc.dashboard.StudyBrowserListBase", { this._mainLayout.addAt(control, 0); break; } - return control || this.base(arguments, id); }, From b6bd45fdc5c8a37845c65e9de26a459d71eba22e Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 14:46:21 +0100 Subject: [PATCH 46/74] minor --- .../source/class/osparc/dashboard/StudyBrowser.js | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index fcb2cdde9bc..f99656cfdad 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -213,12 +213,14 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __createUserStudiesLayout: function() { const navBarLabelFont = qx.bom.Font.fromConfig(osparc.theme.Font.fonts["nav-bar-label"]); const studiesTitleContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); - const studiesDeleteButton = this.__createDeleteButton(false); const myStudyLabel = new qx.ui.basic.Label(this.tr("Recent studies")).set({ font: navBarLabelFont }); studiesTitleContainer.add(myStudyLabel); + const studiesDeleteButton = this.__createDeleteButton(false); studiesTitleContainer.add(studiesDeleteButton); + userStudyLayout.add(studiesTitleContainer); + const userStudyContainer = this.__userStudyContainer = this.__createUserStudyList(); userStudyContainer.addListener("changeSelection", () => { const nSelected = this.__userStudyContainer.getSelection().length; @@ -230,20 +232,22 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const userStudyLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ marginTop: 20 }); - userStudyLayout.add(studiesTitleContainer); userStudyLayout.add(userStudyContainer); + return userStudyLayout; }, __createTemplateStudiesLayout: function() { const navBarLabelFont = qx.bom.Font.fromConfig(osparc.theme.Font.fonts["nav-bar-label"]); const templateTitleContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); - const templateDeleteButton = this.__createDeleteButton(true); const tempStudyLabel = new qx.ui.basic.Label(this.tr("New studies")).set({ font: navBarLabelFont }); templateTitleContainer.add(tempStudyLabel); + const templateDeleteButton = this.__createDeleteButton(true); templateTitleContainer.add(templateDeleteButton); + tempStudyLayout.add(templateTitleContainer); + const templateStudyContainer = this.__templateStudyContainer = this.__createTemplateStudyList(); this.__templateStudyContainer.addListener("changeSelection", () => { const nSelected = this.__templateStudyContainer.getSelection().length; @@ -258,8 +262,8 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const tempStudyLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ marginTop: 20 }); - tempStudyLayout.add(templateTitleContainer); tempStudyLayout.add(templateStudyContainer); + return tempStudyLayout; }, From b4762827cee33e851c9bfa6f39d0b2ebf6cf5f3e Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 14:58:52 +0100 Subject: [PATCH 47/74] minor fix --- .../source/class/osparc/dashboard/StudyBrowser.js | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index f99656cfdad..04a0c358140 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -211,6 +211,10 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __createUserStudiesLayout: function() { + const userStudyLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ + marginTop: 20 + }); + const navBarLabelFont = qx.bom.Font.fromConfig(osparc.theme.Font.fonts["nav-bar-label"]); const studiesTitleContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); const myStudyLabel = new qx.ui.basic.Label(this.tr("Recent studies")).set({ @@ -229,15 +233,16 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }); this.__updateDeleteStudiesButton(studiesDeleteButton); }, this); - const userStudyLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ - marginTop: 20 - }); userStudyLayout.add(userStudyContainer); return userStudyLayout; }, __createTemplateStudiesLayout: function() { + const tempStudyLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ + marginTop: 20 + }); + const navBarLabelFont = qx.bom.Font.fromConfig(osparc.theme.Font.fonts["nav-bar-label"]); const templateTitleContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); const tempStudyLabel = new qx.ui.basic.Label(this.tr("New studies")).set({ @@ -259,9 +264,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }); this.__updateDeleteTemplatesButton(templateDeleteButton); }, this); - const tempStudyLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ - marginTop: 20 - }); tempStudyLayout.add(templateStudyContainer); return tempStudyLayout; From 3a757757ff404ba0eabe1b804848d784d51cefbb Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 17:25:33 +0100 Subject: [PATCH 48/74] close studyDetailsEditor window when finished editing --- .../source/class/osparc/dashboard/StudyBrowser.js | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 04a0c358140..a67f5e8280a 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -509,8 +509,14 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { height: 400, modal: true }); - studyDetailsEditor.addListener("openedStudy", () => { - win.close(); + [ + "updatedStudy", + "updatedTemplate", + "openedStudy" + ].forEach(event => { + studyDetailsEditor.addListener(event, () => { + win.close(); + }); }); win.add(studyDetailsEditor); win.open(); From e4a782a98fc32d00efdcba43901a559d6e7a40d4 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 17:26:02 +0100 Subject: [PATCH 49/74] allow multiline study title --- .../client/source/class/osparc/dashboard/StudyBrowserListBase.js | 1 + 1 file changed, 1 insertion(+) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js index 0650df4666a..668589f8f8c 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js @@ -92,6 +92,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserListBase", { control = new qx.ui.basic.Label(this.getStudyTitle()).set({ margin: [5, 0], font: "title-14", + rich : true, anonymous: true }); osparc.utils.Utils.setIdToWidget(control, "studyBrowserListNew_title"); From 589e24095975ab58188af8edaad04b9586e6db83 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 18:27:57 +0100 Subject: [PATCH 50/74] ticks are Images (visibility logic improved) --- .../osparc/dashboard/StudyBrowserListItem.js | 41 ++++++++----------- 1 file changed, 18 insertions(+), 23 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js index bb1a38c46bf..ba14c14aab5 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js @@ -38,11 +38,14 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { qx.locale.Date.getTimeFormat("short") ); - const tickIcon = this.getChildControl("tick-selected"); - this.bind("value", tickIcon, "visibility", { - converter: function(value) { - return value ? "visible" : "excluded"; - } + this.addListener("changeValue", e => { + const val = this.getValue(); + + const tick = this.getChildControl("tick-selected"); + tick.setVisibility(val ? "visible" : "excluded"); + + const untick = this.getChildControl("tick-unselected"); + untick.setVisibility(val ? "excluded" : "visible"); }); }, @@ -86,8 +89,8 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { __timeFormat: null, multiSelection: function(on) { - const untickIcon = this.getChildControl("tick-unselected"); - untickIcon.setVisibility(on ? "visible" : "excluded"); + const menuButton = this.getChildControl("menu-button"); + menuButton.setVisibility(on ? "excluded" : "visible"); }, // overridden @@ -108,29 +111,21 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { }); break; case "tick-unselected": - control = new qx.ui.form.MenuButton().set({ - width: 30, - height: 30, - icon: "@FontAwesome5Solid/circle/16", - zIndex: this.self().menuButtonZIndex + 1, - focusable: false + control = new qx.ui.basic.Image("@FontAwesome5Solid/circle/16").set({ + zIndex: this.self().menuButtonZIndex -1 }); this._add(control, { - top: 0, - right: 0 + top: 6, + right: 6 }); break; case "tick-selected": - control = new qx.ui.form.MenuButton().set({ - width: 30, - height: 30, - icon: "@FontAwesome5Solid/check-circle/16", - zIndex: this.self().menuButtonZIndex + 2, - focusable: false + control = new qx.ui.basic.Image("@FontAwesome5Solid/check-circle/16").set({ + zIndex: this.self().menuButtonZIndex -1 }); this._add(control, { - top: 0, - right: 0 + top: 6, + right: 6 }); break; case "creator": From 49ef9de72f422290fea202d994cefd9deaad6c26 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 18:48:08 +0100 Subject: [PATCH 51/74] fixed size for study cards --- .../source/class/osparc/dashboard/StudyBrowserListBase.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js index 668589f8f8c..e596bfd7b1b 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js @@ -34,6 +34,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserListBase", { this.base(arguments); this.set({ width: this.self().ITEM_WIDTH, + height: this.self().ITEM_HEIGHT, allowGrowX: false }); @@ -54,7 +55,8 @@ qx.Class.define("osparc.dashboard.StudyBrowserListBase", { }, statics: { - ITEM_WIDTH: 210 + ITEM_WIDTH: 200, + ITEM_HEIGHT: 200 }, events: { From aacb3c6a6d12274ff8d72a89a6b560204c822172 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 19:30:07 +0100 Subject: [PATCH 52/74] icon moved to base class, fixed heights removed --- .../osparc/dashboard/StudyBrowserListBase.js | 19 ++++++++++++++++--- .../osparc/dashboard/StudyBrowserListItem.js | 18 ------------------ .../osparc/dashboard/StudyBrowserListNew.js | 16 +--------------- 3 files changed, 17 insertions(+), 36 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js index e596bfd7b1b..56b46fdf1fc 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js @@ -40,9 +40,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserListBase", { this._setLayout(new qx.ui.layout.Canvas()); - let mainLayout = this._mainLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(5).set({ - alignY: "middle" - })); + const mainLayout = this._mainLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); this._add(mainLayout, { top: 0, right: 0, @@ -100,6 +98,21 @@ qx.Class.define("osparc.dashboard.StudyBrowserListBase", { osparc.utils.Utils.setIdToWidget(control, "studyBrowserListNew_title"); this._mainLayout.addAt(control, 0); break; + case "icon": + control = new qx.ui.basic.Image().set({ + anonymous: true, + scale: true, + allowStretchX: true, + allowStretchY: true, + alignX: "center", + alignY: "middle", + allowGrowX: true, + allowGrowY: true + }); + this._mainLayout.addAt(control, 3, { + flex: 1 + }); + break; } return control || this.base(arguments, id); }, diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js index ba14c14aab5..1539ebaad72 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js @@ -146,16 +146,6 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { osparc.utils.Utils.setIdToWidget(control, "studyBrowserListItem_lastChangeDate"); this._mainLayout.addAt(control, 2); break; - case "icon": - control = new qx.ui.basic.Image(this.getIcon()).set({ - anonymous: true, - scale: true, - allowStretchX: true, - allowStretchY: true, - height: 120 - }); - this._mainLayout.addAt(control, 3); - break; case "tags": control = new qx.ui.container.Composite(new qx.ui.layout.Flow(5, 3)); this._mainLayout.addAt(control, 4); @@ -200,14 +190,6 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { } }, - _applyIcon: function(value, old) { - let icon = this.getChildControl("icon"); - icon.set({ - source: value, - paddingTop: value && value.match(/^@/) ? 30 : 0 - }); - }, - _applyTags: function(tags) { if (osparc.data.Permissions.getInstance().canDo("study.tag")) { const tagsContainer = this.getChildControl("tags"); diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js index 405507e4f55..88f6840bda5 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js @@ -46,17 +46,6 @@ qx.Class.define("osparc.dashboard.StudyBrowserListNew", { }); this._mainLayout.addAt(control, 1); break; - case "icon": - control = new qx.ui.basic.Image("@FontAwesome5Solid/plus-circle/64").set({ - anonymous: true, - scale: true, - allowStretchX: true, - allowStretchY: true, - alignY: "middle", - height: 145 - }); - this._mainLayout.addAt(control, 2); - break; } return control || this.base(arguments, id); @@ -65,10 +54,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserListNew", { __buildLayout: function() { this.setStudyTitle(this.tr("Create New Study")); this.getChildControl("studyDescription"); - let icon = this.getChildControl("icon"); - icon.set({ - paddingTop: icon.getSource() && icon.getSource().match(/^@/) ? 30 : 0 - }); + this.setIcon("@FontAwesome5Solid/plus-circle/64"); }, _onToggleChange: function(e) { From b4d25677052875e18511376c35c799a4c3b724a7 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Wed, 18 Mar 2020 19:32:22 +0100 Subject: [PATCH 53/74] make eslint happy --- .../web/client/source/class/osparc/dashboard/StudyBrowser.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index a67f5e8280a..3eb723f0e14 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -439,7 +439,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { study.tags ? osparc.store.Store.getInstance().getTags().filter(tag => study.tags.includes(tag.id)) : []; - const item = new osparc.dashboard.StudyBrowserListItem(menu).set({ + const item = new osparc.dashboard.StudyBrowserListItem().set({ uuid: study.uuid, studyTitle: study.name, icon: study.thumbnail || "@FontAwesome5Solid/flask/50", From b98bd78fd90b3d4c1b4b541ce863e8b5ce599b6d Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Fri, 20 Mar 2020 11:14:13 +0100 Subject: [PATCH 54/74] more generic StudyBrowserListBase --- .../osparc/dashboard/StudyBrowserListBase.js | 32 ++++++++++-------- .../osparc/dashboard/StudyBrowserListItem.js | 33 ++++++++----------- .../osparc/dashboard/StudyBrowserListNew.js | 23 +++---------- 3 files changed, 36 insertions(+), 52 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js index 56b46fdf1fc..d02a5b9a702 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js @@ -66,12 +66,6 @@ qx.Class.define("osparc.dashboard.StudyBrowserListBase", { appearance: { refine : true, init : "pb-listitem" - }, - - studyTitle: { - check: "String", - apply : "_applyStudyTitle", - nullable : true } }, @@ -88,16 +82,31 @@ qx.Class.define("osparc.dashboard.StudyBrowserListBase", { _createChildControlImpl: function(id) { let control; switch (id) { - case "studyTitle": - control = new qx.ui.basic.Label(this.getStudyTitle()).set({ + case "title": + control = new qx.ui.basic.Label().set({ margin: [5, 0], font: "title-14", rich : true, anonymous: true }); - osparc.utils.Utils.setIdToWidget(control, "studyBrowserListNew_title"); this._mainLayout.addAt(control, 0); break; + case "desc1": + control = new qx.ui.basic.Label().set({ + rich: true, + allowGrowY: false, + anonymous: true + }); + this._mainLayout.addAt(control, 1); + break; + case "desc2": + control = new qx.ui.basic.Label().set({ + rich: true, + allowGrowY: false, + anonymous: true + }); + this._mainLayout.addAt(control, 2); + break; case "icon": control = new qx.ui.basic.Image().set({ anonymous: true, @@ -117,11 +126,6 @@ qx.Class.define("osparc.dashboard.StudyBrowserListBase", { return control || this.base(arguments, id); }, - _applyStudyTitle: function(value, old) { - let label = this.getChildControl("studyTitle"); - label.setValue(value); - }, - _applyIcon: function(value, old) { let icon = this.getChildControl("icon"); icon.set({ diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js index 1539ebaad72..67ca3131c5d 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js @@ -62,6 +62,12 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { apply : "_applyUuid" }, + studyTitle: { + check: "String", + apply : "_applyStudyTitle", + nullable : true + }, + creator: { check: "String", apply : "_applyCreator", @@ -128,24 +134,6 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { right: 6 }); break; - case "creator": - control = new qx.ui.basic.Label(this.getCreator()).set({ - rich: true, - allowGrowY: false, - anonymous: true - }); - osparc.utils.Utils.setIdToWidget(control, "studyBrowserListItem_creator"); - this._mainLayout.addAt(control, 1); - break; - case "lastChangeDate": - control = new qx.ui.basic.Label().set({ - rich: true, - allowGrowY: false, - anonymous: true - }); - osparc.utils.Utils.setIdToWidget(control, "studyBrowserListItem_lastChangeDate"); - this._mainLayout.addAt(control, 2); - break; case "tags": control = new qx.ui.container.Composite(new qx.ui.layout.Flow(5, 3)); this._mainLayout.addAt(control, 4); @@ -167,13 +155,18 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { osparc.utils.Utils.setIdToWidget(this, "studyBrowserListItem_"+value); }, + _applyStudyTitle: function(value, old) { + let label = this.getChildControl("title"); + label.setValue(value); + }, + _applyCreator: function(value, old) { - let label = this.getChildControl("creator"); + let label = this.getChildControl("desc1"); label.setValue(value); }, _applylastChangeDate: function(value, old) { - let label = this.getChildControl("lastChangeDate"); + let label = this.getChildControl("desc2"); if (value) { let dateStr = null; if (value.getDate() === (new Date()).getDate()) { diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js index 88f6840bda5..10a4c8fcb41 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js @@ -34,26 +34,13 @@ qx.Class.define("osparc.dashboard.StudyBrowserListNew", { }, members: { - // overridden - _createChildControlImpl: function(id) { - let control; - switch (id) { - case "studyDescription": - control = new qx.ui.basic.Label(this.tr("Start with a empty study")).set({ - rich: true, - allowGrowY: false, - anonymous: true - }); - this._mainLayout.addAt(control, 1); - break; - } + __buildLayout: function() { + const title = this.getChildControl("title"); + title.setValue(this.tr("Create New Study")); - return control || this.base(arguments, id); - }, + const desc1 = this.getChildControl("desc1"); + desc1.setValue(this.tr("Start with a empty study")); - __buildLayout: function() { - this.setStudyTitle(this.tr("Create New Study")); - this.getChildControl("studyDescription"); this.setIcon("@FontAwesome5Solid/plus-circle/64"); }, From 5edf0b7f8d684e6ff5cbc67ef99b43fae691287f Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Fri, 20 Mar 2020 12:11:31 +0100 Subject: [PATCH 55/74] Filter out of scroll --- .../class/osparc/dashboard/StudyBrowser.js | 49 ++++++++++--------- 1 file changed, 25 insertions(+), 24 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 3eb723f0e14..3ec3d7f350b 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -41,14 +41,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { construct: function() { this.base(arguments); - this._setLayout(new qx.ui.layout.HBox()); - - this.__studyBrowserLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox()); - const scrollStudies = new qx.ui.container.Scroll(); - scrollStudies.add(this.__studyBrowserLayout); - this._add(scrollStudies, { - flex: 1 - }); + this._setLayout(new qx.ui.layout.VBox(10)); this.__initResources(); }, @@ -80,7 +73,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, members: { - __studyBrowserLayout: null, + __loadingIFrame: null, __studyFilters: null, __userStudyContainer: null, __templateStudyContainer: null, @@ -136,14 +129,14 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __initResources: function() { - const iframe = osparc.utils.Utils.createLoadingIFrame(this.tr("Studies")); - this.__studyBrowserLayout.add(iframe, { + const iframe = this.__loadingIFrame = osparc.utils.Utils.createLoadingIFrame(this.tr("Studies")); + this._add(iframe, { flex: 1 }); this.__getTags() .then(() => { - this.__studyBrowserLayout.removeAll(); + this._removeAll(); iframe.dispose(); this.__createStudiesLayout(); this.__reloadStudies(); @@ -193,13 +186,22 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __createStudiesLayout: function() { - const studyFilters = this.__studyFilters = new osparc.component.filter.group.StudyFilterGroup("studyBrowser"); + const studyFilters = this.__studyFilters = new osparc.component.filter.group.StudyFilterGroup("studyBrowser").set({ + paddingTop: 5 + }); + this._add(studyFilters); + + const studyBrowserLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox()); const tempStudyLayout = this.__createTemplateStudiesLayout(); + studyBrowserLayout.add(tempStudyLayout); const userStudyLayout = this.__createUserStudiesLayout(); + studyBrowserLayout.add(userStudyLayout); - this.__studyBrowserLayout.add(studyFilters); - this.__studyBrowserLayout.add(tempStudyLayout); - this.__studyBrowserLayout.add(userStudyLayout); + const scrollStudies = new qx.ui.container.Scroll(); + scrollStudies.add(studyBrowserLayout); + this._add(scrollStudies, { + flex: 1 + }); }, __createNewStudyButton: function() { @@ -365,16 +367,10 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __startStudy: function(studyData) { - this.__showChildren(false); - let iframe = osparc.utils.Utils.createLoadingIFrame(this.tr("Services")); - this._add(iframe, { - flex: 1 - }); - osparc.store.Store.getInstance().getServices(false) .then(() => { - this._remove(iframe); - iframe.dispose(); + this._remove(this.__loadingIFrame); + this.__loadingIFrame.dispose(); this.__showChildren(true); this.__loadStudy(studyData); }); @@ -578,6 +574,11 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { if (selection.length > 1) { this.__itemMultiSelected(item, isTemplate); } else if (selected) { + this.__showChildren(false); + const iframe = this.__loadingIFrame = osparc.utils.Utils.createLoadingIFrame(this.tr("Services")); + this._add(iframe, { + flex: 1 + }); isTemplate ? this.__createStudyBtnClkd(studyData) : this.__startStudy(studyData); } }, From 69a932dbf9ce39175261edf7c1b9b2e8dc39d8d3 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Fri, 20 Mar 2020 12:47:53 +0100 Subject: [PATCH 56/74] Commands and focuses added to login pages --- .../class/osparc/auth/core/BaseAuthPage.js | 11 ++++++ .../source/class/osparc/auth/ui/LoginView.js | 20 ++++++---- .../class/osparc/auth/ui/RegistrationView.js | 26 +++++++++++-- .../osparc/auth/ui/ResetPassRequestView.js | 38 ++++++++++++++----- 4 files changed, 76 insertions(+), 19 deletions(-) diff --git a/services/web/client/source/class/osparc/auth/core/BaseAuthPage.js b/services/web/client/source/class/osparc/auth/core/BaseAuthPage.js index a8768b39550..0c89b588778 100644 --- a/services/web/client/source/class/osparc/auth/core/BaseAuthPage.js +++ b/services/web/client/source/class/osparc/auth/core/BaseAuthPage.js @@ -52,6 +52,9 @@ qx.Class.define("osparc.auth.core.BaseAuthPage", { el.insertInto(form); }); this._buildPage(); + + this.addListener("appear", this._onAppear, this); + this.addListener("disappear", this._onDisappear, this); }, /* @@ -108,6 +111,14 @@ qx.Class.define("osparc.auth.core.BaseAuthPage", { this.add(lbl, { flex:1 }); + }, + + _onAppear: function() { + return; + }, + + _onDisappear: function() { + return; } } }); diff --git a/services/web/client/source/class/osparc/auth/ui/LoginView.js b/services/web/client/source/class/osparc/auth/ui/LoginView.js index 3b39a4d34ba..8865f546949 100644 --- a/services/web/client/source/class/osparc/auth/ui/LoginView.js +++ b/services/web/client/source/class/osparc/auth/ui/LoginView.js @@ -50,6 +50,8 @@ qx.Class.define("osparc.auth.ui.LoginView", { members: { // overrides base __form: null, + __loginBtn: null, + _buildPage: function() { this.__form = new qx.ui.form.Form(); @@ -85,17 +87,11 @@ qx.Class.define("osparc.auth.ui.LoginView", { this.add(pass); this.__form.add(pass, "", null, "password", null); - const loginBtn = new osparc.ui.form.FetchButton(this.tr("Sign in")); + const loginBtn = this.__loginBtn = new osparc.ui.form.FetchButton(this.tr("Sign in")); loginBtn.addListener("execute", () => { loginBtn.setFetching(true); this.__login(loginBtn); }, this); - // Listen to "Enter" key - this.addListener("keypress", keyEvent => { - if (keyEvent.getKeyIdentifier() === "Enter") { - this.__login(); - } - }, this); osparc.utils.Utils.setIdToWidget(loginBtn, "loginSubmitBtn"); this.add(loginBtn); @@ -201,6 +197,16 @@ qx.Class.define("osparc.auth.ui.LoginView", { for (const key in fieldItems) { fieldItems[key].resetValue(); } + }, + + _onAppear: function() { + // Listen to "Enter" key + const command = new qx.ui.command.Command("Enter"); + this.__loginBtn.setCommand(command); + }, + + _onDisappear: function() { + this.__loginBtn.setCommand(null); } } }); diff --git a/services/web/client/source/class/osparc/auth/ui/RegistrationView.js b/services/web/client/source/class/osparc/auth/ui/RegistrationView.js index 7faed308f3e..e8cbc39f6a0 100644 --- a/services/web/client/source/class/osparc/auth/ui/RegistrationView.js +++ b/services/web/client/source/class/osparc/auth/ui/RegistrationView.js @@ -32,6 +32,8 @@ qx.Class.define("osparc.auth.ui.RegistrationView", { members: { __email: null, + __submitBtn: null, + __cancelBtn: null, // overrides base _buildPage: function() { @@ -47,6 +49,10 @@ qx.Class.define("osparc.auth.ui.RegistrationView", { this.add(email); osparc.utils.Utils.setIdToWidget(email, "registrationEmailFld"); this.__email = email; + this.addListener("appear", () => { + email.focus(); + email.activate(); + }); // const uname = new qx.ui.form.TextField().set({ // required: true, @@ -86,13 +92,13 @@ qx.Class.define("osparc.auth.ui.RegistrationView", { // submit & cancel buttons const grp = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); - const submitBtn = new qx.ui.form.Button(this.tr("Submit")); + const submitBtn = this.__submitBtn = new qx.ui.form.Button(this.tr("Submit")); osparc.utils.Utils.setIdToWidget(submitBtn, "registrationSubmitBtn"); grp.add(submitBtn, { flex:1 }); - const cancelBtn = new qx.ui.form.Button(this.tr("Cancel")); + const cancelBtn = this.__cancelBtn = new qx.ui.form.Button(this.tr("Cancel")); osparc.utils.Utils.setIdToWidget(cancelBtn, "registrationCancelBtn"); grp.add(cancelBtn, { flex:1 @@ -132,7 +138,21 @@ qx.Class.define("osparc.auth.ui.RegistrationView", { }; manager.register(userData, successFun, failFun, this); - } + }, + + _onAppear: function() { + // Listen to "Enter" key + const commandEnter = new qx.ui.command.Command("Enter"); + this.__submitBtn.setCommand(commandEnter); + // Listen to "Esc" key + const commandEsc = new qx.ui.command.Command("Esc"); + this.__cancelBtn.setCommand(commandEsc); + }, + + _onDisappear: function() { + this.__submitBtn.setCommand(null); + this.__cancelBtn.setCommand(null); + } } }); diff --git a/services/web/client/source/class/osparc/auth/ui/ResetPassRequestView.js b/services/web/client/source/class/osparc/auth/ui/ResetPassRequestView.js index ec328e65c82..52daa9e7cde 100644 --- a/services/web/client/source/class/osparc/auth/ui/ResetPassRequestView.js +++ b/services/web/client/source/class/osparc/auth/ui/ResetPassRequestView.js @@ -29,30 +29,36 @@ qx.Class.define("osparc.auth.ui.ResetPassRequestView", { */ members: { + __submitBtn: null, + __cancelBtn: null, // overrides base _buildPage: function() { - let manager = new qx.ui.form.validation.Manager(); + const manager = new qx.ui.form.validation.Manager(); this._addTitleHeader(this.tr("Reset Password")); // email - let email = new qx.ui.form.TextField(); + const email = new qx.ui.form.TextField(); email.setRequired(true); email.setPlaceholder(this.tr("Introduce your registration email")); this.add(email); + this.addListener("appear", () => { + email.focus(); + email.activate(); + }); manager.add(email, qx.util.Validate.email()); // submit and cancel buttons - let grp = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); + const grp = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); - let submitBtn = new qx.ui.form.Button(this.tr("Submit")); + const submitBtn = this.__submitBtn = new qx.ui.form.Button(this.tr("Submit")); grp.add(submitBtn, { flex:1 }); - let cancelBtn = new qx.ui.form.Button(this.tr("Cancel")); + const cancelBtn = this.__cancelBtn = new qx.ui.form.Button(this.tr("Cancel")); grp.add(cancelBtn, { flex:1 }); @@ -73,20 +79,34 @@ qx.Class.define("osparc.auth.ui.ResetPassRequestView", { __submit: function(email) { console.debug("sends email to reset password to ", email); - let manager = osparc.auth.Manager.getInstance(); + const manager = osparc.auth.Manager.getInstance(); - let successFun = function(log) { + const successFun = function(log) { this.fireDataEvent("done", log.message); osparc.component.message.FlashMessenger.getInstance().log(log); }; - let failFun = function(msg) { + const failFun = function(msg) { msg = msg || this.tr("Could not request password reset"); osparc.component.message.FlashMessenger.getInstance().logAs(msg, "ERROR"); }; manager.resetPasswordRequest(email.getValue(), successFun, failFun, this); - } + }, + + _onAppear: function() { + // Listen to "Enter" key + const commandEnter = new qx.ui.command.Command("Enter"); + this.__submitBtn.setCommand(commandEnter); + // Listen to "Esc" key + const commandEsc = new qx.ui.command.Command("Esc"); + this.__cancelBtn.setCommand(commandEsc); + }, + + _onDisappear: function() { + this.__submitBtn.setCommand(null); + this.__cancelBtn.setCommand(null); + } } }); From 0e76872c48efe06597d8f6bf5334a6e18fbb4aa4 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Fri, 20 Mar 2020 12:52:25 +0100 Subject: [PATCH 57/74] Padding added to study cards --- .../source/class/osparc/dashboard/StudyBrowserListBase.js | 4 ++-- .../source/class/osparc/dashboard/StudyBrowserListItem.js | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js index d02a5b9a702..f3f873a2121 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListBase.js @@ -16,8 +16,6 @@ ************************************************************************ */ -/* eslint "qx-rules/no-refs-in-members": "warn" */ - /** * Widget used mainly by StudyBrowser for displaying Studies * @@ -40,6 +38,8 @@ qx.Class.define("osparc.dashboard.StudyBrowserListBase", { this._setLayout(new qx.ui.layout.Canvas()); + this.setPadding(8); + const mainLayout = this._mainLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); this._add(mainLayout, { top: 0, diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js index 67ca3131c5d..8986c6470b9 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js @@ -121,8 +121,8 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { zIndex: this.self().menuButtonZIndex -1 }); this._add(control, { - top: 6, - right: 6 + top: 4, + right: 4 }); break; case "tick-selected": @@ -130,8 +130,8 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { zIndex: this.self().menuButtonZIndex -1 }); this._add(control, { - top: 6, - right: 6 + top: 4, + right: 4 }); break; case "tags": From 604d9de14a14072a3eafdc045f52a4a7416a1b80 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Fri, 20 Mar 2020 14:52:42 +0100 Subject: [PATCH 58/74] adapt e2e: study opens with one click --- tests/e2e/utils/auto.js | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tests/e2e/utils/auto.js b/tests/e2e/utils/auto.js index 616f26f411e..b9d65072740 100644 --- a/tests/e2e/utils/auto.js +++ b/tests/e2e/utils/auto.js @@ -220,10 +220,6 @@ async function dashboardOpenFirstTemplate(page, templateName) { const firstChildId = '[osparc-test-id="' + children[0] + '"]' await page.waitForSelector(firstChildId) await page.click(firstChildId) - - await page.waitForSelector('[osparc-test-id="openStudyBtn"]') - console.log("Opening ", templateName) - await page.click('[osparc-test-id="openStudyBtn"]') } async function __dashboardFilterStudiesByText(page, templateName) { From 26967ae573b59126c51119c9494abb4a19d36f5b Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Fri, 20 Mar 2020 15:56:22 +0100 Subject: [PATCH 59/74] LoadingIFrame logic improved --- .../class/osparc/dashboard/StudyBrowser.js | 56 +++++++++++-------- 1 file changed, 33 insertions(+), 23 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 3ec3d7f350b..0f0c5cdfccf 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -129,15 +129,11 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __initResources: function() { - const iframe = this.__loadingIFrame = osparc.utils.Utils.createLoadingIFrame(this.tr("Studies")); - this._add(iframe, { - flex: 1 - }); + this.__showLoadingIFrame(this.tr("Studies")); this.__getTags() .then(() => { - this._removeAll(); - iframe.dispose(); + this.__hideLoadingIFrame(); this.__createStudiesLayout(); this.__reloadStudies(); this.__attachEventHandlers(); @@ -338,6 +334,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __createStudy: function(minStudyData, templateId) { + this.__showLoadingIFrame(this.tr("Creating Study")); if (templateId) { const params = { url: { @@ -367,11 +364,10 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __startStudy: function(studyData) { + this.__showLoadingIFrame(this.tr("Starting Study")); osparc.store.Store.getInstance().getServices(false) .then(() => { - this._remove(this.__loadingIFrame); - this.__loadingIFrame.dispose(); - this.__showChildren(true); + this.__hideLoadingIFrame(); this.__loadStudy(studyData); }); }, @@ -383,15 +379,10 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this.fireDataEvent("startStudy", this.__studyEditor); }, - __showChildren: function(show) { - let children = this._getChildren(); - for (let i=0; i { + children.setVisibility(show ? "visible" : "excluded"); + }); }, __createUserStudyList: function() { @@ -574,11 +565,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { if (selection.length > 1) { this.__itemMultiSelected(item, isTemplate); } else if (selected) { - this.__showChildren(false); - const iframe = this.__loadingIFrame = osparc.utils.Utils.createLoadingIFrame(this.tr("Services")); - this._add(iframe, { - flex: 1 - }); isTemplate ? this.__createStudyBtnClkd(studyData) : this.__startStudy(studyData); } }, @@ -692,6 +678,30 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __createConfirmWindow: function(isMulti) { const msg = isMulti ? this.tr("Are you sure you want to delete the studies?") : this.tr("Are you sure you want to delete the study?"); return new osparc.ui.window.Confirmation(msg); + }, + + __showLoadingIFrame: function(label) { + this.__hideLoadingIFrame(); + + this.__showStudiesLayout(false); + + const iframe = this.__loadingIFrame = osparc.utils.Utils.createLoadingIFrame(label); + this._add(iframe, { + flex: 1 + }); + }, + + __hideLoadingIFrame: function() { + if (this.__loadingIFrame) { + const idx = this._indexOf(this.__loadingIFrame); + if (idx !== -1) { + this._remove(this.__loadingIFrame); + } + this.__loadingIFrame.dispose(); + this.__loadingIFrame = null; + } + + this.__showStudiesLayout(true); } } }); From 90ee4dd96e50a1122944ec255d377708f75d3e92 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Fri, 20 Mar 2020 15:56:47 +0100 Subject: [PATCH 60/74] Update StudyBrowser.js --- .../web/client/source/class/osparc/dashboard/StudyBrowser.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 0f0c5cdfccf..fe160b736a7 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -129,7 +129,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __initResources: function() { - this.__showLoadingIFrame(this.tr("Studies")); + this.__showLoadingIFrame(this.tr("Loading studies")); this.__getTags() .then(() => { From 7aed8c2b5c08bba8d9b8c79041ed3458ca348aef Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 23 Mar 2020 10:08:46 +0100 Subject: [PATCH 61/74] [bug fix] Bring back the automatic output update --- .../web/client/source/class/osparc/desktop/StudyEditor.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/web/client/source/class/osparc/desktop/StudyEditor.js b/services/web/client/source/class/osparc/desktop/StudyEditor.js index 429b3224b19..d772dc899e7 100644 --- a/services/web/client/source/class/osparc/desktop/StudyEditor.js +++ b/services/web/client/source/class/osparc/desktop/StudyEditor.js @@ -646,14 +646,14 @@ qx.Class.define("osparc.desktop.StudyEditor", { // callback for node updates const slotName3 = "nodeUpdated"; socket.removeSlot(slotName3); - socket.on(slotName3, function(data) { + socket.on(slotName3, data => { const d = JSON.parse(data); const nodeId = d["Node"]; const nodeData = d["Data"]; const workbench = this.getStudy().getWorkbench(); const node = workbench.getNode(nodeId); if (node) { - node.setOutputData(nodeData); + node.setOutputData(nodeData.outputs); if (nodeData.progress) { const progress = Number.parseInt(nodeData.progress); node.setProgress(progress); From abf6b2cc624acc5f7fcd76e5ad526dcb0dee3f70 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 23 Mar 2020 10:23:25 +0100 Subject: [PATCH 62/74] minor --- .../web/client/source/class/osparc/dashboard/StudyBrowser.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index fe160b736a7..0f1ebe1f716 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -226,6 +226,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const userStudyContainer = this.__userStudyContainer = this.__createUserStudyList(); userStudyContainer.addListener("changeSelection", () => { const nSelected = this.__userStudyContainer.getSelection().length; + myStudyLabel.setVisibility(nSelected ? "visible" : "excluded"); this.__userStudyContainer.getChildren().forEach(userStudyItem => { userStudyItem.multiSelection(nSelected); }); @@ -252,8 +253,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { tempStudyLayout.add(templateTitleContainer); const templateStudyContainer = this.__templateStudyContainer = this.__createTemplateStudyList(); - this.__templateStudyContainer.addListener("changeSelection", () => { + templateStudyContainer.addListener("changeSelection", () => { const nSelected = this.__templateStudyContainer.getSelection().length; + tempStudyLabel.setVisibility(nSelected ? "visible" : "excluded"); this.__newStudyBtn.setEnabled(!nSelected); this.__templateStudyContainer.getChildren().forEach(templateStudyItem => { if (templateStudyItem instanceof osparc.dashboard.StudyBrowserListItem) { From c0368fdc710c210f2f7dee96d7dcdc3705fd9ebf Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 23 Mar 2020 10:35:59 +0100 Subject: [PATCH 63/74] show/hide studies container label --- .../component/form/ToggleButtonContainer.js | 13 ++++++++++++- .../class/osparc/dashboard/StudyBrowser.js | 19 +++++++++++++------ 2 files changed, 25 insertions(+), 7 deletions(-) diff --git a/services/web/client/source/class/osparc/component/form/ToggleButtonContainer.js b/services/web/client/source/class/osparc/component/form/ToggleButtonContainer.js index 8dd438bc7ae..2a19247319f 100644 --- a/services/web/client/source/class/osparc/component/form/ToggleButtonContainer.js +++ b/services/web/client/source/class/osparc/component/form/ToggleButtonContainer.js @@ -16,7 +16,8 @@ qx.Class.define("osparc.component.form.ToggleButtonContainer", { }, events: { - changeSelection: "qx.event.type.Data" + changeSelection: "qx.event.type.Data", + changeVisibility: "qx.event.type.Data" }, members: { @@ -27,6 +28,9 @@ qx.Class.define("osparc.component.form.ToggleButtonContainer", { child.addListener("changeValue", e => { this.fireDataEvent("changeSelection", this.getSelection()); }, this); + child.addListener("changeVisibility", e => { + this.fireDataEvent("changeVisibility", this.getVisibles()); + }, this); } else { console.error("ToggleButtonContainer only allows ToggleButton as its children."); } @@ -46,6 +50,13 @@ qx.Class.define("osparc.component.form.ToggleButtonContainer", { return this.getChildren().filter(button => button.getValue()); }, + /** + * Returns an array that contains all visible buttons. + */ + getVisibles: function() { + return this.getChildren().filter(button => button.isVisible()); + }, + /** * Sets the given button's value to true (checks it) and unchecks all other buttons. If the given button is not present, * every button in the container will get a false value (unchecked). diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 0f1ebe1f716..d371384a94a 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -224,9 +224,12 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { userStudyLayout.add(studiesTitleContainer); const userStudyContainer = this.__userStudyContainer = this.__createUserStudyList(); - userStudyContainer.addListener("changeSelection", () => { - const nSelected = this.__userStudyContainer.getSelection().length; - myStudyLabel.setVisibility(nSelected ? "visible" : "excluded"); + userStudyContainer.addListener("changeVisibility", e => { + const nVisibles = e.getData().length; + myStudyLabel.setVisibility(nVisibles ? "visible" : "excluded"); + }, this); + userStudyContainer.addListener("changeSelection", e => { + const nSelected = e.getdata().length; this.__userStudyContainer.getChildren().forEach(userStudyItem => { userStudyItem.multiSelection(nSelected); }); @@ -253,9 +256,13 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { tempStudyLayout.add(templateTitleContainer); const templateStudyContainer = this.__templateStudyContainer = this.__createTemplateStudyList(); - templateStudyContainer.addListener("changeSelection", () => { - const nSelected = this.__templateStudyContainer.getSelection().length; - tempStudyLabel.setVisibility(nSelected ? "visible" : "excluded"); + templateStudyContainer.addListener("changeVisibility", e => { + const nVisibles = e.getData().length; + tempStudyLabel.setVisibility(nVisibles ? "visible" : "excluded"); + }, this); + + templateStudyContainer.addListener("changeSelection", e => { + const nSelected = e.getData().length; this.__newStudyBtn.setEnabled(!nSelected); this.__templateStudyContainer.getChildren().forEach(templateStudyItem => { if (templateStudyItem instanceof osparc.dashboard.StudyBrowserListItem) { From 921b35a7359914d29a5ed6970ad9e5c620808cdf Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 23 Mar 2020 10:45:26 +0100 Subject: [PATCH 64/74] minor --- .../client/source/class/osparc/dashboard/StudyBrowser.js | 2 +- .../source/class/osparc/dashboard/StudyBrowserListNew.js | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index d371384a94a..7e038bd092c 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -229,7 +229,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { myStudyLabel.setVisibility(nVisibles ? "visible" : "excluded"); }, this); userStudyContainer.addListener("changeSelection", e => { - const nSelected = e.getdata().length; + const nSelected = e.getData().length; this.__userStudyContainer.getChildren().forEach(userStudyItem => { userStudyItem.multiSelection(nSelected); }); diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js index 10a4c8fcb41..2fadca51ca9 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListNew.js @@ -50,7 +50,13 @@ qx.Class.define("osparc.dashboard.StudyBrowserListNew", { _shouldApplyFilter: function(data) { if (data.text) { - return true; + const checks = [ + this.getChildControl("title").getValue().toString(), + this.getChildControl("desc1").getValue().toString() + ]; + if (checks.filter(label => label.toLowerCase().trim().includes(data.text)).length == 0) { + return true; + } } if (data.tags && data.tags.length) { return true; From 5b3f8284b238074c2e3f47cba41c833ccc1e84b4 Mon Sep 17 00:00:00 2001 From: Pedro Crespo Date: Mon, 23 Mar 2020 11:12:15 +0100 Subject: [PATCH 65/74] Routes to statics are always set --- .../src/simcore_service_webserver/statics.py | 70 +++++++++---------- 1 file changed, 34 insertions(+), 36 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/statics.py b/services/web/server/src/simcore_service_webserver/statics.py index 85dfc53383c..e2639522362 100644 --- a/services/web/server/src/simcore_service_webserver/statics.py +++ b/services/web/server/src/simcore_service_webserver/statics.py @@ -46,46 +46,44 @@ async def index(request: web.Request): return web.Response(text=ofh.read(), content_type="text/html") -def write_statics_file(directory): +def write_statics_file(directory: Path) -> None: + # ensures directory exists + os.makedirs(directory, exist_ok=True) + + # create statis fiel statics = {} statics["stackName"] = os.environ.get("SWARM_STACK_NAME") statics["buildDate"] = os.environ.get("BUILD_DATE") - with open(directory / "statics.json", "w") as statics_file: - json.dump(statics, statics_file) + with open(directory / "statics.json", "wt") as fh: + json.dump(statics, fh) -@app_module_setup(__name__, ModuleCategory.SYSTEM, logger=log) +@app_module_setup(__name__, ModuleCategory.ADDON, logger=log) def setup_statics(app: web.Application): - # TODO: Should serving front-end ria be configurable? # Front-end Rich Interface Application (RIA) - try: - outdir = get_client_outdir(app) - - # Checks integrity of RIA source before serving - EXPECTED_FOLDERS = ("osparc", "resource", "transpiled") - folders = [x for x in outdir.iterdir() if x.is_dir()] - - for name in EXPECTED_FOLDERS: - folder_names = [path.name for path in folders] - if name not in folder_names: - raise web.HTTPServiceUnavailable( - reason="Invalid front-end source-output folders" - " Expected %s, got %s in %s" - % (EXPECTED_FOLDERS, folder_names, outdir), - text="Front-end application is not available", - ) - - # TODO: map ui to /ui or create an alias!? - app.router.add_get("/", index, name=INDEX_RESOURCE_NAME) - - # NOTE: source-output and build-output have both the same subfolder structure - # TODO: check whether this can be done at oncen - for path in folders: - app.router.add_static("/" + path.name, path) - - # Create statics file - write_statics_file(outdir / "resource") - - except web.HTTPServiceUnavailable as ex: - log.exception(ex.text) - return + app.router.add_get("/", index, name=INDEX_RESOURCE_NAME) + + # NOTE: source-output and build-output have both the same subfolder structure + outdir = get_client_outdir(app) + + # Create statics file + write_statics_file(outdir / "resource") + + EXPECTED_FOLDERS = ["osparc", "resource", "transpiled"] + folders = [x for x in outdir.iterdir() if x.is_dir()] + + # Checks integrity of RIA source before serving and warn! + for name in EXPECTED_FOLDERS: + folder_names = [path.name for path in folders] + if name not in folder_names: + log.warning( + "Missing folders: expected %s, got %s in %s", + EXPECTED_FOLDERS, + folder_names, + outdir, + ) + + # Add statis routes + folders = set(folders).union(EXPECTED_FOLDERS) + for path in folders: + app.router.add_static("/" + path.name, path) From 8693570dc16d196c0e3cf24143522707423251d6 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 23 Mar 2020 11:14:03 +0100 Subject: [PATCH 66/74] Show description instead of lastChange in template cards --- .../source/class/osparc/dashboard/StudyBrowser.js | 12 ++++++++++-- .../class/osparc/dashboard/StudyBrowserListItem.js | 13 +++++++++++++ 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 7e038bd092c..43e9029f336 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -438,11 +438,19 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const item = new osparc.dashboard.StudyBrowserListItem().set({ uuid: study.uuid, studyTitle: study.name, - icon: study.thumbnail || "@FontAwesome5Solid/flask/50", creator: study.prjOwner ? study.prjOwner : null, - lastChangeDate: study.lastChangeDate ? new Date(study.lastChangeDate) : null, + icon: study.thumbnail || "@FontAwesome5Solid/flask/50", tags }); + if (isTemplate) { + item.set({ + studyDescription: isTemplate ? study.description : null + }); + } else { + item.set({ + lastChangeDate: study.lastChangeDate ? new Date(study.lastChangeDate) : null + }); + } const menu = this.__getStudyItemMenu(item, study, isTemplate); item.setMenu(menu); item.subscribeToFilterGroup("studyBrowser"); diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js index 8986c6470b9..4b67c9e0321 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js @@ -68,6 +68,12 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { nullable : true }, + studyDescription: { + check: "String", + apply : "_applyStudyDescription", + nullable : true + }, + creator: { check: "String", apply : "_applyCreator", @@ -160,6 +166,13 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { label.setValue(value); }, + _applyStudyDescription: function(value, old) { + if (value) { + const label = this.getChildControl("desc2"); + label.setValue(value); + } + }, + _applyCreator: function(value, old) { let label = this.getChildControl("desc1"); label.setValue(value); From 25c66de3200be003148a236f1c883e66eb530804 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 23 Mar 2020 11:30:02 +0100 Subject: [PATCH 67/74] info shown depends on isTemplate --- .../class/osparc/dashboard/StudyBrowser.js | 12 +--- .../osparc/dashboard/StudyBrowserListItem.js | 59 +++++++++++-------- 2 files changed, 36 insertions(+), 35 deletions(-) diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 43e9029f336..31875dfd139 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -436,21 +436,15 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { osparc.store.Store.getInstance().getTags().filter(tag => study.tags.includes(tag.id)) : []; const item = new osparc.dashboard.StudyBrowserListItem().set({ + isTemplate, uuid: study.uuid, studyTitle: study.name, + studyDescription: study.description, creator: study.prjOwner ? study.prjOwner : null, + lastChangeDate: study.lastChangeDate ? new Date(study.lastChangeDate) : null, icon: study.thumbnail || "@FontAwesome5Solid/flask/50", tags }); - if (isTemplate) { - item.set({ - studyDescription: isTemplate ? study.description : null - }); - } else { - item.set({ - lastChangeDate: study.lastChangeDate ? new Date(study.lastChangeDate) : null - }); - } const menu = this.__getStudyItemMenu(item, study, isTemplate); item.setMenu(menu); item.subscribeToFilterGroup("studyBrowser"); diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js index 4b67c9e0321..94dcbc05e60 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserListItem.js @@ -50,40 +50,47 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { }, properties: { + isTemplate: { + check: "Boolean", + nullable: false, + init: false, + event: "changeIsTemplate" + }, + menu: { - check : "qx.ui.menu.Menu", - nullable : true, - apply : "_applyMenu", - event : "changeMenu" + check: "qx.ui.menu.Menu", + nullable: true, + apply: "_applyMenu", + event: "changeMenu" }, uuid: { check: "String", - apply : "_applyUuid" + apply: "_applyUuid" }, studyTitle: { check: "String", - apply : "_applyStudyTitle", - nullable : true + apply: "_applyStudyTitle", + nullable: true }, studyDescription: { check: "String", - apply : "_applyStudyDescription", - nullable : true + apply: "_applyStudyDescription", + nullable: true }, creator: { check: "String", - apply : "_applyCreator", - nullable : true + apply: "_applyCreator", + nullable: true }, lastChangeDate: { - check : "Date", - apply : "_applylastChangeDate", - nullable : true + check: "Date", + apply: "_applyLastChangeDate", + nullable: true }, tags: { @@ -167,20 +174,15 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { }, _applyStudyDescription: function(value, old) { - if (value) { - const label = this.getChildControl("desc2"); + if (value !== "" && this.getIsTemplate()) { + const label = this.getChildControl("desc1"); label.setValue(value); } }, - _applyCreator: function(value, old) { - let label = this.getChildControl("desc1"); - label.setValue(value); - }, - - _applylastChangeDate: function(value, old) { - let label = this.getChildControl("desc2"); - if (value) { + _applyLastChangeDate: function(value, old) { + if (value && !this.getIsTemplate()) { + const label = this.getChildControl("desc1"); let dateStr = null; if (value.getDate() === (new Date()).getDate()) { dateStr = this.tr("Today"); @@ -191,8 +193,13 @@ qx.Class.define("osparc.dashboard.StudyBrowserListItem", { } const timeStr = this.__timeFormat.format(value); label.setValue(dateStr + " " + timeStr); - } else { - label.resetValue(); + } + }, + + _applyCreator: function(value, old) { + if (this.getIsTemplate()) { + const label = this.getChildControl("desc2"); + label.setValue(value); } }, From 81d23fcbd448fc77f5b45110d8bf46037f943448 Mon Sep 17 00:00:00 2001 From: Pedro Crespo Date: Mon, 23 Mar 2020 11:41:19 +0100 Subject: [PATCH 68/74] Added cleanup of tmp if any --- .../src/simcore_service_webserver/statics.py | 43 ++++++++++++------- 1 file changed, 27 insertions(+), 16 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/statics.py b/services/web/server/src/simcore_service_webserver/statics.py index e2639522362..8e524ab3468 100644 --- a/services/web/server/src/simcore_service_webserver/statics.py +++ b/services/web/server/src/simcore_service_webserver/statics.py @@ -9,6 +9,8 @@ import json import logging import os +import shutil +import tempfile from pathlib import Path from aiohttp import web @@ -17,24 +19,30 @@ from servicelib.application_setup import ModuleCategory, app_module_setup INDEX_RESOURCE_NAME = "statics.index" +TMPDIR_KEY = f"{__name__}.tmpdir" log = logging.getLogger(__file__) def get_client_outdir(app: web.Application) -> Path: cfg = app[APP_CONFIG_KEY]["main"] - - # pylint 2.3.0 produces 'E1101: Instance of 'Path' has no 'expanduser' member (no-member)' ONLY - # with the installed code and not with the development code! - client_dir = Path(cfg["client_outdir"]).expanduser() # pylint: disable=E1101 + client_dir = Path(cfg["client_outdir"]).expanduser() if not client_dir.exists(): - txt = reason = "Front-end application is not available" - if cfg["testing"]: - reason = "Invalid client source path: %s" % client_dir - raise web.HTTPServiceUnavailable(reason=reason, text=txt) + tmp_dir = tempfile.mkdtemp(suffix="client_outdir") + log.error( + "Invalid client source path [%s]. Defaulting to %s", client_dir, tmp_dir + ) + client_dir = tmp_dir + app[TMPDIR_KEY] = tmp_dir return client_dir +async def _delete_tmps(app: web.Application): + tmp_dir = app.get(TMPDIR_KEY) + if tmp_dir: + shutil.rmtree(tmp_dir, ignore_errors=True) + + async def index(request: web.Request): """ Serves boot application under index @@ -58,32 +66,35 @@ def write_statics_file(directory: Path) -> None: json.dump(statics, fh) -@app_module_setup(__name__, ModuleCategory.ADDON, logger=log) +@app_module_setup(__name__, ModuleCategory.SYSTEM, logger=log) def setup_statics(app: web.Application): - # Front-end Rich Interface Application (RIA) + # Serves Front-end Rich Interface Application (RIA) app.router.add_get("/", index, name=INDEX_RESOURCE_NAME) # NOTE: source-output and build-output have both the same subfolder structure - outdir = get_client_outdir(app) + outdir: Path = get_client_outdir(app) # Create statics file write_statics_file(outdir / "resource") - EXPECTED_FOLDERS = ["osparc", "resource", "transpiled"] + required_dirs = ["osparc", "resource", "transpiled"] folders = [x for x in outdir.iterdir() if x.is_dir()] # Checks integrity of RIA source before serving and warn! - for name in EXPECTED_FOLDERS: + for name in required_dirs: folder_names = [path.name for path in folders] if name not in folder_names: log.warning( "Missing folders: expected %s, got %s in %s", - EXPECTED_FOLDERS, + required_dirs, folder_names, outdir, ) - # Add statis routes - folders = set(folders).union(EXPECTED_FOLDERS) + # Add static routes + folders = set(folders).union(required_dirs) for path in folders: app.router.add_static("/" + path.name, path) + + # cleanup + app.on_cleanup.append(_delete_tmps) From 2200c12053b4d4fdfb07cdaa12942bf4715ed429 Mon Sep 17 00:00:00 2001 From: Pedro Crespo Date: Mon, 23 Mar 2020 12:31:03 +0100 Subject: [PATCH 69/74] Fixes statis setup --- .gitignore | 2 +- services/web/server/src/simcore_service_webserver/statics.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index c1061c4838c..1ced025a0e6 100644 --- a/.gitignore +++ b/.gitignore @@ -149,4 +149,4 @@ prof/ # outputs from make .stack-*.yml -services/catalog/log.txt +services/web/server/tests/data/static/resource/statics.json diff --git a/services/web/server/src/simcore_service_webserver/statics.py b/services/web/server/src/simcore_service_webserver/statics.py index 8e524ab3468..96b0e01093c 100644 --- a/services/web/server/src/simcore_service_webserver/statics.py +++ b/services/web/server/src/simcore_service_webserver/statics.py @@ -12,6 +12,7 @@ import shutil import tempfile from pathlib import Path +from typing import Set from aiohttp import web @@ -92,7 +93,7 @@ def setup_statics(app: web.Application): ) # Add static routes - folders = set(folders).union(required_dirs) + folders: Set[Path] = set(folders).union([outdir / name for name in required_dirs]) for path in folders: app.router.add_static("/" + path.name, path) From 2593b9011840bc1c3b280400bda4a92d6d0a18cb Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 23 Mar 2020 17:01:53 +0100 Subject: [PATCH 70/74] minor fix --- services/web/client/source/class/osparc/data/model/Node.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/services/web/client/source/class/osparc/data/model/Node.js b/services/web/client/source/class/osparc/data/model/Node.js index 62953d22122..cdaa9989aff 100644 --- a/services/web/client/source/class/osparc/data/model/Node.js +++ b/services/web/client/source/class/osparc/data/model/Node.js @@ -849,8 +849,10 @@ qx.Class.define("osparc.data.model.Node", { const { data } = resp; - const sizeBytes = (data && ("size_bytes" in data)) ? data["size_bytes"] : 0; - this.getPropsWidget().retrievedPortData(portKey, true, sizeBytes); + if (portKey) { + const sizeBytes = (data && ("size_bytes" in data)) ? data["size_bytes"] : 0; + this.getPropsWidget().retrievedPortData(portKey, true, sizeBytes); + } console.log(data); }, this); updReq.addListener("fail", e => { From 146aa5a282b8b96e1ba97fd9af11760da3f7a476 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Tue, 24 Mar 2020 10:07:04 +0100 Subject: [PATCH 71/74] minor --- .../class/osparc/component/metadata/StudyDetailsEditor.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/web/client/source/class/osparc/component/metadata/StudyDetailsEditor.js b/services/web/client/source/class/osparc/component/metadata/StudyDetailsEditor.js index 8a33120f5f1..c16d100b570 100644 --- a/services/web/client/source/class/osparc/component/metadata/StudyDetailsEditor.js +++ b/services/web/client/source/class/osparc/component/metadata/StudyDetailsEditor.js @@ -261,9 +261,9 @@ qx.Class.define("osparc.component.metadata.StudyDetailsEditor", { btn.resetIcon(); btn.getChildControl("icon").getContentElement() .removeClass("rotate"); - this.fireDataEvent(this.__isTemplate ? "updatedTemplate" : "updatedStudy", data); this.__model.set(data); this.setMode("display"); + this.fireDataEvent(this.__isTemplate ? "updatedTemplate" : "updatedStudy", data); }) .catch(err => { btn.resetIcon(); From ca647e24ef7a68278793ff3d56a0208f43102f1a Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Tue, 24 Mar 2020 11:52:17 +0100 Subject: [PATCH 72/74] minor fix --- .../web/client/source/class/osparc/data/model/Node.js | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/services/web/client/source/class/osparc/data/model/Node.js b/services/web/client/source/class/osparc/data/model/Node.js index cdaa9989aff..54dcd92d05d 100644 --- a/services/web/client/source/class/osparc/data/model/Node.js +++ b/services/web/client/source/class/osparc/data/model/Node.js @@ -859,14 +859,18 @@ qx.Class.define("osparc.data.model.Node", { const { error } = e.getTarget().getResponse(); - this.getPropsWidget().retrievedPortData(portKey, false); + if (portKey) { + this.getPropsWidget().retrievedPortData(portKey, false); + } console.error("fail", error); }, this); updReq.addListener("error", e => { const { error } = e.getTarget().getResponse(); - this.getPropsWidget().retrievedPortData(portKey, false); + if (portKey) { + this.getPropsWidget().retrievedPortData(portKey, false); + } console.error("error", error); }, this); updReq.send(); From 61e98da3ce017d517b9d908099a7277727d28b16 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Tue, 24 Mar 2020 15:52:12 +0100 Subject: [PATCH 73/74] file renamings --- .../osparc/component/form/ToggleButtonContainer.js | 2 +- .../source/class/osparc/dashboard/StudyBrowser.js | 12 ++++++------ ...yBrowserListBase.js => StudyBrowserButtonBase.js} | 2 +- ...yBrowserListItem.js => StudyBrowserButtonItem.js} | 4 ++-- ...udyBrowserListNew.js => StudyBrowserButtonNew.js} | 4 ++-- 5 files changed, 12 insertions(+), 12 deletions(-) rename services/web/client/source/class/osparc/dashboard/{StudyBrowserListBase.js => StudyBrowserButtonBase.js} (98%) rename services/web/client/source/class/osparc/dashboard/{StudyBrowserListItem.js => StudyBrowserButtonItem.js} (98%) rename services/web/client/source/class/osparc/dashboard/{StudyBrowserListNew.js => StudyBrowserButtonNew.js} (93%) diff --git a/services/web/client/source/class/osparc/component/form/ToggleButtonContainer.js b/services/web/client/source/class/osparc/component/form/ToggleButtonContainer.js index 2a19247319f..b534f6b5957 100644 --- a/services/web/client/source/class/osparc/component/form/ToggleButtonContainer.js +++ b/services/web/client/source/class/osparc/component/form/ToggleButtonContainer.js @@ -6,7 +6,7 @@ */ /** - * Container for StudyBrowserListItems or any other ToggleButtons, with some convenient methods. + * Container for StudyBrowserButtonItems or any other ToggleButtons, with some convenient methods. */ qx.Class.define("osparc.component.form.ToggleButtonContainer", { extend: qx.ui.container.Composite, diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js index 31875dfd139..1b76205b61d 100644 --- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js @@ -17,8 +17,8 @@ /** * Widget that shows two lists of studies and study editor form: - * - List1: User's studies (StudyBrowserListItem) - * - List2: Template studies to start from (StudyBrowserListItem) + * - List1: User's studies (StudyBrowserButtonItem) + * - List2: Template studies to start from (StudyBrowserButtonItem) * - Form: Extra editable information of the selected study * * It is the entry point to start editing or creatina new study. @@ -201,7 +201,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __createNewStudyButton: function() { - const newStudyBtn = this.__newStudyBtn = new osparc.dashboard.StudyBrowserListNew(); + const newStudyBtn = this.__newStudyBtn = new osparc.dashboard.StudyBrowserButtonNew(); newStudyBtn.subscribeToFilterGroup("studyBrowser"); osparc.utils.Utils.setIdToWidget(newStudyBtn, "newStudyBtn"); newStudyBtn.addListener("execute", () => this.__createStudyBtnClkd()); @@ -265,7 +265,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const nSelected = e.getData().length; this.__newStudyBtn.setEnabled(!nSelected); this.__templateStudyContainer.getChildren().forEach(templateStudyItem => { - if (templateStudyItem instanceof osparc.dashboard.StudyBrowserListItem) { + if (templateStudyItem instanceof osparc.dashboard.StudyBrowserButtonItem) { templateStudyItem.multiSelection(nSelected); } }); @@ -435,7 +435,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { study.tags ? osparc.store.Store.getInstance().getTags().filter(tag => study.tags.includes(tag.id)) : []; - const item = new osparc.dashboard.StudyBrowserListItem().set({ + const item = new osparc.dashboard.StudyBrowserButtonItem().set({ isTemplate, uuid: study.uuid, studyTitle: study.name, @@ -650,7 +650,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const canDeleteTemplate = osparc.data.Permissions.getInstance().canDo("studies.template.delete"); let allMine = Boolean(templateSelection.length) && canDeleteTemplate; for (let i=0; i Date: Tue, 24 Mar 2020 15:52:19 +0100 Subject: [PATCH 74/74] minor --- services/web/client/source/class/osparc/data/model/Node.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/web/client/source/class/osparc/data/model/Node.js b/services/web/client/source/class/osparc/data/model/Node.js index 54dcd92d05d..1987e4acc75 100644 --- a/services/web/client/source/class/osparc/data/model/Node.js +++ b/services/web/client/source/class/osparc/data/model/Node.js @@ -875,7 +875,9 @@ qx.Class.define("osparc.data.model.Node", { }, this); updReq.send(); - this.getPropsWidget().retrievingPortData(portKey); + if (portKey) { + this.getPropsWidget().retrievingPortData(portKey); + } } } },