diff --git a/.github/workflows/actions.yml b/.github/workflows/actions.yml index 4609eb697..e3e24b043 100644 --- a/.github/workflows/actions.yml +++ b/.github/workflows/actions.yml @@ -42,15 +42,28 @@ jobs: exit 1; } + - name: Decrypt Service Account Key File + working-directory: ./ + run: | + openssl enc -aes-256-cbc -d -K "$OPENSSL_KEY" -iv "$OPENSSL_IV" -in ci-mapswipe-firebase-adminsdk-80fzw-ebce84bd5b.json.enc -out mapswipe_workers/serviceAccountKey.json + env: + OPENSSL_PASSPHRASE: ${{ secrets.OPENSSL_PASSPHRASE }} + OPENSSL_KEY: ${{ secrets.OPENSSL_KEY }} + OPENSSL_IV: ${{ secrets.OPENSSL_IV }} + + - name: Build docker images + run: | + # Create a mock file for wal-g setup + touch postgres/serviceAccountKey.json + docker compose build postgres firebase_deploy mapswipe_workers_creation django + - name: Setup Postgres Database Container env: POSTGRES_PASSWORD: postgres POSTGRES_USER: postgres POSTGRES_DB: postgres run: | - # Create a mock file for wal-g setup - touch postgres/serviceAccountKey.json - docker compose up --build --detach postgres + docker compose up --detach postgres for i in {1..5}; do docker compose exec -T postgres pg_isready && s=0 && break || s=$? && sleep 5; done; (docker compose logs postgres && exit $s) - name: Deploy Firebase Rules and Functions @@ -60,15 +73,6 @@ jobs: run: | docker compose run --rm firebase_deploy sh -c "firebase use $FIREBASE_DB && firebase deploy --token $FIREBASE_TOKEN --only database" - - name: Decrypt Service Account Key File - working-directory: ./ - run: | - openssl enc -aes-256-cbc -d -K "$OPENSSL_KEY" -iv "$OPENSSL_IV" -in ci-mapswipe-firebase-adminsdk-80fzw-ebce84bd5b.json.enc -out mapswipe_workers/serviceAccountKey.json - env: - OPENSSL_PASSPHRASE: ${{ secrets.OPENSSL_PASSPHRASE }} - OPENSSL_KEY: ${{ secrets.OPENSSL_KEY }} - OPENSSL_IV: ${{ secrets.OPENSSL_IV }} - - name: Run Tests working-directory: ./mapswipe_workers env: diff --git a/community-dashboard/app/resources/icons/validate-image.svg b/community-dashboard/app/resources/icons/validate-image.svg new file mode 100644 index 000000000..7066c5c2b --- /dev/null +++ b/community-dashboard/app/resources/icons/validate-image.svg @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/community-dashboard/app/views/StatsBoard/index.tsx b/community-dashboard/app/views/StatsBoard/index.tsx index 1b44bc50f..d04b93212 100644 --- a/community-dashboard/app/views/StatsBoard/index.tsx +++ b/community-dashboard/app/views/StatsBoard/index.tsx @@ -43,12 +43,14 @@ import InformationCard from '#components/InformationCard'; import areaSvg from '#resources/icons/area.svg'; import sceneSvg from '#resources/icons/scene.svg'; import featureSvg from '#resources/icons/feature.svg'; +import validateImageSvg from '#resources/icons/validate-image.svg'; import { ContributorTimeStatType, OrganizationSwipeStatsType, ProjectTypeSwipeStatsType, ProjectTypeAreaStatsType, ContributorSwipeStatType, + ProjectTypeEnum, } from '#generated/types'; import { mergeItems } from '#utils/common'; import { @@ -67,15 +69,28 @@ const CHART_BREAKPOINT = 700; export type ActualContributorTimeStatType = ContributorTimeStatType & { totalSwipeTime: number }; const UNKNOWN = '-1'; const BUILD_AREA = 'BUILD_AREA'; +const MEDIA = 'MEDIA'; +const DIGITIZATION = 'DIGITIZATION'; const FOOTPRINT = 'FOOTPRINT'; const CHANGE_DETECTION = 'CHANGE_DETECTION'; +const VALIDATE_IMAGE = 'VALIDATE_IMAGE'; const COMPLETENESS = 'COMPLETENESS'; +const STREET = 'STREET'; -const projectTypes: Record = { +// FIXME: the name property is not used properly +const projectTypes: Record = { [UNKNOWN]: { - color: '#808080', + color: '#cacaca', name: 'Unknown', }, + [MEDIA]: { + color: '#cacaca', + name: 'Media', + }, + [DIGITIZATION]: { + color: '#cacaca', + name: 'Digitization', + }, [BUILD_AREA]: { color: '#f8a769', name: 'Find', @@ -92,6 +107,14 @@ const projectTypes: Record = { color: '#fb8072', name: 'Completeness', }, + [VALIDATE_IMAGE]: { + color: '#a1b963', + name: 'Validate Image', + }, + [STREET]: { + color: '#808080', + name: 'Street', + }, }; type ResolutionType = 'day' | 'month' | 'year'; @@ -370,10 +393,16 @@ function StatsBoard(props: Props) { const sortedProjectSwipeType = useMemo( () => ( swipeByProjectType - ?.map((item) => ({ - ...item, - projectType: item.projectType ?? '-1', - })) + ?.map((item) => { + const projectType: ProjectTypeEnum | '-1' = ( + isDefined(item.projectType) && isDefined(projectTypes[item.projectType]) + ) ? item.projectType : UNKNOWN; + + return ({ + ...item, + projectType, + }); + }) .sort((a, b) => compareNumber(a.totalSwipes, b.totalSwipes, -1)) ?? [] ), [swipeByProjectType], @@ -439,6 +468,10 @@ function StatsBoard(props: Props) { (project) => project.projectType === FOOTPRINT, )?.totalSwipes; + const validateImageTotalSwipes = swipeByProjectType?.find( + (project) => project.projectType === VALIDATE_IMAGE, + )?.totalSwipes; + const organizationColors = scaleOrdinal() .domain(totalSwipesByOrganizationStats?.map( (organization) => (organization.organizationName), @@ -689,6 +722,29 @@ function StatsBoard(props: Props) { subHeading="Compare" variant="stat" /> + + )} + value={( + + )} + label={( +
+ Images Validated +
+ )} + subHeading="Validate Image" + variant="stat" + />
( ))} diff --git a/community-dashboard/app/views/StatsBoard/styles.css b/community-dashboard/app/views/StatsBoard/styles.css index 11c250bb2..429b09ba7 100644 --- a/community-dashboard/app/views/StatsBoard/styles.css +++ b/community-dashboard/app/views/StatsBoard/styles.css @@ -67,7 +67,7 @@ >* { flex-basis: 0; flex-grow: 1; - min-width: 12rem; + min-width: 24rem; @media (max-width: 48rem) { min-width: 100%; diff --git a/community-dashboard/docker-compose.yml b/community-dashboard/docker-compose.yml index 39ac61dcc..2b548f3bb 100644 --- a/community-dashboard/docker-compose.yml +++ b/community-dashboard/docker-compose.yml @@ -2,7 +2,6 @@ version: '3.3' services: react: - build: . command: sh -c 'yarn install --frozen-lockfile && yarn start' build: context: ./ @@ -15,4 +14,4 @@ services: volumes: - .:/code ports: - - '3080:3080' + - '3081:3081' diff --git a/django/Dockerfile b/django/Dockerfile index 4f220f7e6..a6330b600 100644 --- a/django/Dockerfile +++ b/django/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10-buster +FROM python:3.10-bullseye LABEL maintainer="Mapswipe info@mapswipe.org" diff --git a/django/apps/aggregated/management/commands/update_aggregated_data.py b/django/apps/aggregated/management/commands/update_aggregated_data.py index be8b82989..49f536227 100644 --- a/django/apps/aggregated/management/commands/update_aggregated_data.py +++ b/django/apps/aggregated/management/commands/update_aggregated_data.py @@ -55,6 +55,8 @@ WHEN P.project_type = {Project.Type.CHANGE_DETECTION.value} THEN 11.2 -- FOOTPRINT: Not calculated right now WHEN P.project_type = {Project.Type.FOOTPRINT.value} THEN 6.1 + WHEN P.project_type = {Project.Type.VALIDATE_IMAGE.value} THEN 6.1 + WHEN P.project_type = {Project.Type.STREET.value} THEN 65 ELSE 1 END ) * COUNT(*) as time_spent_max_allowed @@ -110,6 +112,8 @@ WHEN P.project_type = {Project.Type.CHANGE_DETECTION.value} THEN 11.2 -- FOOTPRINT: Not calculated right now WHEN P.project_type = {Project.Type.FOOTPRINT.value} THEN 6.1 + WHEN P.project_type = {Project.Type.VALIDATE_IMAGE.value} THEN 6.1 + WHEN P.project_type = {Project.Type.STREET.value} THEN 65 ELSE 1 END ) * COUNT(*) as time_spent_max_allowed @@ -134,8 +138,10 @@ G.group_id, ( CASE - -- Hide area for Footprint + -- Hide area for Footprint and Validate Image + -- FIXME: What should we do for Project.Type.STREET.value WHEN P.project_type = {Project.Type.FOOTPRINT.value} THEN 0 + WHEN P.project_type = {Project.Type.VALIDATE_IMAGE.value} THEN 0 ELSE G.total_area END ) as total_task_group_area, diff --git a/django/apps/existing_database/filters.py b/django/apps/existing_database/filters.py index 096f2f5a1..98227d94b 100644 --- a/django/apps/existing_database/filters.py +++ b/django/apps/existing_database/filters.py @@ -38,7 +38,7 @@ class UserGroupFilter: def filter_search(self, queryset): if self.search: queryset = queryset.filter( - name__icontains=self.search, + name__unaccent__icontains=self.search, ) return queryset diff --git a/django/apps/existing_database/models.py b/django/apps/existing_database/models.py index 5bc85e113..319c28b7c 100644 --- a/django/apps/existing_database/models.py +++ b/django/apps/existing_database/models.py @@ -69,6 +69,7 @@ class Type(models.IntegerChoices): MEDIA = 5, "Media" DIGITIZATION = 6, "Digitization" STREET = 7, "Street" + VALIDATE_IMAGE = 10, "Validate Image" project_id = models.CharField(primary_key=True, max_length=999) created = models.DateTimeField(blank=True, null=True) diff --git a/django/schema.graphql b/django/schema.graphql index b5596fc46..07b9659c4 100644 --- a/django/schema.graphql +++ b/django/schema.graphql @@ -100,6 +100,7 @@ enum ProjectTypeEnum { MEDIA DIGITIZATION STREET + VALIDATE_IMAGE } type ProjectTypeSwipeStatsType { diff --git a/docker-compose.tc.yaml b/docker-compose.tc.yaml index 210c09570..62475284e 100644 --- a/docker-compose.tc.yaml +++ b/docker-compose.tc.yaml @@ -33,6 +33,7 @@ x-mapswipe-workers: &base_mapswipe_workers SLACK_CHANNEL: '${SLACK_CHANNEL}' SENTRY_DSN: '${SENTRY_DSN}' OSMCHA_API_KEY: '${OSMCHA_API_KEY}' + MAPILLARY_API_KEY: '${MAPILLARY_API_KEY}' depends_on: - postgres volumes: diff --git a/docker-compose.yaml b/docker-compose.yaml index d465a704c..f015a71a4 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -230,15 +230,23 @@ services: OSM_OAUTH_API_URL: '${OSM_OAUTH_API_URL}' OSM_OAUTH_CLIENT_ID: '${OSM_OAUTH_CLIENT_ID}' OSM_OAUTH_CLIENT_SECRET: '${OSM_OAUTH_CLIENT_SECRET}' + OSM_OAUTH_REDIRECT_URI_WEB: '${OSM_OAUTH_REDIRECT_URI_WEB}' + OSM_OAUTH_APP_LOGIN_LINK_WEB: '${OSM_OAUTH_APP_LOGIN_LINK_WEB}' + OSM_OAUTH_CLIENT_ID_WEB: '${OSM_OAUTH_CLIENT_ID_WEB}' + OSM_OAUTH_CLIENT_SECRET_WEB: '${OSM_OAUTH_CLIENT_SECRET_WEB}' command: >- sh -c "firebase use $FIREBASE_DB && firebase target:apply hosting auth \"$FIREBASE_AUTH_SITE\" && firebase functions:config:set osm.redirect_uri=\"$OSM_OAUTH_REDIRECT_URI\" + osm.redirect_uri_web=\"$OSM_OAUTH_REDIRECT_URI_WEB\" osm.app_login_link=\"$OSM_OAUTH_APP_LOGIN_LINK\" + osm.app_login_link_web=\"$OSM_OAUTH_APP_LOGIN_LINK_WEB\" osm.api_url=\"$OSM_OAUTH_API_URL\" osm.client_id=\"$OSM_OAUTH_CLIENT_ID\" - osm.client_secret=\"$OSM_OAUTH_CLIENT_SECRET\" && + osm.client_id_web=\"$OSM_OAUTH_CLIENT_ID_WEB\" + osm.client_secret=\"$OSM_OAUTH_CLIENT_SECRET\" + osm.client_secret_web=\"$OSM_OAUTH_CLIENT_SECRET_WEB\" && firebase deploy --token $FIREBASE_TOKEN --only functions,hosting,database" django: diff --git a/docs/source/_static/img/mapswipe-time-calculation.png b/docs/source/_static/img/mapswipe-time-calculation.png new file mode 100644 index 000000000..ebef8f83b Binary files /dev/null and b/docs/source/_static/img/mapswipe-time-calculation.png differ diff --git a/docs/source/diagrams.md b/docs/source/diagrams.md index 8ad2ccc56..8ac15c8a9 100644 --- a/docs/source/diagrams.md +++ b/docs/source/diagrams.md @@ -9,29 +9,56 @@ The Diagrams are drawn using [draw.io](https://.wwww.draw.io). You can download --- **Deployment Diagram:** -![Deployment Diagram](/_static/img/deployment_diagram.png) +![Deployment Diagram](_static/img/deployment_diagram.png) --- **Proposed Data Structure Project Type 1 - Firebase:** -![Data Structure - Firebase](/_static/img/data_structure-firebase-1.svg) +![Data Structure - Firebase](_static/img/data_structure-firebase-1.svg) --- **Proposed Data Structure Project Type 2 - Firebase:** -![Data Structure - Firebase](/_static/img/data_structure-firebase-2.svg) +![Data Structure - Firebase](_static/img/data_structure-firebase-2.svg) --- **Database Scheme - Postgres:** -![Database Schema - Postgres](/_static/img/database_schema-postgres.png) +![Database Schema - Postgres](_static/img/database_schema-postgres.png) --- **Entity Relationship Diagram - Postgres:** -![Entity Relationship Diagram- Postgres](/_static/img/entity_relationship_diagram-postgres.png) +![Entity Relationship Diagram- Postgres](_static/img/entity_relationship_diagram-postgres.png) --- **Database Schema - Analytics:** -![Database Schema - Analytics](/_static/img/database_schema-analytics.png) +![Database Schema - Analytics](_static/img/database_schema-analytics.png) + +--- + +**Mapping Sessions - Time Calculation** + +The diagram below is a visual representation of how time is calculated in MapSwipe. + +Step 1: User Mapping Session **sends data** to Firebase +- When a user completes a mapping session in the mobile/web app, the session payload (including start/end timestamps, user ID, session metadata, etc.) is sent in real time to Firebase. + +Step 2: Cron job **fetches data** from the firebase +- Every 3 minutes, a cron job syncs data for any new session records and pulls them into the backend. + +Step 3: Cron job **saves raw data** to Postgres database +- The cron job sends new session data to the Postgres database. + +Step 4: Cron job **reads raw data** from Postgres database +- Another cron job reads the raw data from Postgres database. + +Step 5: Cron job **saves aggregates** to Postgres database +- The cron job aggregates previous 24 hours data (end date - start date), sends back, and saves processed aggregated data to the Postgres database. + +Step 6: Community dashboard **queries aggregate data** from Postgres database +- The Community dashboard pulls the processed data from the Postgres database and updates the dashbaord with up-to-date stats. + + +![MapSwipe Time Calculation](_static/img/mapswipe-time-calculation.png) diff --git a/example.env b/example.env index 2fa92ae33..a4ee41436 100644 --- a/example.env +++ b/example.env @@ -38,10 +38,15 @@ OSMCHA_API_KEY= # OSM OAuth Configuration OSM_OAUTH_REDIRECT_URI= +OSM_OAUTH_REDIRECT_URI_WEB= OSM_OAUTH_API_URL= OSM_OAUTH_CLIENT_ID= +OSM_OAUTH_CLIENT_ID_WEB= OSM_OAUTH_CLIENT_SECRET= -OSM_APP_LOGIN_LINK= +OSM_OAUTH_CLIENT_SECRET_WEB= +OSM_OAUTH_APP_LOGIN_LINK= +OSM_OAUTH_APP_LOGIN_LINK_WEB= + # DJANGO For more info look at django/mapswipe/settings.py::L22 DJANGO_SECRET_KEY= diff --git a/firebase/README.md b/firebase/README.md index fed47268d..c3381abc9 100644 --- a/firebase/README.md +++ b/firebase/README.md @@ -20,6 +20,11 @@ expose the authentication functions publicly. * `firebase deploy --only functions,hosting` * `firebase deploy --only database:rules` +## Deploy with Makefile +You can also deploy the changes to Firebase using make: +* Make sure to remove the firebase_deploy docker image first: `docker rmi python-mapswipe-workers-firebase_deploy` +* `make update_firebase_functions_and_db_rules` + ## Notes on OAuth (OSM login) Refer to [the notes in the app repository](https://github.com/mapswipe/mapswipe/blob/master/docs/osm_login.md). @@ -30,12 +35,16 @@ Some specifics about the related functions: - Before deploying, set the required firebase config values in environment: FIXME: replace env vars with config value names - OSM_OAUTH_REDIRECT_URI `osm.redirect_uri`: `https://dev-auth.mapswipe.org/token` or `https://auth.mapswipe.org/token` + - OSM_OAUTH_REDIRECT_URI_WEB: `https://dev-auth.mapswipe.org/tokenweb` or `https://auth.mapswipe.org/tokenweb` - OSM_OAUTH_APP_LOGIN_LINK `osm.app_login_link`: 'devmapswipe://login/osm' or 'mapswipe://login/osm' + - OSM_OAUTH_APP_LOGIN_LINK_WEB: `https://web.mapswipe.org/dev/#/osm-callback` or `https://web.mapswipe.org/#/osm-callback` - OSM_OAUTH_API_URL `osm.api_url`: 'https://master.apis.dev.openstreetmap.org/' or 'https://www.openstreetmap.org/' (include the trailing slash) - OSM_OAUTH_CLIENT_ID `osm.client_id`: find it on the OSM application page - OSM_OAUTH_CLIENT_SECRET `osm.client_secret`: same as above. Note that this can only be seen once when the application is created. Do not lose it! + - OSM_OAUTH_CLIENT_ID_WEB: This is the ID of a __different__ registered OSM OAuth client for the web version that needs to have `https://dev-auth.mapswipe.org/tokenweb` or `https://auth.mapswipe.org/tokenweb` set as redirect URI. + - OSM_OAUTH_CLIENT_SECRET_WEB: This is the secret of the OSM OAuth client for MapSwipe web version. - Deploy the functions as explained above - Expose the functions publicly through firebase hosting, this is done in `/firebase/firebase.json` under the `hosting` key. diff --git a/firebase/firebase.json b/firebase/firebase.json index 4c56a3044..b81c02219 100644 --- a/firebase/firebase.json +++ b/firebase/firebase.json @@ -20,6 +20,14 @@ { "source": "/token", "function": "osmAuth-token" + }, + { + "source": "/redirectweb", + "function": "osmAuth-redirectweb" + }, + { + "source": "/tokenweb", + "function": "osmAuth-tokenweb" } ] }, diff --git a/firebase/functions/src/index.ts b/firebase/functions/src/index.ts index 02a70dd37..c448cadc2 100644 --- a/firebase/functions/src/index.ts +++ b/firebase/functions/src/index.ts @@ -8,7 +8,7 @@ admin.initializeApp(); // all functions are bundled together. It's less than ideal, but it does not // seem possible to split them using the split system for multiple sites from // https://firebase.google.com/docs/hosting/multisites -import {redirect, token} from './osm_auth'; +import {redirect, token, redirectweb, tokenweb} from './osm_auth'; import { formatProjectTopic, formatUserName } from './utils'; exports.osmAuth = {}; @@ -23,6 +23,14 @@ exports.osmAuth.token = functions.https.onRequest((req, res) => { token(req, res, admin); }); +exports.osmAuth.redirectweb = functions.https.onRequest((req, res) => { + redirectweb(req, res); +}); + +exports.osmAuth.tokenweb = functions.https.onRequest((req, res) => { + tokenweb(req, res, admin); +}); + /* Log the userIds of all users who finished a group to /v2/userGroups/{projectId}/{groupId}/. Gets triggered when new results of a group are written to the database. @@ -42,23 +50,46 @@ exports.groupUsersCounter = functions.database.ref('/v2/results/{projectId}/{gro const thisResultRef = admin.database().ref('/v2/results/' + context.params.projectId + '/' + context.params.groupId + '/' + context.params.userId ); const userGroupsRef = admin.database().ref('/v2/userGroups/'); + let appVersionString: string | undefined | null = undefined; + + type Args = Record + // eslint-disable-next-line require-jsdoc + function logger(message: string, extraArgs: Args = {}, logFunction: (typeof console.log) = console.log) { + const ctx: Args = { + message: message, + ...extraArgs, + project: context.params.projectId, + user: context.params.userId, + group: context.params.groupId, + version: appVersionString, + }; + const items = Object.keys(ctx).reduce( + (acc, key) => { + const value = ctx[key]; + if (value === undefined || value === null || value === '') { + return acc; + } + const item = `${key}[${value}]`; + return [...acc, item]; + }, + [] + ); + logFunction(items.join(' ')); + } // Check for specific user ids which have been identified as problematic. // These users have repeatedly uploaded harmful results. // Add new user ids to this list if needed. const userIds: string[] = []; - if ( userIds.includes(context.params.userId) ) { - console.log('suspicious user: ' + context.params.userId); - console.log('will remove this result and not update counters'); + if (userIds.includes(context.params.userId) ) { + console.log('Result removed because of suspicious user activity'); return thisResultRef.remove(); } const result = snapshot.val(); - - // New versions of app will have the appVersion defined (> 2.2.5) // appVersion: 2.2.5 (14)-dev - const appVersionString = result.appVersion as string | undefined | null; + appVersionString = result.appVersion; // Check if the app is of older version // (no need to check for specific version since old app won't sent the version info) @@ -68,11 +99,11 @@ exports.groupUsersCounter = functions.database.ref('/v2/results/{projectId}/{gro if (dataSnapshot.exists()) { const project = dataSnapshot.val(); - // Check if project type is validate and also has + // Check if project type is 'validate' and also has // custom options (i.e. these are new type of projects) if (project.projectType === 2 && project.customOptions) { // We remove the results submitted from older version of app (< v2.2.6) - console.info(`Result submitted for ${context.params.projectId} was discarded: submitted from older version of app`); + logger('Result removed because it was submitted from an older version', undefined, console.error); return thisResultRef.remove(); } } @@ -81,16 +112,13 @@ exports.groupUsersCounter = functions.database.ref('/v2/results/{projectId}/{gro // if result ref does not contain all required attributes we don't updated counters // e.g. due to some error when uploading from client if (!Object.prototype.hasOwnProperty.call(result, 'results')) { - console.log('no results attribute for ' + snapshot.ref); - console.log('will not update counters'); + logger('Not updating counters because results attribute was not found.', { result: String(snapshot.ref) }, console.error); return null; } else if (!Object.prototype.hasOwnProperty.call(result, 'endTime')) { - console.log('no endTime attribute for ' + snapshot.ref); - console.log('will not update counters'); + logger('Not updating counters because endTime attribute was not found.', { result: String(snapshot.ref) }, console.error); return null; } else if (!Object.prototype.hasOwnProperty.call(result, 'startTime')) { - console.log('no startTime attribute for ' + snapshot.ref); - console.log('will not update counters'); + logger('Not updating counters because startTime attribute was not found.', { result: String(snapshot.ref) }, console.error); return null; } @@ -103,8 +131,7 @@ exports.groupUsersCounter = functions.database.ref('/v2/results/{projectId}/{gro const mappingSpeed = (endTime - startTime) / numberOfTasks; if (mappingSpeed < 0.125) { // this about 8-times faster than the average time needed per task - console.log('unlikely high mapping speed: ' + mappingSpeed); - console.log('will remove this result and not update counters'); + logger('Result removed because of unlikely high mapping speed', { mappingSpeed: mappingSpeed }, console.warn); return thisResultRef.remove(); } @@ -117,10 +144,12 @@ exports.groupUsersCounter = functions.database.ref('/v2/results/{projectId}/{gro */ const dataSnapshot = await groupUsersRef.child(context.params.userId).once('value'); if (dataSnapshot.exists()) { - console.log('group contribution exists already. user: '+context.params.userId+' project: '+context.params.projectId+' group: '+context.params.groupId); + logger('Group contribution already exists.'); return null; } + // Update contributions + const latestNumberOfTasks = Object.keys(result['results']).length; await Promise.all([ userContributionRef.child(context.params.groupId).set(true), @@ -136,8 +165,8 @@ exports.groupUsersCounter = functions.database.ref('/v2/results/{projectId}/{gro }), ]); - // Tag userGroups of the user in the result + const userGroupsOfTheUserSnapshot = await userRef.child('userGroups').once('value'); if (!userGroupsOfTheUserSnapshot.exists()) { return null; diff --git a/firebase/functions/src/osm_auth.ts b/firebase/functions/src/osm_auth.ts index d187b4e4f..9953f2ea9 100644 --- a/firebase/functions/src/osm_auth.ts +++ b/firebase/functions/src/osm_auth.ts @@ -1,4 +1,4 @@ -// Firebase cloud functions to allow authentication with OpenStreet Map +// Firebase cloud functions to allow authentication with OpenStreetMap // // There are really 2 functions, which must be publicly accessible via // an https endpoint. They can be hosted on firebase under a domain like @@ -20,8 +20,10 @@ import axios from 'axios'; // will get a cryptic error about the server not being able to continue // TODO: adjust the prefix based on which deployment is done (prod/dev) const OAUTH_REDIRECT_URI = functions.config().osm?.redirect_uri; +const OAUTH_REDIRECT_URI_WEB = functions.config().osm?.redirect_uri_web; const APP_OSM_LOGIN_DEEPLINK = functions.config().osm?.app_login_link; +const APP_OSM_LOGIN_DEEPLINK_WEB = functions.config().osm?.app_login_link_web; // the scope is taken from https://wiki.openstreetmap.org/wiki/OAuth#OAuth_2.0 // at least one seems to be required for the auth workflow to complete. @@ -36,11 +38,11 @@ const OSM_API_URL = functions.config().osm?.api_url; * Configure the `osm.client_id` and `osm.client_secret` * Google Cloud environment variables for the values below to exist */ -function osmOAuth2Client() { +function osmOAuth2Client(client_id: any, client_secret: any) { const credentials = { client: { - id: functions.config().osm?.client_id, - secret: functions.config().osm?.client_secret, + id: client_id, + secret: client_secret, }, auth: { tokenHost: OSM_API_URL, @@ -58,8 +60,8 @@ function osmOAuth2Client() { * NOT a webview inside MapSwipe, as this would break the promise of * OAuth that we do not touch their OSM credentials */ -export const redirect = (req: any, res: any) => { - const oauth2 = osmOAuth2Client(); +function redirect2OsmOauth(req: any, res: any, redirect_uri: string, client_id: string, client_secret: string) { + const oauth2 = osmOAuth2Client(client_id, client_secret); cookieParser()(req, res, () => { const state = @@ -75,17 +77,31 @@ export const redirect = (req: any, res: any) => { httpOnly: true, }); const redirectUri = oauth2.authorizationCode.authorizeURL({ - redirect_uri: OAUTH_REDIRECT_URI, + redirect_uri: redirect_uri, scope: OAUTH_SCOPES, state: state, }); functions.logger.log('Redirecting to:', redirectUri); res.redirect(redirectUri); }); +} + +export const redirect = (req: any, res: any) => { + const redirect_uri = OAUTH_REDIRECT_URI; + const client_id = functions.config().osm?.client_id; + const client_secret = functions.config().osm?.client_secret; + redirect2OsmOauth(req, res, redirect_uri, client_id, client_secret); +}; + +export const redirectweb = (req: any, res: any) => { + const redirect_uri = OAUTH_REDIRECT_URI_WEB; + const client_id = functions.config().osm?.client_id_web; + const client_secret = functions.config().osm?.client_secret_web; + redirect2OsmOauth(req, res, redirect_uri, client_id, client_secret); }; /** - * The OSM OAuth endpoing does not give us any info about the user, + * The OSM OAuth endpoint does not give us any info about the user, * so we need to get the user profile from this endpoint */ async function getOSMProfile(accessToken: string) { @@ -107,8 +123,8 @@ async function getOSMProfile(accessToken: string) { * The Firebase custom auth token, display name, photo URL and OSM access * token are sent back to the app via a deeplink redirect. */ -export const token = async (req: any, res: any, admin: any) => { - const oauth2 = osmOAuth2Client(); +function fbToken(req: any, res: any, admin: any, redirect_uri: string, osm_login_link: string, client_id: string, client_web: string) { + const oauth2 = osmOAuth2Client(client_id, client_web); try { return cookieParser()(req, res, async () => { @@ -139,7 +155,7 @@ export const token = async (req: any, res: any, admin: any) => { // this doesn't work results = await oauth2.authorizationCode.getToken({ code: req.query.code, - redirect_uri: OAUTH_REDIRECT_URI, + redirect_uri: redirect_uri, scope: OAUTH_SCOPES, state: req.query.state, }); @@ -177,7 +193,7 @@ export const token = async (req: any, res: any, admin: any) => { ); // build a deep link so we can send the token back to the app // from the browser - const signinUrl = `${APP_OSM_LOGIN_DEEPLINK}?token=${firebaseToken}`; + const signinUrl = `${osm_login_link}?token=${firebaseToken}`; functions.logger.log('redirecting user to', signinUrl); res.redirect(signinUrl); }); @@ -187,6 +203,22 @@ export const token = async (req: any, res: any, admin: any) => { // back into the app to allow the user to take action return res.json({ error: error.toString() }); } +} + +export const token = async (req: any, res: any, admin: any) => { + const redirect_uri = OAUTH_REDIRECT_URI; + const osm_login_link = APP_OSM_LOGIN_DEEPLINK; + const client_id = functions.config().osm?.client_id; + const client_secret = functions.config().osm?.client_secret; + fbToken(req, res, admin, redirect_uri, osm_login_link, client_id, client_secret); +}; + +export const tokenweb = async (req: any, res: any, admin: any) => { + const redirect_uri = OAUTH_REDIRECT_URI_WEB; + const osm_login_link = APP_OSM_LOGIN_DEEPLINK_WEB; + const client_id = functions.config().osm?.client_id_web; + const client_secret = functions.config().osm?.client_secret_web; + fbToken(req, res, admin, redirect_uri, osm_login_link, client_id, client_secret); }; /** @@ -204,23 +236,18 @@ async function createFirebaseAccount(admin: any, osmID: any, displayName: any, a // with a variable length. const uid = `osm:${osmID}`; + const profileRef = admin.database().ref(`v2/users/${uid}`); + + // check if profile exists on Firebase Realtime Database + const snapshot = await profileRef.once('value'); + const profileExists = snapshot.exists(); + // Save the access token to the Firebase Realtime Database. const databaseTask = admin .database() .ref(`v2/OSMAccessToken/${uid}`) .set(accessToken); - const profileTask = admin - .database() - .ref(`v2/users/${uid}/`) - .set({ - created: new Date().toISOString(), - groupContributionCount: 0, - projectContributionCount: 0, - taskContributionCount: 0, - displayName, - }); - // Create or update the firebase user account. // This does not login the user on the app, it just ensures that a firebase // user account (linked to the OSM account) exists. @@ -240,8 +267,27 @@ async function createFirebaseAccount(admin: any, osmID: any, displayName: any, a throw error; }); + // If profile exists, only update displayName -- else create new user profile + const tasks = [userCreationTask, databaseTask]; + if (profileExists) { + functions.logger.log('Sign in to existing OSM profile'); + const profileUpdateTask = profileRef.update({ displayName: displayName }); + tasks.push(profileUpdateTask); + } else { + functions.logger.log('Sign up new OSM profile'); + const profileCreationTask = profileRef + .set({ + created: new Date().toISOString(), + groupContributionCount: 0, + projectContributionCount: 0, + taskContributionCount: 0, + displayName, + }); + tasks.push(profileCreationTask); + } + // Wait for all async task to complete then generate and return a custom auth token. - await Promise.all([userCreationTask, databaseTask, profileTask]); + await Promise.all(tasks); // Create a Firebase custom auth token. functions.logger.log('In createFirebaseAccount: createCustomToken'); let authToken; diff --git a/manager-dashboard/app/Base/configs/projectTypes.ts b/manager-dashboard/app/Base/configs/projectTypes.ts index e2f7f74eb..e6344d507 100644 --- a/manager-dashboard/app/Base/configs/projectTypes.ts +++ b/manager-dashboard/app/Base/configs/projectTypes.ts @@ -5,6 +5,7 @@ import { PROJECT_TYPE_CHANGE_DETECTION, PROJECT_TYPE_STREET, PROJECT_TYPE_COMPLETENESS, + PROJECT_TYPE_VALIDATE_IMAGE, } from '#utils/common'; const PROJECT_CONFIG_NAME = process.env.REACT_APP_PROJECT_CONFIG_NAME as string; @@ -15,6 +16,7 @@ const mapswipeProjectTypeOptions: { }[] = [ { value: PROJECT_TYPE_BUILD_AREA, label: 'Find' }, { value: PROJECT_TYPE_FOOTPRINT, label: 'Validate' }, + { value: PROJECT_TYPE_VALIDATE_IMAGE, label: 'Validate Image' }, { value: PROJECT_TYPE_CHANGE_DETECTION, label: 'Compare' }, { value: PROJECT_TYPE_STREET, label: 'Street' }, { value: PROJECT_TYPE_COMPLETENESS, label: 'Completeness' }, diff --git a/manager-dashboard/app/Base/styles.css b/manager-dashboard/app/Base/styles.css index c746dc570..87052b68a 100644 --- a/manager-dashboard/app/Base/styles.css +++ b/manager-dashboard/app/Base/styles.css @@ -105,6 +105,7 @@ p { --height-mobile-preview-builarea-content: 30rem; --height-mobile-preview-footprint-content: 22rem; --height-mobile-preview-change-detection-content: 14rem; + --height-mobile-preview-validate-image-content: 22rem; --radius-popup-border: 0.25rem; --radius-scrollbar-border: 0.25rem; diff --git a/manager-dashboard/app/components/Calendar/CalendarDate/index.tsx b/manager-dashboard/app/components/Calendar/CalendarDate/index.tsx index 2b1fed1a0..2520a21f1 100644 --- a/manager-dashboard/app/components/Calendar/CalendarDate/index.tsx +++ b/manager-dashboard/app/components/Calendar/CalendarDate/index.tsx @@ -2,7 +2,7 @@ import React from 'react'; import { _cs } from '@togglecorp/fujs'; import RawButton, { Props as RawButtonProps } from '../../RawButton'; -import { ymdToDateString, typedMemo } from '../../../utils/common.tsx'; +import { ymdToDateString, typedMemo } from '../../../utils/common'; import styles from './styles.css'; diff --git a/manager-dashboard/app/components/Calendar/index.tsx b/manager-dashboard/app/components/Calendar/index.tsx index d72f9054d..21bd7de4f 100644 --- a/manager-dashboard/app/components/Calendar/index.tsx +++ b/manager-dashboard/app/components/Calendar/index.tsx @@ -15,7 +15,7 @@ import Button from '../Button'; import NumberInput from '../NumberInput'; import SelectInput from '../SelectInput'; import useInputState from '../../hooks/useInputState'; -import { typedMemo } from '../../utils/common.tsx'; +import { typedMemo } from '../../utils/common'; import CalendarDate, { Props as CalendarDateProps } from './CalendarDate'; diff --git a/manager-dashboard/app/components/CocoFileInput/index.tsx b/manager-dashboard/app/components/CocoFileInput/index.tsx new file mode 100644 index 000000000..125e0c592 --- /dev/null +++ b/manager-dashboard/app/components/CocoFileInput/index.tsx @@ -0,0 +1,80 @@ +import React from 'react'; +import * as t from 'io-ts'; +import { isRight } from 'fp-ts/Either'; + +import JsonFileInput, { Props as JsonFileInputProps } from '#components/JsonFileInput'; + +const Image = t.type({ + id: t.number, + // width: t.number, + // height: t.number, + file_name: t.string, + // license: t.union([t.number, t.undefined]), + flickr_url: t.union([t.string, t.undefined]), + coco_url: t.union([t.string, t.undefined]), + // date_captured: DateFromISOString, +}); + +const CocoDataset = t.type({ + // info: Info, + // licenses: t.array(License), + images: t.array(Image), + // annotations: t.array(Annotation), + // categories: t.array(Category) +}); +export type CocoDatasetType = t.TypeOf + +interface Props extends Omit, 'onChange' | 'value'> { + value: CocoDatasetType | undefined; + maxLength: number; + onChange: (newValue: CocoDatasetType | undefined, name: N) => void; +} +function CocoFileInput(props: Props) { + const { + name, + onChange, + error, + maxLength, + ...otherProps + } = props; + + const [ + internalErrorMessage, + setInternalErrorMessage, + ] = React.useState(); + + const handleChange = React.useCallback( + (val) => { + const result = CocoDataset.decode(val); + if (!isRight(result)) { + // eslint-disable-next-line no-console + console.error('Invalid COCO format', result.left); + setInternalErrorMessage('Invalid COCO format'); + return; + } + if (result.right.images.length > maxLength) { + setInternalErrorMessage(`Too many images ${result.right.images.length} uploaded. Please do not exceed ${maxLength} images.`); + return; + } + const uniqueIdentifiers = new Set(result.right.images.map((item) => item.id)); + if (uniqueIdentifiers.size < result.right.images.length) { + setInternalErrorMessage('Each image should have a unique id.'); + return; + } + setInternalErrorMessage(undefined); + onChange(result.right, name); + }, + [onChange, maxLength, name], + ); + + return ( + + ); +} + +export default CocoFileInput; diff --git a/manager-dashboard/app/components/DateRangeInput/index.tsx b/manager-dashboard/app/components/DateRangeInput/index.tsx index 6442fc835..0b25782ee 100644 --- a/manager-dashboard/app/components/DateRangeInput/index.tsx +++ b/manager-dashboard/app/components/DateRangeInput/index.tsx @@ -19,7 +19,7 @@ import Button from '../Button'; import Popup from '../Popup'; import Calendar, { Props as CalendarProps } from '../Calendar'; import CalendarDate, { Props as CalendarDateProps } from '../Calendar/CalendarDate'; -import { ymdToDateString, dateStringToDate } from '../../utils/common.tsx'; +import { ymdToDateString, dateStringToDate } from '../../utils/common'; import { predefinedDateRangeOptions, diff --git a/manager-dashboard/app/components/InputSection/styles.css b/manager-dashboard/app/components/InputSection/styles.css index 0c0012c77..f729ff036 100644 --- a/manager-dashboard/app/components/InputSection/styles.css +++ b/manager-dashboard/app/components/InputSection/styles.css @@ -24,7 +24,7 @@ display: flex; flex-direction: column; border-radius: var(--radius-card-border); - gap: var(--spacing-extra-large); + gap: var(--spacing-large); background-color: var(--color-foreground); padding: var(--spacing-large); min-height: 14rem; diff --git a/manager-dashboard/app/components/JsonFileInput/index.tsx b/manager-dashboard/app/components/JsonFileInput/index.tsx index bda27a599..023abde95 100644 --- a/manager-dashboard/app/components/JsonFileInput/index.tsx +++ b/manager-dashboard/app/components/JsonFileInput/index.tsx @@ -23,7 +23,7 @@ function readUploadedFileAsText(inputFile: File) { const ONE_MB = 1024 * 1024; const DEFAULT_MAX_FILE_SIZE = ONE_MB; -interface Props extends Omit, 'value' | 'onChange' | 'accept'> { +export interface Props extends Omit, 'value' | 'onChange' | 'accept'> { maxFileSize?: number; value: T | undefined | null; onChange: (newValue: T | undefined, name: N) => void; diff --git a/manager-dashboard/app/utils/common.tsx b/manager-dashboard/app/utils/common.tsx index 53338d34f..ea4f777fa 100644 --- a/manager-dashboard/app/utils/common.tsx +++ b/manager-dashboard/app/utils/common.tsx @@ -66,8 +66,9 @@ export const PROJECT_TYPE_FOOTPRINT = 2; export const PROJECT_TYPE_CHANGE_DETECTION = 3; export const PROJECT_TYPE_COMPLETENESS = 4; export const PROJECT_TYPE_STREET = 7; +export const PROJECT_TYPE_VALIDATE_IMAGE = 10; -export type ProjectType = 1 | 2 | 3 | 4 | 7; +export type ProjectType = 1 | 2 | 3 | 4 | 7 | 10; export const projectTypeLabelMap: { [key in ProjectType]: string @@ -77,6 +78,7 @@ export const projectTypeLabelMap: { [PROJECT_TYPE_CHANGE_DETECTION]: 'Compare', [PROJECT_TYPE_COMPLETENESS]: 'Completeness', [PROJECT_TYPE_STREET]: 'Street', + [PROJECT_TYPE_VALIDATE_IMAGE]: 'Validate Image', }; export type IconKey = 'add-outline' diff --git a/manager-dashboard/app/views/NewProject/ImageInput/index.tsx b/manager-dashboard/app/views/NewProject/ImageInput/index.tsx new file mode 100644 index 000000000..93f4dcde2 --- /dev/null +++ b/manager-dashboard/app/views/NewProject/ImageInput/index.tsx @@ -0,0 +1,73 @@ +import React from 'react'; + +import { + SetValueArg, + Error, + useFormObject, + getErrorObject, +} from '@togglecorp/toggle-form'; +import TextInput from '#components/TextInput'; + +import { + ImageType, +} from '../utils'; + +import styles from './styles.css'; + +const defaultImageValue: ImageType = { + sourceIdentifier: '', +}; + +interface Props { + value: ImageType; + onChange: (value: SetValueArg, index: number) => void | undefined; + index: number; + error: Error | undefined; + disabled?: boolean; + readOnly?: boolean; +} + +export default function ImageInput(props: Props) { + const { + value, + onChange, + index, + error: riskyError, + disabled, + readOnly, + } = props; + + const onImageChange = useFormObject(index, onChange, defaultImageValue); + + const error = getErrorObject(riskyError); + + return ( +
+ + + +
+ ); +} diff --git a/manager-dashboard/app/views/NewProject/ImageInput/styles.css b/manager-dashboard/app/views/NewProject/ImageInput/styles.css new file mode 100644 index 000000000..a6e6f1707 --- /dev/null +++ b/manager-dashboard/app/views/NewProject/ImageInput/styles.css @@ -0,0 +1,5 @@ +.image-input { + display: flex; + flex-direction: column; + gap: var(--spacing-medium); +} diff --git a/manager-dashboard/app/views/NewProject/index.tsx b/manager-dashboard/app/views/NewProject/index.tsx index 8c378597c..cbc94b5ad 100644 --- a/manager-dashboard/app/views/NewProject/index.tsx +++ b/manager-dashboard/app/views/NewProject/index.tsx @@ -3,6 +3,7 @@ import { _cs, isDefined, isNotDefined, + randomString, } from '@togglecorp/fujs'; import { useForm, @@ -10,6 +11,7 @@ import { createSubmitHandler, analyzeErrors, nonFieldError, + useFormArray, } from '@togglecorp/toggle-form'; import { getStorage, @@ -29,7 +31,11 @@ import { import { MdOutlinePublishedWithChanges, MdOutlineUnpublished, + MdAdd, } from 'react-icons/md'; +import { + IoIosTrash, +} from 'react-icons/io'; import { Link } from 'react-router-dom'; import UserContext from '#base/context/UserContext'; @@ -40,6 +46,7 @@ import TextInput from '#components/TextInput'; import NumberInput from '#components/NumberInput'; import SegmentInput from '#components/SegmentInput'; import GeoJsonFileInput from '#components/GeoJsonFileInput'; +import CocoFileInput, { CocoDatasetType } from '#components/CocoFileInput'; import TileServerInput, { TILE_SERVER_BING, TILE_SERVER_ESRI, @@ -48,6 +55,7 @@ import TileServerInput, { import InputSection from '#components/InputSection'; import Button from '#components/Button'; import NonFieldError from '#components/NonFieldError'; +import EmptyMessage from '#components/EmptyMessage'; import AnimatedSwipeIcon from '#components/AnimatedSwipeIcon'; import ExpandableContainer from '#components/ExpandableContainer'; import AlertBanner from '#components/AlertBanner'; @@ -60,6 +68,7 @@ import { ProjectInputType, PROJECT_TYPE_BUILD_AREA, PROJECT_TYPE_FOOTPRINT, + PROJECT_TYPE_VALIDATE_IMAGE, PROJECT_TYPE_COMPLETENESS, PROJECT_TYPE_CHANGE_DETECTION, PROJECT_TYPE_STREET, @@ -73,6 +82,7 @@ import CustomOptionPreview from '#views/NewTutorial/CustomOptionInput/CustomOpti import { projectFormSchema, ProjectFormType, + ImageType, PartialProjectFormType, projectInputTypeOptions, filterOptions, @@ -84,8 +94,10 @@ import { getGroupSize, validateAoiOnOhsome, validateProjectIdOnHotTaskingManager, + MAX_IMAGES, } from './utils'; import BasicProjectInfoForm from './BasicProjectInfoForm'; +import ImageInput from './ImageInput'; // eslint-disable-next-line postcss-modules/no-unused-class import styles from './styles.css'; @@ -108,6 +120,7 @@ const defaultProjectFormValue: PartialProjectFormType = { // maxTasksPerUser: -1, inputType: PROJECT_INPUT_TYPE_UPLOAD, filter: FILTER_BUILDINGS, + randomizeOrder: false, panoOnly: false, }; @@ -447,11 +460,67 @@ function NewProject(props: Props) { })), }))), [customOptionsFromValue]); - const optionsError = React.useMemo( + const customOptionsError = React.useMemo( () => getErrorObject(error?.customOptions), [error?.customOptions], ); + const { images } = value; + + const imagesError = React.useMemo( + () => getErrorObject(error?.images), + [error?.images], + ); + + const { + setValue: setImageValue, + removeValue: onImageRemove, + } = useFormArray< + 'images', + ImageType + >('images', setFieldValue); + + const handleCocoImport = React.useCallback( + (val: CocoDatasetType | undefined) => { + if (isNotDefined(val)) { + setFieldValue( + [], + 'images', + ); + return; + } + setFieldValue( + () => val.images.map((image) => ({ + sourceIdentifier: String(image.id), + fileName: image.file_name, + url: image.flickr_url || image.coco_url, + })), + 'images', + ); + }, + [setFieldValue], + ); + + const handleAddImage = React.useCallback( + () => { + setFieldValue( + (oldValue: PartialProjectFormType['images']) => { + const safeOldValues = oldValue ?? []; + + const newDefineOption: ImageType = { + sourceIdentifier: randomString(), + }; + + return [...safeOldValues, newDefineOption]; + }, + 'images', + ); + }, + [ + setFieldValue, + ], + ); + // eslint-disable-next-line @typescript-eslint/no-empty-function const noOp = () => {}; @@ -491,8 +560,84 @@ function NewProject(props: Props) { disabled={submissionPending || projectTypeEmpty} /> + {(value.projectType === PROJECT_TYPE_VALIDATE_IMAGE) && ( + + + + )} + > + + + {(images && images.length > 0) ? ( +
+ {images.map((image, index) => ( + + + + )} + > + + + ))} +
+ ) : ( + + )} +
+ )} {( (value.projectType === PROJECT_TYPE_FOOTPRINT + || value.projectType === PROJECT_TYPE_VALIDATE_IMAGE || value.projectType === PROJECT_TYPE_STREET) && customOptions && customOptions.length > 0 @@ -501,7 +646,7 @@ function NewProject(props: Props) { heading="Custom Options" > {(customOptions && customOptions.length > 0) ? (
@@ -516,7 +661,7 @@ function NewProject(props: Props) { value={option} index={index} onChange={noOp} - error={optionsError?.[option.value]} + error={customOptionsError?.[option.value]} readOnly /> @@ -742,7 +887,7 @@ function NewProject(props: Props) { value={value?.organizationId} onChange={setFieldValue} error={error?.organizationId} - label="Mapillary Organization ID" + label="Mapillary Organization IidD" hint="Provide a valid Mapillary organization ID to filter for images belonging to a specific organization. Empty indicates that no filter is set on organization." disabled={submissionPending || projectTypeEmpty} /> @@ -763,6 +908,13 @@ function NewProject(props: Props) { onChange={setFieldValue} disabled={submissionPending || projectTypeEmpty} /> +
)} diff --git a/manager-dashboard/app/views/NewProject/styles.css b/manager-dashboard/app/views/NewProject/styles.css index cbfa76230..45aedf1bc 100644 --- a/manager-dashboard/app/views/NewProject/styles.css +++ b/manager-dashboard/app/views/NewProject/styles.css @@ -13,6 +13,14 @@ max-width: 70rem; gap: var(--spacing-large); + + .image-list { + display: flex; + flex-direction: column; + flex-grow: 1; + gap: var(--spacing-medium); + } + .custom-option-container { display: flex; gap: var(--spacing-large); diff --git a/manager-dashboard/app/views/NewProject/utils.ts b/manager-dashboard/app/views/NewProject/utils.ts index 1d237a4c8..e2ac2731f 100644 --- a/manager-dashboard/app/views/NewProject/utils.ts +++ b/manager-dashboard/app/views/NewProject/utils.ts @@ -34,6 +34,7 @@ import { ProjectInputType, PROJECT_TYPE_BUILD_AREA, PROJECT_TYPE_FOOTPRINT, + PROJECT_TYPE_VALIDATE_IMAGE, PROJECT_TYPE_CHANGE_DETECTION, PROJECT_TYPE_COMPLETENESS, PROJECT_TYPE_STREET, @@ -68,13 +69,14 @@ export interface ProjectFormType { projectImage: File; // image verificationNumber: number; groupSize: number; + maxTasksPerUser: number; + zoomLevel: number; geometry?: GeoJSON.GeoJSON | string; inputType?: ProjectInputType; TMId?: string; filter?: string; filterText?: string; - maxTasksPerUser: number; tileServer: TileServer; tileServerB?: TileServer; customOptions?: CustomOptionsForProject; @@ -83,9 +85,15 @@ export interface ProjectFormType { endTimestamp?: string | null; organizationId?: number; creatorId?: number; + randomizeOrder?: boolean; panoOnly?: boolean; isPano?: boolean | null; samplingThreshold?: number; + images?: { + sourceIdentifier: string; + fileName: string; + url: string; + }[]; } export const PROJECT_INPUT_TYPE_UPLOAD = 'aoi_file'; @@ -114,9 +122,11 @@ export const filterOptions = [ export type PartialProjectFormType = PartialForm< Omit & { projectImage?: File }, // NOTE: we do not want to change File and FeatureCollection to partials - 'geometry' | 'projectImage' | 'value' + 'geometry' | 'projectImage' | 'value' | 'sourceIdentifier' >; +export type ImageType = NonNullable[number]; + type ProjectFormSchema = ObjectSchema; type ProjectFormSchemaFields = ReturnType; @@ -126,6 +136,12 @@ type CustomOptionSchemaFields = ReturnType type CustomOptionFormSchema = ArraySchema; type CustomOptionFormSchemaMember = ReturnType; +type PartialImages = NonNullable[number]; +type ImageSchema = ObjectSchema; +type ImageSchemaFields = ReturnType +type ImageFormSchema = ArraySchema; +type ImageFormSchemaMember = ReturnType; + // FIXME: break this into multiple geometry conditions const DEFAULT_MAX_FEATURES = 20; // const DEFAULT_MAX_FEATURES = 10; @@ -193,6 +209,8 @@ function validGeometryCondition(zoomLevel: number | undefined | null) { return validGeometryConditionForZoom; } +export const MAX_IMAGES = 2000; + export const MAX_OPTIONS = 6; export const MIN_OPTIONS = 2; export const MAX_SUB_OPTIONS = 6; @@ -274,53 +292,25 @@ export const projectFormSchema: ProjectFormSchema = { lessThanOrEqualToCondition(250), ], }, - tileServer: { - fields: tileServerFieldsSchema, - }, maxTasksPerUser: { validations: [ integerCondition, greaterThanCondition(0), ], }, - dateRange: { - required: false, - }, - creatorId: { - required: false, - validations: [ - integerCondition, - greaterThanCondition(0), - ], - }, - organizationId: { - required: false, - validations: [ - integerCondition, - greaterThanCondition(0), - ], - }, - samplingThreshold: { - required: false, - validation: [ - greaterThanCondition(0), - ], - }, - panoOnly: { - required: false, - }, - isPano: { - required: false, - }, }; + // Common + baseSchema = addCondition( baseSchema, value, ['projectType'], ['customOptions'], (formValues) => { - if (formValues?.projectType === PROJECT_TYPE_FOOTPRINT) { + if (formValues?.projectType === PROJECT_TYPE_FOOTPRINT + || formValues?.projectType === PROJECT_TYPE_VALIDATE_IMAGE + || formValues?.projectType === PROJECT_TYPE_STREET) { return { customOptions: { keySelector: (key) => key.value, @@ -383,8 +373,8 @@ export const projectFormSchema: ProjectFormSchema = { const projectType = v?.projectType; if ( projectType === PROJECT_TYPE_BUILD_AREA - || projectType === PROJECT_TYPE_COMPLETENESS || projectType === PROJECT_TYPE_CHANGE_DETECTION + || projectType === PROJECT_TYPE_COMPLETENESS ) { return { zoomLevel: { @@ -403,24 +393,6 @@ export const projectFormSchema: ProjectFormSchema = { }, ); - baseSchema = addCondition( - baseSchema, - value, - ['projectType'], - ['inputType'], - (v) => { - const projectType = v?.projectType; - if (projectType === PROJECT_TYPE_FOOTPRINT) { - return { - inputType: { required: true }, - }; - } - return { - inputType: { forceValue: nullValue }, - }; - }, - ); - baseSchema = addCondition( baseSchema, value, @@ -432,8 +404,8 @@ export const projectFormSchema: ProjectFormSchema = { const zoomLevel = v?.zoomLevel; if ( projectType === PROJECT_TYPE_BUILD_AREA - || projectType === PROJECT_TYPE_COMPLETENESS || projectType === PROJECT_TYPE_CHANGE_DETECTION + || projectType === PROJECT_TYPE_COMPLETENESS || projectType === PROJECT_TYPE_STREET || (projectType === PROJECT_TYPE_FOOTPRINT && ( inputType === PROJECT_INPUT_TYPE_UPLOAD @@ -478,6 +450,51 @@ export const projectFormSchema: ProjectFormSchema = { }, ); + baseSchema = addCondition( + baseSchema, + value, + ['projectType'], + ['tileServer'], + (v) => { + const projectType = v?.projectType; + if ( + projectType === PROJECT_TYPE_BUILD_AREA + || projectType === PROJECT_TYPE_COMPLETENESS + || projectType === PROJECT_TYPE_CHANGE_DETECTION + || projectType === PROJECT_TYPE_FOOTPRINT + ) { + return { + tileServer: { + fields: tileServerFieldsSchema, + }, + }; + } + return { + tileServer: { forceValue: nullValue }, + }; + }, + ); + + // Validate + + baseSchema = addCondition( + baseSchema, + value, + ['projectType'], + ['inputType'], + (v) => { + const projectType = v?.projectType; + if (projectType === PROJECT_TYPE_FOOTPRINT) { + return { + inputType: { required: true }, + }; + } + return { + inputType: { forceValue: nullValue }, + }; + }, + ); + baseSchema = addCondition( baseSchema, value, @@ -555,6 +572,108 @@ export const projectFormSchema: ProjectFormSchema = { }, ); + // Street + + baseSchema = addCondition( + baseSchema, + value, + ['projectType'], + ['dateRange', 'creatorId', 'organizationId', 'samplingThreshold', 'panoOnly', 'isPano', 'randomizeOrder'], + (formValues) => { + if (formValues?.projectType === PROJECT_TYPE_STREET) { + return { + dateRange: { + required: false, + }, + creatorId: { + required: false, + validations: [ + integerCondition, + greaterThanCondition(0), + ], + }, + organizationId: { + required: false, + validations: [ + integerCondition, + greaterThanCondition(0), + ], + }, + samplingThreshold: { + required: false, + validations: [ + greaterThanCondition(0), + ], + }, + panoOnly: { + required: false, + }, + // FIXME: This is not used. + isPano: { + required: false, + }, + randomizeOrder: { + required: false, + }, + }; + } + return { + dateRange: { forceValue: nullValue }, + creatorId: { forceValue: nullValue }, + organizationId: { forceValue: nullValue }, + samplingThreshold: { forceValue: nullValue }, + panoOnly: { forceValue: nullValue }, + isPano: { forceValude: nullValue }, + randomizeOrder: { forceValue: nullValue }, + }; + }, + ); + + // Validate Image + + baseSchema = addCondition( + baseSchema, + value, + ['projectType'], + ['images'], + (formValues) => { + // FIXME: Add "unique" constraint for sourceIdentifier and fileName + if (formValues?.projectType === PROJECT_TYPE_VALIDATE_IMAGE) { + return { + images: { + keySelector: (key) => key.sourceIdentifier, + validation: (values) => { + if (values && values.length > MAX_IMAGES) { + return `Too many images ${values.length}. Please do not exceed ${MAX_IMAGES} images.`; + } + return undefined; + }, + member: (): ImageFormSchemaMember => ({ + fields: (): ImageSchemaFields => ({ + sourceIdentifier: { + required: true, + requiredValidation: requiredStringCondition, + }, + fileName: { + required: true, + requiredValidation: requiredStringCondition, + }, + url: { + required: true, + requiredValidation: requiredStringCondition, + validations: [urlCondition], + }, + }), + }), + }, + }; + } + return { + images: { forceValue: nullValue }, + }; + }, + ); + return baseSchema; }, }; @@ -583,6 +702,7 @@ export function getGroupSize(projectType: ProjectType | undefined) { } if (projectType === PROJECT_TYPE_FOOTPRINT + || projectType === PROJECT_TYPE_VALIDATE_IMAGE || projectType === PROJECT_TYPE_CHANGE_DETECTION || projectType === PROJECT_TYPE_STREET) { return 25; @@ -676,7 +796,7 @@ async function fetchAoiFromHotTaskingManager(projectId: number | string): ( let response; try { response = await fetch( - `https://tasking-manager-tm4-production-api.hotosm.org/api/v2/projects/${projectId}/queries/aoi/?as_file=false`, + `https://tasking-manager-production-api.hotosm.org/api/v2/projects/${projectId}/queries/aoi/?as_file=false`, ); } catch { return { diff --git a/manager-dashboard/app/views/NewTutorial/ImageInput/index.tsx b/manager-dashboard/app/views/NewTutorial/ImageInput/index.tsx new file mode 100644 index 000000000..ca10b5806 --- /dev/null +++ b/manager-dashboard/app/views/NewTutorial/ImageInput/index.tsx @@ -0,0 +1,135 @@ +import React, { useMemo } from 'react'; + +import { + SetValueArg, + Error, + useFormObject, + getErrorObject, +} from '@togglecorp/toggle-form'; +import { isNotDefined, isDefined, unique } from '@togglecorp/fujs'; +import TextInput from '#components/TextInput'; +import SelectInput from '#components/SelectInput'; +import NumberInput from '#components/NumberInput'; + +import { + ImageType, + PartialCustomOptionsType, +} from '../utils'; + +import styles from './styles.css'; + +const defaultImageValue: ImageType = { + sourceIdentifier: '', +}; + +interface Props { + value: ImageType; + onChange: (value: SetValueArg, index: number) => void | undefined; + index: number; + error: Error | undefined; + disabled?: boolean; + readOnly?: boolean; + customOptions: PartialCustomOptionsType | undefined; +} + +export default function ImageInput(props: Props) { + const { + value, + onChange, + index, + error: riskyError, + disabled, + readOnly, + customOptions, + } = props; + + const flattenedOptions = useMemo( + () => { + const opts = customOptions?.flatMap( + (option) => ([ + { + key: option.value, + label: option.title, + }, + ...(option.subOptions ?? []).map( + (subOption) => ({ + key: subOption.value, + label: subOption.description, + }), + ), + ]), + ) ?? []; + + const validOpts = opts.map( + (option) => { + if (isNotDefined(option.key)) { + return undefined; + } + return { + ...option, + key: option.key, + }; + }, + ).filter(isDefined); + return unique( + validOpts, + (option) => option.key, + ); + }, + [customOptions], + ); + + const onImageChange = useFormObject(index, onChange, defaultImageValue); + + const error = getErrorObject(riskyError); + + return ( +
+ + + + + option.key} + labelSelector={(option) => option.label ?? `Option ${option.key}`} + options={flattenedOptions} + error={error?.referenceAnswer} + disabled={disabled || readOnly} + /> +
+ ); +} diff --git a/manager-dashboard/app/views/NewTutorial/ImageInput/styles.css b/manager-dashboard/app/views/NewTutorial/ImageInput/styles.css new file mode 100644 index 000000000..a6e6f1707 --- /dev/null +++ b/manager-dashboard/app/views/NewTutorial/ImageInput/styles.css @@ -0,0 +1,5 @@ +.image-input { + display: flex; + flex-direction: column; + gap: var(--spacing-medium); +} diff --git a/manager-dashboard/app/views/NewTutorial/ScenarioPageInput/FootprintGeoJsonPreview/index.tsx b/manager-dashboard/app/views/NewTutorial/ScenarioPageInput/FootprintGeoJsonPreview/index.tsx index f381ff4f9..2ab9cbe36 100644 --- a/manager-dashboard/app/views/NewTutorial/ScenarioPageInput/FootprintGeoJsonPreview/index.tsx +++ b/manager-dashboard/app/views/NewTutorial/ScenarioPageInput/FootprintGeoJsonPreview/index.tsx @@ -15,7 +15,7 @@ import { import styles from './styles.css'; // NOTE: the padding is selected wrt the size of the preview -const footprintGeojsonPadding = [140, 140]; +const footprintGeojsonPadding: [number, number] = [140, 140]; interface Props { className?: string; diff --git a/manager-dashboard/app/views/NewTutorial/ScenarioPageInput/ValidateImagePreview/index.tsx b/manager-dashboard/app/views/NewTutorial/ScenarioPageInput/ValidateImagePreview/index.tsx new file mode 100644 index 000000000..3dfa8fb98 --- /dev/null +++ b/manager-dashboard/app/views/NewTutorial/ScenarioPageInput/ValidateImagePreview/index.tsx @@ -0,0 +1,81 @@ +import React from 'react'; +import { _cs } from '@togglecorp/fujs'; + +import MobilePreview from '#components/MobilePreview'; +import { IconKey, iconMap } from '#utils/common'; + +import { + ImageType, + colorKeyToColorMap, + PartialCustomOptionsType, +} from '../../utils'; +import styles from './styles.css'; + +interface Props { + className?: string; + image?: ImageType; + previewPopUp?: { + title?: string; + description?: string; + icon?: IconKey; + } + customOptions: PartialCustomOptionsType | undefined; + lookFor: string | undefined; +} + +export default function ValidateImagePreview(props: Props) { + const { + className, + previewPopUp, + customOptions, + lookFor, + image, + } = props; + + const Comp = previewPopUp?.icon ? iconMap[previewPopUp.icon] : undefined; + + return ( + } + popupTitle={previewPopUp?.title || '{title}'} + popupDescription={previewPopUp?.description || '{description}'} + > + Preview +
+ {customOptions?.map((option) => { + const Icon = option.icon + ? iconMap[option.icon] + : iconMap['flag-outline']; + return ( +
+
+ {Icon && ( + + )} +
+
+ ); + })} +
+
+ ); +} diff --git a/manager-dashboard/app/views/NewTutorial/ScenarioPageInput/ValidateImagePreview/styles.css b/manager-dashboard/app/views/NewTutorial/ScenarioPageInput/ValidateImagePreview/styles.css new file mode 100644 index 000000000..5f708d4a5 --- /dev/null +++ b/manager-dashboard/app/views/NewTutorial/ScenarioPageInput/ValidateImagePreview/styles.css @@ -0,0 +1,36 @@ +.validate-image-preview { + .content { + display: flex; + flex-direction: column; + gap: var(--spacing-large); + + .image-preview { + position: relative; + width: 100%; + height: var(--height-mobile-preview-validate-image-content); + } + + .options { + display: grid; + flex-grow: 1; + grid-template-columns: 1fr 1fr 1fr; + grid-gap: var(--spacing-large); + + .option-container { + display: flex; + align-items: center; + justify-content: center; + + .option { + display: flex; + align-items: center; + justify-content: center; + border-radius: 50%; + width: 2.5rem; + height: 2.5rem; + font-size: var(--font-size-extra-large); + } + } + } + } +} diff --git a/manager-dashboard/app/views/NewTutorial/ScenarioPageInput/index.tsx b/manager-dashboard/app/views/NewTutorial/ScenarioPageInput/index.tsx index 607434590..2ba9d05fe 100644 --- a/manager-dashboard/app/views/NewTutorial/ScenarioPageInput/index.tsx +++ b/manager-dashboard/app/views/NewTutorial/ScenarioPageInput/index.tsx @@ -17,6 +17,8 @@ import { PROJECT_TYPE_FOOTPRINT, PROJECT_TYPE_CHANGE_DETECTION, PROJECT_TYPE_COMPLETENESS, + PROJECT_TYPE_STREET, + PROJECT_TYPE_VALIDATE_IMAGE, } from '#utils/common'; import TextInput from '#components/TextInput'; import Heading from '#components/Heading'; @@ -24,6 +26,7 @@ import SelectInput from '#components/SelectInput'; import SegmentInput from '#components/SegmentInput'; import { + ImageType, TutorialTasksGeoJSON, FootprintGeoJSON, BuildAreaGeoJSON, @@ -33,6 +36,7 @@ import { import BuildAreaGeoJsonPreview from './BuildAreaGeoJsonPreview'; import FootprintGeoJsonPreview from './FootprintGeoJsonPreview'; import ChangeDetectionGeoJsonPreview from './ChangeDetectionGeoJsonPreview'; +import ValidateImagePreview from './ValidateImagePreview'; import styles from './styles.css'; type ScenarioType = { @@ -77,6 +81,7 @@ interface Props { index: number, error: Error | undefined; geoJson: TutorialTasksGeoJSON | undefined; + images: ImageType[] | undefined; projectType: ProjectType | undefined; urlA: string | undefined; urlB: string | undefined; @@ -93,6 +98,7 @@ export default function ScenarioPageInput(props: Props) { index, error: riskyError, geoJson: geoJsonFromProps, + images, urlA, projectType, urlB, @@ -170,7 +176,21 @@ export default function ScenarioPageInput(props: Props) { [geoJsonFromProps, scenarioId], ); - const activeSegmentInput: ScenarioSegmentType['value'] = projectType && projectType !== PROJECT_TYPE_FOOTPRINT + const image = React.useMemo( + () => { + if (!images) { + return undefined; + } + return images.find((img) => img.screen === scenarioId); + }, + [images, scenarioId], + ); + + const activeSegmentInput: ScenarioSegmentType['value'] = ( + projectType + && projectType !== PROJECT_TYPE_FOOTPRINT + && projectType !== PROJECT_TYPE_VALIDATE_IMAGE + ) ? activeSegmentInputFromState : 'instructions'; @@ -213,7 +233,11 @@ export default function ScenarioPageInput(props: Props) { disabled={disabled} />
- {projectType && projectType !== PROJECT_TYPE_FOOTPRINT && ( + {( + projectType + && projectType !== PROJECT_TYPE_FOOTPRINT + && projectType !== PROJECT_TYPE_VALIDATE_IMAGE + ) && ( <> Hint @@ -251,7 +275,11 @@ export default function ScenarioPageInput(props: Props) { )} - {projectType && projectType !== PROJECT_TYPE_FOOTPRINT && ( + {( + projectType + && projectType !== PROJECT_TYPE_FOOTPRINT + && projectType !== PROJECT_TYPE_VALIDATE_IMAGE + ) && ( <> Success @@ -318,7 +346,23 @@ export default function ScenarioPageInput(props: Props) { lookFor={lookFor} /> )} - {(projectType && projectType !== PROJECT_TYPE_FOOTPRINT) && ( + {projectType === PROJECT_TYPE_VALIDATE_IMAGE && ( + + )} + {projectType === PROJECT_TYPE_STREET && ( +
+ Preview not available. +
+ )} + {(projectType + && projectType !== PROJECT_TYPE_FOOTPRINT + && projectType !== PROJECT_TYPE_VALIDATE_IMAGE + && projectType !== PROJECT_TYPE_STREET) && ( ( @@ -156,6 +162,7 @@ function getGeoJSONError( return 'GeoJSON does not contain iterable features'; } + // FIXME: Use io-ts // Check properties schema const projectSchemas: { [key in ProjectType]: Record; @@ -189,6 +196,14 @@ function getGeoJSONError( tile_y: 'number', tile_z: 'number', }, + [PROJECT_TYPE_STREET]: { + id: ['string', 'number'], + reference: 'number', + screen: 'number', + }, + [PROJECT_TYPE_VALIDATE_IMAGE]: { + // NOTE: We do not use geojson import for validate image project + }, }; const schemaErrors = tutorialTasks.features.map( (feature) => checkSchema( @@ -313,6 +328,27 @@ function getGeoJSONWarning( return errors; } +function getImagesWarning( + images: ImageType[], + customOptions: number[], +) { + const errors = []; + + const usedValues = images.map((item) => item.referenceAnswer).filter(isDefined); + + const usedValuesSet = new Set(usedValues); + const customOptionsSet = new Set(customOptions); + + const invalidUsedValuesSet = difference(usedValuesSet, customOptionsSet); + + if (invalidUsedValuesSet.size === 1) { + errors.push(`Reference in images should be either ${customOptions.join(', ')}. The invalid reference is ${[...invalidUsedValuesSet].join(', ')}`); + } else if (invalidUsedValuesSet.size > 1) { + errors.push(`Reference in images should be either ${customOptions.join(', ')}. The invalid references are ${[...invalidUsedValuesSet].sort().join(', ')}`); + } + return errors; +} + type CustomScreen = Omit; function sanitizeScreens(scenarioPages: TutorialFormType['scenarioPages']) { const screens = scenarioPages.reduce>( @@ -341,7 +377,6 @@ const defaultTutorialFormValue: PartialTutorialFormType = { name: TILE_SERVER_ESRI, credits: tileServerDefaultCredits[TILE_SERVER_ESRI], }, - customOptions: defaultFootprintCustomOptions, }; type SubmissionStatus = 'started' | 'imageUpload' | 'tutorialSubmit' | 'success' | 'failed'; @@ -401,6 +436,14 @@ function NewTutorial(props: Props) { InformationPagesType >('informationPages', setFieldValue); + const { + setValue: setImageValue, + // removeValue: onImageRemove, + } = useFormArray< + 'images', + ImageType + >('images', setFieldValue); + const handleSubmission = React.useCallback(( finalValuesFromProps: PartialTutorialFormType, ) => { @@ -595,7 +638,6 @@ function NewTutorial(props: Props) { })); return; } - setFieldValue(tutorialTasks, 'tutorialTasks'); const uniqueArray = unique( @@ -611,7 +653,6 @@ function NewTutorial(props: Props) { success: {}, } )); - setFieldValue(tutorialTaskArray, 'scenarioPages'); }, [setFieldValue, setError, value?.projectType]); @@ -640,12 +681,56 @@ function NewTutorial(props: Props) { [setFieldValue], ); + const handleCocoImport = React.useCallback( + (val: CocoDatasetType | undefined) => { + if (isNotDefined(val)) { + setFieldValue( + [], + 'images', + ); + return; + } + const newImages = val.images.map((image, index) => ({ + sourceIdentifier: String(image.id), + fileName: image.file_name, + url: image.flickr_url || image.coco_url, + screen: index + 1, + // referenceAnswer: 1, + })); + setFieldValue( + newImages, + 'images', + ); + + const uniqueArray = unique( + newImages, + ((img) => img.screen), + ); + const sorted = uniqueArray.sort((a, b) => a.screen - b.screen); + const tutorialTaskArray = sorted?.map((img) => ( + { + scenarioId: img.screen, + hint: {}, + instructions: {}, + success: {}, + } + )); + setFieldValue(tutorialTaskArray, 'scenarioPages'); + }, + [setFieldValue], + ); + const submissionPending = ( tutorialSubmissionStatus === 'started' || tutorialSubmissionStatus === 'imageUpload' || tutorialSubmissionStatus === 'tutorialSubmit' ); + const tileServerVisible = value.projectType === PROJECT_TYPE_BUILD_AREA + || value.projectType === PROJECT_TYPE_FOOTPRINT + || value.projectType === PROJECT_TYPE_COMPLETENESS + || value.projectType === PROJECT_TYPE_CHANGE_DETECTION; + const tileServerBVisible = value.projectType === PROJECT_TYPE_CHANGE_DETECTION || value.projectType === PROJECT_TYPE_COMPLETENESS; @@ -668,6 +753,11 @@ function NewTutorial(props: Props) { [error?.informationPages], ); + const imagesError = React.useMemo( + () => getErrorObject(error?.images), + [error?.images], + ); + const hasErrors = React.useMemo( () => analyzeErrors(error), [error], @@ -683,6 +773,14 @@ function NewTutorial(props: Props) { ...options, ...subOptions, ].filter(isDefined); + + if (value?.projectType === PROJECT_TYPE_VALIDATE_IMAGE) { + return getImagesWarning( + value?.images ?? [], + selectedValues, + ); + } + return getGeoJSONWarning( value?.tutorialTasks, value?.projectType, @@ -690,7 +788,13 @@ function NewTutorial(props: Props) { value?.zoomLevel, ); }, - [value?.tutorialTasks, value?.projectType, value?.customOptions, value?.zoomLevel], + [ + value?.tutorialTasks, + value?.images, + value?.projectType, + value?.customOptions, + value?.zoomLevel, + ], ); const getTileServerUrl = (val: PartialTutorialFormType['tileServer']) => { @@ -709,13 +813,16 @@ function NewTutorial(props: Props) { const { customOptions, informationPages, + images, } = value; const handleProjectTypeChange = React.useCallback( (newValue: ProjectType | undefined) => { setFieldValue(undefined, 'tutorialTasks'); setFieldValue(undefined, 'scenarioPages'); + setFieldValue(undefined, 'images'); setFieldValue(newValue, 'projectType'); + setFieldValue(getDefaultOptions(newValue), 'customOptions'); }, [setFieldValue], ); @@ -761,7 +868,11 @@ function NewTutorial(props: Props) { autoFocus /> - {value.projectType === PROJECT_TYPE_FOOTPRINT && ( + {( + value.projectType === PROJECT_TYPE_FOOTPRINT + || value.projectType === PROJECT_TYPE_VALIDATE_IMAGE + || value.projectType === PROJECT_TYPE_STREET + ) && ( - - - + {tileServerVisible && ( + + + + )} + {tileServerBVisible && ( ) } + {value.projectType === PROJECT_TYPE_VALIDATE_IMAGE && ( + + + + {(images && images.length > 0) ? ( +
+ {images.map((image, index) => ( + + + + ))} +
+ ) : ( + + )} +
+ )} - + {value?.projectType !== PROJECT_TYPE_VALIDATE_IMAGE && ( + + )}
{value.scenarioPages?.map((task, index) => ( ))} {(value.scenarioPages?.length ?? 0) === 0 && ( - + <> + {value.projectType !== PROJECT_TYPE_VALIDATE_IMAGE ? ( + + ) : ( + + )} + )}
diff --git a/manager-dashboard/app/views/NewTutorial/styles.css b/manager-dashboard/app/views/NewTutorial/styles.css index 7242f2344..40b8cbdf6 100644 --- a/manager-dashboard/app/views/NewTutorial/styles.css +++ b/manager-dashboard/app/views/NewTutorial/styles.css @@ -20,6 +20,13 @@ gap: var(--spacing-medium); } + .image-list { + display: flex; + flex-direction: column; + flex-grow: 1; + gap: var(--spacing-medium); + } + .custom-option-container { display: flex; gap: var(--spacing-large); diff --git a/manager-dashboard/app/views/NewTutorial/utils.ts b/manager-dashboard/app/views/NewTutorial/utils.ts index 90b805f5d..e0533080a 100644 --- a/manager-dashboard/app/views/NewTutorial/utils.ts +++ b/manager-dashboard/app/views/NewTutorial/utils.ts @@ -8,6 +8,7 @@ import { nullValue, ArraySchema, addCondition, + urlCondition, } from '@togglecorp/toggle-form'; import { isDefined, @@ -26,6 +27,8 @@ import { PROJECT_TYPE_CHANGE_DETECTION, PROJECT_TYPE_COMPLETENESS, PROJECT_TYPE_FOOTPRINT, + PROJECT_TYPE_STREET, + PROJECT_TYPE_VALIDATE_IMAGE, IconKey, } from '#utils/common'; @@ -257,6 +260,63 @@ export const defaultFootprintCustomOptions: PartialTutorialFormType['customOptio }, ]; +export const defaultStreetCustomOptions: PartialTutorialFormType['customOptions'] = [ + { + optionId: 1, + value: 1, + title: 'Yes', + icon: 'checkmark-outline', + iconColor: colorKeyToColorMap.green, + description: 'the object you are looking for is in the image.', + }, + { + optionId: 2, + value: 0, + title: 'No', + icon: 'close-outline', + iconColor: colorKeyToColorMap.red, + description: 'the object you are looking for is NOT in the image.', + }, + { + optionId: 3, + value: 2, + title: 'Not Sure', + icon: 'remove-outline', + iconColor: colorKeyToColorMap.gray, + description: 'if you\'re not sure or there is bad imagery', + }, +]; + +export const defaultValidateImageCustomOptions: PartialTutorialFormType['customOptions'] = [ + { + optionId: 1, + value: 1, + title: 'Yes', + icon: 'checkmark-outline', + iconColor: colorKeyToColorMap.green, + // FIXME: Add description + description: 'Yes', + }, + { + optionId: 2, + value: 0, + title: 'No', + icon: 'close-outline', + iconColor: colorKeyToColorMap.red, + // FIXME: Add description + description: 'No', + }, + { + optionId: 3, + value: 2, + title: 'Not Sure', + icon: 'remove-outline', + iconColor: colorKeyToColorMap.gray, + // FIXME: Add description + description: 'Not Sure', + }, +]; + export function deleteKey( value: T, key: K, @@ -268,6 +328,22 @@ export function deleteKey( return copy; } +export function getDefaultOptions(projectType: ProjectType | undefined) { + if (projectType === PROJECT_TYPE_FOOTPRINT) { + return defaultFootprintCustomOptions; + } + + if (projectType === PROJECT_TYPE_STREET) { + return defaultStreetCustomOptions; + } + + if (projectType === PROJECT_TYPE_VALIDATE_IMAGE) { + return defaultValidateImageCustomOptions; + } + + return undefined; +} + export interface BuildAreaProperties { reference: number; screen: number; @@ -308,6 +384,12 @@ export interface ChangeDetectionProperties { // taskId: string; } +export interface StreetProperties { + id: string; + reference: number; + screen: number; +} + export type BuildAreaGeoJSON = GeoJSON.FeatureCollection< GeoJSON.Geometry, BuildAreaProperties @@ -323,9 +405,14 @@ export type ChangeDetectionGeoJSON = GeoJSON.FeatureCollection< ChangeDetectionProperties >; +export type StreetGeoJSON = GeoJSON.FeatureCollection< + GeoJSON.Geometry, + StreetProperties +>; + export type TutorialTasksGeoJSON = GeoJSON.FeatureCollection< GeoJSON.Geometry, - BuildAreaProperties | FootprintProperties | ChangeDetectionProperties + BuildAreaProperties | FootprintProperties | ChangeDetectionProperties | StreetProperties >; export type CustomOptions = { @@ -375,7 +462,6 @@ export interface TutorialFormType { title: string; }; }[]; - tutorialTasks?: TutorialTasksGeoJSON, exampleImage1: File; exampleImage2: File; projectType: ProjectType; @@ -383,6 +469,15 @@ export interface TutorialFormType { zoomLevel?: number; customOptions?: CustomOptions; informationPages: InformationPages; + + tutorialTasks?: TutorialTasksGeoJSON, + images?: { + sourceIdentifier: string; + fileName: string; + url: string; + referenceAnswer: number; + screen: number; + }[]; } export type PartialTutorialFormType = PartialForm< @@ -391,9 +486,11 @@ export type PartialTutorialFormType = PartialForm< exampleImage2?: File; }, // NOTE: we do not want to change File and FeatureCollection to partials - 'image' | 'tutorialTasks' | 'exampleImage1' | 'exampleImage2' | 'scenarioId' | 'optionId' | 'subOptionsId' | 'pageNumber' | 'blockNumber' | 'blockType' | 'imageFile' + 'image' | 'tutorialTasks' | 'exampleImage1' | 'exampleImage2' | 'scenarioId' | 'optionId' | 'subOptionsId' | 'pageNumber' | 'blockNumber' | 'blockType' | 'imageFile' | 'sourceIdentifier' >; +export type ImageType = NonNullable[number]; + type TutorialFormSchema = ObjectSchema; type TutorialFormSchemaFields = ReturnType; @@ -411,6 +508,12 @@ export type CustomOptionSchemaFields = ReturnType export type CustomOptionFormSchema = ArraySchema; export type CustomOptionFormSchemaMember = ReturnType; +type PartialImages = NonNullable[number]; +type ImageSchema = ObjectSchema; +type ImageSchemaFields = ReturnType +type ImageFormSchema = ArraySchema; +type ImageFormSchemaMember = ReturnType; + export type InformationPagesType = NonNullable[number] type InformationPagesSchema = ObjectSchema; type InformationPagesSchemaFields = ReturnType @@ -422,6 +525,8 @@ export type PartialInformationPagesType = PartialTutorialFormType['informationPa export type PartialCustomOptionsType = PartialTutorialFormType['customOptions']; export type PartialBlocksType = NonNullable[number]>['blocks']; +export const MAX_IMAGES = 20; + export const MAX_OPTIONS = 6; export const MIN_OPTIONS = 2; export const MAX_SUB_OPTIONS = 6; @@ -449,12 +554,6 @@ export const tutorialFormSchema: TutorialFormSchema = { requiredValidation: requiredStringCondition, validations: [getNoMoreThanNCharacterCondition(MD_TEXT_MAX_LENGTH)], }, - tileServer: { - fields: tileServerFieldsSchema, - }, - tutorialTasks: { - required: true, - }, informationPages: { validation: (info) => { if (info && info.length > MAX_INFO_PAGES) { @@ -513,6 +612,8 @@ export const tutorialFormSchema: TutorialFormSchema = { }, }; + // common + baseSchema = addCondition( baseSchema, value, @@ -550,7 +651,11 @@ export const tutorialFormSchema: TutorialFormSchema = { }), }, }; - if (projectType && projectType !== PROJECT_TYPE_FOOTPRINT) { + if ( + projectType + && projectType !== PROJECT_TYPE_FOOTPRINT + && projectType !== PROJECT_TYPE_VALIDATE_IMAGE + ) { fields = { ...fields, hint: { @@ -724,7 +829,9 @@ export const tutorialFormSchema: TutorialFormSchema = { }), }; - if (formValues?.projectType === PROJECT_TYPE_FOOTPRINT) { + if (formValues?.projectType === PROJECT_TYPE_FOOTPRINT + || formValues?.projectType === PROJECT_TYPE_VALIDATE_IMAGE + || formValues?.projectType === PROJECT_TYPE_STREET) { return { customOptions: customOptionField, }; @@ -757,6 +864,23 @@ export const tutorialFormSchema: TutorialFormSchema = { }), ); + baseSchema = addCondition( + baseSchema, + value, + ['projectType'], + ['tileServer'], + (v) => ( + v?.projectType !== PROJECT_TYPE_VALIDATE_IMAGE + ? { + tileServer: { + fields: tileServerFieldsSchema, + }, + } : { + tileServer: { forceValue: nullValue }, + } + ), + ); + baseSchema = addCondition( baseSchema, value, @@ -772,6 +896,77 @@ export const tutorialFormSchema: TutorialFormSchema = { tileServerB: { forceValue: nullValue }, }), ); + + baseSchema = addCondition( + baseSchema, + value, + ['projectType'], + ['tutorialTasks'], + (formValues) => { + if (formValues?.projectType === PROJECT_TYPE_VALIDATE_IMAGE) { + return { + tutorialTasks: { forceValue: nullValue }, + }; + } + return { + tutorialTasks: { + required: true, + }, + }; + }, + ); + + // validate image + + baseSchema = addCondition( + baseSchema, + value, + ['projectType'], + ['images'], + (formValues) => { + // FIXME: Add "unique" constraint for sourceIdentifier and fileName + if (formValues?.projectType === PROJECT_TYPE_VALIDATE_IMAGE) { + return { + images: { + keySelector: (key) => key.sourceIdentifier, + validation: (values) => { + if (values && values.length > MAX_IMAGES) { + return `Too many images ${values.length}. Please do not exceed ${MAX_IMAGES} images.`; + } + return undefined; + }, + member: (): ImageFormSchemaMember => ({ + fields: (): ImageSchemaFields => ({ + sourceIdentifier: { + required: true, + requiredValidation: requiredStringCondition, + }, + fileName: { + required: true, + requiredValidation: requiredStringCondition, + }, + url: { + required: true, + requiredValidation: requiredStringCondition, + validations: [urlCondition], + }, + referenceAnswer: { + required: true, + }, + screen: { + required: true, + }, + }), + }), + }, + }; + } + return { + images: { forceValue: nullValue }, + }; + }, + ); + return baseSchema; }, }; diff --git a/manager-dashboard/package.json b/manager-dashboard/package.json index de3c020bd..4b0ee3c1a 100644 --- a/manager-dashboard/package.json +++ b/manager-dashboard/package.json @@ -44,8 +44,11 @@ "apollo-upload-client": "^16.0.0", "core-js": "3", "firebase": "^9.9.0", + "fp-ts": "^2.16.10", "graphql": "^15.5.1", "graphql-anywhere": "^4.2.7", + "io-ts": "^2.2.22", + "io-ts-types": "^0.5.19", "leaflet": "^1.8.0", "react": "^17.0.2", "react-dom": "^17.0.2", diff --git a/manager-dashboard/user_scripts/README.md b/manager-dashboard/user_scripts/README.md new file mode 100644 index 000000000..3964cc12c --- /dev/null +++ b/manager-dashboard/user_scripts/README.md @@ -0,0 +1,73 @@ +## Description +This will serve as a guide on how to create a COCO file using the utility script for Google Drive and DropBox + +## Google Drive +You can find the utility script for Google Drive here: [generate_coco_from_drive.js](./generate_coco_from_drive.js) + +### Prerequisites +- You must have a Google account +- Your image files should be stored in a public Google Drive folder +- You have access to Google Apps Script via https://script.google.com + +### Creation Steps +- Create a Google Apps script project + - Go to https://script.google.com + - Click on "New Project" + - Rename the project name to `your-project-name` +- Paste the utility script + - Replace the default code with the utility file's code +- Replace placeholder values + - Replace `your_coco_export.json` with your output filename + - Replace `your_public_folder_id` with the ID of your Google Drive folder +> The folder ID is the alphanumeric string that appears after "/folders/" in the URL.\ +> Eg: drive.google.com/drive/folders/**1prcCevijN5mubTllB2kr5ki1gjh_IO4u**?usp=sharing +- Run the script + - Save the project to Drive using the floppy disk 💾 icon + - Press Run + - Accept the authorization prompts the first time you run the script +- View COCO JSON Output + - Go to **View > Logs** + - Copy the Google Drive URL where the coco file is generated + - Download the json file + +## DropBox +You can find the utility script for DropBox here: [generate_coco_from_dropbox.py](./generate_coco_from_dropbox.py) + +### Prerequisites +- Create account: https://www.dropbox.com/register +- Create new App: https://www.dropbox.com/developers/apps + - Choose an API: Scoped access + - Choose the type of access you need: Full Dropbox + - Name your app: `your-app-name` +- Update `Permission type` + - Go to the app settings + - Click **Scoped App** + - Tick the following permissions + - files.metadata.read + - files.content.write + - files.content.read + - sharing.write + - sharing.read + - Submit +- Generate new access token: + - Go to the app settings + - Click **Generated access token** +- Install uv on your system: https://docs.astral.sh/uv/getting-started/installation/ +- Download the [generate_coco_from_dropbox.py](./generate_coco_from_dropbox.py) script +- Create a DropBox folder and upload images + +### Creation Steps +- Copy the folder pathname in DropBox +- Copy the generated access token from DropBox +- Run the script +```bash + # Help + uv run generate_coco_dropbox.py --help + + # Sample + uv run generate_coco_dropbox.py "DROPBOX_ACCESS_TOKEN" "FOLDER_PATHNAME_IN_DROPBOX" "DESTINATION_EXPORT_FILE_NAME_IN_DROPBOX" + + # Example + uv run generate_coco_dropbox.py sl.yourAccessTokenHere "/COCO TEST" "coco_export.json" +``` +- Download the exported coco json from the link in terminal or your DropBox folder diff --git a/manager-dashboard/user_scripts/generate_coco_from_drive.js b/manager-dashboard/user_scripts/generate_coco_from_drive.js new file mode 100644 index 000000000..957eee989 --- /dev/null +++ b/manager-dashboard/user_scripts/generate_coco_from_drive.js @@ -0,0 +1,33 @@ +function main() { + const exportFileName = 'your_coco_export.json'; + const folderId = 'your_public_folder_id'; + const folder = DriveApp.getFolderById(folderId); + const files = folder.getFiles(); + + const images = []; + + let id = 1; + while (files.hasNext()) { + const file = files.next(); + const name = file.getName(); + const fileId = file.getId(); + // const url = https://drive.google.com/uc?export=view&id=" + fileId; + const url = `https://drive.google.com/thumbnail?id=${fileId}&sz=w1000`; + images.push({ + coco_url: url, + file_name: name, + id, + }); + id += 1; + } + + const exportContent = JSON.stringify({ images }); + const exportFile = DriveApp.createFile( + exportFileName, + exportContent, + MimeType.PLAIN_TEXT, + ); + const exportFileUrl = exportFile.getUrl(); + + Logger.log(`COCO file available at: ${exportFileUrl}`); +} diff --git a/manager-dashboard/user_scripts/generate_coco_from_dropbox.py b/manager-dashboard/user_scripts/generate_coco_from_dropbox.py new file mode 100644 index 000000000..47249d2de --- /dev/null +++ b/manager-dashboard/user_scripts/generate_coco_from_dropbox.py @@ -0,0 +1,213 @@ +# /// script +# requires-python = ">=3.13" +# dependencies = [ +# "httpx~=0.28.1", +# "colorama", +# ] +# /// +from pathlib import Path +from colorama import init, Fore + +import argparse +import textwrap +import httpx +import json +import re + +# Initialize colorama +init(autoreset=True) + + +DROPBOX_PERMISSION_MESSAGE = f""" +{Fore.YELLOW} +---------------------------------------------------- +Make sure the dropbox App includes these permissions +- files.metadata.read +- files.content.write +- files.content.read +- sharing.write +- sharing.read +""" + + +def dropbox_request_error_handler(res: httpx.Response): + try: + res.raise_for_status() + except httpx.HTTPStatusError as http_err: + print(f"{Fore.RED}HTTP error occurred while requesting {res.url}: {http_err}") + print(f"{Fore.RED}Response content: {res.text}") + raise + except httpx.RequestError as req_err: + print( + f"{Fore.RED}An error occurred while making the request to {res.url}: {req_err}" + ) + raise + except Exception as err: + print(f"{Fore.RED}An unexpected error occurred: {err}") + raise + finally: + print(DROPBOX_PERMISSION_MESSAGE) + + +def dropbox_request(endpoint: str, data: object, *, access_token: str): + url = f"https://api.dropboxapi.com/2/{endpoint}" + headers = { + "Authorization": f"Bearer {access_token}", + "Content-Type": "application/json", + } + res = httpx.post( + url, + headers=headers, + data=json.dumps(data), + ) + dropbox_request_error_handler(res) + return res.json() + + +def dropbox_content_request( + endpoint: str, path: str, data: object, *, access_token: str +): + url = f"https://content.dropboxapi.com/2/{endpoint}" + headers = { + "Authorization": f"Bearer {access_token}", + "Content-Type": "application/octet-stream", + "Dropbox-API-Arg": json.dumps( + { + "path": path, + "mode": "overwrite", # overwrite if exists + "autorename": False, + "mute": False, + } + ), + } + res = httpx.post( + url, + headers=headers, + data=json.dumps(data).encode("utf-8"), + ) + dropbox_request_error_handler(res) + return res.json() + + +def list_all_files(folder_path: str, *, access_token: str): + ALLOWED_EXTENSIONS = {".jpg", ".jpeg", ".png", ".webp"} + files = [] + + data = {"path": folder_path, "recursive": False} + response = dropbox_request("files/list_folder", data, access_token=access_token) + + files.extend(response.get("entries", [])) + + while response.get("has_more", False): + cursor = response["cursor"] + response = dropbox_request( + "files/list_folder/continue", + {"cursor": cursor}, + access_token=access_token, + ) + files.extend(response.get("entries", [])) + + # Sort files by name (just in case) + files = sorted(files, key=lambda file: file["name"].lower()) + # Filter out only files (not folders) that are supported + files = [ + file + for file in files + if file[".tag"] == "file" + and Path(file["name"]).suffix.lower() in ALLOWED_EXTENSIONS + ] + return files + + +def share_file_and_get_links(files, *, access_token: str): + total = len(files) + images = [] + for i, file in enumerate(files): + path = file["path_lower"] + actual_path = file["path_display"] + + # First try to list existing shared links + data = {"path": path, "direct_only": True} + print(f"{i + 1}/{total} Getting public URL") + res = dropbox_request( + "sharing/list_shared_links", + data, + access_token=access_token, + ) + if res.get("links"): + link = res["links"][0]["url"] + else: + data = {"path": path, "settings": {"requested_visibility": "public"}} + res_create = dropbox_request( + "sharing/create_shared_link_with_settings", + data, + access_token=access_token, + ) + link = res_create["url"] + + raw_url = re.sub(r"&dl=0\b", "", link) + "&raw=1" + + images.append( + { + "id": i + 1, + "file_name": actual_path, + "coco_url": raw_url, + } + ) + return images + + +def main(): + parser = argparse.ArgumentParser( + description="Generate COCO file from images folder.", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=textwrap.dedent(DROPBOX_PERMISSION_MESSAGE), + ) + parser.add_argument("access_token", help="Access token for authentication") + parser.add_argument( + "images_folder", help='Path to the images folder in dropbox. eg: "/COCO TEST"' + ) + parser.add_argument( + "export_file_name", + help="Name of the export COCO file to be created in dropbox under provided images_folder", + ) + + args = parser.parse_args() + + access_token = args.access_token + images_folder = args.images_folder + export_file_name = args.export_file_name + + # Get all the files on given path + files = list_all_files( + images_folder, + access_token=access_token, + ) + + # Share individual file publically and get public link + public_images = share_file_and_get_links( + files, + access_token=access_token, + ) + + # Upload coco format export to dropbox + print("Uploading COCO file") + absolute_export_file_name = str(Path(images_folder) / Path(export_file_name)) + dropbox_content_request( + "files/upload", + absolute_export_file_name, + {"images": public_images}, + access_token=access_token, + ) + + # Get temporary link + res = dropbox_request( + "files/get_temporary_link", + {"path": absolute_export_file_name}, + access_token=access_token, + ) + print(f"COCO file available at {res['link']}") + + +if __name__ == "__main__": + main() diff --git a/manager-dashboard/yarn.lock b/manager-dashboard/yarn.lock index a3597eea5..66718bef1 100644 --- a/manager-dashboard/yarn.lock +++ b/manager-dashboard/yarn.lock @@ -6668,6 +6668,11 @@ forwarded@0.2.0: resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== +fp-ts@^2.16.10: + version "2.16.10" + resolved "https://registry.yarnpkg.com/fp-ts/-/fp-ts-2.16.10.tgz#829b82a46571c2dc202bed38a9c2eeec603e38c4" + integrity sha512-vuROzbNVfCmUkZSUbnWSltR1sbheyQbTzug7LB/46fEa1c0EucLeBaCEUE0gF3ZGUGBt9lVUiziGOhhj6K1ORA== + fraction.js@^4.1.1: version "4.1.2" resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.1.2.tgz#13e420a92422b6cf244dff8690ed89401029fbe8" @@ -7480,6 +7485,16 @@ invariant@^2.2.4: dependencies: loose-envify "^1.0.0" +io-ts-types@^0.5.19: + version "0.5.19" + resolved "https://registry.yarnpkg.com/io-ts-types/-/io-ts-types-0.5.19.tgz#9c04fa73f15992436605218a5686b610efa7a5d3" + integrity sha512-kQOYYDZG5vKre+INIDZbLeDJe+oM+4zLpUkjXyTMyUfoCpjJNyi29ZLkuEAwcPufaYo3yu/BsemZtbdD+NtRfQ== + +io-ts@^2.2.22: + version "2.2.22" + resolved "https://registry.yarnpkg.com/io-ts/-/io-ts-2.2.22.tgz#5ab0d3636fe8494a275f0266461ab019da4b8d0b" + integrity sha512-FHCCztTkHoV9mdBsHpocLpdTAfh956ZQcIkWQxxS0U5HT53vtrcuYdQneEJKH6xILaLNzXVl2Cvwtoy8XNN0AA== + ip-regex@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9" diff --git a/mapswipe_workers/mapswipe_workers/definitions.py b/mapswipe_workers/mapswipe_workers/definitions.py index c9dec6d79..afa7cb6ff 100644 --- a/mapswipe_workers/mapswipe_workers/definitions.py +++ b/mapswipe_workers/mapswipe_workers/definitions.py @@ -137,6 +137,7 @@ class ProjectType(Enum): MEDIA_CLASSIFICATION = 5 DIGITIZATION = 6 STREET = 7 + VALIDATE_IMAGE = 10 @property def constructor(self): @@ -149,6 +150,7 @@ def constructor(self): FootprintProject, MediaClassificationProject, StreetProject, + ValidateImageProject, ) project_type_classes = { @@ -159,6 +161,7 @@ def constructor(self): 5: MediaClassificationProject, 6: DigitizationProject, 7: StreetProject, + 10: ValidateImageProject, } return project_type_classes[self.value] @@ -170,6 +173,8 @@ def tutorial(self): ClassificationTutorial, CompletenessTutorial, FootprintTutorial, + StreetTutorial, + ValidateImageTutorial, ) project_type_classes = { @@ -177,5 +182,7 @@ def tutorial(self): 2: FootprintTutorial, 3: ChangeDetectionTutorial, 4: CompletenessTutorial, + 7: StreetTutorial, + 10: ValidateImageTutorial, } return project_type_classes[self.value] diff --git a/mapswipe_workers/mapswipe_workers/firebase/firebase.py b/mapswipe_workers/mapswipe_workers/firebase/firebase.py index 809b6c801..b91256985 100644 --- a/mapswipe_workers/mapswipe_workers/firebase/firebase.py +++ b/mapswipe_workers/mapswipe_workers/firebase/firebase.py @@ -14,6 +14,7 @@ def save_project_to_firebase(self, project): # if a geometry exists in projects we want to delete it. # This geometry is not used in clients. project.pop("geometry", None) + # FIXME: We might need to pop images # save project self.ref.update({f"v2/projects/{project['projectId']}": project}) logger.info( @@ -82,6 +83,7 @@ def save_tutorial_to_firebase( tutorialDict.pop("raw_tasks", None) tutorialDict.pop("examplesFile", None) tutorialDict.pop("tutorial_tasks", None) + tutorialDict.pop("images", None) if not tutorial.projectId or tutorial.projectId == "": raise CustomError( diff --git a/mapswipe_workers/mapswipe_workers/project_types/__init__.py b/mapswipe_workers/mapswipe_workers/project_types/__init__.py index 43013b0dc..3fb4e722b 100644 --- a/mapswipe_workers/mapswipe_workers/project_types/__init__.py +++ b/mapswipe_workers/mapswipe_workers/project_types/__init__.py @@ -3,12 +3,15 @@ from .arbitrary_geometry.footprint.tutorial import FootprintTutorial from .media_classification.project import MediaClassificationProject from .street.project import StreetProject +from .street.tutorial import StreetTutorial from .tile_map_service.change_detection.project import ChangeDetectionProject from .tile_map_service.change_detection.tutorial import ChangeDetectionTutorial from .tile_map_service.classification.project import ClassificationProject from .tile_map_service.classification.tutorial import ClassificationTutorial from .tile_map_service.completeness.project import CompletenessProject from .tile_map_service.completeness.tutorial import CompletenessTutorial +from .validate_image.project import ValidateImageProject +from .validate_image.tutorial import ValidateImageTutorial __all__ = [ "ClassificationProject", @@ -20,6 +23,9 @@ "MediaClassificationProject", "FootprintProject", "FootprintTutorial", + "ValidateImageProject", + "ValidateImageTutorial", "DigitizationProject", "StreetProject", + "StreetTutorial", ] diff --git a/mapswipe_workers/mapswipe_workers/project_types/street/tutorial.py b/mapswipe_workers/mapswipe_workers/project_types/street/tutorial.py index cfbfc0ead..e59a97f09 100644 --- a/mapswipe_workers/mapswipe_workers/project_types/street/tutorial.py +++ b/mapswipe_workers/mapswipe_workers/project_types/street/tutorial.py @@ -1,14 +1,84 @@ +from dataclasses import asdict, dataclass + +from mapswipe_workers.definitions import logger +from mapswipe_workers.firebase.firebase import Firebase +from mapswipe_workers.project_types.street.project import StreetGroup, StreetTask from mapswipe_workers.project_types.tutorial import BaseTutorial +@dataclass +class StreetTutorialTask(StreetTask): + projectId: int + taskId: str + groupId: int + referenceAnswer: int + screen: int + + class StreetTutorial(BaseTutorial): - """The subclass for an TMS Grid based Tutorial.""" + """The subclass for an arbitrary geometry based Tutorial.""" - def save_tutorial(self): - raise NotImplementedError("Currently Street has no Tutorial") + def __init__(self, tutorial_draft): + # this will create the basis attributes + super().__init__(tutorial_draft) + + # self.projectId = tutorial_draft["projectId"] + self.projectType = tutorial_draft["projectType"] + self.tutorial_tasks = tutorial_draft["tutorialTasks"] + self.groups = dict() + self.tasks = dict() def create_tutorial_groups(self): - raise NotImplementedError("Currently Street has no Tutorial") + """Create group for the tutorial based on provided examples in geojson file.""" + # load examples/tasks from file + + group = StreetGroup( + groupId=101, + projectId=self.projectId, + numberOfTasks=len(self.tutorial_tasks), + progress=0, + finishedCount=0, + requiredCount=0, + ) + self.groups[101] = group + + # Add number of tasks for the group here. This needs to be set according to + # the number of features/examples in the geojson file + + logger.info( + f"{self.projectId}" + f" - create_tutorial_groups - " + f"created groups dictionary" + ) def create_tutorial_tasks(self): - raise NotImplementedError("Currently Street has no Tutorial") + """Create the tasks dict based on provided examples in geojson file.""" + task_list = [] + for i, task in enumerate(self.tutorial_tasks["features"]): + task = StreetTutorialTask( + projectId=self.projectId, + groupId=101, + taskId=f"{task['properties']['id']}", + geometry="", + referenceAnswer=task["properties"]["reference"], + screen=task["properties"]["screen"], + ) + task_list.append(asdict(task)) + if task_list: + self.tasks[101] = task_list + else: + logger.info(f"group in project {self.projectId} is not valid.") + + logger.info( + f"{self.projectId}" + f" - create_tutorial_tasks - " + f"created tasks dictionary" + ) + + def save_tutorial(self): + firebase = Firebase() + firebase.save_tutorial_to_firebase( + self, self.groups, self.tasks, useCompression=True + ) + logger.info(self.tutorialDraftId) + firebase.drop_tutorial_draft(self.tutorialDraftId) diff --git a/mapswipe_workers/mapswipe_workers/project_types/validate_image/__init__.py b/mapswipe_workers/mapswipe_workers/project_types/validate_image/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/mapswipe_workers/mapswipe_workers/project_types/validate_image/project.py b/mapswipe_workers/mapswipe_workers/project_types/validate_image/project.py new file mode 100644 index 000000000..e0aee5f4d --- /dev/null +++ b/mapswipe_workers/mapswipe_workers/project_types/validate_image/project.py @@ -0,0 +1,108 @@ +import math +from dataclasses import dataclass +from typing import Dict, List + +from mapswipe_workers.definitions import logger +from mapswipe_workers.firebase.firebase import Firebase +from mapswipe_workers.firebase_to_postgres.transfer_results import ( + results_to_file, + save_results_to_postgres, + truncate_temp_results, +) +from mapswipe_workers.generate_stats.project_stats import ( + get_statistics_for_integer_result_project, +) +from mapswipe_workers.project_types.project import BaseGroup, BaseProject + + +@dataclass +class ValidateImageGroup(BaseGroup): + pass + + +@dataclass +class ValidateImageTask: + # TODO(tnagorra): We need to check if fileName should be saved on project + # NOTE: We do not need to add projectId and groupId so we are not extending BaseTask + + # NOTE: taskId is the sourceIdentifier + taskId: str + + fileName: str + url: str + + # NOTE: This is not required but required by the base class + geometry: str + + +class ValidateImageProject(BaseProject): + def __init__(self, project_draft): + super().__init__(project_draft) + self.groups: Dict[str, ValidateImageGroup] = {} + self.tasks: Dict[str, List[ValidateImageTask]] = {} # dict keys are group ids + + # NOTE: This is a standard structure defined on manager dashboard. + # It's derived from other formats like COCO. + # The transfromation is done in manager dashboard. + self.images = project_draft["images"] + + def save_tasks_to_firebase(self, projectId: str, tasks: dict): + firebase = Firebase() + firebase.save_tasks_to_firebase(projectId, tasks, useCompression=False) + + @staticmethod + def results_to_postgres(results: dict, project_id: str, filter_mode: bool): + """How to move the result data from firebase to postgres.""" + results_file, user_group_results_file = results_to_file(results, project_id) + truncate_temp_results() + save_results_to_postgres(results_file, project_id, filter_mode) + return user_group_results_file + + @staticmethod + def get_per_project_statistics(project_id, project_info): + """How to aggregate the project results.""" + return get_statistics_for_integer_result_project( + project_id, project_info, generate_hot_tm_geometries=False + ) + + def validate_geometries(self): + pass + + def save_to_files(self, project): + """We do not have any geometry so we pass here""" + pass + + def create_groups(self): + self.numberOfGroups = math.ceil(len(self.images) / self.groupSize) + for group_index in range(self.numberOfGroups): + self.groups[f"g{group_index + 100}"] = ValidateImageGroup( + projectId=self.projectId, + groupId=f"g{group_index + 100}", + progress=0, + finishedCount=0, + requiredCount=0, + numberOfTasks=self.groupSize, + ) + logger.info(f"{self.projectId} - create_groups - created groups dictionary") + + def create_tasks(self): + if len(self.groups) == 0: + raise ValueError("Groups needs to be created before tasks can be created.") + for group_id, group in self.groups.items(): + self.tasks[group_id] = [] + for i in range(self.groupSize): + # FIXME: We should try not to mutate values + image_metadata = self.images.pop() + task = ValidateImageTask( + taskId=image_metadata["sourceIdentifier"], + fileName=image_metadata["fileName"], + url=image_metadata["url"], + geometry="", + ) + self.tasks[group_id].append(task) + + # list now empty? if usual group size is not reached + # the actual number of tasks for the group is updated + if not self.images: + group.numberOfTasks = i + 1 + break diff --git a/mapswipe_workers/mapswipe_workers/project_types/validate_image/tutorial.py b/mapswipe_workers/mapswipe_workers/project_types/validate_image/tutorial.py new file mode 100644 index 000000000..b42b0be61 --- /dev/null +++ b/mapswipe_workers/mapswipe_workers/project_types/validate_image/tutorial.py @@ -0,0 +1,80 @@ +from dataclasses import dataclass + +from mapswipe_workers.definitions import logger +from mapswipe_workers.firebase.firebase import Firebase +from mapswipe_workers.project_types.tutorial import BaseTutorial +from mapswipe_workers.project_types.validate_image.project import ( + ValidateImageGroup, + ValidateImageTask, +) + + +@dataclass +class ValidateImageTutorialTask(ValidateImageTask): + # TODO(tnagorra): Check if we need projectId and groupId in tutorial task + projectId: str + groupId: str + referenceAnswer: int + screen: int + + +class ValidateImageTutorial(BaseTutorial): + + def __init__(self, tutorial_draft): + # this will create the basis attributes + super().__init__(tutorial_draft) + + self.groups = dict() + self.tasks = dict() + self.images = tutorial_draft["images"] + + def create_tutorial_groups(self): + """Create group for the tutorial based on provided examples in images.""" + + # NOTE: The groupId must be a numeric 101. It's hardcoded in save_tutorial_to_firebase + group = ValidateImageGroup( + groupId=101, + projectId=self.projectId, + numberOfTasks=len(self.images), + progress=0, + finishedCount=0, + requiredCount=0, + ) + self.groups[101] = group + + logger.info( + f"{self.projectId} - create_tutorial_groups - created groups dictionary" + ) + + def create_tutorial_tasks(self): + """Create the tasks dict based on provided examples in geojson file.""" + task_list = [] + for image_metadata in self.images: + image_metadata = ValidateImageTutorialTask( + projectId=self.projectId, + groupId=101, + taskId=image_metadata["sourceIdentifier"], + fileName=image_metadata["fileName"], + url=image_metadata["url"], + geometry="", + referenceAnswer=image_metadata["referenceAnswer"], + screen=image_metadata["screen"], + ) + task_list.append(image_metadata) + + if task_list: + self.tasks[101] = task_list + else: + logger.info(f"group in project {self.projectId} is not valid.") + + logger.info( + f"{self.projectId} - create_tutorial_tasks - created tasks dictionary" + ) + + def save_tutorial(self): + firebase = Firebase() + firebase.save_tutorial_to_firebase( + self, self.groups, self.tasks, useCompression=False + ) + logger.info(self.tutorialDraftId) + firebase.drop_tutorial_draft(self.tutorialDraftId) diff --git a/mapswipe_workers/mapswipe_workers/utils/process_mapillary.py b/mapswipe_workers/mapswipe_workers/utils/process_mapillary.py index 29f4363f7..54afc2be6 100644 --- a/mapswipe_workers/mapswipe_workers/utils/process_mapillary.py +++ b/mapswipe_workers/mapswipe_workers/utils/process_mapillary.py @@ -1,22 +1,20 @@ import os -from concurrent.futures import ThreadPoolExecutor, as_completed +from concurrent.futures import ProcessPoolExecutor +from functools import partial import mercantile import pandas as pd import requests -from shapely import ( - LineString, - MultiLineString, - MultiPolygon, - Point, - Polygon, - box, - unary_union, -) +from shapely import MultiPolygon, Point, Polygon, box, unary_union from shapely.geometry import shape from vt2geojson import tools as vt2geojson_tools -from mapswipe_workers.definitions import MAPILLARY_API_KEY, MAPILLARY_API_LINK, logger +from mapswipe_workers.definitions import ( + MAPILLARY_API_KEY, + MAPILLARY_API_LINK, + CustomError, + logger, +) from mapswipe_workers.utils.spatial_sampling import spatial_sampling @@ -44,7 +42,7 @@ def create_tiles(polygon, level): return tiles -def download_and_process_tile(row, attempt_limit=3): +def download_and_process_tile(row, polygon, kwargs, attempt_limit=3): z = row["z"] x = row["x"] y = row["y"] @@ -53,37 +51,24 @@ def download_and_process_tile(row, attempt_limit=3): attempt = 0 while attempt < attempt_limit: try: - r = requests.get(url) - assert r.status_code == 200, r.content - features = vt2geojson_tools.vt_bytes_to_geojson(r.content, x, y, z).get( - "features", [] - ) - data = [] - for feature in features: - geometry = feature.get("geometry", {}) - properties = feature.get("properties", {}) - geometry_type = geometry.get("type", None) - coordinates = geometry.get("coordinates", []) - - element_geometry = None - if geometry_type == "Point": - element_geometry = Point(coordinates) - elif geometry_type == "LineString": - element_geometry = LineString(coordinates) - elif geometry_type == "MultiLineString": - element_geometry = MultiLineString(coordinates) - elif geometry_type == "Polygon": - element_geometry = Polygon(coordinates[0]) - elif geometry_type == "MultiPolygon": - element_geometry = MultiPolygon(coordinates) - - # Append the dictionary with geometry and properties - row = {"geometry": element_geometry, **properties} - data.append(row) - - data = pd.DataFrame(data) - - if not data.empty: + data = get_mapillary_data(url, x, y, z) + if data.isna().all().all() is False or data.empty is False: + data = data[data["geometry"].apply(lambda point: point.within(polygon))] + target_columns = [ + "id", + "geometry", + "captured_at", + "is_pano", + "compass_angle", + "sequence", + "organization_id", + ] + for col in target_columns: + if col not in data.columns: + data[col] = None + if data.isna().all().all() is False or data.empty is False: + data = filter_results(data, **kwargs) + return data except Exception as e: print(f"An exception occurred while requesting a tile: {e}") @@ -93,8 +78,28 @@ def download_and_process_tile(row, attempt_limit=3): return None +def get_mapillary_data(url, x, y, z): + r = requests.get(url) + assert r.status_code == 200, r.content + features = vt2geojson_tools.vt_bytes_to_geojson(r.content, x, y, z).get( + "features", [] + ) + data = [] + data.extend( + [ + { + "geometry": Point(feature["geometry"]["coordinates"]), + **feature.get("properties", {}), + } + for feature in features + if feature.get("geometry", {}).get("type") == "Point" + ] + ) + return pd.DataFrame(data) + + def coordinate_download( - polygon, level, use_concurrency=True, attempt_limit=3, workers=os.cpu_count() * 4 + polygon, level, kwargs: dict, use_concurrency=True, workers=os.cpu_count() * 4 ): tiles = create_tiles(polygon, level) @@ -104,48 +109,32 @@ def coordinate_download( if not use_concurrency: workers = 1 - futures = [] - with ThreadPoolExecutor(max_workers=workers) as executor: - for index, row in tiles.iterrows(): - futures.append( - executor.submit(download_and_process_tile, row, attempt_limit) - ) - - for future in as_completed(futures): - if future is not None: - df = future.result() - - if df is not None and not df.empty: - downloaded_metadata.append(df) + downloaded_metadata = parallelized_processing( + downloaded_metadata, kwargs, polygon, tiles, workers + ) if len(downloaded_metadata): downloaded_metadata = pd.concat(downloaded_metadata, ignore_index=True) else: return pd.DataFrame(downloaded_metadata) - target_columns = [ - "id", - "geometry", - "captured_at", - "is_pano", - "compass_angle", - "sequence", - "organization_id", - ] - for col in target_columns: - if col not in downloaded_metadata.columns: - downloaded_metadata[col] = None - if ( - downloaded_metadata.isna().all().all() is False - or downloaded_metadata.empty is False - ): - downloaded_metadata = downloaded_metadata[ - downloaded_metadata["geometry"].apply( - lambda point: point.within(polygon) - ) - ] return downloaded_metadata +def parallelized_processing(data, kwargs, polygon, tiles, workers): + process_tile_with_args = partial( + download_and_process_tile, polygon=polygon, kwargs=kwargs + ) + with ProcessPoolExecutor(max_workers=workers) as executor: + futures = list( + executor.map(process_tile_with_args, tiles.to_dict(orient="records")) + ) + + for df in futures: + if df is not None and not df.empty: + data.append(df) + return data + + def geojson_to_polygon(geojson_data): if geojson_data["type"] == "FeatureCollection": features = geojson_data["features"] @@ -193,41 +182,32 @@ def filter_results( df = results_df.copy() if creator_id is not None: if df["creator_id"].isna().all(): - logger.exception( - "No Mapillary Feature in the AoI has a 'creator_id' value." - ) + logger.info("No Mapillary Feature in the AoI has a 'creator_id' value.") return None df = df[df["creator_id"] == creator_id] - if is_pano is not None: if df["is_pano"].isna().all(): - logger.exception("No Mapillary Feature in the AoI has a 'is_pano' value.") + logger.info("No Mapillary Feature in the AoI has a 'is_pano' value.") return None df = df[df["is_pano"] == is_pano] - if organization_id is not None: if df["organization_id"].isna().all(): - logger.exception( + logger.info( "No Mapillary Feature in the AoI has an 'organization_id' value." ) return None df = df[df["organization_id"] == organization_id] - if start_time is not None: if df["captured_at"].isna().all(): - logger.exception( - "No Mapillary Feature in the AoI has a 'captured_at' value." - ) + logger.info("No Mapillary Feature in the AoI has a 'captured_at' value.") return None df = filter_by_timerange(df, start_time, end_time) - return df def get_image_metadata( aoi_geojson, level=14, - attempt_limit=3, is_pano: bool = None, creator_id: int = None, organization_id: str = None, @@ -236,40 +216,30 @@ def get_image_metadata( randomize_order=False, sampling_threshold=None, ): + kwargs = { + "is_pano": is_pano, + "creator_id": creator_id, + "organization_id": organization_id, + "start_time": start_time, + "end_time": end_time, + } aoi_polygon = geojson_to_polygon(aoi_geojson) - downloaded_metadata = coordinate_download(aoi_polygon, level, attempt_limit) - + downloaded_metadata = coordinate_download(aoi_polygon, level, kwargs) if downloaded_metadata.empty or downloaded_metadata.isna().all().all(): - raise ValueError("No Mapillary Features in the AoI.") - - downloaded_metadata = downloaded_metadata[ - downloaded_metadata["geometry"].apply(lambda geom: isinstance(geom, Point)) - ] - - downloaded_metadata = filter_results( - downloaded_metadata, - creator_id, - is_pano, - organization_id, - start_time, - end_time, - ) - - if ( - downloaded_metadata is None - or downloaded_metadata.empty - or downloaded_metadata.isna().all().all() - ): - raise ValueError("No Mapillary Features in the AoI match the filter criteria.") - - downloaded_metadata = spatial_sampling(downloaded_metadata, sampling_threshold) + raise CustomError( + "No Mapillary Features in the AoI or no Features match the filter criteria." + ) + if sampling_threshold is not None: + downloaded_metadata = spatial_sampling(downloaded_metadata, sampling_threshold) if randomize_order is True: downloaded_metadata = downloaded_metadata.sample(frac=1).reset_index(drop=True) + downloaded_metadata = downloaded_metadata.drop_duplicates(subset=["geometry"]) + total_images = len(downloaded_metadata) if total_images > 100000: - raise ValueError( + raise CustomError( f"Too many Images with selected filter options for the AoI: {total_images}" ) diff --git a/mapswipe_workers/mapswipe_workers/utils/spatial_sampling.py b/mapswipe_workers/mapswipe_workers/utils/spatial_sampling.py index 97302b945..67f35c7e9 100644 --- a/mapswipe_workers/mapswipe_workers/utils/spatial_sampling.py +++ b/mapswipe_workers/mapswipe_workers/utils/spatial_sampling.py @@ -143,6 +143,10 @@ def spatial_sampling(df, interval_length): if interval_length: sequence_df = filter_points(sequence_df, interval_length) + if "is_pano" in sequence_df.columns: + # below line prevents FutureWarning + # (https://stackoverflow.com/questions/73800841/add-series-as-a-new-row-into-dataframe-triggers-futurewarning) + sequence_df["is_pano"] = sequence_df["is_pano"].astype(bool) sampled_sequence_df = pd.concat([sampled_sequence_df, sequence_df], axis=0) # reverse order such that sequence are in direction of travel diff --git a/mapswipe_workers/python_scripts/extract_project_population_stats.py b/mapswipe_workers/python_scripts/extract_project_population_stats.py new file mode 100644 index 000000000..072f57f35 --- /dev/null +++ b/mapswipe_workers/python_scripts/extract_project_population_stats.py @@ -0,0 +1,143 @@ +import argparse +import os +import warnings + +import geopandas as gpd +import pandas as pd +import rasterio +import requests +from exactextract import exact_extract +from tqdm import tqdm + +warnings.filterwarnings("ignore") + + +def project_list(id_file): + """Reads Mapswipe project IDs from the user input file""" + + with open(id_file, "r") as file: + ids = file.read().strip() + + project_list = ids.split(",") + project_list = [id.strip() for id in project_list] + + return project_list + + +def population_raster_download(): + """Downloads 1km resolution global population raster for 2020 from WorldPop to the current working directory.""" + + url = "https://data.worldpop.org/GIS/Population/Global_2000_2020/2020/0_Mosaicked/ppp_2020_1km_Aggregated.tif" + + output_file = "ppp_2020_1km_Aggregated.tif" + + output_file_path = os.path.join(os.getcwd(), output_file) + + if os.path.exists(output_file_path): + + print("Population raster already exists. Moving to next steps......") + return output_file_path + + else: + + response = requests.get(url, stream=True) + size = int(response.headers.get("content-length", 0)) + block_size = 1024 + try: + with open(output_file, "wb") as file, tqdm( + desc="Downloading population raster", + total=size, + unit="B", + unit_scale=True, + unit_divisor=1024, + ) as bar: + for chunk in response.iter_content(block_size): + if chunk: + file.write(chunk) + bar.update(len(chunk)) + + print("Download complete:", output_file_path) + return output_file_path + + except requests.RequestException as e: + print(f"Error downloading data: {e}") + + +def population_count(list, dir, raster): + """Gets boundary data for projects from Mapswipe API and calculates zonal statistics + with global population raster and individual project boundaries.""" + + dict = {} + worldpop = rasterio.open(raster) + + for id in list: + url = f"https://apps.mapswipe.org/api/project_geometries/project_geom_{id}.geojson" + response = requests.get(url) + + try: + geojson = response.json() + for feature in geojson["features"]: + geometry = feature.get("geometry", {}) + if "coordinates" in geometry: + if geometry["type"] == "Polygon": + geometry["coordinates"] = [ + [[coord[0], coord[1]] for coord in polygon] + for polygon in geometry["coordinates"] + ] + elif geometry["type"] == "MultiPolygon": + geometry["coordinates"] = [ + [ + [[coord[0], coord[1]] for coord in polygon] + for polygon in multipolygon + ] + for multipolygon in geometry["coordinates"] + ] + gdf = gpd.GeoDataFrame.from_features(geojson["features"]) + gdf.set_crs("EPSG:4326", inplace=True) + no_of_people = exact_extract(worldpop, gdf, "sum") + no_of_people = round(no_of_people[0]["properties"]["sum"]) + + dict[id] = no_of_people + + except requests.RequestException as e: + print(f"Error in retrieval of project boundary from Mapswipe: {e}") + + df = pd.DataFrame( + dict.items(), columns=["Project_IDs", "Number of people impacted"] + ) + + df["Project_IDs"] = "https://mapswipe.org/en/projects/" + df["Project_IDs"] + + df.to_csv(f"{dir}/projects_population.csv") + + print(f"CSV file successfully created at {dir}/number_of_people_impacted.csv") + + +if __name__ == "__main__": + """Generates population stats for individual Mapswipe projects""" + parser = argparse.ArgumentParser() + parser.add_argument( + "-t", + "--text_file", + help=( + "Path to the text file containing project IDs from Mapswipe. The file should contain IDs in this manner: " + "-O8kulfxD4zRYQ2T1aXf, -O8kyOCreRGklW15n8RU, -O8kzSy9105axIPOAJjO, -OAwWv9rnJqPXTpWxO8-, " + "-OB-tettI2np7t3Gpu-k" + ), + type=str, + required=True, + ) + parser.add_argument( + "-o", + "--output_directory", + help="Path to the directory to store the output", + type=str, + required=True, + ) + args = parser.parse_args() + + population_count( + project_list(args.text_file), + args.output_directory, + population_raster_download(), + ) diff --git a/mapswipe_workers/sample_data/street/README.md b/mapswipe_workers/sample_data/street/README.md new file mode 100644 index 000000000..dfa52293e --- /dev/null +++ b/mapswipe_workers/sample_data/street/README.md @@ -0,0 +1,58 @@ +# Creating a New 'Street' Tutorial +### Useful Links +- MapSwipe Development Server: [https://dev-managers.mapswipe.org] +- MapSwipe Development App Installation Guide: [https://github.com/mapswipe/mapswipe/wiki/How-to-test-the-development-version-of-MapSwipe](https://github.com/mapswipe/mapswipe/wiki/How-to-test-the-development-version-of-MapSwipe) + +## Select appropriate Mapillary imagery for the tutorial (with JOSM and Mapillary plug-in) + +1. Open JOSM. Make sure the [JOSM Mapillary plug-in](https://wiki.openstreetmap.org/wiki/JOSM/Plugins/Mapillary) is installed +2. **File > Download data**. Select an area in which you expect appropriate example imagery available on Mapillary and **Download** +3. **Imagery > Mapillary** to download sequences and images for the current area +4. If helpful, use the Mapillary filter dialog to filter images (for start and end date, user and/or organization) +5. Click **Mapillary** in Layers controls to select the Mapillary layer +6. Zoom in until you can see images location markers (green dots) +7. Click on the dots to view the images +8. Once you have found an image that you would like to use in your tutorial, **File > Export Mapillary images** and select **Export selected images** +9. Click **Explore** +10. Choose a parent folder for all images in this tutorial +11. **OK** +12. Repeat until you have exported all the images that you would like to use in the tutorial. Use the same parent folder for all images. + +## Add exported Mapillary images as geotagged images in QGIS + +1. Open QGIS +2. **Processing Toolbox > Vector creation > Import geotagged photos** +3. Select the folder containing all exported Mapillary images and check **Scan recursively** +4. **Run** +5. **Properties > Display** and add `` to HTML Map Tip to show images on a pop up +6. **View > Show Map Tips** +7. If you keep the mouse tip on the image markers, a pop up with the image will appear + +## Edit geotagged images in QGIS + +1. Right click on layer. +2. **Properties > Field** +3. **Toggle editing mode** +4. Change the name of the `filename` column to `id` +5. Add `Integer (32 bit)` columns titled `screen` and `reference`. +6. Populate the `reference` and `screen` fields. + * `reference` is the value of the correct answer option for the image. + * `screen` determines the order of the images in the tutorial and should start with `1`. +7. Delete any rows representing images that you do not want to use + +## Export as GeoJSON + +1. **Toggle editing mode** +2. **Save** +3. Right click, **Export > Save Features As...** +4. Choose Format GeoJSON, CRS EPSG:4326 - WGS 84 +5. Select only `id`, `reference` and `screen` as fields to export. Deselect all other fields. +6. Choose a file name and location and click OK to save + +## Create tutorial + +1. Go to https://dev-managers.mapswipe.org/ +2. Select **Projects** and then **Add New Tutorial**. +3. Check that **Project Type** is set to **Street**. +4. Fill in all the fields, following the instructions. Upload your `GeoJSON` you just created with the scenarios where it says **Scenario Pages**. +5. Submit diff --git a/mapswipe_workers/sample_data/street/street_tutorial_sample_scenario.geojson b/mapswipe_workers/sample_data/street/street_tutorial_sample_scenario.geojson new file mode 100644 index 000000000..8f5b236f0 --- /dev/null +++ b/mapswipe_workers/sample_data/street/street_tutorial_sample_scenario.geojson @@ -0,0 +1,17 @@ +{ + "type": "FeatureCollection", + "name": "cobblestone-scenario", + "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } }, + "features": [ + { + "type": "Feature", + "properties": { "id": "378811598610667", "reference": 1, "screen": 2 }, + "geometry": { "type": "Point", "coordinates": [ 13.45285, 52.508467, 0.0 ] } + }, + { + "type": "Feature", + "properties": { "id": "1171343450849316", "reference": 0, "screen": 1 }, + "geometry": { "type": "Point", "coordinates": [ 13.4514123, 52.5103378, 0.0 ] } + } + ] +} diff --git a/mapswipe_workers/tests/fixtures/projectDrafts/street.json b/mapswipe_workers/tests/fixtures/projectDrafts/street.json index 1dd5b452a..67d1d8b04 100644 --- a/mapswipe_workers/tests/fixtures/projectDrafts/street.json +++ b/mapswipe_workers/tests/fixtures/projectDrafts/street.json @@ -46,6 +46,5 @@ "requestingOrganisation": "test", "verificationNumber": 3, "groupSize": 25, - "startTimestamp": "2019-07-01T00:00:00.000Z", "samplingThreshold": 0.1 } diff --git a/mapswipe_workers/tests/fixtures/tutorialDrafts/change_detection.json b/mapswipe_workers/tests/fixtures/tutorialDrafts/change_detection.json index 4b857eaa0..8c0f817bd 100644 --- a/mapswipe_workers/tests/fixtures/tutorialDrafts/change_detection.json +++ b/mapswipe_workers/tests/fixtures/tutorialDrafts/change_detection.json @@ -4,6 +4,7 @@ "exampleImage2": "", "lookFor": "damaged buildings", "name": "change_detection_tutorial", + "tutorialDraftId": "test_tile_change_detection", "projectType": 3, "screens": [ null, diff --git a/mapswipe_workers/tests/fixtures/tutorialDrafts/completeness.json b/mapswipe_workers/tests/fixtures/tutorialDrafts/completeness.json index b08c10dd7..0752c71a1 100644 --- a/mapswipe_workers/tests/fixtures/tutorialDrafts/completeness.json +++ b/mapswipe_workers/tests/fixtures/tutorialDrafts/completeness.json @@ -4,6 +4,7 @@ "exampleImage2": "https://firebasestorage.googleapis.com/v0/b/heigit-crowdmap.appspot.com/o/projectImages%2F1686065132355-tutorial-image-2-1x1.png?alt=media&token=bf8e67bc-d34c-4676-ba17-56bffc6b3f2d", "lookFor": "buildings", "name": "completeness_tutorial", + "tutorialDraftId": "test_tile_completeness", "projectType": 4, "screens": { "categories": { diff --git a/mapswipe_workers/tests/fixtures/tutorialDrafts/footprint.json b/mapswipe_workers/tests/fixtures/tutorialDrafts/footprint.json index b8b31a9f9..b4e26e7bd 100644 --- a/mapswipe_workers/tests/fixtures/tutorialDrafts/footprint.json +++ b/mapswipe_workers/tests/fixtures/tutorialDrafts/footprint.json @@ -1,5 +1,6 @@ { "createdBy": "LtCUyou6CnSSc1H0Q0nDrN97x892", + "tutorialDraftId": "test_footprint_tutorial", "customOptions": [ { "description": "the shape does outline a building in the image", diff --git a/mapswipe_workers/tests/fixtures/tutorialDrafts/street.json b/mapswipe_workers/tests/fixtures/tutorialDrafts/street.json new file mode 100644 index 000000000..1385ffb52 --- /dev/null +++ b/mapswipe_workers/tests/fixtures/tutorialDrafts/street.json @@ -0,0 +1,110 @@ +{ + "createdBy": "atCSosZACaN0qhcVjtMO1tq9d1G3", + "tutorialDraftId": "test_tile_classification", + "informationPages": [ + { + "blocks": [ + { + "blockNumber": 1, + "blockType": "text", + "textDescription": "This is the first information page" + }, + { + "blockNumber": 2, + "blockType": "image", + "image": "https://firebasestorage.googleapis.com/v0/b/dev-mapswipe.appspot.com/o/tutorialImages%2F1739963139725-block-image-2-1x1.png?alt=media&token=ae584dcd-d351-4bfe-be5f-1e0d38547f72" + } + ], + "pageNumber": 1, + "title": "Information page 1" + } + ], + "lookFor": "cobblestone", + "name": "cobblestone-tutorial", + "projectType": 7, + "screens": [ + null, + { + "hint": { + "description": "This seems to be a tarmac surface.", + "icon": "check", + "title": "Tarmac" + }, + "instructions": { + "description": "Check out if the road surface material is cobblestone here", + "icon": "check", + "title": "Is this cobblestone?" + }, + "success": { + "description": "Correct, this is not cobblestone", + "icon": "check", + "title": "Nice!" + } + }, + { + "hint": { + "description": "That surface does look like cobblestone!", + "icon": "heart-outline", + "title": "Cobblestone" + }, + "instructions": { + "description": "Does this look like cobblestone?", + "icon": "egg-outline", + "title": "How about this one?" + }, + "success": { + "description": "Correct", + "icon": "search-outline", + "title": "Correct" + } + } + ], + "tileServer": { + "credits": "© 2019 Microsoft Corporation, Earthstar Geographics SIO", + "name": "bing" + }, + "tutorialTasks": { + "crs": { + "properties": { + "name": "urn:ogc:def:crs:OGC:1.3:CRS84" + }, + "type": "name" + }, + "features": [ + { + "geometry": { + "coordinates": [ + 13.4514123, + 52.5103378, + 0 + ], + "type": "Point" + }, + "properties": { + "id": "1171343450849316", + "reference": 1, + "screen": 1 + }, + "type": "Feature" + }, + { + "geometry": { + "coordinates": [ + 13.45285, + 52.508467, + 0 + ], + "type": "Point" + }, + "properties": { + "id": "378811598610667", + "reference": 0, + "screen": 2 + }, + "type": "Feature" + } + ], + "name": "cobblestone-scenario", + "type": "FeatureCollection" + } +} \ No newline at end of file diff --git a/mapswipe_workers/tests/integration/set_up.py b/mapswipe_workers/tests/integration/set_up.py index 74adc6fda..1c3c0bdf4 100644 --- a/mapswipe_workers/tests/integration/set_up.py +++ b/mapswipe_workers/tests/integration/set_up.py @@ -16,20 +16,28 @@ def set_firebase_test_data( - project_type: str, data_type: str, fixture_name: str, identifier: str + project_type: str, + data_type: str, + fixture_name: str, + identifier: str, + tutorial_id: str = None, ): test_dir = os.path.dirname(__file__) fixture_name = fixture_name + ".json" file_path = os.path.join( test_dir, "fixtures", project_type, data_type, fixture_name ) - upload_file_to_firebase(file_path, data_type, identifier) + upload_file_to_firebase(file_path, data_type, identifier, tutorial_id=tutorial_id) -def upload_file_to_firebase(file_path: str, data_type: str, identifier: str): +def upload_file_to_firebase( + file_path: str, data_type: str, identifier: str, tutorial_id: str = None +): with open(file_path) as test_file: test_data = json.load(test_file) + if tutorial_id: + test_data["tutorialId"] = tutorial_id fb_db = auth.firebaseDB() ref = fb_db.reference(f"/v2/{data_type}/{identifier}") ref.set(test_data) @@ -85,15 +93,20 @@ def create_test_project( set_postgres_test_data(project_type, "users", "user") set_firebase_test_data(project_type, "user_groups", "user_group", "") set_firebase_test_data(project_type, "results", fixture_name, project_id) - set_postgres_test_data(project_type, "mapping_sessions", fixture_name, columns=[ - "project_id", - "group_id", - "user_id", - "mapping_session_id", - "start_time", - "end_time", - "items_count", - ]) + set_postgres_test_data( + project_type, + "mapping_sessions", + fixture_name, + columns=[ + "project_id", + "group_id", + "user_id", + "mapping_session_id", + "start_time", + "end_time", + "items_count", + ], + ) set_postgres_test_data(project_type, mapping_sessions_results, fixture_name) if create_user_group_session_data: set_postgres_test_data( @@ -108,7 +121,9 @@ def create_test_project( "created_at", ], ) - set_postgres_test_data(project_type, "mapping_sessions_user_groups", fixture_name) + set_postgres_test_data( + project_type, "mapping_sessions_user_groups", fixture_name + ) time.sleep(5) # Wait for Firebase Functions to complete return project_id @@ -131,12 +146,24 @@ def create_test_user(project_type: str, user_id: str = None) -> str: def create_test_project_draft( - project_type: str, fixture_name: str = "user", identifier: str = "" + project_type: str, + fixture_name: str = "user", + identifier: str = "", + tutorial_id: str = None, ) -> str: """ Create test project drafts in Firebase and return project ids. Project drafts in Firebase are created by project manager using the dashboard. """ + if tutorial_id: + set_firebase_test_data( + project_type, + "projectDrafts", + fixture_name, + identifier, + tutorial_id=tutorial_id, + ) + return identifier if not identifier: identifier = f"test_{fixture_name}" set_firebase_test_data(project_type, "projectDrafts", fixture_name, identifier) diff --git a/mapswipe_workers/tests/integration/set_up_db.sql b/mapswipe_workers/tests/integration/set_up_db.sql index f954d3a8c..b2b23f328 100644 --- a/mapswipe_workers/tests/integration/set_up_db.sql +++ b/mapswipe_workers/tests/integration/set_up_db.sql @@ -1,5 +1,6 @@ -- noinspection SqlNoDataSourceInspectionForFile CREATE EXTENSION IF NOT EXISTS postgis; +CREATE EXTENSION IF NOT EXISTS unaccent; CREATE TABLE IF NOT EXISTS projects ( created timestamp, @@ -30,7 +31,7 @@ CREATE TABLE IF NOT EXISTS groups ( required_count int, progress int, project_type_specifics json, - -- total_area & time_spent_max_allowed are maintaned and used by aggregated module + -- total_area & time_spent_max_allowed are maintained and used by aggregated module total_area float DEFAULT NULL, time_spent_max_allowed float DEFAULT NULL, PRIMARY KEY (project_id, group_id), diff --git a/mapswipe_workers/tests/integration/tear_down.py b/mapswipe_workers/tests/integration/tear_down.py index 61760781c..33af5d52d 100644 --- a/mapswipe_workers/tests/integration/tear_down.py +++ b/mapswipe_workers/tests/integration/tear_down.py @@ -8,7 +8,7 @@ from mapswipe_workers import auth -def delete_test_data(project_id: str) -> None: +def delete_test_data(project_id: str, tutorial_id: str = None) -> None: """ Delete test project indluding groups, tasks and results from Firebase and Postgres @@ -38,6 +38,12 @@ def delete_test_data(project_id: str) -> None: ref = fb_db.reference(f"v2/users/{project_id}") ref.delete() + if tutorial_id is not None: + ref = fb_db.reference(f"v2/projects/{tutorial_id}") + ref.delete() + ref = fb_db.reference(f"v2/tutorialDrafts/{tutorial_id}") + ref.delete() + # Clear out the user-group used in test. # XXX: Use a firebase simulator for running test. # For CI/CD, use a real firebase with scope using commit hash, diff --git a/mapswipe_workers/tests/integration/test_create_tutorial.py b/mapswipe_workers/tests/integration/test_create_tutorial.py new file mode 100644 index 000000000..e6db39579 --- /dev/null +++ b/mapswipe_workers/tests/integration/test_create_tutorial.py @@ -0,0 +1,45 @@ +import unittest + +from click.testing import CliRunner + +from mapswipe_workers import auth, mapswipe_workers +from mapswipe_workers.utils.create_directories import create_directories +from tests.integration import set_up, tear_down + + +class TestCreateTileClassificationProject(unittest.TestCase): + def setUp(self): + self.tutorial_id = set_up.create_test_tutorial_draft( + "tile_classification", + "tile_classification", + "test_tile_classification_tutorial", + ) + + self.project_id = set_up.create_test_project_draft( + "tile_classification", + "tile_classification", + "test_tile_classification_tutorial", + tutorial_id=self.tutorial_id, + ) + create_directories() + + def tearDown(self): + tear_down.delete_test_data(self.project_id, self.tutorial_id) + + def test_create_tile_classification_project_and_tutorial(self): + runner = CliRunner() + runner.invoke(mapswipe_workers.run_create_tutorials, catch_exceptions=False) + runner.invoke(mapswipe_workers.run_create_projects, catch_exceptions=False) + + fb_db = auth.firebaseDB() + ref = fb_db.reference(f"/v2/projects/{self.project_id}") + result = ref.get() + self.assertEqual(result["tutorialId"], self.tutorial_id) + + ref = fb_db.reference(f"/v2/projects/{self.tutorial_id}") + result = ref.get(shallow=True) + self.assertIsNotNone(result) + + +if __name__ == "__main__": + unittest.main() diff --git a/mapswipe_workers/tests/unittests/test_process_mapillary.py b/mapswipe_workers/tests/unittests/test_process_mapillary.py index b5b8302f5..341c6a601 100644 --- a/mapswipe_workers/tests/unittests/test_process_mapillary.py +++ b/mapswipe_workers/tests/unittests/test_process_mapillary.py @@ -7,6 +7,7 @@ from shapely import wkt from shapely.geometry import GeometryCollection, MultiPolygon, Point, Polygon +from mapswipe_workers.definitions import CustomError from mapswipe_workers.utils.process_mapillary import ( coordinate_download, create_tiles, @@ -50,6 +51,7 @@ def setUp(self): self.test_polygon = Polygon([(0, 0), (1, 0), (1, 1), (0, 1)]) self.empty_polygon = Polygon() self.empty_geometry = GeometryCollection() + self.row = pd.Series({"x": 1, "y": 1, "z": self.level}) def test_create_tiles_with_valid_polygon(self): tiles = create_tiles(self.test_polygon, self.level) @@ -178,26 +180,26 @@ def test_download_and_process_tile_success(self, mock_get, mock_vt2geojson): row = {"x": 1, "y": 1, "z": 14} - result = download_and_process_tile(row) + polygon = wkt.loads("POLYGON ((-1 -1, -1 1, 1 1, 1 -1, -1 -1))") + result = download_and_process_tile(row, polygon, {}) self.assertIsInstance(result, pd.DataFrame) self.assertEqual(len(result), 1) self.assertEqual(result["geometry"][0].wkt, "POINT (0 0)") @patch("mapswipe_workers.utils.process_mapillary.requests.get") def test_download_and_process_tile_failure(self, mock_get): - # Mock a failed response + mock_response = MagicMock() mock_response.status_code = 500 mock_get.return_value = mock_response - row = pd.Series({"x": 1, "y": 1, "z": self.level}) - result = download_and_process_tile(row) + result = download_and_process_tile(self.row, self.test_polygon, {}) self.assertIsNone(result) - @patch("mapswipe_workers.utils.process_mapillary.download_and_process_tile") - def test_coordinate_download(self, mock_download_and_process_tile): + @patch("mapswipe_workers.utils.process_mapillary.get_mapillary_data") + def test_download_and_process_tile_spatial_filtering(self, mock_get_mapillary_data): inside_points = [ (0.2, 0.2), (0.5, 0.5), @@ -215,20 +217,20 @@ def test_coordinate_download(self, mock_download_and_process_tile): for x, y in points ] - mock_download_and_process_tile.return_value = pd.DataFrame(data) + mock_get_mapillary_data.return_value = pd.DataFrame(data) - metadata = coordinate_download(self.test_polygon, self.level) + metadata = download_and_process_tile(self.row, self.test_polygon, {}) metadata = metadata.drop_duplicates() self.assertEqual(len(metadata), len(inside_points)) self.assertIsInstance(metadata, pd.DataFrame) - @patch("mapswipe_workers.utils.process_mapillary.download_and_process_tile") - def test_coordinate_download_with_failures(self, mock_download_and_process_tile): - mock_download_and_process_tile.return_value = pd.DataFrame() + @patch("mapswipe_workers.utils.process_mapillary.parallelized_processing") + def test_coordinate_download_with_failures(self, mock_parallelized_processing): + mock_parallelized_processing.return_value = pd.DataFrame() - metadata = coordinate_download(self.test_polygon, self.level) + metadata = coordinate_download(self.test_polygon, self.level, {}) self.assertTrue(metadata.empty) @@ -291,7 +293,7 @@ def test_filter_missing_columns(self): "is_pano", "organization_id", "captured_at", - ] # Add your column names here + ] for column in columns_to_check: df_copy = self.fixture_df.copy() df_copy[column] = None @@ -309,40 +311,13 @@ def test_get_image_metadata(self, mock_coordinate_download): self.assertIn("ids", result) self.assertIn("geometries", result) - @patch("mapswipe_workers.utils.process_mapillary.coordinate_download") - def test_get_image_metadata_filtering(self, mock_coordinate_download): - mock_coordinate_download.return_value = self.fixture_df - - params = { - "is_pano": True, - "start_time": "2016-01-20 00:00:00", - "end_time": "2022-01-21 23:59:59", - } - - result = get_image_metadata(self.fixture_data, **params) - self.assertIsInstance(result, dict) - self.assertIn("ids", result) - self.assertIn("geometries", result) - - @patch("mapswipe_workers.utils.process_mapillary.coordinate_download") - def test_get_image_metadata_no_rows(self, mock_coordinate_download): - mock_coordinate_download.return_value = self.fixture_df - - params = { - "is_pano": True, - "start_time": "1916-01-20 00:00:00", - "end_time": "1922-01-21 23:59:59", - } - with self.assertRaises(ValueError): - get_image_metadata(self.fixture_data, **params) - @patch("mapswipe_workers.utils.process_mapillary.coordinate_download") def test_get_image_metadata_empty_response(self, mock_coordinate_download): df = self.fixture_df.copy() df = df.drop(df.index) mock_coordinate_download.return_value = df - with self.assertRaises(ValueError): + with self.assertRaises(CustomError): get_image_metadata(self.fixture_data) @patch("mapswipe_workers.utils.process_mapillary.filter_results") @@ -350,16 +325,26 @@ def test_get_image_metadata_empty_response(self, mock_coordinate_download): def test_get_image_metadata_size_restriction( self, mock_coordinate_download, mock_filter_results ): - mock_df = pd.DataFrame({"ID": range(1, 100002)}) - mock_df["geometry"] = self.test_polygon - mock_df["captured_at"] = range(1, 100002) - mock_df["sequence_id"] = 1 - mock_filter_results.return_value = mock_df - mock_coordinate_download.return_value = self.fixture_df - - with self.assertRaises(ValueError): + mock_df = pd.DataFrame({"id": range(1, 100002), "geometry": range(1, 100002)}) + mock_coordinate_download.return_value = mock_df + with self.assertRaises(CustomError): get_image_metadata(self.fixture_data) + @patch("mapswipe_workers.utils.process_mapillary.coordinate_download") + def test_get_image_metadata_drop_duplicates(self, mock_coordinate_download): + test_df = pd.DataFrame( + { + "id": [1, 2, 2, 3, 4, 4, 5], + "geometry": ["a", "b", "b", "c", "d", "d", "e"], + } + ) + mock_coordinate_download.return_value = test_df + return_dict = get_image_metadata(self.fixture_data) + + return_df = pd.DataFrame(return_dict) + + self.assertNotEqual(len(return_df), len(test_df)) + if __name__ == "__main__": unittest.main() diff --git a/mapswipe_workers/tests/unittests/test_tutorial_arbitrary_geometry_footprint.py b/mapswipe_workers/tests/unittests/test_tutorial_arbitrary_geometry_footprint.py new file mode 100644 index 000000000..3d20b6289 --- /dev/null +++ b/mapswipe_workers/tests/unittests/test_tutorial_arbitrary_geometry_footprint.py @@ -0,0 +1,27 @@ +import os +import unittest + +from mapswipe_workers.project_types import FootprintTutorial +from tests.fixtures import FIXTURE_DIR, get_fixture + + +class TestTutorial(unittest.TestCase): + def test_init_arbitrary_geometry_footprint_project(self): + tutorial_draft = get_fixture( + os.path.join(FIXTURE_DIR, "tutorialDrafts", "footprint.json") + ) + self.assertIsNotNone(FootprintTutorial(tutorial_draft=tutorial_draft)) + + def test_create_arbitrary_geometry_footprint_tasks(self): + tutorial_draft = get_fixture( + os.path.join(FIXTURE_DIR, "tutorialDrafts", "footprint.json") + ) + tutorial = FootprintTutorial(tutorial_draft=tutorial_draft) + tutorial.create_tutorial_groups() + tutorial.create_tutorial_tasks() + self.assertTrue(tutorial.groups) + self.assertTrue(tutorial.tasks) + + +if __name__ == "__main__": + unittest.main() diff --git a/mapswipe_workers/tests/unittests/test_tutorial_street.py b/mapswipe_workers/tests/unittests/test_tutorial_street.py new file mode 100644 index 000000000..6dd9b0127 --- /dev/null +++ b/mapswipe_workers/tests/unittests/test_tutorial_street.py @@ -0,0 +1,27 @@ +import os +import unittest + +from mapswipe_workers.project_types import StreetTutorial +from tests.fixtures import FIXTURE_DIR, get_fixture + + +class TestTutorial(unittest.TestCase): + def test_init_street_tutorial(self): + tutorial_draft = get_fixture( + os.path.join(FIXTURE_DIR, "tutorialDrafts", "street.json") + ) + self.assertIsNotNone(StreetTutorial(tutorial_draft=tutorial_draft)) + + def test_create_street_tasks(self): + tutorial_draft = get_fixture( + os.path.join(FIXTURE_DIR, "tutorialDrafts", "street.json") + ) + tutorial = StreetTutorial(tutorial_draft=tutorial_draft) + tutorial.create_tutorial_groups() + tutorial.create_tutorial_tasks() + self.assertTrue(tutorial.groups) + self.assertTrue(tutorial.tasks) + + +if __name__ == "__main__": + unittest.main() diff --git a/mapswipe_workers/tests/unittests/test_tutorial_tile_change_detection.py b/mapswipe_workers/tests/unittests/test_tutorial_tile_change_detection.py new file mode 100644 index 000000000..e394fd607 --- /dev/null +++ b/mapswipe_workers/tests/unittests/test_tutorial_tile_change_detection.py @@ -0,0 +1,27 @@ +import os +import unittest + +from mapswipe_workers.project_types import ChangeDetectionTutorial +from tests.fixtures import FIXTURE_DIR, get_fixture + + +class TestTutorial(unittest.TestCase): + def test_init_tile_change_detection_project(self): + tutorial_draft = get_fixture( + os.path.join(FIXTURE_DIR, "tutorialDrafts", "change_detection.json") + ) + self.assertIsNotNone(ChangeDetectionTutorial(tutorial_draft=tutorial_draft)) + + def test_create_tile_change_detection_tasks(self): + tutorial_draft = get_fixture( + os.path.join(FIXTURE_DIR, "tutorialDrafts", "change_detection.json") + ) + tutorial = ChangeDetectionTutorial(tutorial_draft=tutorial_draft) + tutorial.create_tutorial_groups() + tutorial.create_tutorial_tasks() + self.assertTrue(tutorial.groups) + self.assertTrue(tutorial.tasks) + + +if __name__ == "__main__": + unittest.main() diff --git a/mapswipe_workers/tests/unittests/test_tutorial.py b/mapswipe_workers/tests/unittests/test_tutorial_tile_classification.py similarity index 100% rename from mapswipe_workers/tests/unittests/test_tutorial.py rename to mapswipe_workers/tests/unittests/test_tutorial_tile_classification.py diff --git a/mapswipe_workers/tests/unittests/test_tutorial_tile_completeness.py b/mapswipe_workers/tests/unittests/test_tutorial_tile_completeness.py new file mode 100644 index 000000000..972c412ca --- /dev/null +++ b/mapswipe_workers/tests/unittests/test_tutorial_tile_completeness.py @@ -0,0 +1,27 @@ +import os +import unittest + +from mapswipe_workers.project_types import CompletenessTutorial +from tests.fixtures import FIXTURE_DIR, get_fixture + + +class TestTutorial(unittest.TestCase): + def test_init_tile_completeness_project(self): + tutorial_draft = get_fixture( + os.path.join(FIXTURE_DIR, "tutorialDrafts", "completeness.json") + ) + self.assertIsNotNone(CompletenessTutorial(tutorial_draft=tutorial_draft)) + + def test_create_tile_completeness_tasks(self): + tutorial_draft = get_fixture( + os.path.join(FIXTURE_DIR, "tutorialDrafts", "completeness.json") + ) + tutorial = CompletenessTutorial(tutorial_draft=tutorial_draft) + tutorial.create_tutorial_groups() + tutorial.create_tutorial_tasks() + self.assertTrue(tutorial.groups) + self.assertTrue(tutorial.tasks) + + +if __name__ == "__main__": + unittest.main() diff --git a/postgres/initdb.sql b/postgres/initdb.sql index f954d3a8c..b2b23f328 100644 --- a/postgres/initdb.sql +++ b/postgres/initdb.sql @@ -1,5 +1,6 @@ -- noinspection SqlNoDataSourceInspectionForFile CREATE EXTENSION IF NOT EXISTS postgis; +CREATE EXTENSION IF NOT EXISTS unaccent; CREATE TABLE IF NOT EXISTS projects ( created timestamp, @@ -30,7 +31,7 @@ CREATE TABLE IF NOT EXISTS groups ( required_count int, progress int, project_type_specifics json, - -- total_area & time_spent_max_allowed are maintaned and used by aggregated module + -- total_area & time_spent_max_allowed are maintained and used by aggregated module total_area float DEFAULT NULL, time_spent_max_allowed float DEFAULT NULL, PRIMARY KEY (project_id, group_id),