Merge branch 'backend' into 'develop'
Backend See merge request tjohn/cc-data!5
This commit is contained in:
commit
316cb4f663
2
.gitignore
vendored
2
.gitignore
vendored
@ -21,3 +21,5 @@
|
|||||||
# System Files
|
# System Files
|
||||||
.DS_Store
|
.DS_Store
|
||||||
Thumbs.db
|
Thumbs.db
|
||||||
|
|
||||||
|
node_modules
|
||||||
40
README.md
40
README.md
@ -2,9 +2,45 @@
|
|||||||
|
|
||||||
Campus Cup AKMC Data Traveloptimizer
|
Campus Cup AKMC Data Traveloptimizer
|
||||||
|
|
||||||
## Installation
|
## Backend
|
||||||
|
|
||||||
|
### Requirements
|
||||||
|
- MariaDB or MySQL
|
||||||
|
- node `10.12` or higher
|
||||||
|
- Configure database in `.env`-file or environment variables. See `.env` for reference
|
||||||
|
- Set API-Key for meteostat.net in `.env`-file or environment variable
|
||||||
|
- import `setup.sql` for sample data
|
||||||
|
|
||||||
|
### Start
|
||||||
|
- Run `$(cd backend && npm run start)`
|
||||||
|
- call http://localhost:3000/v1/update/climate to fetch climate data for sample entries.
|
||||||
|
|
||||||
|
### Search
|
||||||
|
Customize your search with query parameters. For now, only climate parameters are supported. If you omit climate queries, all climate parameters will be randomized.
|
||||||
|
|
||||||
|
Following queries are supperted by now:
|
||||||
|
- from=YYYY-MM-DD _(required)_
|
||||||
|
- to=YYYY-MM-DD _(required)_
|
||||||
|
- temperature=NUMBER,NUMBER
|
||||||
|
- raindays=NUMBER,NUMBER
|
||||||
|
- sunhours=NUMBER,NUMBER
|
||||||
|
- percipitation=NUMBER,NUMBER
|
||||||
|
|
||||||
|
__Examples:__
|
||||||
|
http://localhost:3000/v1/search?from=2020-06-14&to=2020-07-29&temperature=27,29&raindays=8,12&sunhours=250,300
|
||||||
|
http://localhost:3000/v1/search?from=2020-06-14&to=2020-07-29
|
||||||
|
|
||||||
|
|
||||||
|
### More
|
||||||
|
To get more search results, add more entries with meteostat station IDs to the `regions` table in the database
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Frontend
|
||||||
|
|
||||||
|
### Installation
|
||||||
- Install node 10.15.3
|
- Install node 10.15.3
|
||||||
- Run "(cd frontend && npm i)"
|
- Run "(cd frontend && npm i)"
|
||||||
|
|
||||||
# Start dev server
|
### Start dev server
|
||||||
- Run "(cd frontend && npm run start)"
|
- Run "(cd frontend && npm run start)"
|
||||||
|
|||||||
@ -14,19 +14,19 @@
|
|||||||
|
|
||||||
-- Exportiere Datenbank Struktur für travopti
|
-- Exportiere Datenbank Struktur für travopti
|
||||||
DROP DATABASE IF EXISTS `travopti`;
|
DROP DATABASE IF EXISTS `travopti`;
|
||||||
CREATE DATABASE IF NOT EXISTS `travopti` /*!40100 DEFAULT CHARACTER SET latin1 COLLATE latin1_german1_ci */;
|
CREATE DATABASE IF NOT EXISTS `travopti` /*!40100 DEFAULT CHARACTER SET latin1 */;
|
||||||
USE `travopti`;
|
USE `travopti`;
|
||||||
|
|
||||||
-- Exportiere Struktur von Tabelle travopti.countries
|
-- Exportiere Struktur von Tabelle travopti.countries
|
||||||
DROP TABLE IF EXISTS `countries`;
|
DROP TABLE IF EXISTS `countries`;
|
||||||
CREATE TABLE IF NOT EXISTS `countries` (
|
CREATE TABLE IF NOT EXISTS `countries` (
|
||||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||||
`country` varchar(255) COLLATE latin1_german1_ci NOT NULL,
|
`country` varchar(255) NOT NULL,
|
||||||
`created_at` timestamp NOT NULL DEFAULT current_timestamp(),
|
`created_at` timestamp NOT NULL DEFAULT current_timestamp(),
|
||||||
`updated_at` timestamp NOT NULL DEFAULT current_timestamp() ON UPDATE current_timestamp(),
|
`updated_at` timestamp NOT NULL DEFAULT current_timestamp() ON UPDATE current_timestamp(),
|
||||||
PRIMARY KEY (`id`),
|
PRIMARY KEY (`id`),
|
||||||
UNIQUE KEY `country` (`country`)
|
UNIQUE KEY `country` (`country`)
|
||||||
) ENGINE=InnoDB AUTO_INCREMENT=64 DEFAULT CHARSET=latin1 COLLATE=latin1_german1_ci;
|
) ENGINE=InnoDB AUTO_INCREMENT=64 DEFAULT CHARSET=utf8mb4;
|
||||||
|
|
||||||
-- Exportiere Daten aus Tabelle travopti.countries: ~37 rows (ungefähr)
|
-- Exportiere Daten aus Tabelle travopti.countries: ~37 rows (ungefähr)
|
||||||
DELETE FROM `countries`;
|
DELETE FROM `countries`;
|
||||||
@ -75,9 +75,9 @@ INSERT INTO `countries` (`id`, `country`, `created_at`, `updated_at`) VALUES
|
|||||||
DROP TABLE IF EXISTS `regions`;
|
DROP TABLE IF EXISTS `regions`;
|
||||||
CREATE TABLE IF NOT EXISTS `regions` (
|
CREATE TABLE IF NOT EXISTS `regions` (
|
||||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||||
`region` varchar(255) COLLATE latin1_german1_ci NOT NULL,
|
`region` varchar(255) NOT NULL,
|
||||||
`country_id` int(11) DEFAULT NULL,
|
`country_id` int(11) DEFAULT NULL,
|
||||||
`meteostat_id` int(11) DEFAULT NULL,
|
`meteostat_id` varchar(11) DEFAULT NULL,
|
||||||
`lon` double(22,0) DEFAULT NULL,
|
`lon` double(22,0) DEFAULT NULL,
|
||||||
`lat` double(22,0) DEFAULT NULL,
|
`lat` double(22,0) DEFAULT NULL,
|
||||||
`created_at` timestamp NOT NULL DEFAULT current_timestamp(),
|
`created_at` timestamp NOT NULL DEFAULT current_timestamp(),
|
||||||
@ -85,36 +85,36 @@ CREATE TABLE IF NOT EXISTS `regions` (
|
|||||||
PRIMARY KEY (`id`) USING BTREE,
|
PRIMARY KEY (`id`) USING BTREE,
|
||||||
KEY `FK_regions_countries` (`country_id`) USING BTREE,
|
KEY `FK_regions_countries` (`country_id`) USING BTREE,
|
||||||
CONSTRAINT `FK_regions_countries` FOREIGN KEY (`country_id`) REFERENCES `countries` (`id`) ON UPDATE CASCADE
|
CONSTRAINT `FK_regions_countries` FOREIGN KEY (`country_id`) REFERENCES `countries` (`id`) ON UPDATE CASCADE
|
||||||
) ENGINE=InnoDB AUTO_INCREMENT=127 DEFAULT CHARSET=latin1 COLLATE=latin1_german1_ci;
|
) ENGINE=InnoDB AUTO_INCREMENT=127 DEFAULT CHARSET=utf8mb4;
|
||||||
|
|
||||||
-- Exportiere Daten aus Tabelle travopti.regions: ~47 rows (ungefähr)
|
-- Exportiere Daten aus Tabelle travopti.regions: ~47 rows (ungefähr)
|
||||||
DELETE FROM `regions`;
|
DELETE FROM `regions`;
|
||||||
/*!40000 ALTER TABLE `regions` DISABLE KEYS */;
|
/*!40000 ALTER TABLE `regions` DISABLE KEYS */;
|
||||||
INSERT INTO `regions` (`id`, `region`, `country_id`, `meteostat_id`, `lon`, `lat`, `created_at`, `updated_at`) VALUES
|
INSERT INTO `regions` (`id`, `region`, `country_id`, `meteostat_id`, `lon`, `lat`, `created_at`, `updated_at`) VALUES
|
||||||
(1, 'Buenos Aires', 1, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(1, 'Buenos Aires', 1, '87585', NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(2, 'Melbourne', 2, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(2, 'Melbourne', 2, '94866', NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(3, 'Sydney', 2, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(3, 'Sydney', 2, '94767', NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(4, 'Bruessel', 3, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(4, 'Bruessel', 3, '06458', NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(5, 'Rio de Janei..', 4, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(5, 'Rio de Janeiro', 4, '83755', NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(6, 'Sao Paolo', 4, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(6, 'Sao Paolo', 4, '83780', NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(7, 'Toronto', 5, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(7, 'Toronto', 5, '71624', NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(8, 'Santiago de ..', 6, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(8, 'Santiago de Chile', 6, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(9, 'Peking', 7, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(9, 'Peking', 7, '54511', NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(10, 'Shanghai', 7, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(10, 'Shanghai', 7, '58362', NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(11, 'Bogota', 8, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(11, 'Bogota', 8, '80222', NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(12, 'Kairo', 9, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(12, 'Kairo', 9, '62366', NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(13, 'London', 10, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(13, 'London', 10, '03772', NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(14, 'Paris', 11, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(14, 'Paris', 11, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(15, 'Berlin', 12, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(15, 'Berlin', 12, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(16, 'Athen', 13, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(16, 'Athen', 13, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(17, 'Hongkong', 14, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(17, 'Hongkong', 14, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(18, 'Reykjavik', 15, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(18, 'Reykjavik', 15, '04030', NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(19, 'Delhi', 16, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(19, 'Delhi', 16, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(20, 'Mumbai', 16, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(20, 'Mumbai', 16, '43002', NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(21, 'Dublin', 17, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(21, 'Dublin', 17, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(22, 'Rom', 18, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(22, 'Rom', 18, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(23, 'Tokio', 19, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(23, 'Tokio', 19, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(24, 'Kuala Lumpur', 20, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(24, 'Kuala Lumpur', 20, '48647', NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(25, 'Mexico City', 21, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(25, 'Mexico City', 21, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(26, 'Marrakesch', 22, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(26, 'Marrakesch', 22, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(27, 'Amsterdam', 23, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(27, 'Amsterdam', 23, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
@ -136,7 +136,7 @@ INSERT INTO `regions` (`id`, `region`, `country_id`, `meteostat_id`, `lon`, `lat
|
|||||||
(43, 'Las Vegas', 36, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(43, 'Las Vegas', 36, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(44, 'Miami', 36, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(44, 'Miami', 36, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(45, 'New York', 36, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(45, 'New York', 36, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(46, 'San Francis ..', 36, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
(46, 'San Francisco', 36, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03'),
|
||||||
(47, 'Dubai', 37, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03');
|
(47, 'Dubai', 37, NULL, NULL, NULL, '2020-06-09 22:11:03', '2020-06-09 22:11:03');
|
||||||
/*!40000 ALTER TABLE `regions` ENABLE KEYS */;
|
/*!40000 ALTER TABLE `regions` ENABLE KEYS */;
|
||||||
|
|
||||||
@ -160,7 +160,7 @@ CREATE TABLE IF NOT EXISTS `regions_byt` (
|
|||||||
PRIMARY KEY (`id`),
|
PRIMARY KEY (`id`),
|
||||||
UNIQUE KEY `region_id` (`region_id`,`travelstyle`) USING BTREE,
|
UNIQUE KEY `region_id` (`region_id`,`travelstyle`) USING BTREE,
|
||||||
CONSTRAINT `FK_regions_byt_regions` FOREIGN KEY (`region_id`) REFERENCES `regions` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
|
CONSTRAINT `FK_regions_byt_regions` FOREIGN KEY (`region_id`) REFERENCES `regions` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
|
||||||
) ENGINE=InnoDB AUTO_INCREMENT=191 DEFAULT CHARSET=latin1 COLLATE=latin1_german1_ci;
|
) ENGINE=InnoDB AUTO_INCREMENT=191 DEFAULT CHARSET=utf8mb4;
|
||||||
|
|
||||||
-- Exportiere Daten aus Tabelle travopti.regions_byt: ~141 rows (ungefähr)
|
-- Exportiere Daten aus Tabelle travopti.regions_byt: ~141 rows (ungefähr)
|
||||||
DELETE FROM `regions_byt`;
|
DELETE FROM `regions_byt`;
|
||||||
@ -323,7 +323,7 @@ CREATE TABLE IF NOT EXISTS `regions_trivago` (
|
|||||||
UNIQUE KEY `region_id_year_month` (`region_id`,`year`,`month`),
|
UNIQUE KEY `region_id_year_month` (`region_id`,`year`,`month`),
|
||||||
KEY `FK_regions_trivago_regions` (`region_id`),
|
KEY `FK_regions_trivago_regions` (`region_id`),
|
||||||
CONSTRAINT `FK_regions_trivago_regions` FOREIGN KEY (`region_id`) REFERENCES `regions` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
|
CONSTRAINT `FK_regions_trivago_regions` FOREIGN KEY (`region_id`) REFERENCES `regions` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
|
||||||
) ENGINE=InnoDB AUTO_INCREMENT=1278 DEFAULT CHARSET=latin1 COLLATE=latin1_german1_ci;
|
) ENGINE=InnoDB AUTO_INCREMENT=1278 DEFAULT CHARSET=utf8mb4;
|
||||||
|
|
||||||
-- Exportiere Daten aus Tabelle travopti.regions_trivago: ~940 rows (ungefähr)
|
-- Exportiere Daten aus Tabelle travopti.regions_trivago: ~940 rows (ungefähr)
|
||||||
DELETE FROM `regions_trivago`;
|
DELETE FROM `regions_trivago`;
|
||||||
@ -1278,7 +1278,9 @@ CREATE TABLE IF NOT EXISTS `region_climate` (
|
|||||||
`region_id` int(11) NOT NULL,
|
`region_id` int(11) NOT NULL,
|
||||||
`year` int(11) NOT NULL,
|
`year` int(11) NOT NULL,
|
||||||
`month` int(11) NOT NULL,
|
`month` int(11) NOT NULL,
|
||||||
`temperature` float DEFAULT NULL,
|
`temperature_mean` float DEFAULT NULL,
|
||||||
|
`temperature_mean_min` float DEFAULT NULL,
|
||||||
|
`temperature_mean_max` float DEFAULT NULL,
|
||||||
`percipitation` float DEFAULT NULL,
|
`percipitation` float DEFAULT NULL,
|
||||||
`raindays` int(11) DEFAULT NULL,
|
`raindays` int(11) DEFAULT NULL,
|
||||||
`sunshine` float DEFAULT NULL,
|
`sunshine` float DEFAULT NULL,
|
||||||
@ -1289,7 +1291,7 @@ CREATE TABLE IF NOT EXISTS `region_climate` (
|
|||||||
UNIQUE KEY `region_id_year_month` (`region_id`,`year`,`month`),
|
UNIQUE KEY `region_id_year_month` (`region_id`,`year`,`month`),
|
||||||
KEY `FK_region_climate_regions` (`region_id`),
|
KEY `FK_region_climate_regions` (`region_id`),
|
||||||
CONSTRAINT `FK_region_climate_regions` FOREIGN KEY (`region_id`) REFERENCES `regions` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
|
CONSTRAINT `FK_region_climate_regions` FOREIGN KEY (`region_id`) REFERENCES `regions` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=latin1 COLLATE=latin1_german1_ci;
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
|
||||||
-- Exportiere Daten aus Tabelle travopti.region_climate: ~0 rows (ungefähr)
|
-- Exportiere Daten aus Tabelle travopti.region_climate: ~0 rows (ungefähr)
|
||||||
DELETE FROM `region_climate`;
|
DELETE FROM `region_climate`;
|
||||||
@ -1300,14 +1302,14 @@ DELETE FROM `region_climate`;
|
|||||||
DROP TABLE IF EXISTS `search_presets`;
|
DROP TABLE IF EXISTS `search_presets`;
|
||||||
CREATE TABLE IF NOT EXISTS `search_presets` (
|
CREATE TABLE IF NOT EXISTS `search_presets` (
|
||||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||||
`parameter` varchar(255) COLLATE latin1_german1_ci NOT NULL,
|
`parameter` varchar(255) NOT NULL,
|
||||||
`name` varchar(255) COLLATE latin1_german1_ci NOT NULL,
|
`name` varchar(255) NOT NULL,
|
||||||
`value_1` int(11) NOT NULL,
|
`value_1` int(11) NOT NULL,
|
||||||
`value_2` int(11) DEFAULT NULL,
|
`value_2` int(11) DEFAULT NULL,
|
||||||
`created_at` timestamp NOT NULL DEFAULT current_timestamp(),
|
`created_at` timestamp NOT NULL DEFAULT current_timestamp(),
|
||||||
`updated_at` timestamp NOT NULL DEFAULT current_timestamp() ON UPDATE current_timestamp(),
|
`updated_at` timestamp NOT NULL DEFAULT current_timestamp() ON UPDATE current_timestamp(),
|
||||||
PRIMARY KEY (`id`)
|
PRIMARY KEY (`id`)
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=latin1 COLLATE=latin1_german1_ci;
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
|
||||||
-- Exportiere Daten aus Tabelle travopti.search_presets: ~0 rows (ungefähr)
|
-- Exportiere Daten aus Tabelle travopti.search_presets: ~0 rows (ungefähr)
|
||||||
DELETE FROM `search_presets`;
|
DELETE FROM `search_presets`;
|
||||||
@ -1318,11 +1320,11 @@ DELETE FROM `search_presets`;
|
|||||||
DROP TABLE IF EXISTS `search_tags`;
|
DROP TABLE IF EXISTS `search_tags`;
|
||||||
CREATE TABLE IF NOT EXISTS `search_tags` (
|
CREATE TABLE IF NOT EXISTS `search_tags` (
|
||||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||||
`searchtag` varchar(255) COLLATE latin1_german1_ci NOT NULL,
|
`searchtag` varchar(255) NOT NULL,
|
||||||
`created_at` timestamp NOT NULL DEFAULT current_timestamp(),
|
`created_at` timestamp NOT NULL DEFAULT current_timestamp(),
|
||||||
`updated_at` timestamp NOT NULL DEFAULT current_timestamp() ON UPDATE current_timestamp(),
|
`updated_at` timestamp NOT NULL DEFAULT current_timestamp() ON UPDATE current_timestamp(),
|
||||||
PRIMARY KEY (`id`)
|
PRIMARY KEY (`id`)
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=latin1 COLLATE=latin1_german1_ci;
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
|
||||||
-- Exportiere Daten aus Tabelle travopti.search_tags: ~0 rows (ungefähr)
|
-- Exportiere Daten aus Tabelle travopti.search_tags: ~0 rows (ungefähr)
|
||||||
DELETE FROM `search_tags`;
|
DELETE FROM `search_tags`;
|
||||||
@ -1343,7 +1345,7 @@ CREATE TABLE IF NOT EXISTS `user_feedback` (
|
|||||||
KEY `FK_user_feedback_search_tags` (`tag_id`),
|
KEY `FK_user_feedback_search_tags` (`tag_id`),
|
||||||
CONSTRAINT `FK_user_feedback_regions` FOREIGN KEY (`region_id`) REFERENCES `regions` (`id`) ON DELETE CASCADE ON UPDATE CASCADE,
|
CONSTRAINT `FK_user_feedback_regions` FOREIGN KEY (`region_id`) REFERENCES `regions` (`id`) ON DELETE CASCADE ON UPDATE CASCADE,
|
||||||
CONSTRAINT `FK_user_feedback_search_tags` FOREIGN KEY (`tag_id`) REFERENCES `search_tags` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
|
CONSTRAINT `FK_user_feedback_search_tags` FOREIGN KEY (`tag_id`) REFERENCES `search_tags` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=latin1 COLLATE=latin1_german1_ci;
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
|
||||||
-- Exportiere Daten aus Tabelle travopti.user_feedback: ~0 rows (ungefähr)
|
-- Exportiere Daten aus Tabelle travopti.user_feedback: ~0 rows (ungefähr)
|
||||||
DELETE FROM `user_feedback`;
|
DELETE FROM `user_feedback`;
|
||||||
|
|||||||
5
backend/.env
Normal file
5
backend/.env
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
METEOSTAT_API_KEY=LMlDskju
|
||||||
|
DB_HOST=localhost
|
||||||
|
DB_USER=root
|
||||||
|
DB_PASSWORD=devtest
|
||||||
|
DB_PORT=3306
|
||||||
276
backend/app.js
Normal file
276
backend/app.js
Normal file
@ -0,0 +1,276 @@
|
|||||||
|
const express = require('express')
|
||||||
|
const moment = require('moment')
|
||||||
|
const _ = require('lodash')
|
||||||
|
const db = require('./mysql')
|
||||||
|
const score = require('./score')
|
||||||
|
const transformer = require('./transformer')
|
||||||
|
const climate = require('./climate')
|
||||||
|
const base = require('./base64')
|
||||||
|
|
||||||
|
const app = express()
|
||||||
|
const port = 3000
|
||||||
|
//const multiplier_temp = 5
|
||||||
|
const multiplier = {
|
||||||
|
temperature_mean: 5,
|
||||||
|
percipitation: 3.5,
|
||||||
|
raindays: 3,
|
||||||
|
sunhours: 2.5,
|
||||||
|
}
|
||||||
|
|
||||||
|
const samplePresets = [
|
||||||
|
{
|
||||||
|
id: 29837,
|
||||||
|
parameter: "temperature",
|
||||||
|
label: "warm",
|
||||||
|
values: [22, 25]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
app.get('/', (req, res) => res.send('Hello Timo!'))
|
||||||
|
app.get('/v1/regions', (req, res) => getAllRegions().then(x => res.json({ data: x })))
|
||||||
|
app.get('/v1/presets', (req, res) => res.json({ data: samplePresets}))
|
||||||
|
app.get('/v1/search', searchHandler)
|
||||||
|
app.get('/v1/update/climate', climateUpdateHandler)
|
||||||
|
|
||||||
|
app.listen(port, () => console.log(`Travopti backend listening at http://localhost:${port}`))
|
||||||
|
|
||||||
|
function climateUpdateHandler(req, res) {
|
||||||
|
let parameter = []
|
||||||
|
if (req.query.startDate) parameter.push(req.query.startDate)
|
||||||
|
if (req.query.endDate) parameter.push(req.query.endDate)
|
||||||
|
climate.update(...parameter).then(x => {
|
||||||
|
res.send(x)
|
||||||
|
}).catch(e => {
|
||||||
|
let result = {
|
||||||
|
message: 'error during update process. check backend logs.',
|
||||||
|
error: e
|
||||||
|
}
|
||||||
|
res.send(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function searchHandler(req, res) {
|
||||||
|
let response = {}
|
||||||
|
|
||||||
|
response.meta = {
|
||||||
|
params: req.params,
|
||||||
|
query: req.query,
|
||||||
|
headers: req.headers
|
||||||
|
}
|
||||||
|
|
||||||
|
let q = req.query.q ? base.base64ToObj(req.query.q) : req.query
|
||||||
|
console.log('Q:', q)
|
||||||
|
|
||||||
|
let queryObj = {}
|
||||||
|
if (q.temperature) queryObj['temperature_mean'] = q.temperature
|
||||||
|
if (q.percipitation) queryObj['percipitation'] = q.percipitation
|
||||||
|
if (q.raindays) queryObj['raindays'] = q.raindays
|
||||||
|
if (q.sunhours) queryObj['sunhours'] = q.sunhours
|
||||||
|
|
||||||
|
scoreAndSearch(q.from, q.to, queryObj).then(searchResults => {
|
||||||
|
response.data = searchResults
|
||||||
|
res.json(response)
|
||||||
|
}).catch(e => {
|
||||||
|
console.log(e)
|
||||||
|
res.json(e.message)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function scoreAndSearch(from, to, queries) {
|
||||||
|
// TODO break funtion into parts when implementing non-climate queries and modularize (new file)
|
||||||
|
|
||||||
|
console.log('search')
|
||||||
|
|
||||||
|
// get Min and Max values for each Parameter
|
||||||
|
const minMax = await getClimateMinMax()
|
||||||
|
|
||||||
|
// randomize if empty queries
|
||||||
|
if (_.isEmpty(queries)) {
|
||||||
|
let t = _.round(_.random(minMax.min.temperature_mean, minMax.max.temperature_mean-5),0)
|
||||||
|
let p = _.round(_.random(minMax.min.percipitation, minMax.max.percipitation - 50), 0)
|
||||||
|
let r = _.round(_.random(minMax.min.raindays, minMax.max.raindays - 5), 0)
|
||||||
|
let s = _.round(_.random(minMax.min.sunhours, minMax.max.sunhours - 50), 0)
|
||||||
|
queries.temperature_mean = `${t},${t + 5}`
|
||||||
|
queries.percipitation = `${p},${p + 50}`
|
||||||
|
queries.raindays = `${r},${r + 5}`
|
||||||
|
queries.sunhours = `${s},${s + 50}`
|
||||||
|
}
|
||||||
|
queries = oldToNewQuerySyntax(queries)
|
||||||
|
console.log(queries)
|
||||||
|
|
||||||
|
// TODO simplify and remove support for old query syntaax
|
||||||
|
let monthFrom = 0
|
||||||
|
let monthTo = 0
|
||||||
|
let dayFrom = 0
|
||||||
|
let dayTo = 0
|
||||||
|
|
||||||
|
if (_.isNumber(from) && _.isNumber(to)) {
|
||||||
|
let dateFrom = moment(from).toDate()
|
||||||
|
let dateTo = moment(to).toDate()
|
||||||
|
monthFrom = dateFrom.getMonth()
|
||||||
|
monthTo = dateTo.getMonth()
|
||||||
|
dayFrom = dateFrom.getDay()
|
||||||
|
dayTo = dateTo.getDay()
|
||||||
|
if (moment(dateFrom).add(23, 'hours').isAfter(moment(dateTo))) throw new Error("ERR: 'to' must be at least one day after 'from'.")
|
||||||
|
} else {
|
||||||
|
// to still support old query syntax
|
||||||
|
let re = /([12]\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[12]\d|3[01]))/i;
|
||||||
|
monthFrom = Number(from.split("-")[1])
|
||||||
|
monthTo = Number(to.split("-")[1])
|
||||||
|
dayFrom = Number(from.split("-")[2])
|
||||||
|
dayTo = Number(to.split("-")[2])
|
||||||
|
if (!from.match(re) || !to.match(re)) throw new Error("ERR: invalid parameter:",from,to)
|
||||||
|
if (moment(from, 'YYYY-MM-DD').add(23, 'hours').isAfter(moment(to, 'YYYY-MM-DD'))) throw new Error("ERR: 'to' must be at least one day after 'from'.")
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Prepare search --
|
||||||
|
// to calculate average if traveldates are in more than one month
|
||||||
|
let travelPeriods = []
|
||||||
|
if (monthFrom === monthTo) {
|
||||||
|
let element = {
|
||||||
|
month: monthFrom,
|
||||||
|
days: dayTo - dayFrom
|
||||||
|
}
|
||||||
|
travelPeriods.push(element)
|
||||||
|
} else {
|
||||||
|
for (let index = monthFrom; index <= monthTo; index++) {
|
||||||
|
let element = {}
|
||||||
|
if (index === monthFrom) {
|
||||||
|
element = {
|
||||||
|
month: index,
|
||||||
|
days: 32 - dayFrom
|
||||||
|
}
|
||||||
|
} else if (index === monthTo) {
|
||||||
|
element = {
|
||||||
|
month: index,
|
||||||
|
days: dayTo
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
element = {
|
||||||
|
month: index,
|
||||||
|
days: 30
|
||||||
|
}
|
||||||
|
}
|
||||||
|
travelPeriods.push(element)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// calculate detail scores
|
||||||
|
let detailScores = await Promise.all(travelPeriods.map(async period => {
|
||||||
|
period.climate = await getAllRegionsWithClimatePerMonth(period.month)
|
||||||
|
period.scores = {}
|
||||||
|
Object.entries(queries).forEach(([key, value]) => {
|
||||||
|
// console.log('key',key)
|
||||||
|
// console.log('val', value)
|
||||||
|
period.scores[key] = calculateScores(key, period.climate, value[0], value[1], minMax)
|
||||||
|
});
|
||||||
|
return period
|
||||||
|
}));
|
||||||
|
|
||||||
|
|
||||||
|
// calculate ratio and transform into target object
|
||||||
|
return {
|
||||||
|
results: transformer.transform(detailScores),
|
||||||
|
debug: {
|
||||||
|
detailScores: detailScores,
|
||||||
|
minMax: minMax
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function calculateScores(type, regionDataRows, searchLowParam, searchMaxParam, minMax) {
|
||||||
|
console.log('calculateScores for', type)
|
||||||
|
let result = regionDataRows.map(x => {
|
||||||
|
const sc = Math.round(score.calculateScoreRange(minMax.min[type], minMax.max[type], multiplier[type], x[type], searchLowParam, searchMaxParam) * 100) / 100
|
||||||
|
|
||||||
|
return {
|
||||||
|
region_id: x.region_id,
|
||||||
|
type: type,
|
||||||
|
value: x[type],
|
||||||
|
score: x[type] === null ? null : sc
|
||||||
|
}
|
||||||
|
|
||||||
|
})
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getClimateMinMax() {
|
||||||
|
console.log('getClimateMinMax')
|
||||||
|
const sqlMin = `SELECT
|
||||||
|
MIN(temperature_mean) AS temperature_mean,
|
||||||
|
MIN(temperature_mean_min) AS temperature_mean_min,
|
||||||
|
MIN(temperature_mean_max) AS temperature_mean_max,
|
||||||
|
MIN(percipitation) AS percipitation,
|
||||||
|
MIN(raindays) AS raindays,
|
||||||
|
MIN(sunshine) AS sunhours
|
||||||
|
FROM region_climate`
|
||||||
|
const sqlMax = `SELECT
|
||||||
|
MAX(temperature_mean) AS temperature_mean,
|
||||||
|
MAX(temperature_mean_min) AS temperature_mean_min,
|
||||||
|
MAX(temperature_mean_max) AS temperature_mean_max,
|
||||||
|
MAX(percipitation) AS percipitation,
|
||||||
|
MAX(raindays) AS raindays,
|
||||||
|
MAX(sunshine) AS sunhours
|
||||||
|
FROM region_climate`
|
||||||
|
const [qResMin, qResMax] = await Promise.all([getQueryRows(sqlMin), getQueryRows(sqlMax)])
|
||||||
|
//console.log(qResMin)
|
||||||
|
return { min: qResMin[0], max: qResMax[0] }
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getQueryRows(sql) {
|
||||||
|
//console.log('getQueryRows')
|
||||||
|
const [rows, fields] = await db.execute(sql)
|
||||||
|
return rows
|
||||||
|
}
|
||||||
|
|
||||||
|
function getAllRegions() {
|
||||||
|
const sql = `SELECT
|
||||||
|
regions.id AS region_id,
|
||||||
|
regions.region AS name,
|
||||||
|
regions.country_id AS country_id,
|
||||||
|
countries.country AS country,
|
||||||
|
regions.meteostat_id AS meteostat_id
|
||||||
|
FROM regions
|
||||||
|
JOIN countries
|
||||||
|
ON regions.country_id = countries.id`
|
||||||
|
return getQueryRows(sql)
|
||||||
|
}
|
||||||
|
|
||||||
|
function getClimatePerRegionAndMonth(regionId, month) {
|
||||||
|
console.log('getClimatePerRegionAndMonth')
|
||||||
|
const sql = `SELECT region_id, AVG(temperature_mean), AVG(temperature_mean_min), AVG(temperature_mean_max), AVG(percipitation), AVG(sunshine) FROM region_climate WHERE month = ${month} AND region_id = ${regionId}`
|
||||||
|
return getQueryRows(sql)
|
||||||
|
}
|
||||||
|
|
||||||
|
function getAllRegionsWithClimatePerMonth(month) {
|
||||||
|
console.log('getAllRegionsWithClimatePerMonth')
|
||||||
|
const sql = `SELECT
|
||||||
|
region_climate.region_id AS region_id,
|
||||||
|
regions.country_id AS country_id,
|
||||||
|
regions.region AS name,
|
||||||
|
ROUND(AVG(region_climate.temperature_mean), 1) AS temperature_mean,
|
||||||
|
ROUND(AVG(region_climate.temperature_mean_min), 1) AS temperature_mean_min,
|
||||||
|
ROUND(AVG(region_climate.temperature_mean_max), 1) AS temperature_mean_max,
|
||||||
|
ROUND(AVG(region_climate.percipitation), 1) AS percipitation,
|
||||||
|
ROUND(AVG(region_climate.raindays), 1) AS raindays,
|
||||||
|
ROUND(AVG(region_climate.sunshine), 1) AS sunhours
|
||||||
|
FROM region_climate JOIN regions ON region_climate.region_id = regions.id WHERE region_climate.month = ${month} GROUP BY region_id`
|
||||||
|
return getQueryRows(sql)
|
||||||
|
}
|
||||||
|
|
||||||
|
function oldToNewQuerySyntax(queries) {
|
||||||
|
let res = {}
|
||||||
|
try {
|
||||||
|
if (queries.temperature_mean) res.temperature_mean = [queries.temperature_mean.split(',')[0], queries.temperature_mean.split(',')[1]]
|
||||||
|
if (queries.percipitation) res.percipitation = [queries.percipitation.split(',')[0], queries.percipitation.split(',')[1]]
|
||||||
|
if (queries.raindays) res.raindays = [queries.raindays.split(',')[0], queries.raindays.split(',')[1]]
|
||||||
|
if (queries.sunhours) res.sunhours = [queries.sunhours.split(',')[0], queries.sunhours.split(',')[1]]
|
||||||
|
console.log('queries successfully transformed');
|
||||||
|
} catch (error) {
|
||||||
|
console.log('queries are ok');
|
||||||
|
return queries
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
}
|
||||||
23
backend/base64.js
Normal file
23
backend/base64.js
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
/**
|
||||||
|
* Encodes an object as base64 string..
|
||||||
|
* @param obj The object to encode
|
||||||
|
*/
|
||||||
|
exports.objToBase64 = function(obj) {
|
||||||
|
return btoa(JSON.stringify(obj));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Decodes a base64 encoded object.
|
||||||
|
* @param base64 Encoded object
|
||||||
|
*/
|
||||||
|
exports.base64ToObj = function(base64) {
|
||||||
|
return JSON.parse(atob(base64));
|
||||||
|
}
|
||||||
|
|
||||||
|
function atob(base64) {
|
||||||
|
return Buffer.from(base64, 'base64').toString('binary')
|
||||||
|
}
|
||||||
|
|
||||||
|
function btoa(string) {
|
||||||
|
return Buffer.from(string).toString('base64')
|
||||||
|
}
|
||||||
124
backend/climate.js
Normal file
124
backend/climate.js
Normal file
@ -0,0 +1,124 @@
|
|||||||
|
require('dotenv').config()
|
||||||
|
const mysql = require('mysql2/promise');
|
||||||
|
const axios = require('axios')
|
||||||
|
|
||||||
|
const rangeStartDate = '2010-01'
|
||||||
|
const rangeEndDate = '2018-12'
|
||||||
|
|
||||||
|
exports.update = async function (startDate = rangeStartDate, endDate = rangeEndDate) {
|
||||||
|
console.log('update climate with:', startDate, endDate);
|
||||||
|
|
||||||
|
const connection = await mysql.createConnection({
|
||||||
|
host: process.env.DB_HOST,
|
||||||
|
user: process.env.DB_USER,
|
||||||
|
password: process.env.DB_PASSWORD,
|
||||||
|
port: process.env.DB_PORT,
|
||||||
|
database: 'travopti'
|
||||||
|
});
|
||||||
|
const [result, fields] = await connection.execute(`SELECT * FROM regions WHERE meteostat_id IS NOT NULL`)
|
||||||
|
|
||||||
|
// let temp = await Promise.all(result.map(x => createClimateObject(x)))
|
||||||
|
// let final = temp.reduce((total, element) => total.concat(element), [])
|
||||||
|
|
||||||
|
// await writeToDatabase(connection, final)
|
||||||
|
|
||||||
|
let temp2 = await Promise.all(result.map(x => createClimateObjectFrom(x, startDate, endDate)))
|
||||||
|
let final2 = temp2.reduce((total, element) => total.concat(element), [])
|
||||||
|
|
||||||
|
await writeToDatabase(connection, final2)
|
||||||
|
|
||||||
|
connection.end();
|
||||||
|
let response = 'database update complete. see backend logs for info.'
|
||||||
|
console.log(response)
|
||||||
|
return response
|
||||||
|
}
|
||||||
|
|
||||||
|
// async function createClimateObject(src) {
|
||||||
|
// let response
|
||||||
|
// try {
|
||||||
|
// response = await axios.get(`https://api.meteostat.net/v1/climate/normals?station=${src.meteostat_id}&key=${process.env.METEOSTAT_API_KEY}`)
|
||||||
|
// } catch (error) {
|
||||||
|
// console.log("skipping: couldn't find results for following region: ")
|
||||||
|
// console.log(src.region + " with meteostat_id " + src.meteostat_id)
|
||||||
|
// return []
|
||||||
|
// }
|
||||||
|
// if (!response.data.data) {
|
||||||
|
// console.log("skipping: no data for station meteostat_id " + src.meteostat_id + " (" + src.region + ")")
|
||||||
|
// return []
|
||||||
|
// }
|
||||||
|
// let results = []
|
||||||
|
// for (let index = 1; index <= 12; index++) {
|
||||||
|
// let result = {
|
||||||
|
// region: src.region,
|
||||||
|
// region_id: src.id,
|
||||||
|
// month: index,
|
||||||
|
// temperature: Object.values(response.data.data.temperature)[index - 1] ? Object.values(response.data.data.temperature)[index - 1] : null,
|
||||||
|
// temperature_min: Object.values(response.data.data.temperature_min)[index - 1] ? Object.values(response.data.data.temperature_min)[index - 1] : null,
|
||||||
|
// temperature_max: Object.values(response.data.data.temperature_max)[index - 1] ? Object.values(response.data.data.temperature_max)[index - 1] : null,
|
||||||
|
// precipitation: Object.values(response.data.data.precipitation)[index - 1] ? Object.values(response.data.data.precipitation)[index - 1] : null,
|
||||||
|
// sunshine: Object.values(response.data.data.sunshine)[index - 1] ? Object.values(response.data.data.sunshine)[index - 1] : null,
|
||||||
|
// }
|
||||||
|
// results.push(result)
|
||||||
|
// }
|
||||||
|
// return results
|
||||||
|
// }
|
||||||
|
|
||||||
|
async function createClimateObjectFrom(src, startDate, endDate) {
|
||||||
|
let response
|
||||||
|
try {
|
||||||
|
response = await axios.get(`https://api.meteostat.net/v1/history/monthly?station=${src.meteostat_id}&start=${startDate}&end=${endDate}&key=${process.env.METEOSTAT_API_KEY}`)
|
||||||
|
} catch (error) {
|
||||||
|
console.log("skipping createClimateObjectFrom: couldn't find results for following region: ")
|
||||||
|
console.log(src.region + " with meteostat_id " + src.meteostat_id)
|
||||||
|
console.log(error)
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
if (!response.data.data) {
|
||||||
|
console.log("skipping: no data for station meteostat_id " + src.meteostat_id + " (" + src.region + ")")
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
let results = response.data.data.map(element => {
|
||||||
|
let result = {
|
||||||
|
region: src.region,
|
||||||
|
region_id: src.id,
|
||||||
|
year: element.month.split("-")[0],
|
||||||
|
month: element.month.split("-")[1],
|
||||||
|
temperature: element.temperature_mean,
|
||||||
|
temperature_min: element.temperature_mean_min,
|
||||||
|
temperature_max: element.temperature_mean_max,
|
||||||
|
precipitation: element.precipitation,
|
||||||
|
raindays: element.raindays,
|
||||||
|
sunshine: element.sunshine,
|
||||||
|
humidity: element.humidity ? element.humidity : null
|
||||||
|
}
|
||||||
|
//console.log(result)
|
||||||
|
return result
|
||||||
|
})
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
async function writeToDatabase(dbConnection, climateObjArr) {
|
||||||
|
climateObjArr.forEach(async (element) => {
|
||||||
|
//console.log(element)
|
||||||
|
try {
|
||||||
|
if (!element.year) {
|
||||||
|
await dbConnection.execute(`
|
||||||
|
REPLACE INTO region_climate (region_id, year, month, temperature_mean, temperature_mean_min, temperature_mean_max, percipitation, sunshine)
|
||||||
|
VALUES (${element.region_id}, 0, ${element.month}, ${element.temperature}, ${element.temperature_min}, ${element.temperature_max}, ${element.precipitation}, ${element.sunshine});`)
|
||||||
|
} else {
|
||||||
|
await dbConnection.execute(`
|
||||||
|
REPLACE INTO region_climate (region_id, year, month, temperature_mean, temperature_mean_min, temperature_mean_max, percipitation, sunshine, humidity, raindays)
|
||||||
|
VALUES (${element.region_id}, ${element.year}, ${element.month}, ${element.temperature}, ${element.temperature_min}, ${element.temperature_max}, ${element.precipitation}, ${element.sunshine}, ${element.humidity}, ${element.raindays});`)
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (error.code !== 'ER_DUP_ENTRY') {
|
||||||
|
console.log("element which causes problems: ")
|
||||||
|
console.log(element)
|
||||||
|
console.log("query which causes problems: ")
|
||||||
|
console.log(error)
|
||||||
|
} else {
|
||||||
|
console.log(element.region + ": " + error.sqlMessage)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
92
backend/mysql.js
Normal file
92
backend/mysql.js
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
var mysql = require('mysql2/promise');
|
||||||
|
require('dotenv').config()
|
||||||
|
|
||||||
|
// var connection = mysql.createConnection({
|
||||||
|
// host: process.env.DB_HOST,
|
||||||
|
// user: process.env.DB_USER,
|
||||||
|
// password: process.env.DB_PASSWORD,
|
||||||
|
// port: process.env.DB_PORT,
|
||||||
|
// database: 'travopti'
|
||||||
|
// });
|
||||||
|
|
||||||
|
const pool = mysql.createPool({
|
||||||
|
connectionLimit: 10,
|
||||||
|
host: process.env.DB_HOST,
|
||||||
|
user: process.env.DB_USER,
|
||||||
|
password: process.env.DB_PASSWORD,
|
||||||
|
port: process.env.DB_PORT,
|
||||||
|
database: 'travopti',
|
||||||
|
// typeCast: function (field, next) {
|
||||||
|
// if (field.type == "INT") {
|
||||||
|
// var value = field.string();
|
||||||
|
// return (value === null) ? null : Number(value);
|
||||||
|
// }
|
||||||
|
// return next();
|
||||||
|
// }
|
||||||
|
decimalNumbers: true
|
||||||
|
});
|
||||||
|
|
||||||
|
pool.getConnection()
|
||||||
|
.then(function (connection) {
|
||||||
|
console.log(`Connected to database: ${process.env.DB_HOST}`);
|
||||||
|
//pool.releaseConnection(connection)
|
||||||
|
})
|
||||||
|
.catch(function (error) {
|
||||||
|
console.error(error.message);
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = pool;
|
||||||
|
|
||||||
|
// let travoptidb = {}
|
||||||
|
// travoptidb.all = () => {
|
||||||
|
// return new Promise((resolve, reject) => {
|
||||||
|
// pool.query(`SELECT * FROM regions`, (err, results) => {
|
||||||
|
// if (err) {
|
||||||
|
// return reject(err)
|
||||||
|
// }
|
||||||
|
// return resolve(results)
|
||||||
|
// })
|
||||||
|
// })
|
||||||
|
// }
|
||||||
|
|
||||||
|
// connection.connect((err) => {
|
||||||
|
// if (err) throw err;
|
||||||
|
// console.log('Database connected!')
|
||||||
|
// });
|
||||||
|
|
||||||
|
|
||||||
|
// exports.getRegions = () => {
|
||||||
|
// let sql = `SELECT * FROM regions`;
|
||||||
|
// console.log(connection.state)
|
||||||
|
// if (connection.state === 'disconnected') {
|
||||||
|
// setTimeout(() => console.log('waiting...'), 1000);
|
||||||
|
// }
|
||||||
|
// console.log('executed')
|
||||||
|
// let res = {}
|
||||||
|
// connection.query(sql, (error, results, fields) => {
|
||||||
|
// if (error) {
|
||||||
|
// return console.error(error.message);
|
||||||
|
// }
|
||||||
|
// console.log('innercallback(1)')
|
||||||
|
// res = results[0]
|
||||||
|
// });
|
||||||
|
// console.log('outsidecallback(2)')
|
||||||
|
// return res;
|
||||||
|
// }
|
||||||
|
|
||||||
|
// exports.getBYTdata = () => {
|
||||||
|
// connection.query(`SELECT * FROM regions_byt`, (error, results, fields) => {
|
||||||
|
// if (error) {
|
||||||
|
// return console.error(error.message);
|
||||||
|
// }
|
||||||
|
// console.log(results[0])
|
||||||
|
// nres = results.map((obj) => {
|
||||||
|
// return obj.region
|
||||||
|
// })
|
||||||
|
// //console.log(nres);
|
||||||
|
// });
|
||||||
|
// }
|
||||||
|
|
||||||
|
// exports.end = () => connection.end();
|
||||||
|
|
||||||
|
// module.exports = connection;
|
||||||
1523
backend/package-lock.json
generated
Normal file
1523
backend/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
22
backend/package.json
Normal file
22
backend/package.json
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
{
|
||||||
|
"name": "cc-data-backend",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "",
|
||||||
|
"main": "app.js",
|
||||||
|
"scripts": {
|
||||||
|
"start": "nodemon ./app.js"
|
||||||
|
},
|
||||||
|
"author": "",
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"axios": "^0.19.2",
|
||||||
|
"dotenv": "^8.2.0",
|
||||||
|
"express": "^4.17.1",
|
||||||
|
"lodash": "^4.17.15",
|
||||||
|
"moment": "^2.26.0",
|
||||||
|
"mysql2": "^2.1.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"nodemon": "^2.0.4"
|
||||||
|
}
|
||||||
|
}
|
||||||
31
backend/score.js
Normal file
31
backend/score.js
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
const multiplier_temperature = 5;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param {...any} scores expects objects which contains score and their weight
|
||||||
|
*/
|
||||||
|
exports.calculateAvgScore = (...scores) => {
|
||||||
|
return avgScore = scores.reduce((total, score) => total += score) / scores.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.calculateScoreRange = (min, max, multiplier, regionVal, sLowVal, sHighVal) => {
|
||||||
|
//console.log('scores.calculateScoreRange:', min, max, multiplier, regionVal, sLowVal, sHighVal)
|
||||||
|
// return full score when in range
|
||||||
|
if (regionVal >= sLowVal && regionVal <= sHighVal) return 10;
|
||||||
|
// choose value with smallest distance
|
||||||
|
let sVal = Math.abs(regionVal - sLowVal) < Math.abs(regionVal - sHighVal) ? sLowVal : sHighVal;
|
||||||
|
//console.log('nearest value',sVal, regionVal)
|
||||||
|
return this.calculateScore(min, max, multiplier, regionVal, sVal);
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.calculateScore = (min, max, multiplier, regionVal, searchVal) => {
|
||||||
|
|
||||||
|
let score = 1 - (Math.abs(searchVal - regionVal) / (max - min) * multiplier);
|
||||||
|
return score <= 0 ? 0 : score * 10;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('test score calculation. result: ' + this.calculateScoreRange(-15, 45, 5, 24, 15, 22))
|
||||||
1205
backend/transformer-test.json
Normal file
1205
backend/transformer-test.json
Normal file
File diff suppressed because it is too large
Load Diff
77
backend/transformer.js
Normal file
77
backend/transformer.js
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
const _ = require('lodash')
|
||||||
|
const fs = require('fs')
|
||||||
|
|
||||||
|
|
||||||
|
exports.transform = (data) => {
|
||||||
|
// get data
|
||||||
|
// let data = JSON.parse(fs.readFileSync('transformer-test.json'));
|
||||||
|
const types = Object.keys(data[0].scores)
|
||||||
|
|
||||||
|
// STEP 1 Create Response Array with region names from first climate object
|
||||||
|
let byRegion = data[0].climate.map(el => {
|
||||||
|
return {
|
||||||
|
region_id: el.region_id,
|
||||||
|
country_id: el.country_id,
|
||||||
|
name: el.name,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// STEP 2 Prepare flat scoreobject array and set days property
|
||||||
|
scoreObjs = _.flatten(_.map(data, (period) => {
|
||||||
|
return _.reduce(period.scores, (arr, el) => {
|
||||||
|
return arr.concat(el)
|
||||||
|
}).map(element => {
|
||||||
|
element.days = period.days
|
||||||
|
return element
|
||||||
|
})
|
||||||
|
}))
|
||||||
|
|
||||||
|
// STEP 3 Collect scoreobjects for each region
|
||||||
|
let results = byRegion.map(region => {
|
||||||
|
let scores = []
|
||||||
|
types.forEach(typ => {
|
||||||
|
let tempScores = _.filter(scoreObjs, { 'region_id': region.region_id, 'type': typ })
|
||||||
|
if (_.some(tempScores, { 'score': null })) {
|
||||||
|
console.log("found 'null' scores! skipping...")
|
||||||
|
//console.log(tempScores)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
let averagedScore = {
|
||||||
|
region_id: region.region_id,
|
||||||
|
type: typ,
|
||||||
|
value: 0,
|
||||||
|
score: 0,
|
||||||
|
days: 0
|
||||||
|
}
|
||||||
|
tempScores.forEach(el => {
|
||||||
|
averagedScore.value += (el.value * el.days)
|
||||||
|
averagedScore.score += (el.score * el.days)
|
||||||
|
averagedScore.days += (el.days)
|
||||||
|
})
|
||||||
|
|
||||||
|
averagedScore.value = _.round(averagedScore.value / averagedScore.days, 1)
|
||||||
|
averagedScore.score = _.round(averagedScore.score / averagedScore.days, 1)
|
||||||
|
delete averagedScore.region_id
|
||||||
|
delete averagedScore.days
|
||||||
|
scores.push(averagedScore)
|
||||||
|
|
||||||
|
})
|
||||||
|
region.scores = scores
|
||||||
|
|
||||||
|
// STEP 4 Calculate Average Score
|
||||||
|
region.score = calculateAverage(region.scores)
|
||||||
|
//console.log(region)
|
||||||
|
return region
|
||||||
|
})
|
||||||
|
|
||||||
|
// console.log(results)
|
||||||
|
return _.orderBy(results, 'score', 'desc')
|
||||||
|
//end
|
||||||
|
}
|
||||||
|
|
||||||
|
function calculateAverage(scores) {
|
||||||
|
let sum = 0
|
||||||
|
scores.forEach(el => sum += el.score)
|
||||||
|
//console.log(sum)
|
||||||
|
return _.round(sum / scores.length, 2)
|
||||||
|
}
|
||||||
Loading…
Reference in New Issue
Block a user