Compare commits

...

490 Commits

Author SHA1 Message Date
Keith Martin
7856e0ceb8 Setup: Update Ollama service examples in compose.alldbms.yaml file 2025-09-02 11:15:57 +10:00
Keith Martin
ed62352090 Merge remote-tracking branch 'origin/develop' into PostgreSQL 2025-09-02 10:24:26 +10:00
Keith Martin
78937c9362 Tests: remove ToDo that is done 2025-09-02 10:20:03 +10:00
Keith Martin
409bb217f0 Tests: Make test TestConfig_PortalPath parallel compatible 2025-08-29 13:24:58 +10:00
Keith Martin
cf6dae3622 CLI: Update usage descriptions for Boolean command flags 2025-08-29 12:02:40 +10:00
Keith Martin
4642780f5e PostgreSQL: remove ToDo where it has been done. 2025-08-29 11:58:56 +10:00
Keith Martin
05fa302661 Docker: Update alldbms to match base compose.yaml 2025-08-29 11:38:33 +10:00
Keith Martin
b86ddf0ec6 Merge remote-tracking branch 'origin/develop' into PostgreSQL 2025-08-29 11:35:28 +10:00
Keith Martin
5085189999 Merge remote-tracking branch 'origin/develop' into PostgreSQL 2025-08-24 21:48:15 +10:00
Michael Mayer
72628cf27e Build: Add "docker-postgres" target to Makefile
Signed-off-by: Michael Mayer <michael@photoprism.app>
2025-08-21 14:52:25 +02:00
Keith Martin
e56ab41901 Build: Use postgres for consistency on all make commands 2025-08-21 22:40:06 +10:00
Keith Martin
292d218940 Build: Shorten PostgreSQL to psql in make names 2025-08-21 22:31:31 +10:00
Keith Martin
3f778b3df2 Build: update postgresql yaml file and adjust make names 2025-08-21 22:28:27 +10:00
Keith Martin
b24797d42b Tests: correct assert.Equal order of operators 2025-08-20 22:09:58 +10:00
Keith Martin
805e22d807 Merge remote-tracking branch 'origin/develop' into PostgreSQL 2025-08-20 21:18:55 +10:00
Keith Martin
667b683a76 PostgreSQL: remove not required lower 2025-08-20 20:19:35 +10:00
Keith Martin
7328b4575d Merge remote-tracking branch 'origin/develop' into PostgreSQL 2025-08-20 20:18:44 +10:00
Keith Martin
77667c8f82 PostgreSQL: use ILIKE for case insensitive like processing 2025-08-20 17:33:28 +10:00
Keith Martin
1370b7589e PostgreSQL: add caseinsensitive collation and update tests 2025-08-20 17:32:58 +10:00
Keith Martin
2af099df72 PostgreSQL: add ability to adjust order by for columns needing case insensitive order 2025-08-20 17:31:16 +10:00
Keith Martin
28eb716f76 Migrations: Add Post step for post create/update processing 2025-08-20 12:01:54 +10:00
Keith Martin
146f06566f PostgreSQL: Handle different ordering of NULL results 2025-08-19 23:16:04 +10:00
Keith Martin
efa164f793 Gorm: Fix missing & on Model and Delete statements 2025-08-18 23:17:54 +10:00
Keith Martin
1f2ced9af2 Tests: Fix reuse of ID causing test failure 2025-08-18 22:50:26 +10:00
Keith Martin
70d1e42a56 Tests: update counts due to fix of album fixture duplicate id 2025-08-18 22:50:00 +10:00
Keith Martin
6f6a1a66dd Tests: make Moment tests survive changes to fixtures 2025-08-18 22:49:17 +10:00
Keith Martin
d4c0c6b3d0 Fixture: Renumber cows to 35 due duplicated id 2025-08-18 22:05:15 +10:00
Keith Martin
f99f0f714c Tests: Update expected for Albums due new fixtures 2025-08-18 21:40:02 +10:00
Keith Martin
dde5070174 Merge: Add missed index from merge 2025-08-18 21:31:26 +10:00
Keith Martin
904b7a94a4 Docker: Update to match compose.yml 2025-08-18 21:30:32 +10:00
Keith Martin
0d0873ae4c Gorm: Fix missing & on Delete statements 2025-08-18 21:30:06 +10:00
Keith Martin
a9855bdfae Merge remote-tracking branch 'origin/develop' into PostgreSQL 2025-08-18 17:30:03 +10:00
Keith Martin
b7f6f0f443 Merge remote-tracking branch 'origin/develop' into PostgreSQL 2025-07-15 11:33:38 +10:00
Keith Martin
3ae9e776e3 refactor: make fmt-go 2025-07-10 09:33:56 +10:00
Keith Martin
dfe84836c9 Merge remote-tracking branch 'origin/develop' into PostgreSQL 2025-07-10 09:32:02 +10:00
Keith Martin
87b84bdd67 make: Add alldbms 2025-07-09 11:37:57 +10:00
Keith Martin
6438e673dc tests: unit test that restore and index work with MediaRestoring 2025-07-08 23:31:59 +10:00
Keith Martin
f9f24a89ad refactor: ensure that start from empty database will restore from yaml files without errors 2025-07-08 16:08:52 +10:00
Keith Martin
efb39fd370 refactor: move queries from entity. 2025-07-01 19:12:50 +10:00
Keith Martin
0308ed82c4 refactor: correct preload to exclude all photo fields 2025-07-01 10:57:48 +10:00
Keith Martin
0f08d8ec2b Merge remote-tracking branch 'origin/develop' into PostgreSQL 2025-07-01 10:26:33 +10:00
Keith Martin
ee8700d61b yaml: handle DeletedAt and other yaml errors 2025-06-30 22:51:29 +10:00
Keith Martin
fb9a16a1c6 Tests: Check more than just DeletedAt yaml errors 2025-06-30 22:50:33 +10:00
Keith Martin
070302d666 Tests: Gorm V1 yaml files 2025-06-30 22:32:09 +10:00
Keith Martin
a26143bfad CLI: improve detection of populated databases 2025-06-30 18:29:21 +10:00
Keith Martin
f7098ba2d2 git: add .vscode to ignore 2025-06-30 13:17:26 +10:00
Keith Martin
c8bc522469 Tests: Detect failed preload of labels 2025-06-30 10:42:09 +10:00
Keith Martin
a7c04f678e photo: correct preload of labels 2025-06-30 10:34:54 +10:00
Keith Martin
91097c17ab Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-06-28 13:08:17 +10:00
Keith Martin
ffca3d8c8e Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-06-28 13:05:03 +10:00
Keith Martin
3f5aa0283c Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-06-27 22:16:13 +10:00
Keith Martin
cff2605cd4 Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-06-27 22:12:57 +10:00
Keith Martin
14feed45db Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-06-27 14:41:00 +10:00
Keith Martin
788d1cdee3 Migration: change sqlite3 to sqlite 2025-06-27 14:06:35 +10:00
Keith Martin
99c0dd6602 Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-06-27 13:50:41 +10:00
Keith Martin
587b393519 Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-06-05 19:14:54 +10:00
Keith Martin
6a827ee55f Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-06-05 16:21:24 +10:00
Keith Martin
8c53ffc49f Merge remote-tracking branch 'upstream/develop' into PostgreSQL 2025-05-18 21:56:30 +10:00
Keith Martin
74e135d9e5 Tests: switch unit tests to testdb database, and keep acceptance tests on acceptance database 2025-05-17 16:00:21 +10:00
Keith Martin
bc23cde659 SQL: refactor postgresql drops to use force 2025-05-17 14:31:13 +10:00
Keith Martin
6c9f079035 Tests: Add tests for album with label filter 2025-05-17 14:19:34 +10:00
Keith Martin
3ef5171424 Backend: refactor remove Debug(). 2025-05-17 12:24:02 +10:00
Keith Martin
b4d1e50bf4 Backend: refactor searchPhotos to cover all cases where frm.Label is changed for PostgreSQL 2025-05-17 12:15:37 +10:00
Keith Martin
33a39d71c8 Backend: fix UserPhotosGeo for PostgreSQL Group By limitation 2025-05-17 12:14:17 +10:00
Keith Martin
361248f245 Backend: fix searchPhotos for frmLabel being reset later in func and not enabling PostgreSQL specific handling 2025-05-16 23:54:36 +10:00
Keith Martin
0424009a84 Backend: fix subjects PostgreSQL query 2025-05-16 20:26:47 +10:00
Keith Martin
a69bf55b20 CLI: refactor migrations transfer to include setting of sequence number for appropriate tables 2025-05-16 19:31:03 +10:00
Keith Martin
50532c5444 Tests: make sure that a create works against a migrated table 2025-05-16 19:29:36 +10:00
Keith Martin
bc0752aeda CLI: refactor migrations transfer to add batch size 2025-05-16 15:41:29 +10:00
Keith Martin
72c8f835c0 Tests: add tests for batch size addition to migrations transfer 2025-05-16 15:40:57 +10:00
Keith Martin
bdbb5e32c3 CLI: refactor migrations to ensure version and migration entities exist (issue with once already being fired) 2025-05-15 21:20:35 +10:00
Keith Martin
9be5ce7e6d Tests: add PostgreSQL tests for migrations transfer 2025-05-15 21:04:39 +10:00
Keith Martin
e97af870db Make: refactor add postgresql for acceptance and tidy up 2025-05-15 16:41:31 +10:00
Keith Martin
a38b2b254e Code: make fmt-go 2025-05-15 16:14:49 +10:00
Keith Martin
fa829cabcf Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-05-15 16:03:46 +10:00
Keith Martin
4ffb76bff1 Merge branch 'transfer' into gorm2 2025-05-15 15:49:08 +10:00
Keith Martin
9a213c8fd9 Code: make fmt-go 2025-05-15 15:42:27 +10:00
Keith Martin
a726ab344d Tests: improve details fixture create to remove messages due to fixtures already existing 2025-05-15 15:39:52 +10:00
Keith Martin
8d74949bc2 Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-05-15 15:37:24 +10:00
Keith Martin
440da2e1f2 Make: refactor acceptance test targets to make generic so later DBMS' are easy to add 2025-05-14 22:31:55 +10:00
Keith Martin
c0d46afe10 Make: Add mariadb for acceptance javascript testing 2025-05-13 20:15:49 +10:00
Keith Martin
11036972c9 CLI: refactor away from FindInBatches for structs with composite primary key due to gorm bug. Exclude records with empty user_uid from old databases 2025-05-13 20:05:25 +10:00
Keith Martin
5c1ba53c87 Tests: add SQLite to MySQL and refactor database source files 2025-05-11 22:26:23 +10:00
Keith Martin
41c238fab4 CLI: migrations fix when truncate is done and add missing user_uid affected table deletes 2025-05-11 22:25:25 +10:00
Keith Martin
b87b78884f CLI: add new migrations transfer capability 2025-05-10 23:21:53 +10:00
Keith Martin
1980a5093a Tests: add tests to support new cli migrations transfer 2025-05-10 23:20:54 +10:00
Keith Martin
a264280d5d Tests: refactor comments 2025-05-10 13:23:29 +10:00
Keith Martin
dfae14c303 Tests: refactor to enable cli returning help information 2025-05-10 13:20:41 +10:00
Keith Martin
fe67756f49 Merge remote-tracking branch 'origin/develop' into gorm2 2025-05-09 21:23:07 +10:00
Keith Martin
c4c116f445 Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-05-05 21:12:51 +10:00
Keith Martin
74302f64ff Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-05-05 20:52:27 +10:00
Keith Martin
b85e6a590b Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-05-05 19:52:54 +10:00
Keith Martin
52b7f15e50 Tests: Add test DetailsCounts 2025-05-05 19:35:44 +10:00
Keith Martin
25877e04e2 DB: refactor to gorm2 2025-05-05 18:30:32 +10:00
Keith Martin
83eb4b782c Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-05-05 18:26:59 +10:00
Keith Martin
5a0a74e8b7 Tests: refactor Photos test due type change 2025-05-03 21:50:54 +10:00
Keith Martin
9da1964e33 Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-05-03 21:28:47 +10:00
Keith Martin
cf1f473f26 Tests: Refactor unit tests for new fixtures 2025-05-03 18:34:23 +10:00
Keith Martin
d2f5f58296 Config: refactor findBin for PostgreSQL 2025-05-03 18:23:34 +10:00
Keith Martin
a2344733e6 Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-05-03 18:16:43 +10:00
Keith Martin
2a773a0c42 Tests: Refactor unit tests for new fixtures 2025-05-03 17:49:14 +10:00
Keith Martin
e94d71c335 Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-05-03 17:26:13 +10:00
Keith Martin
38a7ad9180 Add time_zone change for postgres 2025-04-28 22:37:37 +10:00
Keith Martin
95eb3acf99 Update compose files for postgres with latest changes 2025-04-28 22:37:11 +10:00
Keith Martin
eb45f8572b Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-04-28 22:04:46 +10:00
Keith Martin
9be886f644 Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-04-28 20:27:59 +10:00
Keith Martin
d9d5759244 Parallel test run improvements 2025-04-11 22:08:38 +10:00
Keith Martin
9a1ad5c866 Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-04-11 21:12:00 +10:00
Keith Martin
e9ddb8b81a Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-04-11 20:43:20 +10:00
Keith Martin
9079252791 Fix case statement error 2025-04-02 23:12:14 +10:00
Keith Martin
06d9b45c68 Ensure parallel test runs do not clash on file names 2025-04-02 22:52:05 +10:00
Keith Martin
a027ce02d0 Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-04-02 22:13:56 +10:00
Keith Martin
ff8cea8c89 Apply foreign_keys to SQLite3 dsn 2025-04-02 22:13:40 +10:00
Keith Martin
8a444bfc81 Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-04-02 22:11:16 +10:00
Keith Martin
6ee8fcf9d6 remove tensorflow as per compose.yaml 2025-04-02 22:09:10 +10:00
Keith Martin
356e929f0c Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-04-02 21:57:44 +10:00
Keith Martin
ea89f8828f Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-04-02 20:30:40 +10:00
Keith Martin
206abd0466 Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-04-02 20:24:34 +10:00
Keith Martin
29449b7812 Update with latest base compose.yaml 2025-04-01 22:19:56 +10:00
Keith Martin
06de17fd98 Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-04-01 21:45:45 +10:00
Keith Martin
1762053ac6 Correct test as per photo fixture data 2025-04-01 21:45:05 +10:00
Keith Martin
6e06451f85 Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-04-01 21:08:27 +10:00
Keith Martin
8a738548e3 Alter test to handle different ways of using SQLite. 2025-03-31 22:32:06 +10:00
Keith Martin
9598230142 Flag tests that use InitializeTestData to not run in Short to avoid parallel issues 2025-03-31 20:26:15 +10:00
Keith Martin
3d0d393cda run make fmt-go 2025-03-30 23:36:48 +10:00
Keith Martin
5d74008f0d Move test data into separate folder per DSN_NAME to prevent parallel runs stomping on each other 2025-03-30 23:36:03 +10:00
Keith Martin
62bdb86e95 Set DSN for SQLITE to empty string due to MemoryDB expectations and add new function to derive folder name 2025-03-30 23:34:47 +10:00
Keith Martin
05e6a732d5 Switch to named DSNs for testing 2025-03-30 22:28:12 +10:00
Keith Martin
c96fbcec7d Run "make fmt-go" 2025-03-30 00:00:28 +10:00
Keith Martin
c2a1ce5d58 Correct expected number of matches 2025-03-29 23:57:44 +10:00
Keith Martin
6cacfe81fc Prevent DBMS specific tests running if not testing against that DBMS 2025-03-29 23:56:33 +10:00
Keith Martin
4f90432eb9 Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-03-29 22:26:51 +10:00
Keith Martin
c01fd9e667 Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-03-29 21:44:26 +10:00
Keith Martin
0aa53bf42f Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-03-28 23:52:30 +10:00
Keith Martin
88ba1bd79c Add MariaDB handling for test 2025-03-28 23:34:37 +10:00
Keith Martin
ca9e4a145b Change to Millisecond as Mariadb is not nano second aware 2025-03-28 23:32:13 +10:00
Keith Martin
ad15c8f44c Fix test to handle legacy users existing 2025-03-28 22:59:49 +10:00
Keith Martin
b5c9d6b8ba Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-03-28 22:04:17 +10:00
Keith Martin
c3eab926ec Add PostgreSQL version detection and handling 2025-03-28 21:45:26 +10:00
Keith Martin
dd4a579df0 Change duration to Microsecond as PostgreSQL is not Nanosecond aware 2025-03-28 21:03:07 +10:00
Keith Martin
eeaecb641e Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-03-27 22:56:59 +10:00
Keith Martin
f0b676acdf Improve testing for legacy users 2025-03-27 22:48:58 +10:00
Keith Martin
6491e51d72 sqlite3 changes for gormv2 2025-03-27 22:41:32 +10:00
Keith Martin
c8ec5d7a87 gorm HasTable move to Migrator 2025-03-27 22:40:49 +10:00
Keith Martin
2cbd214f51 gorm.DeletedAt{} changes 2025-03-27 22:40:26 +10:00
Keith Martin
26007f58e3 Handle additional migration script for auth tables 2025-03-27 21:46:44 +10:00
Keith Martin
087fc4ab81 Update counts expected due to new fixtures 2025-03-27 21:45:19 +10:00
Keith Martin
8669cb512a Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-03-27 20:52:16 +10:00
Keith Martin
4dad472fe9 update testing status. 2025-03-07 20:56:41 +10:00
Keith Martin
92bc8d5c78 Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-03-07 18:40:06 +10:00
Keith Martin
8991651c00 add postgres tests 2025-03-07 18:34:34 +10:00
Keith Martin
7616c07ac0 refactor functions and tests to separate files 2025-03-07 18:33:53 +10:00
Keith Martin
46a9a12cc9 allow for Migration and Version structs to already be in database 2025-03-07 18:32:39 +10:00
Keith Martin
bcc9b95320 Add migration testing for postgres 2025-03-07 16:41:51 +10:00
Keith Martin
05f6237706 fix files AUTO_INCREMENT (.sql out of sync with .go file) 2025-03-07 16:39:26 +10:00
Keith Martin
cec96c7bbd Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-03-07 15:41:45 +10:00
Keith Martin
7a61f84f3d add retry logic as there is a small chance that 2 automigrates happen to run at the same time 2025-03-07 15:41:13 +10:00
Keith Martin
7f819fde8a Add new storage mount as per compose.yaml 2025-03-07 15:01:33 +10:00
Keith Martin
0793c564c1 Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-03-07 12:59:30 +10:00
Keith Martin
d99aaf1595 go mod tidy 2025-03-07 12:59:17 +10:00
Keith Martin
c674452a21 add log.info about expected sql errors during migration tests 2025-03-07 11:03:33 +10:00
Keith Martin
ea51083549 Add capability to ignore table missing on truncate 2025-03-07 11:02:54 +10:00
Keith Martin
c2cc24e35d Fix AfterDelete where cause error, and ensure that logic is still safe. 2025-03-06 23:36:16 +10:00
Keith Martin
8f2e7db6c5 fix marker_test to check for correct/nil errors, and add Create so where clause error is not generated 2025-03-06 22:30:58 +10:00
Keith Martin
fdc93ca09d add more logging of when to expect sql errors 2025-03-06 22:13:27 +10:00
Keith Martin
dc6207b402 Correct log level so errors are visible 2025-03-06 21:15:49 +10:00
Keith Martin
8da76c563f Save bypassed, due to new database, migrations so they aren't run later. 2025-03-06 21:15:31 +10:00
Keith Martin
3b2f483f44 Change logging level to Info so it's visible 2025-03-06 21:13:51 +10:00
Keith Martin
25edea8e57 Add NewDatabase Option and Detection 2025-03-06 21:13:03 +10:00
Keith Martin
3e77499619 add log.info messages when database errors are expected 2025-03-06 20:25:43 +10:00
Keith Martin
16bd38c88d Fix incorrect id in test 2025-03-06 19:41:53 +10:00
Keith Martin
f53361418b Remove sql error by checking before creating for Cell 2025-03-06 19:31:07 +10:00
Keith Martin
b42757f0f1 Remove duplicate error from FirstOrCreate 2025-03-06 19:22:04 +10:00
Keith Martin
d7104b4acb Remove foreign key violation due to missing photo record 2025-03-06 19:21:03 +10:00
Keith Martin
cec1f9ccf3 Use GORM Upsert capability for AddDuplicate 2025-03-06 18:19:55 +10:00
Keith Martin
0779a603a6 Correct First to return nil for scenario where there is no PhotoID and PhotoUID 2025-03-06 17:44:50 +10:00
Keith Martin
9c66f0c2fb Fix issue with no photos being found from a related file, removing an SQL error as well 2025-03-06 17:35:48 +10:00
Keith Martin
c0cfd3ad8e Prevent pk violation error if the record already exists for Details 2025-03-06 17:20:36 +10:00
Keith Martin
c7581cbb24 Wrap calls to the legacy user table with checks to ensure that it exists to prevent errors being thrown. 2025-03-06 16:57:12 +10:00
Keith Martin
4031d0112b improve fixture creation to remove errors if records already exist due to gorm2 (switch to save instead of create) 2025-03-06 16:55:37 +10:00
Keith Martin
39d4af7d4f Remove error if legacy table doesn't exist 2025-03-06 16:29:14 +10:00
Keith Martin
37f98b26f4 Fix database backup test that only works if using memory database with sqlite 2025-03-06 15:02:34 +10:00
Keith Martin
56ccaf3254 remove more causes of duplicate key errors for new folders 2025-03-05 22:05:29 +10:00
Keith Martin
f4c2528a7d remove duplicate key error messages from indexing 2025-03-05 21:55:00 +10:00
Keith Martin
4919ba7d0c update tests to support GormV2 correctly 2025-03-04 23:09:50 +10:00
Keith Martin
b3da52f386 Remove SQLErrors due to reuse of UserUID 2025-03-04 23:00:36 +10:00
Keith Martin
1a78f69ba9 update test to support GormV2 correctly 2025-03-04 22:52:03 +10:00
Keith Martin
1e526f4459 move func's that are used outside of tests out of _test.go file to remove VS code compiler error 2025-03-04 22:20:16 +10:00
Keith Martin
91889ec61e remove duplicate key violations, and improve tests for folder creation 2025-03-04 22:18:15 +10:00
Keith Martin
de8941278b Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-03-04 21:57:02 +10:00
Keith Martin
ba48eeaabd Update to 17-alpine 2025-03-03 21:56:17 +10:00
Keith Martin
674cdc1c68 update postgresql-client install to use postgresql.org sources and install latest 2025-03-03 21:47:43 +10:00
Keith Martin
be3d1fdc5e Add PostgreSQL benchmarking 2025-03-03 21:10:06 +10:00
Keith Martin
c241439b0a move func's that are used outside of tests out of _test.go file to remove VS code compiler error 2025-03-03 18:09:50 +10:00
Keith Martin
7c8afc8984 Fix tests that are expecting specific scenarios that are not valid for make test-sqlite 2025-03-03 13:53:36 +10:00
Keith Martin
078d53f4db enforce installing postgresql-client in docker compose file 2025-03-03 11:39:21 +10:00
Keith Martin
31e4c8ce02 Latest test status 2025-03-02 22:43:18 +10:00
Keith Martin
b98d5d6ce4 reverted to alpine-16, with commented out attempted changes 2025-03-02 22:42:46 +10:00
Keith Martin
55a6a3c0b5 Fix PostgreSQL error about bool/integer and case insensitivity 2025-03-02 21:57:58 +10:00
Keith Martin
5c42cc05c6 add execute via chmod 2025-03-02 20:49:48 +10:00
Keith Martin
6092ea5cc5 try and install psql into docker containers 2025-03-02 20:46:23 +10:00
Keith Martin
702ed8afd8 Fix subquery returning more than 1 row issue 2025-03-02 20:14:13 +10:00
Keith Martin
b54f5d5d8c Update dsn Parse to support PostgreSQL 2025-03-02 16:07:34 +10:00
Keith Martin
ff7701a630 Add postgres to Usage 2025-03-02 16:06:58 +10:00
Keith Martin
bf33b605ad fix numeric instead of true/false 2025-03-02 14:57:50 +10:00
Keith Martin
c0733d44a5 postgresql lower values and like 2025-03-02 14:10:51 +10:00
Keith Martin
d0c11c502a postgresql lower values and likes 2025-03-02 13:44:00 +10:00
Keith Martin
df11369464 postgresql lower vaules for photo_title 2025-03-02 13:34:45 +10:00
Keith Martin
7a36182c9b Fix test Correct Local to UTC 2025-03-02 13:27:17 +10:00
Keith Martin
6dbecccb36 postgresql lower values for subjects 2025-03-02 13:26:51 +10:00
Keith Martin
1df405f799 postgresql lower varchar on likes 2025-03-02 13:10:01 +10:00
Keith Martin
1518a2242f Fix panic in test 2025-03-02 12:53:12 +10:00
Keith Martin
2da042916b postgresql lower varchar on likes 2025-03-02 12:25:33 +10:00
Keith Martin
7867b5f1ac Fix some of the case insensitivity issues using lower 2025-03-01 23:11:09 +10:00
Keith Martin
4f44b520f2 test status update after internal/entity tests passed on sqlite, mysql and postgresql 2025-03-01 18:15:50 +10:00
Keith Martin
89136f4fb2 Fix unstable test (assumed order of rows returned without an order by) 2025-03-01 18:04:26 +10:00
Keith Martin
1136b55259 Update test status and flag a couple that failed on retest of all in internal/entity 2025-03-01 18:03:47 +10:00
Keith Martin
368321269d Update connections for postgresql to use pgxpool 2025-03-01 17:38:55 +10:00
Keith Martin
e2da30b660 Fix postgres and gorm timezone handling 2025-03-01 15:09:38 +10:00
Keith Martin
44bfe71b93 Show tests fixed by previous fixes. 2025-02-28 23:59:52 +10:00
Keith Martin
855a016c58 Match latest from develop branch 2025-02-28 18:48:25 +10:00
Keith Martin
37cea68025 Fix SetPrimary using number instead of true/false 2025-02-28 18:44:27 +10:00
Keith Martin
75e5d6e3ec Fix test SQL error 2025-02-28 18:38:27 +10:00
Keith Martin
ffa50ab84c Remove Debug() gorm statements 2025-02-28 18:30:52 +10:00
Keith Martin
61f29950cc Merge remote-tracking branch 'keif888/gorm2' into PostgreSQL 2025-02-28 18:26:46 +10:00
Keith Martin
4a25a6b611 Fix EstimateCountry not supporting PostgreSQL 2025-02-28 17:34:30 +10:00
Keith Martin
46b0f7b4b7 go mod tidy 2025-02-28 17:32:17 +10:00
Keith Martin
41ea19d1ca Fix complex ORDER BY not generating correctly 2025-02-28 17:31:24 +10:00
Keith Martin
aa8ff8ac46 Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-02-28 16:58:55 +10:00
Keith Martin
5fc8864092 Fix marker_invalid not using FALSE 2025-02-28 15:27:30 +10:00
Keith Martin
c08426513d Fix UpdateLabelCounts and UpdateSubjectCounts 2025-02-27 23:10:06 +10:00
Keith Martin
8b962da919 Others fixed by reset of sequence 2025-02-27 22:43:33 +10:00
Keith Martin
83d213baa9 Fix reset of sequence for auth_users 2025-02-27 22:37:29 +10:00
Keith Martin
0eba2bdbb8 Add PostgreSQL backup and restore 2025-02-27 22:13:35 +10:00
Keith Martin
5d2e3fdfd2 Fix searchPhotos for PostgreSQL 2025-02-27 17:00:04 +10:00
Keith Martin
920e16994c Add test for data returned for flower test 2025-02-27 16:59:34 +10:00
Keith Martin
70a15db9d3 FixPostgre SQL errors 2025-02-27 12:40:41 +10:00
Keith Martin
8687ed448d Fix TestFaces 2025-02-27 12:39:49 +10:00
Keith Martin
d096a82603 Update with tests not run due to panic 2025-02-27 12:09:53 +10:00
Keith Martin
fa0cd8e0c9 Fix tests so they do not panic when expected results are not there 2025-02-27 11:57:02 +10:00
Keith Martin
16caeab36c Fix tests so they do not panic when expected results are not there 2025-02-27 11:36:22 +10:00
Keith Martin
44376ee1f6 Update status and issue with case sensitivity 2025-02-27 00:04:56 +10:00
Keith Martin
4980918571 Fix FixPrimaries 2025-02-26 23:32:00 +10:00
Keith Martin
69e370b815 Add PostgreSQL to SelectedPhotos 2025-02-26 23:27:40 +10:00
Keith Martin
533ef9f6f3 Fix MomentsLabels 2025-02-26 23:22:29 +10:00
Keith Martin
4d72032532 Fix MomentsStates 2025-02-26 23:20:05 +10:00
Keith Martin
dd06557beb Fix MomentsCountries 2025-02-26 23:18:01 +10:00
Keith Martin
d9c9a50296 Fix MomentsTime 2025-02-26 23:16:24 +10:00
Keith Martin
c7e8b17d0f Fix AlbumFolders 2025-02-26 23:12:40 +10:00
Keith Martin
a0d7bc7bec Fix SetPhotoPrimary 2025-02-26 23:04:15 +10:00
Keith Martin
c7a9a5b391 Update test to support PostgreSQL 2025-02-26 23:00:24 +10:00
Keith Martin
2a3777270c Fix SetDownloadFileID 2025-02-26 22:55:50 +10:00
Keith Martin
eae2c6aa72 Add PostgreSQL to SelectedFiles 2025-02-26 22:50:51 +10:00
Keith Martin
e108c54ba1 Change UpdateLabelCovers to individual SQLs 2025-02-26 22:34:30 +10:00
Keith Martin
50ad74fb51 Revert to each DBMS having it's own SQL code to avoid unintended consequences 2025-02-26 22:29:06 +10:00
Keith Martin
90a492e7cc Fix UpdateSubjectCovers 2025-02-26 22:22:35 +10:00
Keith Martin
0a3465a93b Fix UpdateLabelCovers 2025-02-26 21:45:32 +10:00
Keith Martin
d2943ae895 Fix UpdateAlbumMonthCovers 2025-02-26 21:31:31 +10:00
Keith Martin
f6c37e2766 Fix UpdateAlbumFolderCovers 2025-02-26 21:24:08 +10:00
Keith Martin
60ccc38a04 Fix UpdateAlbumDefaultCovers 2025-02-26 21:11:18 +10:00
Keith Martin
10f78e9a2a Initial unit test status for PostreSQL 2025-02-26 16:12:49 +10:00
Keith Martin
eecf746c16 Add handling for PostgreSQL abort on failure for transactions 2025-02-26 14:28:53 +10:00
Keith Martin
482beddd12 Improve test for rerunning on PostgreSQL/MariaDB 2025-02-26 14:18:12 +10:00
Keith Martin
ad9b22df38 Add lock timeouts to match MariaDB (testing = 5s, running = 50s) 2025-02-26 14:16:02 +10:00
Keith Martin
189ce679c0 Exclude bytea types for postgresql 2025-02-26 13:37:19 +10:00
Keith Martin
297ff2bc60 RegenerateIndex updated for postgresql 2025-02-26 13:27:06 +10:00
Keith Martin
837a230c26 remove Debug(). from tests 2025-02-26 13:25:26 +10:00
Keith Martin
98041fbe79 Add postregsql make commands 2025-02-26 13:19:02 +10:00
Keith Martin
94a657d298 Improve testing for RegenerateIndex 2025-02-26 13:18:39 +10:00
Keith Martin
1ae8a28257 Add all postgresql make commands and reset sql scripts 2025-02-26 11:06:40 +10:00
Keith Martin
e7b062ce7b Add some postgresql make targets 2025-02-25 23:36:35 +10:00
Keith Martin
6ce7bda065 keycloak database create and preload with data 2025-02-25 23:23:26 +10:00
Keith Martin
61036c49d7 update compose and init for postgresql 2025-02-25 21:18:50 +10:00
Keith Martin
3d352f2314 Gorm v1 vs Gorm v2 test 2025-02-25 12:43:56 +10:00
Keith Martin
1e693a5d0b database ddl for generator 2025-02-25 12:43:22 +10:00
Keith Martin
3add6e2d5c Latest test results and Makefile used to generate them. (Makefile has fix to handle issue with -experimental-multiple-windows 2025-02-25 12:41:37 +10:00
Keith Martin
29ce4f49fa go mod tidy 2025-02-24 22:14:40 +10:00
Keith Martin
b2113804da Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-02-24 22:13:09 +10:00
Keith Martin
78c55ab9fa Enable Error Logging before release. 2025-02-24 20:43:54 +10:00
Keith Martin
e0ae19b2c2 Remove archived performance tests 2025-02-24 20:27:51 +10:00
Keith Martin
546f450168 Remove old benchmark 2025-02-24 20:27:18 +10:00
Keith Martin
e3beaa5056 Add benchmark tests and latest execution 2025-02-24 20:22:19 +10:00
Keith Martin
7181b38058 Ensure that error messages are captured 2025-02-24 19:55:48 +10:00
Keith Martin
c3845e0558 Hide all but errors being logged 2025-02-24 16:05:58 +10:00
Keith Martin
d72c4e7839 Remove sqlite3 strings for gorm2 2025-02-23 22:51:06 +10:00
Keith Martin
8ee65846b0 Address gorm2 differences in benchmark tests 2025-02-23 22:44:57 +10:00
Keith Martin
c1f40b8742 Merge remote-tracking branch 'origin/Benchmarking' into gorm2 2025-02-23 22:23:55 +10:00
Keith Martin
e3ca1dbcd9 Change to Init to avoid truncates 2025-02-23 22:23:38 +10:00
Keith Martin
1298a73cbb Merge remote-tracking branch 'origin/Benchmarking' into gorm2 2025-02-23 22:04:16 +10:00
Keith Martin
696d22e66b Add more benchmarks 2025-02-23 22:02:25 +10:00
Keith Martin
800c275a99 Increase allowed time for 100k migration 2025-02-22 23:01:46 +10:00
Keith Martin
9be8d9376f Merge remote-tracking branch 'origin/Benchmarking' into gorm2 2025-02-22 22:58:38 +10:00
Keith Martin
da389e4747 Apply GormV2 to benchmarks 2025-02-22 22:58:30 +10:00
Keith Martin
56c04b0137 Exclude the restore/copy time from the benchmark 2025-02-22 22:57:03 +10:00
Keith Martin
337994c18a Merge remote-tracking branch 'origin/Benchmarking' into gorm2 2025-02-22 22:27:08 +10:00
Keith Martin
efaeb0f818 Migration Benchmark Tests for sqlite and mysql 2025-02-22 22:18:31 +10:00
Keith Martin
d42c2aaf09 Merge remote-tracking branch 'origin/develop' into Benchmarking 2025-02-22 11:26:50 +10:00
Keith Martin
05a43d59ae go mod tidy 2025-02-21 22:36:55 +10:00
Keith Martin
ce206cf75f initial version of doco 2025-02-21 22:36:16 +10:00
Keith Martin
105fc5552b Cleanup some gorm attributes 2025-02-21 18:23:10 +10:00
Keith Martin
fe0d0e8c75 Fix benchmark last row in table 2025-02-21 16:58:25 +10:00
Keith Martin
ae57efeeb5 Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-02-21 16:57:05 +10:00
Keith Martin
e9e3efaa14 Update testing status 2025-02-21 16:55:13 +10:00
Keith Martin
a4de6ddfed Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-02-21 15:02:40 +10:00
Keith Martin
c4474a98e5 remove Debug() 2025-02-21 13:15:02 +10:00
Keith Martin
9b0d6ee399 Add AutoIncrement testing, and complete min/max set of tables 2025-02-21 13:11:07 +10:00
Keith Martin
85c6a54762 add missing AUTO_INCREMENT 2025-02-21 13:09:27 +10:00
Keith Martin
8833b1a9e4 Update detected errors for MySQL invalid data test 2025-02-20 22:41:59 +10:00
Keith Martin
1c87e12738 Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-02-20 22:28:21 +10:00
Keith Martin
6762aefe2c auto generated go files 2025-02-20 22:27:02 +10:00
Keith Martin
3669bf8733 Add initial testing to confirm that data types have migrated successfully. 2025-02-20 22:13:46 +10:00
Keith Martin
6dc262d0fc Add drop of all tables to ensure clean slate for migration tests 2025-02-20 21:24:57 +10:00
Keith Martin
0060d1168c Add ignore of tables not existing (mysql) when delete/update. 2025-02-20 21:23:02 +10:00
Keith Martin
cae323dfb1 Apply gorm type generics 2025-02-18 22:47:47 +10:00
Keith Martin
b113224266 Apply gorm type generics 2025-02-18 22:17:37 +10:00
Keith Martin
bf2c0138f8 Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-02-16 23:03:03 +10:00
Keith Martin
e1085a978c Add 10k and 100k mysql tests, improve code 2025-02-16 23:02:29 +10:00
Keith Martin
386e2549a0 Move migration performance tests 2025-02-16 20:59:13 +10:00
Keith Martin
a5212ab39a address ImageJpeg case change 2025-02-16 20:38:40 +10:00
Keith Martin
194957490d Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-02-15 23:16:48 +10:00
Keith Martin
e4ebcc5f8b Add mariadb database create script for migration tests 2025-02-15 23:15:32 +10:00
Keith Martin
8f8bdafcdf Correct spelling of PostgreSQL 2025-02-15 23:14:41 +10:00
Keith Martin
58fffb7480 Add MySQL invalid data and 1k migration tests 2025-02-15 23:12:42 +10:00
Keith Martin
bd41750b03 Check that there are the right number of error messages from migration 2025-02-15 21:49:23 +10:00
Keith Martin
29d629d4d4 Revert album_path to varbinary(1024) 2025-02-13 22:26:58 +10:00
Keith Martin
780bcfbb8c More data type changes which gorm is not applying 2025-02-13 22:12:17 +10:00
Keith Martin
aebe7c86e9 Add foreign key/primary key type changes that GORM migration doesn't handle. 2025-02-12 23:57:27 +10:00
Keith Martin
f5d141cf05 Add MySQL migration initial test 2025-02-12 23:56:43 +10:00
Keith Martin
02160dd6ba Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-02-12 20:48:00 +10:00
Keith Martin
3439f079e0 Update with latest test results 2025-02-12 20:47:25 +10:00
Keith Martin
51310399c9 Address instability of Scroll to top test 2025-02-12 20:47:05 +10:00
Keith Martin
2e3ec4bc9b Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-02-11 21:52:12 +10:00
Keith Martin
929c8abea9 go automated changes 2025-02-11 21:48:54 +10:00
Keith Martin
8cb378dca8 Merge remote-tracking branch 'origin/develop' into Benchmarking 2025-02-06 21:23:43 +10:00
Keith Martin
f7cf1dc89b Merge remote-tracking branch 'origin/Benchmarking' into gorm2 2025-02-06 20:48:39 +10:00
Keith Martin
42c68fba98 Address Jpeg case change issue 2025-02-06 20:42:12 +10:00
Keith Martin
d8fa21e622 Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-02-06 20:41:06 +10:00
Keith Martin
8ff0146847 Add mariadb benchmarks 2025-02-05 21:11:04 +10:00
Keith Martin
83bf238aa0 Address change in case of Jpeg 2025-02-05 20:26:29 +10:00
Keith Martin
9b4cbb9d83 Merge remote-tracking branch 'origin/develop' into Benchmarking 2025-02-05 15:12:51 +10:00
Keith Martin
77c2bb4479 Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-01-31 23:08:27 +10:00
Keith Martin
47e8e49b11 Add Benchmarking in 2025-01-30 20:55:00 +10:00
Keith Martin
4e4a734bb0 Merge remote-tracking branch 'origin/develop' into Benchmarking 2025-01-30 14:07:22 +10:00
Keith Martin
6bf42958fa Merge remote-tracking branch 'origin/develop' into Benchmarking 2025-01-15 20:54:26 +10:00
Keith Martin
df0eb510cb Add some entity create/delete bench tests 2025-01-12 18:47:54 +10:00
Keith Martin
e15240ae54 Add Benchmark tests to Makefile 2025-01-11 22:01:28 +10:00
Keith Martin
c60847f9ae Merge remote-tracking branch 'photoprism/develop' into gorm2 2025-01-05 20:27:43 +10:00
Keith Martin
9be1969ce6 Add tests for 1k, 10k and 100k sqlite migration to GormV2 2024-11-23 23:17:30 +10:00
Keith Martin
8c09390fec Fix error on truncate of Labels 2024-11-23 20:31:35 +10:00
Keith Martin
ead033385a Merge remote-tracking branch 'photoprism/develop' into gorm2 2024-11-23 15:46:07 +10:00
Keith Martin
f8cdc2c9aa Correct tests lost in merge 2024-10-26 23:23:08 +10:00
Keith Martin
603fc6901e Revert incorrectly changed parameter order 2024-10-26 22:26:52 +10:00
Keith Martin
484176ed9b Change test result verification due to album_fixture updates. 2024-10-26 22:13:00 +10:00
Keith Martin
ad982dce8d DeletedAt, counts due new fixtures and ID changes to match origin 2024-10-26 21:54:31 +10:00
Keith Martin
5adb6dd402 Merge remote-tracking branch 'photoprism/develop' into gorm2 2024-10-26 20:21:45 +10:00
Keith Martin
41d4f322cd Add sqlite stored tests 2024-10-26 20:04:32 +10:00
Keith Martin
ca767fdada command line program to generate a database with configurable number of photos 2024-10-23 21:31:10 +10:00
Keith Martin
bc5698cb90 1st cut of Performance Testing DB creation 2024-10-21 22:11:15 +10:00
Keith Martin
6540ebbec8 Add Gormv2 DeletedAt handling 2024-10-19 19:41:53 +10:00
Keith Martin
d16ee5e909 Merge remote-tracking branch 'photoprism/develop' into gorm2 2024-10-19 19:36:50 +10:00
Keith Martin
3c8a793699 MySQL foreign key cleanups 2024-10-14 19:58:11 +10:00
Keith Martin
b5226429d8 Enable ordered execution of migration based on foreign keys 2024-10-14 19:57:47 +10:00
Keith Martin
f0793936ce Correct data type mismatch to folders 2024-10-14 16:01:33 +10:00
Keith Martin
2e93baca89 Add additional migration type detections 2024-10-13 22:09:29 +10:00
Keith Martin
99c4575e52 Ensure that the Gorm2 version is marked done 2024-10-13 22:07:08 +10:00
Keith Martin
69654d13ed Fix issue when using SQLite file backed database 2024-10-13 22:06:08 +10:00
Keith Martin
4c1d6f9e1c sqlite migration to gorm 2 and referential integrity cleanup 2024-10-11 23:58:15 +10:00
Keith Martin
1d0fee8a70 Correct invalid file_fixture data and update tests to match. 2024-10-10 21:10:28 +10:00
Keith Martin
649945d976 Fix acceptance test inconsistent fail on scroll 2024-10-07 19:46:14 +10:00
Keith Martin
cb56effe87 Update status of tests 2024-10-07 15:15:21 +10:00
Keith Martin
acb9f9f20f Fix cleanup which had timing issue on MariaDB 2024-10-06 23:38:02 +10:00
Keith Martin
9ec1b94f8c Fix incorrect Nil expectation 2024-10-06 23:07:38 +10:00
Keith Martin
c613d57989 Add check for save inconsistencies on all db tests. 2024-10-06 23:04:35 +10:00
Keith Martin
9cc486a48d Add ability to detect inconsitencies in Photo 2024-10-06 22:59:22 +10:00
Keith Martin
b33883412c Change update to use new ModelValues which can strip undesirable values from the Struct 2024-10-06 22:57:30 +10:00
Keith Martin
0a10428335 Remove testing records from db 2024-10-06 19:44:04 +10:00
Keith Martin
be3b3e010d Add Search function and tests 2024-10-06 19:36:40 +10:00
Keith Martin
ec59b0dd6a remove extra Debug() 2024-10-06 19:23:13 +10:00
Keith Martin
a864db86c2 Update tests to detect errors from db calls 2024-10-06 19:22:58 +10:00
Keith Martin
bca366c257 Uplift to use Preloaded Find 2024-10-05 22:06:38 +10:00
Keith Martin
eacc0f09dc Add Preloaded Find and First 2024-10-05 22:02:16 +10:00
Keith Martin
faa416738b Add Preloaded Find and First 2024-10-05 21:59:34 +10:00
Keith Martin
c6c2849e40 Add tests for Find and First searching with Preload 2024-10-05 21:59:10 +10:00
Keith Martin
b1c7bd791d Fix test so it cleans up before due to other tests 2024-10-05 21:57:13 +10:00
Keith Martin
98f02bbb0e Correct AlbumUID after update to album_fixtures 2024-10-05 20:51:01 +10:00
Keith Martin
e1ec2d7619 Remove excess log statement 2024-10-05 16:01:41 +10:00
Keith Martin
4565bc1636 Add Primary and Secondary file load test 2024-10-05 16:00:32 +10:00
Keith Martin
76e5fea93f Add a GormV1 behaviour test 2024-10-05 14:43:17 +10:00
Keith Martin
c2382b95ba Add test for Save with Unknown in Struct vs Valid ID 2024-10-05 13:06:49 +10:00
Keith Martin
db0e6bd549 Merge remote-tracking branch 'photoprism/develop' into gorm2 2024-10-03 19:32:06 +10:00
Keith Martin
cea7e27f04 Add call to Create if there are constraint issues so Gorm will insert in correct order 2024-10-03 16:45:02 +10:00
Keith Martin
24fa378249 add test for save with not in database embedded struct record 2024-10-03 16:24:36 +10:00
Keith Martin
10ef4467f5 Revert to save instead of create 2024-10-03 16:18:56 +10:00
Keith Martin
ff7e9e7605 Remove db logging 2024-10-03 16:18:18 +10:00
Keith Martin
9cbc249ad3 Improve error detection and reporting 2024-10-03 16:11:54 +10:00
Keith Martin
dd226ae37f Tests to detect issues with Save vs Create 2024-10-03 14:21:49 +10:00
Keith Martin
c380e79569 Add db locking testing to cover errors from entity_update and entity_save. 2024-10-02 23:36:26 +10:00
Keith Martin
87dc497aaf Improve File.Create to allow collection of Photo.ID as PhotoID 2024-10-02 20:48:00 +10:00
Keith Martin
01cf5d7c23 Merge remote-tracking branch 'photoprism/develop' into gorm2 2024-10-02 17:04:20 +10:00
Keith Martin
0367f1558f Implement Preload for PhotoLabel.Label and tests 2024-10-02 16:57:38 +10:00
Keith Martin
daf2c68377 Update expected error string 2024-10-02 15:40:40 +10:00
Keith Martin
3227537f60 Improve error message, tests, and fix bug found 2024-10-02 15:37:21 +10:00
Keith Martin
03b5fe1351 Update test report 2024-10-01 23:17:12 +10:00
Keith Martin
f36a5ceadc remove sqlite3 has been renamed to sqlite 2024-10-01 23:16:54 +10:00
Keith Martin
48fc58b1df Add db mutex for missed test suite that truncates db tables 2024-10-01 23:16:28 +10:00
Keith Martin
2f55b7f2e3 Update VARCHAR to gorm generic 2024-10-01 21:40:10 +10:00
Keith Martin
dd6b505e4a Replace auto_preload 2024-10-01 20:54:09 +10:00
Keith Martin
487a2f947e go mod tidy 2024-10-01 19:43:36 +10:00
Keith Martin
f3b924378f Add db controlled mutex to prevent database init occuring during another test as this breaks the tests on MariaDB 2024-10-01 19:35:54 +10:00
Keith Martin
d256744058 remove hard coded dbname for MariaDB 2024-10-01 00:03:05 +10:00
Keith Martin
4aaf64a3b5 sqlite foreign_keys 2024-09-30 23:46:59 +10:00
Keith Martin
3d3a83800b Improve backup testing 2024-09-30 22:57:56 +10:00
Keith Martin
709efa7831 Ensure that sqlite has foreign keys enabled in connection string 2024-09-30 22:56:59 +10:00
Keith Martin
25b9603743 Revert test changes due to non reset of auto increment 2024-09-30 16:30:10 +10:00
Keith Martin
981a1d6271 Reset auto increment on truncate 2024-09-30 16:18:39 +10:00
Keith Martin
c0cc672419 Change truncate to individual deletes in foreign key order 2024-09-30 00:04:33 +10:00
Keith Martin
20d47febc4 Skip if not sqlite 2024-09-30 00:03:46 +10:00
Keith Martin
9a205558cb Fix tests to handle foreign keys on test setup 2024-09-30 00:03:22 +10:00
Keith Martin
565382ae4a Add ability to recreate MariaDB database if env PHOTOPRISM_TEST_DBDROP is true 2024-09-29 17:20:29 +10:00
Keith Martin
12b5636926 Fix casing issue on File_luminance which causes migration issues. 2024-09-29 16:56:04 +10:00
Keith Martin
fbc2d0ef51 Fix database version detection so it works with MariaDB V11+ 2024-09-29 16:29:20 +10:00
Keith Martin
dd8b029428 update tests to detect sqlite vs mariadb 2024-09-29 00:12:41 +10:00
Keith Martin
f74a528901 Fix order by not generating correctly 2024-09-29 00:12:08 +10:00
Keith Martin
a29ef74580 Change expected record numbers due to removal of duplicate ID in photo_fixtures 2024-09-29 00:11:38 +10:00
Keith Martin
48c0a8a016 add sqlite exclusion for negative limit 2024-09-29 00:10:45 +10:00
Keith Martin
4f37d68521 fk's enabled on sqlite so revert exclusion 2024-09-29 00:10:08 +10:00
Keith Martin
30bccb9e08 Fix test to work on MariaDB 2024-09-29 00:09:30 +10:00
Keith Martin
a1a98d9f5d Correct Preload issue with casing of column name 2024-09-29 00:09:07 +10:00
Keith Martin
c1c6c980ce Handle save not calling BeforeCreate 2024-09-29 00:08:41 +10:00
Keith Martin
b98a371968 Bring the Photo.ID across when not provided 2024-09-29 00:08:03 +10:00
Keith Martin
69725d79da Match gorm1 functionality for record not found 2024-09-29 00:07:19 +10:00
Keith Martin
0293f1641d Make sure the entity is there when running the AutoMigrator 2024-09-29 00:06:19 +10:00
Keith Martin
fabb64101b Add creation of required records to enable tests to work with foreign keys. 2024-09-29 00:05:40 +10:00
Keith Martin
55fdabeb37 Handle nullable FileID for filesync 2024-09-29 00:01:35 +10:00
Keith Martin
9d05b40e50 Enable foreign keys for sqlite as per mariadb 2024-09-28 16:23:22 +10:00
Keith Martin
ac26bafe02 Correct duplicate PK error 2024-09-28 15:39:56 +10:00
Keith Martin
6c8a7390e9 Correct counts to gorm2 numbers 2024-09-28 15:39:04 +10:00
Keith Martin
22b86fe05f Add new PhotoAlbums entry so that RemoveDuplicateMoments doesnt break other tests 2024-09-28 13:56:08 +10:00
Keith Martin
68f7a3ce4f Add PreLoad for User back 2024-09-28 12:51:18 +10:00
Keith Martin
8d86fd0870 Fix MariaDB foreign key violation error due to records in PhotoAlbums when deleting Albums. 2024-09-27 22:17:31 +10:00
Keith Martin
ab7d102b18 mod and sum files, and latest test status 2024-09-25 23:08:59 +10:00
Keith Martin
194a5043ad Fix Postgres Database Driver string 2024-09-25 23:02:59 +10:00
Keith Martin
f685920f7f Fix sqlite back to sqlite3 2024-09-25 23:02:36 +10:00
Keith Martin
668ab4df50 Fix Struct column names passed to db 2024-09-25 23:02:02 +10:00
Keith Martin
caa098bf6a Raise error on missing PK values on update 2024-09-25 23:01:16 +10:00
Keith Martin
f533d4155b Workaround data diff due to gorm1 nulling fields on updates 2024-09-25 23:00:37 +10:00
Keith Martin
b208ea831d Fix limit test 2024-09-25 22:59:55 +10:00
Keith Martin
23ccb8e6fa Fix soft delete detection 2024-09-25 22:59:30 +10:00
Keith Martin
8371cadd5e Revert to use of Create on Test Fixture data creation 2024-09-25 20:43:58 +10:00
Keith Martin
622d801f81 Enable SoftDelete on PhotoResult 2024-09-25 18:20:26 +10:00
Keith Martin
78508fabfa Enable SoftDelete on User 2024-09-25 18:16:55 +10:00
Keith Martin
234cf9c3c3 Enable SoftDelete on Subject 2024-09-25 18:13:02 +10:00
Keith Martin
3bc56a2e78 Enable SoftDelete on Service 2024-09-25 18:07:59 +10:00
Keith Martin
92ae756794 Enable SoftDelete on Photo 2024-09-25 17:59:06 +10:00
Keith Martin
62ccd13f79 Enable SoftDelete on Lens 2024-09-25 14:59:00 +10:00
Keith Martin
ed92d82e9a Enable SoftDelete on Label 2024-09-25 14:56:18 +10:00
Keith Martin
05db6b2d3f Enable SoftDelete on Folder 2024-09-25 12:19:59 +10:00
Keith Martin
0feda2037f Enable SoftDelete on Camera 2024-09-25 12:12:43 +10:00
Keith Martin
34994d68c4 Fix missing Where on Update for places 2024-09-24 23:38:27 +10:00
Keith Martin
86077284a6 Fix missing Where on Update for Keyword 2024-09-24 23:37:40 +10:00
Keith Martin
a2d30b94c7 Enable SoftDelete on Album 2024-09-24 22:23:37 +10:00
Keith Martin
fa58e7cf21 Fix missing Where on Update for sqlite 2024-09-24 22:14:07 +10:00
Keith Martin
2f2718b741 Enable DeletedAt for User and add handling for Create on Save 2024-09-24 20:39:03 +10:00
Keith Martin
e2a98f4031 Correct Hook signature 2024-09-24 14:12:12 +10:00
Keith Martin
fe00886528 Merge remote-tracking branch 'origin/gorm2' into gorm2 2024-09-23 20:52:05 +10:00
Keith Martin
4a7eb59816 Add test status doco 2024-09-23 20:50:32 +10:00
Keith Martin
10c37f5b12 Correct FileCounts due to fixture duplicate correction 2024-09-23 20:38:37 +10:00
Keith Martin
4cbc307507 Enable SoftDelete on File 2024-09-20 23:39:49 +10:00
Keith Martin
d5d5075f32 Fix panic on returning Error text instead of Error object 2024-09-20 21:52:18 +10:00
Keith Martin
fd531b967d Fix not passing pointer to Save 2024-09-20 20:35:51 +10:00
Keith Martin
ff2b0889e3 Correct order of execution of Fixtures to match foreign keys 2024-09-20 20:10:45 +10:00
Keith Martin
bfcbe7261a Correct errors in KeywordID values 2024-09-20 20:10:05 +10:00
Keith Martin
ed0757ed04 Tests of the basic structure to match fixture files as GORM1 would 2024-09-20 20:08:59 +10:00
Keith Martin
9ea550c0ef Turn off auto inserting Associations to match GORM1 2024-09-20 20:05:04 +10:00
Keith Martin
1b140fd91c Add type:bytes; tag to all varbinary columns in structs 2024-09-20 14:25:07 +10:00
Keith Martin
80af799f7d Remove errors about missing where clauses on save 2024-09-19 17:02:13 +10:00
Keith Martin
d26e65fab2 Update duplicate IDs 2024-09-19 16:24:55 +10:00
Keith Martin
52586bff19 Change to Save from Create to bypass issues with already loaded data 2024-09-19 16:23:22 +10:00
Keith Martin
4ee5d99c60 Move init tests so they dont fail due to other tests 2024-09-19 12:53:14 +10:00
Keith Martin
ce5b1ce044 Correct assert.Equal wrong way round 2024-09-19 12:28:30 +10:00
Keith Martin
9abf7a60b7 Add test to check that the right number of records are there after test init 2024-09-19 12:27:02 +10:00
Keith Martin
d83c873e93 Correct Hooks interfaces 2024-09-18 22:43:53 +10:00
Keith Martin
6246affd58 Remove type Map as GORM2 throws unsupported data when using it 2024-09-18 22:36:44 +10:00
Keith Martin
d9175d2297 Merge remote-tracking branch 'upstream/develop' into gorm2 2024-09-18 20:08:01 +10:00
Keith Martin
c61c8f4241 albrechtf 05dc86d1 Fix gorm relations; make most SQL more generic 2024-09-18 20:07:37 +10:00
Keith Martin
993339bd10 albrechtf 20dad85a Replace GORM with GORMv2 2024-09-18 17:22:23 +10:00
Keith Martin
1eea63051a Merge remote-tracking branch 'upstream/develop' into gorm2 2024-09-18 11:11:16 +10:00
Keith Martin
0404e7befe 1st attempt at finding bool's as 0 or 1 2024-09-17 23:53:25 +10:00
Keith Martin
e358f0ae29 Remove all compiler errors related to Gorm2 2024-09-17 23:38:17 +10:00
Keith Martin
74d1257b35 Change imports to gorm2 2024-09-17 14:52:51 +10:00
438 changed files with 38404 additions and 4400 deletions

1
.gitignore vendored
View File

@@ -80,3 +80,4 @@ Thumbs.db
.settings
.swp
AGENTS.md
.vscode

152
Makefile
View File

@@ -80,14 +80,32 @@ test-entity: reset-sqlite run-test-entity
test-commands: reset-sqlite run-test-commands
test-photoprism: reset-sqlite run-test-photoprism
test-short: reset-sqlite run-test-short
test-mariadb: reset-acceptance run-test-mariadb
acceptance-run-chromium: storage/acceptance acceptance-auth-sqlite-restart wait acceptance-auth acceptance-auth-sqlite-stop acceptance-sqlite-restart wait-2 acceptance acceptance-sqlite-stop
acceptance-run-chromium-short: storage/acceptance acceptance-auth-sqlite-restart wait acceptance-auth-short acceptance-auth-sqlite-stop acceptance-sqlite-restart wait-2 acceptance-short acceptance-sqlite-stop
acceptance-auth-run-chromium: storage/acceptance acceptance-auth-sqlite-restart wait acceptance-auth acceptance-auth-sqlite-stop
acceptance-public-run-chromium: storage/acceptance acceptance-sqlite-restart wait acceptance acceptance-sqlite-stop
wait:
sleep 20
wait-2:
test-mariadb: reset-mariadb-testdb run-test-mariadb
test-postgres: reset-postgres-testdb run-test-postgres
test-sqlite: reset-sqlite-unit run-test-sqlite
# SQLite acceptance tests - These setup, configure and then call the actual tests.
acceptance-run-chromium: storage/acceptance storage/sqlite acceptance-exec-chromium
acceptance-run-chromium-short: storage/acceptance storage/sqlite acceptance-exec-chromium-short
acceptance-auth-run-chromium: storage/acceptance storage/sqlite acceptance-auth-exec-chromium
acceptance-public-run-chromium: storage/acceptance storage/sqlite acceptance-public-exec-chromium
# MariaDB acceptance tests - These setup, configure and then call the actual tests.
acceptance-mariadb-run-chromium: storage/acceptance storage/mariadb acceptance-exec-chromium
acceptance-mariadb-run-chromium-short: storage/acceptance storage/mariadb acceptance-exec-chromium-short
acceptance-mariadb-auth-run-chromium: storage/acceptance storage/mariadb acceptance-auth-exec-chromium
acceptance-mariadb-public-run-chromium: storage/acceptance storage/mariadb acceptance-public-exec-chromium
# PostgreSQL acceptance tests - These setup, configure and then call the actual tests.
acceptance-postgres-run-chromium: storage/acceptance storage/postgres acceptance-exec-chromium
acceptance-postgres-run-chromium-short: storage/acceptance storage/postgres acceptance-exec-chromium-short
acceptance-postgres-auth-run-chromium: storage/acceptance storage/postgres acceptance-auth-exec-chromium
acceptance-postgres-public-run-chromium: storage/acceptance storage/postgres acceptance-public-exec-chromium
# The actual tests that are called for acceptance tests. Don't call these directly, use the ones with run in the name.
acceptance-exec-chromium: acceptance-file-reset acceptance-database-reset-1 acceptance-auth-start wait-1 acceptance-auth acceptance-auth-stop acceptance-database-reset-2 acceptance-public-start wait-2 acceptance acceptance-public-stop
acceptance-exec-chromium-short: acceptance-file-reset acceptance-database-reset-1 acceptance-auth-start wait-1 acceptance-auth-short acceptance-auth-stop acceptance-database-reset-2 acceptance-public-start wait-2 acceptance-short acceptance-public-stop
acceptance-auth-exec-chromium: acceptance-file-reset acceptance-database-reset-1 acceptance-auth-start wait-1 acceptance-auth acceptance-auth-stop
acceptance-public-exec-chromium: acceptance-file-reset acceptance-database-reset-1 acceptance-public-start wait-1 acceptance acceptance-public-stop
wait-%:
sleep 20
show-rev:
@git rev-parse HEAD
@@ -189,9 +207,7 @@ install-tensorflow:
sudo scripts/dist/install-tensorflow.sh
install-darktable:
sudo scripts/dist/install-darktable.sh
acceptance-sqlite-restart:
cp -f storage/acceptance/backup.db storage/acceptance/index.db
cp -f storage/acceptance/config-sqlite/settingsBackup.yml storage/acceptance/config-sqlite/settings.yml
acceptance-file-reset:
rm -rf storage/acceptance/sidecar/2020
rm -rf storage/acceptance/sidecar/2011
rm -rf storage/acceptance/originals/2010
@@ -199,15 +215,34 @@ acceptance-sqlite-restart:
rm -rf storage/acceptance/originals/2011
rm -rf storage/acceptance/originals/2013
rm -rf storage/acceptance/originals/2017
./photoprism --auth-mode="public" -c "./storage/acceptance/config-sqlite" start -d
acceptance-sqlite-stop:
./photoprism --auth-mode="public" -c "./storage/acceptance/config-sqlite" stop
acceptance-auth-sqlite-restart:
cp -f storage/acceptance/backup.db storage/acceptance/index.db
cp -f storage/acceptance/config-sqlite/settingsBackup.yml storage/acceptance/config-sqlite/settings.yml
./photoprism --auth-mode="password" -c "./storage/acceptance/config-sqlite" start -d
acceptance-auth-sqlite-stop:
./photoprism --auth-mode="password" -c "./storage/acceptance/config-sqlite" stop
acceptance-database-reset-%:
@if [ -f storage/acceptance/config-active/dbms.sqlite ]; then \
echo "resetting sqlite"; \
cp -f storage/acceptance/backup.db storage/acceptance/index.db; \
cp -f storage/acceptance/config-active/settingsBackup.yml storage/acceptance/config-active/settings.yml; \
fi
@if [ -f storage/acceptance/config-active/dbms.mariadb ]; then \
echo "resetting mariadb"; \
cp -f storage/acceptance/backup.db storage/acceptance/index.db; \
mysql < scripts/sql/reset-acceptance.sql; \
./photoprism --database-driver sqlite --database-dsn "storage/acceptance/index.db?_busy_timeout=5000&_foreign_keys=on" --transfer-driver mysql --transfer-dsn "$(subst testdb,acceptance,$(PHOTOPRISM_TEST_DSN_MARIADB))" migrations transfer -force; \
cp -f storage/acceptance/config-active/settingsBackup.yml storage/acceptance/config-active/settings.yml; \
fi
@if [ -f storage/acceptance/config-active/dbms.postgresql ]; then \
echo "resetting postgresql"; \
cp -f storage/acceptance/backup.db storage/acceptance/index.db; \
psql postgresql://photoprism:photoprism@postgres:5432/postgres -f scripts/sql/postgresql/reset-acceptance.sql; \
./photoprism --database-driver sqlite --database-dsn "storage/acceptance/index.db?_busy_timeout=5000&_foreign_keys=on" --transfer-driver postgres --transfer-dsn "$(subst testdb,acceptance,$(PHOTOPRISM_TEST_DSN_POSTGRES))" migrations transfer -force; \
cp -f storage/acceptance/config-active/settingsBackup.yml storage/acceptance/config-active/settings.yml; \
fi
acceptance-public-start:
./photoprism --auth-mode="public" -c "./storage/acceptance/config-active" start -d
acceptance-public-stop:
./photoprism --auth-mode="public" -c "./storage/acceptance/config-active" stop
acceptance-auth-start:
./photoprism --auth-mode="password" -c "./storage/acceptance/config-active" start -d
acceptance-auth-stop:
./photoprism --auth-mode="password" -c "./storage/acceptance/config-active" stop
start:
./photoprism start -d
stop:
@@ -216,6 +251,8 @@ terminal:
$(DOCKER_COMPOSE) exec -u $(UID) photoprism bash
mariadb:
$(DOCKER_COMPOSE) exec mariadb mariadb -uroot -pphotoprism photoprism
postgres:
$(DOCKER_COMPOSE) exec postgres psql -uphotoprism -pphotoprism photoprism
root: root-terminal
root-terminal:
$(DOCKER_COMPOSE) exec -u root photoprism bash
@@ -259,6 +296,20 @@ dep-tensorflow:
dep-acceptance: storage/acceptance
storage/acceptance:
[ -f "./storage/acceptance/index.db" ] || (cd storage && rm -rf acceptance && wget -c https://dl.photoprism.app/qa/acceptance.tar.gz -O - | tar -xz)
storage/sqlite:
rm -rf storage/acceptance/config-active
cp storage/acceptance/config-sqlite/ storage/acceptance/config-active -r
echo sqlite > storage/acceptance/config-active/dbms.sqlite
storage/mariadb:
rm -rf storage/acceptance/config-active
cp storage/acceptance/config-sqlite/ storage/acceptance/config-active -r
sed "s/DatabaseDriver: sqlite/DatabaseDriver: mysql/;s/DatabaseDsn[: a-z./]\+/DatabaseDsn: $(subst &,\&,$(subst /,\/,$(PHOTOPRISM_TEST_DSN_MARIADB)))/" storage/acceptance/config-sqlite/options.yml | sed "s/testdb/acceptance/g" > storage/acceptance/config-active/options.yml
echo mariadb > storage/acceptance/config-active/dbms.mariadb
storage/postgres:
rm -rf storage/acceptance/config-active
cp storage/acceptance/config-sqlite/ storage/acceptance/config-active -r
sed "s/DatabaseDriver: sqlite/DatabaseDriver: postgres/;s/DatabaseDsn[: a-z./]\+/DatabaseDsn: $(subst &,\&,$(subst /,\/,$(PHOTOPRISM_TEST_DSN_POSTGRES)))/" storage/acceptance/config-sqlite/options.yml | sed "s/testdb/acceptance/g" > storage/acceptance/config-active/options.yml
echo postgresql > storage/acceptance/config-active/dbms.postgresql
zip-facenet:
(cd assets && zip -r facenet.zip facenet -x "*/.*" -x "*/version.txt")
zip-nasnet:
@@ -337,7 +388,7 @@ test-js:
(cd frontend && env TZ=UTC BUILD_ENV=development NODE_ENV=development BABEL_ENV=test npm run test)
acceptance:
$(info Running public-mode tests in Chrome...)
(cd frontend && npm run testcafe -- "chrome --headless=new" --test-grep "^(Multi-Window)\:*" --test-meta mode=public --config-file ./testcaferc.json --experimental-multiple-windows "tests/acceptance" && npm run testcafe -- "chrome --headless=new" --test-grep "^(Common|Core)\:*" --test-meta mode=public --config-file ./testcaferc.json "tests/acceptance")
(cd frontend && find ./tests/acceptance -type f -name "*.js" | xargs -i perl -0777 -ne 'while(/(?:mode: \"auth[^,]*\,)|(Multi-Window\:[A-Za-z 0-9\-_]*)/g){print "$$1\n" if ($$1);}' {} | xargs -I testname bash -c 'npm run testcafe -- "chrome --headless=new" --experimental-multiple-windows --test-meta mode=public --config-file ./testcaferc.json --test "testname" "tests/acceptance"' && npm run testcafe -- "chrome --headless=new" --test-grep "^(Common|Core)\:*" --test-meta mode=public --config-file ./testcaferc.json "tests/acceptance")
acceptance-short:
$(info Running JS acceptance tests in Chrome...)
(cd frontend && npm run testcafe -- "chrome --headless=new" --test-grep "^(Multi-Window)\:*" --test-meta mode=public --config-file ./testcaferc.json --experimental-multiple-windows "tests/acceptance" && npm run testcafe -- "chrome --headless=new" --test-grep "^(Common|Core)\:*" --test-meta mode=public,type=short --config-file ./testcaferc.json "tests/acceptance")
@@ -346,7 +397,7 @@ acceptance-firefox:
(cd frontend && npm run testcafe -- firefox:headless --test-grep "^(Common|Core)\:*" --test-meta mode=public --config-file ./testcaferc.json --disable-native-automation "tests/acceptance")
acceptance-auth:
$(info Running JS acceptance-auth tests in Chrome...)
(cd frontend && npm run testcafe -- "chrome --headless=new" --test-grep "^(Multi-Window)\:*" --test-meta mode=auth --config-file ./testcaferc.json --experimental-multiple-windows "tests/acceptance" && npm run testcafe -- "chrome --headless=new" --test-grep "^(Common|Core)\:*" --test-meta mode=auth --config-file ./testcaferc.json "tests/acceptance")
(cd frontend && find ./tests/acceptance -type f -name "*.js" | xargs -i perl -0777 -ne 'while(/(?:mode: \"public[^,]*\,)|(Multi-Window\:[A-Za-z 0-9\-_]*)/g){print "$$1\n" if ($$1);}' {} | xargs -I testname bash -c 'npm run testcafe -- "chrome --headless=new" --experimental-multiple-windows --test-meta mode=auth --config-file ./testcaferc.json --test "testname" "tests/acceptance"' && npm run testcafe -- "chrome --headless=new" --test-grep "^(Common|Core)\:*" --test-meta mode=auth --config-file ./testcaferc.json "tests/acceptance")
acceptance-auth-short:
$(info Running JS acceptance-auth tests in Chrome...)
(cd frontend && npm run testcafe -- "chrome --headless=new" --test-grep "^(Multi-Window)\:*" --test-meta mode=auth --config-file ./testcaferc.json --experimental-multiple-windows "tests/acceptance" && npm run testcafe -- "chrome --headless=new" --test-grep "^(Common|Core)\:*" --test-meta mode=auth,type=short --config-file ./testcaferc.json "tests/acceptance")
@@ -380,9 +431,26 @@ reset-mariadb-local:
reset-mariadb-acceptance:
$(info Resetting acceptance database...)
mysql < scripts/sql/reset-acceptance.sql
reset-sqlite-unit:
$(info Resetting SQLite unit database...)
rm --force ./storage/testdata/unit.test.db
cp ./internal/entity/migrate/testdata/migrate_sqlite3 ./storage/testdata/unit.test.db
reset-mariadb-all: reset-mariadb-testdb reset-mariadb-local reset-mariadb-acceptance reset-mariadb-photoprism
reset-testdb: reset-sqlite reset-mariadb-testdb
reset-acceptance: reset-mariadb-acceptance
reset-postgres:
$(info Resetting photoprism database...)
psql postgresql://photoprism:photoprism@postgres:5432/postgres -f scripts/sql/postgresql/reset-photoprism.sql
reset-postgres-testdb:
$(info Resetting testdb database...)
psql postgresql://photoprism:photoprism@postgres:5432/postgres -f scripts/sql/postgresql/reset-testdb.sql
reset-postgres-local:
$(info Resetting local database...)
psql postgresql://photoprism:photoprism@postgres:5432/postgres -f scripts/sql/postgresql/reset-local.sql
reset-postgres-acceptance:
$(info Resetting acceptance database...)
psql postgresql://photoprism:photoprism@postgres:5432/postgres -f scripts/sql/postgresql/reset-acceptance.sql
reset-postgres-all: reset-postgres-testdb reset-postgres-local reset-postgres-acceptance reset-postgres-photoprism
reset-testdb: reset-sqlite reset-mariadb-testdb reset-postgres-testdb
# reset-acceptance: reset-mariadb-acceptance
reset-sqlite:
$(info Removing test database files...)
find ./internal -type f -name ".test.*" -delete
@@ -394,7 +462,13 @@ run-test-go:
$(GOTEST) -parallel 1 -count 1 -cpu 1 -tags="slow,develop" -timeout 20m ./pkg/... ./internal/...
run-test-mariadb:
$(info Running all Go tests on MariaDB...)
PHOTOPRISM_TEST_DRIVER="mysql" PHOTOPRISM_TEST_DSN="root:photoprism@tcp(mariadb:4001)/acceptance?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true" $(GOTEST) -parallel 1 -count 1 -cpu 1 -tags="slow,develop" -timeout 20m ./pkg/... ./internal/...
PHOTOPRISM_TEST_DSN_NAME="mariadb" $(GOTEST) -parallel 1 -count 1 -cpu 1 -tags="slow,develop" -timeout 20m ./pkg/... ./internal/...
run-test-postgres:
$(info Running all Go tests on PostgreSQL...)
PHOTOPRISM_TEST_DSN_NAME="postgres" $(GOTEST) -parallel 1 -count 1 -cpu 1 -tags="slow,develop" -timeout 20m ./pkg/... ./internal/...
run-test-sqlite:
$(info Running all Go tests on SQLite...)
PHOTOPRISM_TEST_DSN_NAME="sqlitefile" $(GOTEST) -parallel 1 -count 1 -cpu 1 -tags "slow,develop" -timeout 20m ./pkg/... ./internal/...
run-test-pkg:
$(info Running all Go tests in "/pkg"...)
$(GOTEST) -parallel 2 -count 1 -cpu 2 -tags="slow,develop" -timeout 20m ./pkg/...
@@ -430,6 +504,24 @@ test-coverage:
go test -parallel 1 -count 1 -cpu 1 -failfast -tags="slow,develop" -timeout 30m -coverprofile coverage.txt -covermode atomic ./pkg/... ./internal/...
go tool cover -html=coverage.txt -o coverage.html
go tool cover -func coverage.txt | grep total:
test-sqlite-benchmark10x:
$(info Running all Go tests with benchmarks...)
dirname $$(grep --files-with-matches --include "*_test.go" -oP "(?<=func )Benchmark[A-Za-z_]+(?=\(b \*testing\.B)" --recursive ./*) | sort -u | xargs -n1 bash -c 'cd "$$0" && pwd && go test -skip Test -parallel 4 -count 10 -cpu 4 -failfast -tags slow -timeout 30m -benchtime 1s -bench=.'
test-sqlite-benchmark10s:
$(info Running all Go tests with benchmarks...)
dirname $$(grep --files-with-matches --include "*_test.go" -oP "(?<=func )Benchmark[A-Za-z_]+(?=\(b \*testing\.B)" --recursive ./*) | sort -u | xargs -n1 bash -c 'cd "$$0" && pwd && go test -skip Test -parallel 4 -count 1 -cpu 4 -failfast -tags slow -timeout 30m -benchtime 10s -bench=.'
test-mariadb-benchmark10x:
$(info Running all Go tests with benchmarks...)
dirname $$(grep --files-with-matches --include "*_test.go" -oP "(?<=func )Benchmark[A-Za-z_]+(?=\(b \*testing\.B)" --recursive ./*) | sort -u | xargs -n1 bash -c 'cd "$$0" && pwd && PHOTOPRISM_TEST_DSN_NAME="mariadb" go test -skip Test -parallel 4 -count 10 -cpu 4 -failfast -tags slow -timeout 30m -benchtime 1s -bench=.'
test-mariadb-benchmark10s:
$(info Running all Go tests with benchmarks...)
dirname $$(grep --files-with-matches --include "*_test.go" -oP "(?<=func )Benchmark[A-Za-z_]+(?=\(b \*testing\.B)" --recursive ./*) | sort -u | xargs -n1 bash -c 'cd "$$0" && pwd && PHOTOPRISM_TEST_DSN_NAME="mariadb" go test -skip Test -parallel 4 -count 1 -cpu 4 -failfast -tags slow -timeout 30m -benchtime 10s -bench=.'
test-postgres-benchmark10x:
$(info Running all Go tests with benchmarks...)
dirname $$(grep --files-with-matches --include "*_test.go" -oP "(?<=func )Benchmark[A-Za-z_]+(?=\(b \*testing\.B)" --recursive ./*) | sort -u | xargs -n1 bash -c 'cd "$$0" && pwd && PHOTOPRISM_TEST_DSN_NAME="postgres" go test -skip Test -parallel 4 -count 10 -cpu 4 -failfast -tags slow -timeout 30m -benchtime 1s -bench=.'
test-postgres-benchmark10s:
$(info Running all Go tests with benchmarks...)
dirname $$(grep --files-with-matches --include "*_test.go" -oP "(?<=func )Benchmark[A-Za-z_]+(?=\(b \*testing\.B)" --recursive ./*) | sort -u | xargs -n1 bash -c 'cd "$$0" && pwd && PHOTOPRISM_TEST_DSN_NAME="postgres" go test -skip Test -parallel 4 -count 1 -cpu 4 -failfast -tags slow -timeout 30m -benchtime 10s -bench=.'
docker-pull:
$(DOCKER_COMPOSE) --profile=all pull --ignore-pull-failures
$(DOCKER_COMPOSE) -f compose.latest.yaml pull --ignore-pull-failures
@@ -891,5 +983,15 @@ dummy-ldap:
$(DOCKER_COMPOSE) stop dummy-ldap
$(DOCKER_COMPOSE) up -d -V --force-recreate dummy-ldap
# PostgreSQL-specific targets:
start-alldbms:
$(DOCKER_COMPOSE) -f compose.alldbms.yaml up
start-postgres:
$(DOCKER_COMPOSE) -f compose.postgres.yaml up
docker-postgres:
docker pull --platform=amd64 photoprism/develop:plucky
docker pull --platform=amd64 photoprism/develop:plucky-slim
scripts/docker/buildx-multi.sh photoprism linux/amd64 postgres /plucky
# Declare all targets as "PHONY", see https://www.gnu.org/software/make/manual/html_node/Phony-Targets.html.
MAKEFLAGS += --always-make

430
compose.alldbms.yaml Normal file
View File

@@ -0,0 +1,430 @@
## FOR TEST AND DEVELOPMENT ONLY, DO NOT USE IN PRODUCTION ##
## Setup: https://docs.photoprism.app/developer-guide/setup/ ##
services:
## PhotoPrism (Development Environment All DBMS')
photoprism:
build: .
image: photoprism/photoprism:develop
depends_on:
- postgres
- mariadb
- dummy-webdav
- dummy-oidc
stop_grace_period: 15s
security_opt:
- seccomp:unconfined
- apparmor:unconfined
## Expose HTTP and debug ports
ports:
- "2342:2342" # Default HTTP port (host:container)
- "2443:2443" # Default TLS port (host:container)
- "2343:2343" # Acceptance Test HTTP port (host:container)
- "40000:40000" # Go Debugger (host:container)
shm_size: "2gb"
## Set links and labels for use with Traefik reverse proxy
links:
- "traefik:localssl.dev"
- "traefik:app.localssl.dev"
- "traefik:vision.localssl.dev"
- "traefik:qdrant.localssl.dev"
- "traefik:keycloak.localssl.dev"
- "traefik:dummy-oidc.localssl.dev"
- "traefik:dummy-webdav.localssl.dev"
labels:
- "traefik.enable=true"
- "traefik.docker.network=photoprism"
- "traefik.http.services.photoprism.loadbalancer.server.port=2342"
- "traefik.http.services.photoprism.loadbalancer.server.scheme=http"
- "traefik.http.routers.photoprism.entrypoints=websecure"
- "traefik.http.routers.photoprism.rule=Host(`localssl.dev`) || HostRegexp(`^.+\\.localssl\\.dev`)"
- "traefik.http.routers.photoprism.priority=2"
- "traefik.http.routers.photoprism.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.photoprism.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.photoprism.tls=true"
## Override variables with optional env file, see https://docs.docker.com/reference/compose-file/services/#required
env_file:
- path: ".env"
required: false
## Configure development environment
environment:
## Run as a non-root user after initialization (supported: 0, 33, 50-99, 500-600, and 900-1200):
PHOTOPRISM_UID: ${UID:-1000} # user id, should match your host user id
PHOTOPRISM_GID: ${GID:-1000} # group id
## Access Management:
PHOTOPRISM_ADMIN_USER: "admin" # admin login username
PHOTOPRISM_ADMIN_PASSWORD: "photoprism" # initial admin password (8-72 characters)
PHOTOPRISM_AUTH_MODE: "password" # authentication mode (public, password)
PHOTOPRISM_REGISTER_URI: "https://keycloak.localssl.dev/admin/"
PHOTOPRISM_PASSWORD_RESET_URI: "https://keycloak.localssl.dev/realms/master/login-actions/reset-credentials"
PHOTOPRISM_USAGE_INFO: "true"
PHOTOPRISM_FILES_QUOTA: "100"
## Customization:
PHOTOPRISM_DEFAULT_LOCALE: "en" # default user interface language, e.g. "en" or "de"
PHOTOPRISM_PLACES_LOCALE: "local" # location details language, e.g. "local", "en", or "de"
## OpenID Connect (pre-configured for local tests):
## see https://keycloak.localssl.dev/realms/master/.well-known/openid-configuration
PHOTOPRISM_OIDC_URI: "https://keycloak.localssl.dev/realms/master"
PHOTOPRISM_OIDC_CLIENT: "photoprism-develop"
PHOTOPRISM_OIDC_SECRET: "9d8351a0-ca01-4556-9c37-85eb634869b9"
PHOTOPRISM_OIDC_PROVIDER: "Keycloak"
PHOTOPRISM_OIDC_REGISTER: "true"
PHOTOPRISM_OIDC_WEBDAV: "true"
PHOTOPRISM_DISABLE_OIDC: "false"
## LDAP Authentication (pre-configured for local tests):
PHOTOPRISM_LDAP_URI: "ldap://dummy-ldap:389"
PHOTOPRISM_LDAP_INSECURE: "true"
PHOTOPRISM_LDAP_SYNC: "true"
PHOTOPRISM_LDAP_BIND: "simple"
PHOTOPRISM_LDAP_BIND_DN: "cn"
PHOTOPRISM_LDAP_BASE_DN: "dc=localssl,dc=dev"
PHOTOPRISM_LDAP_ROLE: ""
PHOTOPRISM_LDAP_ROLE_DN: "ou=photoprism-*,ou=groups,dc=localssl,dc=dev"
PHOTOPRISM_LDAP_WEBDAV_DN: "ou=photoprism-webdav,ou=groups,dc=localssl,dc=dev"
## HTTPS/TLS Options:
## see https://docs.photoprism.app/getting-started/using-https/
PHOTOPRISM_DISABLE_TLS: "true"
PHOTOPRISM_DEFAULT_TLS: "true"
## Site Information:
PHOTOPRISM_SITE_URL: "https://app.localssl.dev/" # server URL in the format "http(s)://domain.name(:port)/(path)"
PHOTOPRISM_SITE_CAPTION: "AI-Powered Photos App"
PHOTOPRISM_SITE_DESCRIPTION: "Tags and finds pictures without getting in your way!"
PHOTOPRISM_SITE_AUTHOR: "@photoprism_app"
PHOTOPRISM_DEBUG: "true"
PHOTOPRISM_READONLY: "false"
PHOTOPRISM_EXPERIMENTAL: "true"
PHOTOPRISM_HTTP_MODE: "debug"
PHOTOPRISM_HTTP_HOST: "0.0.0.0"
PHOTOPRISM_HTTP_PORT: 2342
PHOTOPRISM_HTTP_COMPRESSION: "gzip" # improves transfer speed and bandwidth utilization (none or gzip)
PHOTOPRISM_DATABASE_DRIVER: "postgres"
PHOTOPRISM_DATABASE_SERVER: "postgres:5432"
PHOTOPRISM_DATABASE_NAME: "photoprism"
PHOTOPRISM_DATABASE_USER: "photoprism"
PHOTOPRISM_DATABASE_PASSWORD: "photoprism"
PHOTOPRISM_TEST_DSN_NAME: "sqlite"
# PHOTOPRISM_TEST_DSN_MYSQL8: "root:photoprism@tcp(mysql:4001)/photoprism?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s"
PHOTOPRISM_TEST_DSN_MARIADB: "root:photoprism@tcp(mariadb:4001)/testdb?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true"
PHOTOPRISM_TEST_DSN_SQLITE: ""
PHOTOPRISM_TEST_DSN_SQLITEFILE: "file:/go/src/github.com/photoprism/photoprism/storage/testdata/unit.test.db?_foreign_keys=on&_busy_timeout=5000"
PHOTOPRISM_TEST_DSN_POSTGRES: "postgresql://testdb:testdb@postgres:5432/testdb?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable"
PHOTOPRISM_ASSETS_PATH: "/go/src/github.com/photoprism/photoprism/assets"
PHOTOPRISM_STORAGE_PATH: "/go/src/github.com/photoprism/photoprism/storage"
PHOTOPRISM_ORIGINALS_PATH: "/go/src/github.com/photoprism/photoprism/storage/originals"
PHOTOPRISM_ORIGINALS_LIMIT: 128000 # sets originals file size limit to 128 GB
PHOTOPRISM_IMPORT_PATH: "/go/src/github.com/photoprism/photoprism/storage/import"
PHOTOPRISM_DISABLE_CHOWN: "false" # disables updating storage permissions via chmod and chown on startup
PHOTOPRISM_DISABLE_BACKUPS: "false" # disables backing up albums and photo metadata to YAML files
PHOTOPRISM_DISABLE_WEBDAV: "false" # disables built-in WebDAV server
PHOTOPRISM_DISABLE_SETTINGS: "false" # disables settings UI and API
PHOTOPRISM_DISABLE_PLACES: "false" # disables reverse geocoding and maps
PHOTOPRISM_DISABLE_EXIFTOOL: "false" # disables creating JSON metadata sidecar files with ExifTool
PHOTOPRISM_DISABLE_TENSORFLOW: "false" # disables all features depending on TensorFlow
PHOTOPRISM_DISABLE_RAW: "false" # disables indexing and conversion of RAW images
PHOTOPRISM_RAW_PRESETS: "false" # enables applying user presets when converting RAW images (reduces performance)
PHOTOPRISM_DETECT_NSFW: "false" # automatically flags photos as private that MAY be offensive (requires TensorFlow)
PHOTOPRISM_UPLOAD_NSFW: "false" # allows uploads that MAY be offensive (no effect without TensorFlow)
PHOTOPRISM_UPLOAD_ALLOW: "" # restricts uploads to these file types (comma-separated list of EXTENSIONS; leave blank to allow all)
PHOTOPRISM_UPLOAD_ARCHIVES: "true" # allows upload of zip archives (will be extracted before import)
PHOTOPRISM_THUMB_LIBRARY: "auto" # image processing library to be used for generating thumbnails (auto, imaging, vips)
PHOTOPRISM_THUMB_FILTER: "auto" # downscaling filter (imaging best to worst: blackman, lanczos, cubic, linear, nearest)
PHOTOPRISM_THUMB_UNCACHED: "true" # enables on-demand thumbnail rendering (high memory and cpu usage)
TF_CPP_MIN_LOG_LEVEL: 1 # show TensorFlow log messages for development
## Video Transcoding (https://docs.photoprism.app/getting-started/advanced/transcoding/):
# PHOTOPRISM_FFMPEG_ENCODER: "software" # H.264/AVC encoder (software, intel, nvidia, apple, raspberry, or vaapi)
# LIBVA_DRIVER_NAME: "i965" # For Intel architectures Haswell and older which do not support QSV yet but use VAAPI instead
PHOTOPRISM_FFMPEG_SIZE: "1920" # video size limit in pixels (720-7680) (default: 3840)
# PHOTOPRISM_FFMPEG_BITRATE: "64" # video bitrate limit in Mbps (default: 60)
## Run/install on first startup (options: update tensorflow https intel gpu davfs yt-dlp):
PHOTOPRISM_INIT: "https postgresql"
## Computer Vision API (https://docs.photoprism.app/getting-started/config-options/#computer-vision):
PHOTOPRISM_VISION_API: "true" # server: enables service API endpoints under /api/v1/vision (requires access token)
PHOTOPRISM_VISION_URI: "" # client: service URI, e.g. http://hostname/api/v1/vision (leave blank to disable)
PHOTOPRISM_VISION_KEY: "" # client: service access token (for authentication)
## Shared devices for video hardware transcoding (optional):
# devices:
# - "/dev/dri:/dev/dri" # Required Intel QSV or VAAPI hardware transcoding
# - "/dev/video11:/dev/video11" # Video4Linux Video Encode Device (h264_v4l2m2m)
working_dir: "/go/src/github.com/photoprism/photoprism"
volumes:
- ".:/go/src/github.com/photoprism/photoprism"
- "./storage:/photoprism"
- "go-mod:/go/pkg/mod"
## PostgreSQL Database Server
## Docs: https://www.postgresql.org/docs/
postgres:
image: postgres:17-alpine
# image: postgres:16-bookworm
expose:
- "5432"
ports:
- "5432:5432" # database port (host:container)
volumes:
- "postgresql:/var/lib/postgresql"
- "./scripts/sql/postgresql-init.sql:/docker-entrypoint-initdb.d/init.sql"
environment:
# POSTGRES_INITDB_ARGS: "--locale-provider=icu --icu-locale=und-u-ks-level2"
# these error. --lc-collate=und-u-ks-level2 --lc-ctype=und-u-ks-level2 --lc-messages=und-u-ks-level2"
# POSTGRES_INITDB_ARGS: "--encoding=UTF8"
POSTGRES_DB: photoprism
POSTGRES_USER: photoprism
POSTGRES_PASSWORD: photoprism
## MariaDB (Database Server)
## Docs: https://mariadb.com/docs/reference/
## Release Notes: https://mariadb.com/kb/en/changes-improvements-in-mariadb-1011/
mariadb:
image: mariadb:11
security_opt: # see https://github.com/MariaDB/mariadb-docker/issues/434#issuecomment-1136151239
- seccomp:unconfined
- apparmor:unconfined
command: --port=4001 --innodb-strict-mode=1 --innodb-buffer-pool-size=256M --transaction-isolation=READ-COMMITTED --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci --max-connections=512 --innodb-rollback-on-timeout=OFF --innodb-lock-wait-timeout=120
expose:
- "4001"
ports:
- "4001:4001" # database port (host:container)
volumes:
- "mariadb:/var/lib/mysql"
- "./scripts/sql/mariadb-init.sql:/docker-entrypoint-initdb.d/init.sql"
environment:
MARIADB_AUTO_UPGRADE: "1"
MARIADB_INITDB_SKIP_TZINFO: "1"
MARIADB_DATABASE: "photoprism"
MARIADB_USER: "photoprism"
MARIADB_PASSWORD: "photoprism"
MARIADB_ROOT_PASSWORD: "photoprism"
## Qdrant (Vector Database)
## Docs: https://qdrant.tech/documentation/guides/installation/#docker-compose
## Release Notes: https://github.com/qdrant/qdrant/releases
## Web UI: https://qdrant.localssl.dev/dashboard
qdrant:
image: qdrant/qdrant:latest
profiles: ["all", "qdrant"]
links:
- "traefik:localssl.dev"
- "traefik:app.localssl.dev"
- "traefik:vision.localssl.dev"
labels:
- "traefik.enable=true"
- "traefik.http.services.qdrant.loadbalancer.server.port=6333"
- "traefik.http.services.qdrant.loadbalancer.server.scheme=http"
- "traefik.http.routers.qdrant.entrypoints=websecure"
- "traefik.http.routers.qdrant.rule=Host(`qdrant.localssl.dev`)"
- "traefik.http.routers.qdrant.priority=3"
- "traefik.http.routers.qdrant.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.qdrant.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.qdrant.tls=true"
expose:
- 6333
- 6334
- 6335
volumes:
- ./.qdrant.yaml:/qdrant/config/production.yaml
- ./storage/qdrant:/qdrant/storage
## PhotoPrism® Computer Vision API
## See: https://github.com/photoprism/photoprism-vision
photoprism-vision:
image: photoprism/vision:latest
entrypoint: [ "/app/venv/bin/flask" ]
command: [ "--app", "app", "run", "--debug", "--host", "0.0.0.0" ]
profiles: ["all", "vision"]
stop_grace_period: 15s
working_dir: "/app"
links:
- "traefik:localssl.dev"
- "traefik:app.localssl.dev"
- "traefik:qdrant.localssl.dev"
labels:
- "traefik.enable=true"
- "traefik.http.services.qdrant.loadbalancer.server.port=5000"
- "traefik.http.services.qdrant.loadbalancer.server.scheme=http"
- "traefik.http.routers.qdrant.entrypoints=websecure"
- "traefik.http.routers.qdrant.rule=Host(`vision.localssl.dev`)"
- "traefik.http.routers.qdrant.priority=3"
- "traefik.http.routers.qdrant.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.qdrant.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.qdrant.tls=true"
expose:
- 5000
environment:
TF_CPP_MIN_LOG_LEVEL: 2
## Ollama client configuration (for the service, see below):
OLLAMA_ENABLED: "true"
OLLAMA_HOST: "http://ollama:11434"
## Ollama Large-Language Model Runner (optional)
## Run "ollama pull [name]:[version]" to download a vision model
## listed at <https://ollama.com/search?c=vision>, for example:
## docker compose exec ollama ollama pull qwen2.5vl:3b
ollama:
image: ollama/ollama:latest
restart: unless-stopped
stop_grace_period: 15s
## Only starts this service if the "vision" or "all" profile is specified::
## docker compose --profile vision up -d
profiles: ["all", "vision"]
## Insecurely exposes the Ollama service on port 11434
## without authentication (for private networks only):
# ports:
# - "11434:11434"
environment:
## Ollama Configuration Options:
OLLAMA_HOST: "0.0.0.0:11434"
OLLAMA_MODELS: "/root/.ollama" # model storage path (see volumes section below)
OLLAMA_MAX_QUEUE: "100" # maximum number of queued requests
OLLAMA_NUM_PARALLEL: "1" # maximum number of parallel requests
OLLAMA_MAX_LOADED_MODELS: "1" # maximum number of loaded models per GPU
OLLAMA_LOAD_TIMEOUT: "5m" # maximum time for loading models (default "5m")
OLLAMA_KEEP_ALIVE: "5m" # duration that models stay loaded in memory (default "5m")
OLLAMA_CONTEXT_LENGTH: "4096" # maximum input context length
OLLAMA_MULTIUSER_CACHE: "false" # optimize prompt caching for multi-user scenarios
OLLAMA_NOPRUNE: "false" # disables pruning of model blobs at startup
OLLAMA_NOHISTORY: "true" # disables readline history
OLLAMA_FLASH_ATTENTION: "false" # enables the experimental flash attention feature
OLLAMA_KV_CACHE_TYPE: "f16" # cache quantization (f16, q8_0, or q4_0)
OLLAMA_SCHED_SPREAD: "false" # allows scheduling models across all GPUs.
OLLAMA_NEW_ENGINE: "true" # enables the new Ollama engine
# OLLAMA_DEBUG: "true" # shows additional debug information
# OLLAMA_INTEL_GPU: "true" # enables experimental Intel GPU detection
## NVIDIA GPU Hardware Acceleration (optional):
# NVIDIA_VISIBLE_DEVICES: "all"
# NVIDIA_DRIVER_CAPABILITIES: "compute,utility"
volumes:
- "./storage/ollama:/root/.ollama"
## NVIDIA GPU Hardware Acceleration (optional):
# deploy:
# resources:
# reservations:
# devices:
# - driver: "nvidia"
# capabilities: [ gpu ]
# count: "all"
## Traefik v3 (Reverse Proxy)
## includes "*.localssl.dev" SSL certificate for test environments
## Docs: https://doc.traefik.io/traefik/
traefik:
image: photoprism/traefik:latest
security_opt:
- no-new-privileges:true
ports:
- "80:80" # HTTP (redirects to HTTPS)
- "443:443" # HTTPS (required)
labels:
- "traefik.enable=true"
volumes:
- "/var/run/docker.sock:/var/run/docker.sock" # enables Traefik to watch services
## Dummy WebDAV Server
dummy-webdav:
image: photoprism/dummy-webdav:240627
environment:
WEBDAV_USERNAME: admin
WEBDAV_PASSWORD: photoprism
labels:
- "traefik.enable=true"
- "traefik.http.services.dummy-webdav.loadbalancer.server.port=80"
- "traefik.http.routers.dummy-webdav.entrypoints=websecure"
- "traefik.http.routers.dummy-webdav.rule=Host(`dummy-webdav.localssl.dev`)"
- "traefik.http.routers.dummy-webdav.priority=3"
- "traefik.http.routers.dummy-webdav.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.dummy-webdav.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.dummy-webdav.tls=true"
## Dummy OIDC Identity Provider
dummy-oidc:
image: photoprism/dummy-oidc:240627
labels:
- "traefik.enable=true"
- "traefik.http.services.dummy-oidc.loadbalancer.server.port=9998"
- "traefik.http.routers.dummy-oidc.entrypoints=websecure"
- "traefik.http.routers.dummy-oidc.rule=Host(`dummy-oidc.localssl.dev`)"
- "traefik.http.routers.dummy-oidc.priority=3"
- "traefik.http.routers.dummy-oidc.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.dummy-oidc.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.dummy-oidc.tls=true"
## Dummy LDAP Directory Server
## Docs: https://glauth.github.io/docs/
dummy-ldap:
image: glauth/glauth-plugins:latest
ports:
- "127.0.0.1:389:389"
labels:
- "traefik.enable=true"
- "traefik.http.services.ldap.loadbalancer.server.port=5555"
- "traefik.http.routers.dummy-ldap.entrypoints=websecure"
- "traefik.http.routers.dummy-ldap.rule=Host(`dummy-ldap.localssl.dev`)"
- "traefik.http.routers.dummy-ldap.priority=3"
- "traefik.http.routers.dummy-ldap.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.dummy-ldap.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.dummy-ldap.tls=true"
volumes:
- "./.ldap.cfg:/app/config/config.cfg"
## Keycloak (OIDC Identity Provider)
## Docs: https://www.keycloak.org/docs/latest/server_admin/
## Login with "user / photoprism" and "admin / photoprism".
keycloak:
image: quay.io/keycloak/keycloak:25.0
command: "start-dev" # development mode, do not use this in production!
links:
- "traefik:localssl.dev"
- "traefik:app.localssl.dev"
labels:
- "traefik.enable=true"
- "traefik.http.services.keycloak.loadbalancer.server.port=8080"
- "traefik.http.routers.keycloak.entrypoints=websecure"
- "traefik.http.routers.keycloak.rule=Host(`keycloak.localssl.dev`)"
- "traefik.http.routers.keycloak.priority=3"
- "traefik.http.routers.keycloak.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.keycloak.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.keycloak.tls=true"
environment: # see https://www.keycloak.org/server/all-config
KEYCLOAK_ADMIN: "admin"
KEYCLOAK_ADMIN_PASSWORD: "photoprism"
KC_METRICS_ENABLED: "false"
KC_HOSTNAME: "keycloak.localssl.dev"
KC_HOSTNAME_STRICT: "false"
KC_PROXY: "edge"
KC_DB: "postgres"
KC_DB_URL: "jdbc:postgresql://postgres:5432/keycloak"
KC_DB_USERNAME: "keycloak"
KC_DB_PASSWORD: "keycloak"
## Run "docker compose --profile prometheus up" to start your development environment with Prometheus.
## Docs: https://prometheus.io/docs/prometheus/latest/configuration/configuration/#oauth2
## The following grants API access to Prometheus with the preconfigured client credentials (adjust flags as needed):
## ./photoprism client add --id=cs5cpu17n6gj2qo5 --secret=xcCbOrw6I0vcoXzhnOmXhjpVSyFq0l0e -s metrics -n Prometheus -e 60 -t 1
prometheus:
image: prom/prometheus:latest
profiles: ["all", "auth", "prometheus"]
labels:
- "traefik.enable=true"
- "traefik.http.services.prometheus.loadbalancer.server.port=9090"
- "traefik.http.routers.prometheus.entrypoints=websecure"
- "traefik.http.routers.prometheus.rule=Host(`prometheus.localssl.dev`)"
- "traefik.http.routers.prometheus.priority=3"
- "traefik.http.routers.prometheus.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.prometheus.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.prometheus.tls=true"
volumes:
- "./prometheus.yml:/etc/prometheus/prometheus.yml"
## Create named volume for Go module cache
volumes:
go-mod:
driver: local
postgresql:
driver: local
mariadb:
driver: local
## Create shared "photoprism" network for connecting with services in other compose.yaml files
networks:
default:
name: photoprism
driver: bridge

View File

@@ -39,9 +39,12 @@ services:
PHOTOPRISM_DATABASE_NAME: "photoprism"
PHOTOPRISM_DATABASE_USER: "root"
PHOTOPRISM_DATABASE_PASSWORD: "photoprism"
PHOTOPRISM_TEST_DRIVER: "sqlite"
PHOTOPRISM_TEST_DSN: ".test.db"
PHOTOPRISM_TEST_DSN_NAME: "sqlitefile"
# PHOTOPRISM_TEST_DSN_MYSQL8: "root:photoprism@tcp(mysql:4001)/photoprism?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s"
PHOTOPRISM_TEST_DSN_MARIADB: "root:photoprism@tcp(mariadb:4001)/testdb?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true"
PHOTOPRISM_TEST_DSN_SQLITE: ""
PHOTOPRISM_TEST_DSN_SQLITEFILE: ".test.db?_foreign_keys=on&_busy_timeout=5000"
PHOTOPRISM_TEST_DSN_POSTGRES: "postgresql://testdb:testdb@postgres:5432/testdb?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable"
PHOTOPRISM_ASSETS_PATH: "/go/src/github.com/photoprism/photoprism/assets"
PHOTOPRISM_STORAGE_PATH: "/go/src/github.com/photoprism/photoprism/storage"
PHOTOPRISM_ORIGINALS_PATH: "/go/src/github.com/photoprism/photoprism/storage/originals"

View File

@@ -93,8 +93,12 @@ services:
PHOTOPRISM_DATABASE_NAME: "photoprism"
PHOTOPRISM_DATABASE_USER: "root"
PHOTOPRISM_DATABASE_PASSWORD: "photoprism"
PHOTOPRISM_TEST_DRIVER: "sqlite"
PHOTOPRISM_TEST_DSN_NAME: "sqlite"
# PHOTOPRISM_TEST_DSN_MYSQL8: "root:photoprism@tcp(mysql:4001)/photoprism?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s"
PHOTOPRISM_TEST_DSN_MARIADB: "root:photoprism@tcp(mariadb:4001)/testdb?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true"
PHOTOPRISM_TEST_DSN_SQLITE: ""
PHOTOPRISM_TEST_DSN_SQLITEFILE: "file:/go/src/github.com/photoprism/photoprism/storage/testdata/unit.test.db?_foreign_keys=on&_busy_timeout=5000"
PHOTOPRISM_TEST_DSN_POSTGRES: "postgresql://testdb:testdb@postgres:5432/testdb?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable"
PHOTOPRISM_ASSETS_PATH: "/go/src/github.com/photoprism/photoprism/assets"
PHOTOPRISM_STORAGE_PATH: "/go/src/github.com/photoprism/photoprism/storage"
PHOTOPRISM_ORIGINALS_PATH: "/go/src/github.com/photoprism/photoprism/storage/originals"

View File

@@ -96,8 +96,12 @@ services:
PHOTOPRISM_DATABASE_NAME: "photoprism"
PHOTOPRISM_DATABASE_USER: "root"
PHOTOPRISM_DATABASE_PASSWORD: "photoprism"
PHOTOPRISM_TEST_DRIVER: "sqlite"
PHOTOPRISM_TEST_DSN_NAME: "sqlite"
# PHOTOPRISM_TEST_DSN_MYSQL8: "root:photoprism@tcp(mysql:4001)/photoprism?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s"
PHOTOPRISM_TEST_DSN_MARIADB: "root:photoprism@tcp(mariadb:4001)/testdb?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true"
PHOTOPRISM_TEST_DSN_SQLITE: ""
PHOTOPRISM_TEST_DSN_SQLITEFILE: "file:/go/src/github.com/photoprism/photoprism/storage/testdata/unit.test.db?_foreign_keys=on&_busy_timeout=5000"
PHOTOPRISM_TEST_DSN_POSTGRES: "postgresql://testdb:testdb@postgres:5432/testdb?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable"
PHOTOPRISM_ASSETS_PATH: "/go/src/github.com/photoprism/photoprism/assets"
PHOTOPRISM_STORAGE_PATH: "/go/src/github.com/photoprism/photoprism/storage"
PHOTOPRISM_ORIGINALS_PATH: "/go/src/github.com/photoprism/photoprism/storage/originals"

View File

@@ -3,33 +3,90 @@
services:
## PhotoPrism Development Environment (PostgreSQL)
# ATTENTION: PostgreSQL is NOT supported yet as Gorm (our ORM library) needs to be upgraded first.
# The current Gorm version does NOT support compatible general data types:
# https://github.com/photoprism/photoprism/issues/47
photoprism:
build: .
image: photoprism/photoprism:develop
depends_on:
- postgres
- dummy-webdav
- dummy-oidc
stop_grace_period: 15s
security_opt:
- seccomp:unconfined
- apparmor:unconfined
## Expose HTTP and debug ports
ports:
- "2342:2342" # default HTTP port (host:container)
- "2343:2343" # acceptance Test HTTP port (host:container)
working_dir: "/go/src/github.com/photoprism/photoprism"
volumes:
- ".:/go/src/github.com/photoprism/photoprism"
- "go-mod:/go/pkg/mod"
- "2342:2342" # Default HTTP port (host:container)
- "2443:2443" # Default TLS port (host:container)
- "2343:2343" # Acceptance Test HTTP port (host:container)
- "40000:40000" # Go Debugger (host:container)
shm_size: "2gb"
## Set links and labels for use with Traefik reverse proxy
links:
- "traefik:localssl.dev"
- "traefik:app.localssl.dev"
- "traefik:vision.localssl.dev"
- "traefik:qdrant.localssl.dev"
- "traefik:keycloak.localssl.dev"
- "traefik:dummy-oidc.localssl.dev"
- "traefik:dummy-webdav.localssl.dev"
labels:
- "traefik.enable=true"
- "traefik.http.services.photoprism.loadbalancer.server.port=2342"
- "traefik.http.services.photoprism.loadbalancer.server.scheme=http"
- "traefik.http.routers.photoprism.entrypoints=websecure"
- "traefik.http.routers.photoprism.rule=Host(`localssl.dev`) || HostRegexp(`^.+\\.localssl\\.dev`)"
- "traefik.http.routers.photoprism.priority=2"
- "traefik.http.routers.photoprism.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.photoprism.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.photoprism.tls=true"
## Override variables with optional env file, see https://docs.docker.com/reference/compose-file/services/#required
env_file:
- path: ".env"
required: false
## Configure development environment
environment:
## Run as a non-root user after initialization (supported: 0, 33, 50-99, 500-600, and 900-1200):
PHOTOPRISM_UID: ${UID:-1000} # user id, should match your host user id
PHOTOPRISM_GID: ${GID:-1000} # group id
## Access Management:
PHOTOPRISM_ADMIN_USER: "admin" # admin login username
PHOTOPRISM_ADMIN_PASSWORD: "photoprism" # initial admin password (8-72 characters)
PHOTOPRISM_AUTH_MODE: "password" # authentication mode (public, password)
PHOTOPRISM_SITE_URL: "http://localhost:2342/"
PHOTOPRISM_REGISTER_URI: "https://keycloak.localssl.dev/admin/"
PHOTOPRISM_PASSWORD_RESET_URI: "https://keycloak.localssl.dev/realms/master/login-actions/reset-credentials"
PHOTOPRISM_USAGE_INFO: "true"
PHOTOPRISM_FILES_QUOTA: "100"
## Customization:
PHOTOPRISM_DEFAULT_LOCALE: "en" # default user interface language, e.g. "en" or "de"
PHOTOPRISM_PLACES_LOCALE: "local" # location details language, e.g. "local", "en", or "de"
## OpenID Connect (pre-configured for local tests):
## see https://keycloak.localssl.dev/realms/master/.well-known/openid-configuration
PHOTOPRISM_OIDC_URI: "https://keycloak.localssl.dev/realms/master"
PHOTOPRISM_OIDC_CLIENT: "photoprism-develop"
PHOTOPRISM_OIDC_SECRET: "9d8351a0-ca01-4556-9c37-85eb634869b9"
PHOTOPRISM_OIDC_PROVIDER: "Keycloak"
PHOTOPRISM_OIDC_REGISTER: "true"
PHOTOPRISM_OIDC_WEBDAV: "true"
PHOTOPRISM_DISABLE_OIDC: "false"
## LDAP Authentication (pre-configured for local tests):
PHOTOPRISM_LDAP_URI: "ldap://dummy-ldap:389"
PHOTOPRISM_LDAP_INSECURE: "true"
PHOTOPRISM_LDAP_SYNC: "true"
PHOTOPRISM_LDAP_BIND: "simple"
PHOTOPRISM_LDAP_BIND_DN: "cn"
PHOTOPRISM_LDAP_BASE_DN: "dc=localssl,dc=dev"
PHOTOPRISM_LDAP_ROLE: ""
PHOTOPRISM_LDAP_ROLE_DN: "ou=photoprism-*,ou=groups,dc=localssl,dc=dev"
PHOTOPRISM_LDAP_WEBDAV_DN: "ou=photoprism-webdav,ou=groups,dc=localssl,dc=dev"
## HTTPS/TLS Options:
## see https://docs.photoprism.app/getting-started/using-https/
PHOTOPRISM_DISABLE_TLS: "true"
PHOTOPRISM_DEFAULT_TLS: "true"
## Site Information:
PHOTOPRISM_SITE_URL: "https://app.localssl.dev/" # server URL in the format "http(s)://domain.name(:port)/(path)"
PHOTOPRISM_SITE_CAPTION: "AI-Powered Photos App"
PHOTOPRISM_SITE_DESCRIPTION: "Open-Source Photo Management"
PHOTOPRISM_SITE_DESCRIPTION: "Tags and finds pictures without getting in your way!"
PHOTOPRISM_SITE_AUTHOR: "@photoprism_app"
PHOTOPRISM_DEBUG: "true"
PHOTOPRISM_READONLY: "false"
@@ -37,56 +94,308 @@ services:
PHOTOPRISM_HTTP_MODE: "debug"
PHOTOPRISM_HTTP_HOST: "0.0.0.0"
PHOTOPRISM_HTTP_PORT: 2342
PHOTOPRISM_HTTP_COMPRESSION: "gzip" # improves transfer speed and bandwidth utilization (none or gzip)
PHOTOPRISM_HTTP_COMPRESSION: "gzip" # improves transfer speed and bandwidth utilization (none or gzip)
PHOTOPRISM_DATABASE_DRIVER: "postgres"
PHOTOPRISM_DATABASE_SERVER: "postgres:5432"
PHOTOPRISM_DATABASE_NAME: "photoprism"
PHOTOPRISM_DATABASE_USER: "photoprism"
PHOTOPRISM_DATABASE_PASSWORD: "photoprism"
PHOTOPRISM_TEST_DRIVER: "sqlite"
PHOTOPRISM_TEST_DSN_NAME: "sqlite"
# PHOTOPRISM_TEST_DSN_MYSQL8: "root:photoprism@tcp(mysql:4001)/photoprism?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s"
PHOTOPRISM_TEST_DSN_MARIADB: "root:photoprism@tcp(mariadb:4001)/testdb?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true"
PHOTOPRISM_TEST_DSN_SQLITE: ""
PHOTOPRISM_TEST_DSN_SQLITEFILE: "file:/go/src/github.com/photoprism/photoprism/storage/testdata/unit.test.db?_foreign_keys=on&_busy_timeout=5000"
PHOTOPRISM_TEST_DSN_POSTGRES: "postgresql://testdb:testdb@postgres:5432/testdb?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable"
PHOTOPRISM_ASSETS_PATH: "/go/src/github.com/photoprism/photoprism/assets"
PHOTOPRISM_STORAGE_PATH: "/go/src/github.com/photoprism/photoprism/storage"
PHOTOPRISM_ORIGINALS_PATH: "/go/src/github.com/photoprism/photoprism/storage/originals"
PHOTOPRISM_ORIGINALS_LIMIT: 128000 # sets originals file size limit to 128 GB
PHOTOPRISM_IMPORT_PATH: "/go/src/github.com/photoprism/photoprism/storage/import"
PHOTOPRISM_DISABLE_CHOWN: "false" # disables updating storage permissions via chmod and chown on startup
PHOTOPRISM_DISABLE_BACKUPS: "false" # disables backing up albums and photo metadata to YAML files
PHOTOPRISM_DISABLE_WEBDAV: "false" # disables built-in WebDAV server
PHOTOPRISM_DISABLE_SETTINGS: "false" # disables settings UI and API
PHOTOPRISM_DISABLE_PLACES: "false" # disables reverse geocoding and maps
PHOTOPRISM_DISABLE_EXIFTOOL: "false" # disables creating JSON metadata sidecar files with ExifTool
PHOTOPRISM_DISABLE_TENSORFLOW: "false" # disables all features depending on TensorFlow
PHOTOPRISM_DETECT_NSFW: "false" # automatically flags photos as private that MAY be offensive (requires TensorFlow)
PHOTOPRISM_UPLOAD_NSFW: "false" # allows uploads that MAY be offensive (no effect without TensorFlow)
PHOTOPRISM_DISABLE_CHOWN: "false" # disables updating storage permissions via chmod and chown on startup
PHOTOPRISM_DISABLE_BACKUPS: "false" # disables backing up albums and photo metadata to YAML files
PHOTOPRISM_DISABLE_WEBDAV: "false" # disables built-in WebDAV server
PHOTOPRISM_DISABLE_SETTINGS: "false" # disables settings UI and API
PHOTOPRISM_DISABLE_PLACES: "false" # disables reverse geocoding and maps
PHOTOPRISM_DISABLE_EXIFTOOL: "false" # disables creating JSON metadata sidecar files with ExifTool
PHOTOPRISM_DISABLE_TENSORFLOW: "false" # disables all features depending on TensorFlow
PHOTOPRISM_DISABLE_RAW: "false" # disables indexing and conversion of RAW images
PHOTOPRISM_RAW_PRESETS: "false" # enables applying user presets when converting RAW images (reduces performance)
PHOTOPRISM_DETECT_NSFW: "false" # automatically flags photos as private that MAY be offensive (requires TensorFlow)
PHOTOPRISM_UPLOAD_NSFW: "false" # allows uploads that MAY be offensive (no effect without TensorFlow)
PHOTOPRISM_UPLOAD_ALLOW: "" # restricts uploads to these file types (comma-separated list of EXTENSIONS; leave blank to allow all)
PHOTOPRISM_UPLOAD_ARCHIVES: "true" # allows upload of zip archives (will be extracted before import)
PHOTOPRISM_RAW_PRESETS: "false" # enables applying user presets when converting RAW images (reduces performance)
PHOTOPRISM_THUMB_FILTER: "lanczos" # resample filter, best to worst: blackman, lanczos, cubic, linear
PHOTOPRISM_THUMB_UNCACHED: "true" # enables on-demand thumbnail rendering (high memory and cpu usage)
PHOTOPRISM_THUMB_SIZE: 1920 # pre-rendered thumbnail size limit (default 1920, min 720, max 7680)
# PHOTOPRISM_THUMB_SIZE: 4096 # Retina 4K, DCI 4K (requires more storage); 7680 for 8K Ultra HD
PHOTOPRISM_THUMB_SIZE_UNCACHED: 7680 # on-demand rendering size limit (default 7680, min 720, max 7680)
PHOTOPRISM_JPEG_SIZE: 7680 # size limit for converted image files in pixels (720-30000)
TF_CPP_MIN_LOG_LEVEL: 1 # show TensorFlow log messages for development
## Run/install on first startup (options: update tensorflow https intel gpu davfs yt-dlp):
PHOTOPRISM_INIT: "https"
PHOTOPRISM_THUMB_LIBRARY: "auto" # image processing library to be used for generating thumbnails (auto, imaging, vips)
PHOTOPRISM_THUMB_FILTER: "auto" # downscaling filter (imaging best to worst: blackman, lanczos, cubic, linear, nearest)
PHOTOPRISM_THUMB_UNCACHED: "true" # enables on-demand thumbnail rendering (high memory and cpu usage)
TF_CPP_MIN_LOG_LEVEL: 1 # show TensorFlow log messages for development
## Video Transcoding (https://docs.photoprism.app/getting-started/advanced/transcoding/):
# PHOTOPRISM_FFMPEG_ENCODER: "software" # H.264/AVC encoder (software, intel, nvidia, apple, raspberry, or vaapi)
# LIBVA_DRIVER_NAME: "i965" # For Intel architectures Haswell and older which do not support QSV yet but use VAAPI instead
PHOTOPRISM_FFMPEG_SIZE: "1920" # video size limit in pixels (720-7680) (default: 3840)
# PHOTOPRISM_FFMPEG_BITRATE: "64" # video bitrate limit in Mbps (default: 60)
## Run/install on first startup (options: update https gpu ffmpeg tensorflow davfs clitools clean):
PHOTOPRISM_INIT: "https postgresql"
## Computer Vision API (https://docs.photoprism.app/getting-started/config-options/#computer-vision):
PHOTOPRISM_VISION_API: "true" # server: enables service API endpoints under /api/v1/vision (requires access token)
PHOTOPRISM_VISION_URI: "" # client: service URI, e.g. http://hostname/api/v1/vision (leave blank to disable)
PHOTOPRISM_VISION_KEY: "" # client: service access token (for authentication)
## Shared devices for video hardware transcoding (optional):
# devices:
# - "/dev/dri:/dev/dri" # Intel QSV (Broadwell and later) or VAAPI (Haswell and earlier)
# - "/dev/video11:/dev/video11" # Video4Linux Video Encode Device (h264_v4l2m2m)
working_dir: "/go/src/github.com/photoprism/photoprism"
volumes:
- ".:/go/src/github.com/photoprism/photoprism"
- "./storage:/photoprism"
- "go-mod:/go/pkg/mod"
## PostgreSQL Database Server
## Docs: https://www.postgresql.org/docs/
postgres:
image: postgres:12-alpine
image: postgres:17-alpine
# image: postgres:16-bookworm
expose:
- "5432"
ports:
- "5432:5432" # database port (host:container)
volumes:
- "postgresql:/var/lib/postgresql"
- "./scripts/sql/postgresql-init.sql:/docker-entrypoint-initdb.d/init.sql"
environment:
# POSTGRES_INITDB_ARGS: "--locale-provider=icu --icu-locale=und-u-ks-level2"
# these error. --lc-collate=und-u-ks-level2 --lc-ctype=und-u-ks-level2 --lc-messages=und-u-ks-level2"
# POSTGRES_INITDB_ARGS: "--encoding=UTF8"
POSTGRES_DB: photoprism
POSTGRES_USER: photoprism
POSTGRES_PASSWORD: photoprism
## Qdrant (Vector Database)
## Docs: https://qdrant.tech/documentation/guides/installation/#docker-compose
## Release Notes: https://github.com/qdrant/qdrant/releases
## Web UI: https://qdrant.localssl.dev/dashboard
qdrant:
image: qdrant/qdrant:latest
profiles: ["all", "qdrant"]
links:
- "traefik:localssl.dev"
- "traefik:app.localssl.dev"
- "traefik:vision.localssl.dev"
labels:
- "traefik.enable=true"
- "traefik.http.services.qdrant.loadbalancer.server.port=6333"
- "traefik.http.services.qdrant.loadbalancer.server.scheme=http"
- "traefik.http.routers.qdrant.entrypoints=websecure"
- "traefik.http.routers.qdrant.rule=Host(`qdrant.localssl.dev`)"
- "traefik.http.routers.qdrant.priority=3"
- "traefik.http.routers.qdrant.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.qdrant.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.qdrant.tls=true"
expose:
- 6333
- 6334
- 6335
volumes:
- ./.qdrant.yaml:/qdrant/config/production.yaml
- ./storage/qdrant:/qdrant/storage
## PhotoPrism® Computer Vision API
## See: https://github.com/photoprism/photoprism-vision
photoprism-vision:
image: photoprism/vision:latest
entrypoint: [ "/app/venv/bin/flask" ]
command: [ "--app", "app", "run", "--debug", "--host", "0.0.0.0" ]
profiles: ["all", "vision"]
stop_grace_period: 15s
working_dir: "/app"
links:
- "traefik:localssl.dev"
- "traefik:app.localssl.dev"
- "traefik:qdrant.localssl.dev"
labels:
- "traefik.enable=true"
- "traefik.http.services.qdrant.loadbalancer.server.port=5000"
- "traefik.http.services.qdrant.loadbalancer.server.scheme=http"
- "traefik.http.routers.qdrant.entrypoints=websecure"
- "traefik.http.routers.qdrant.rule=Host(`vision.localssl.dev`)"
- "traefik.http.routers.qdrant.priority=3"
- "traefik.http.routers.qdrant.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.qdrant.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.qdrant.tls=true"
expose:
- 5000
environment:
TF_CPP_MIN_LOG_LEVEL: 2
## Ollama client configuration (for the service, see below):
OLLAMA_ENABLED: "true"
OLLAMA_HOST: "http://ollama:11434"
## Ollama Large-Language Model Runner (optional)
## Run "ollama pull [name]:[version]" to download a vision model
## listed at <https://ollama.com/search?c=vision>, for example:
## docker compose exec ollama ollama pull qwen2.5vl:3b
ollama:
image: ollama/ollama:latest
restart: unless-stopped
stop_grace_period: 15s
## Only starts this service if the "vision" or "all" profile is specified::
## docker compose --profile vision up -d
profiles: ["all", "vision"]
## Insecurely exposes the Ollama service on port 11434
## without authentication (for private networks only):
# ports:
# - "11434:11434"
environment:
## Ollama Configuration Options:
OLLAMA_HOST: "0.0.0.0:11434"
OLLAMA_MODELS: "/root/.ollama" # model storage path (see volumes section below)
OLLAMA_MAX_QUEUE: "100" # maximum number of queued requests
OLLAMA_NUM_PARALLEL: "1" # maximum number of parallel requests
OLLAMA_MAX_LOADED_MODELS: "1" # maximum number of loaded models per GPU
OLLAMA_LOAD_TIMEOUT: "5m" # maximum time for loading models (default "5m")
OLLAMA_KEEP_ALIVE: "10m" # duration that models stay loaded in memory (default "5m")
OLLAMA_CONTEXT_LENGTH: "4096" # maximum input context length
OLLAMA_MULTIUSER_CACHE: "1" # optimize prompt caching for multi-user scenarios
# OLLAMA_DEBUG: "1" # shows additional debug information
# OLLAMA_NOPRUNE: "1" # disables pruning of model blobs at startup
# OLLAMA_NOHISTORY: "1" # disables readline history
# OLLAMA_FLASH_ATTENTION: "1" # enables the experimental flash attention feature
# OLLAMA_SCHED_SPREAD: "1" # allows scheduling models across all GPUs.
# OLLAMA_GPU_OVERHEAD: "0" # reserves a portion of VRAM per GPU (bytes)
# OLLAMA_INTEL_GPU: "1" # enables experimental Intel GPU detection
## NVIDIA GPU Hardware Acceleration (optional):
# NVIDIA_VISIBLE_DEVICES: "all"
# NVIDIA_DRIVER_CAPABILITIES: "compute,utility"
volumes:
- "./storage/ollama:/root/.ollama"
## NVIDIA GPU Hardware Acceleration (optional):
# deploy:
# resources:
# reservations:
# devices:
# - driver: "nvidia"
# capabilities: [ gpu ]
# count: "all"
## Traefik v3 (Reverse Proxy)
## includes "*.localssl.dev" SSL certificate for test environments
## Docs: https://doc.traefik.io/traefik/
traefik:
image: photoprism/traefik:latest
security_opt:
- no-new-privileges:true
ports:
- "80:80" # HTTP (redirects to HTTPS)
- "443:443" # HTTPS (required)
labels:
- "traefik.enable=true"
volumes:
- "/var/run/docker.sock:/var/run/docker.sock" # enables Traefik to watch services
## Dummy WebDAV Server
dummy-webdav:
image: photoprism/dummy-webdav:231015
image: photoprism/dummy-webdav:240627
environment:
WEBDAV_USERNAME: admin
WEBDAV_PASSWORD: photoprism
labels:
- "traefik.enable=true"
- "traefik.http.services.dummy-webdav.loadbalancer.server.port=80"
- "traefik.http.routers.dummy-webdav.entrypoints=websecure"
- "traefik.http.routers.dummy-webdav.rule=Host(`dummy-webdav.localssl.dev`)"
- "traefik.http.routers.dummy-webdav.priority=3"
- "traefik.http.routers.dummy-webdav.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.dummy-webdav.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.dummy-webdav.tls=true"
## Dummy OIDC Identity Provider
dummy-oidc:
image: photoprism/dummy-oidc:240627
labels:
- "traefik.enable=true"
- "traefik.http.services.dummy-oidc.loadbalancer.server.port=9998"
- "traefik.http.routers.dummy-oidc.entrypoints=websecure"
- "traefik.http.routers.dummy-oidc.rule=Host(`dummy-oidc.localssl.dev`)"
- "traefik.http.routers.dummy-oidc.priority=3"
- "traefik.http.routers.dummy-oidc.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.dummy-oidc.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.dummy-oidc.tls=true"
## Dummy LDAP Directory Server
## Docs: https://glauth.github.io/docs/
dummy-ldap:
image: glauth/glauth-plugins:latest
ports:
- "127.0.0.1:389:389"
labels:
- "traefik.enable=true"
- "traefik.http.services.ldap.loadbalancer.server.port=5555"
- "traefik.http.routers.dummy-ldap.entrypoints=websecure"
- "traefik.http.routers.dummy-ldap.rule=Host(`dummy-ldap.localssl.dev`)"
- "traefik.http.routers.dummy-ldap.priority=3"
- "traefik.http.routers.dummy-ldap.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.dummy-ldap.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.dummy-ldap.tls=true"
volumes:
- "./.ldap.cfg:/app/config/config.cfg"
## Keycloak (OIDC Identity Provider)
## Docs: https://www.keycloak.org/docs/latest/server_admin/
## Login with "user / photoprism" and "admin / photoprism".
keycloak:
image: quay.io/keycloak/keycloak:25.0
command: "start-dev" # development mode, do not use this in production!
links:
- "traefik:localssl.dev"
- "traefik:app.localssl.dev"
labels:
- "traefik.enable=true"
- "traefik.http.services.keycloak.loadbalancer.server.port=8080"
- "traefik.http.routers.keycloak.entrypoints=websecure"
- "traefik.http.routers.keycloak.rule=Host(`keycloak.localssl.dev`)"
- "traefik.http.routers.keycloak.priority=3"
- "traefik.http.routers.keycloak.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.keycloak.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.keycloak.tls=true"
environment: # see https://www.keycloak.org/server/all-config
KEYCLOAK_ADMIN: "admin"
KEYCLOAK_ADMIN_PASSWORD: "photoprism"
KC_METRICS_ENABLED: "false"
KC_HOSTNAME: "keycloak.localssl.dev"
KC_HOSTNAME_STRICT: "false"
KC_PROXY: "edge"
KC_DB: "postgres"
KC_DB_URL: "jdbc:postgresql://postgres:5432/keycloak"
KC_DB_USERNAME: "keycloak"
KC_DB_PASSWORD: "keycloak"
## Run "docker compose --profile prometheus up" to start your development environment with Prometheus.
## Docs: https://prometheus.io/docs/prometheus/latest/configuration/configuration/#oauth2
## The following grants API access to Prometheus with the preconfigured client credentials (adjust flags as needed):
## ./photoprism client add --id=cs5cpu17n6gj2qo5 --secret=xcCbOrw6I0vcoXzhnOmXhjpVSyFq0l0e -s metrics -n Prometheus -e 60 -t 1
prometheus:
image: prom/prometheus:latest
profiles: ["all", "auth", "prometheus"]
labels:
- "traefik.enable=true"
- "traefik.http.services.prometheus.loadbalancer.server.port=9090"
- "traefik.http.routers.prometheus.entrypoints=websecure"
- "traefik.http.routers.prometheus.rule=Host(`prometheus.localssl.dev`)"
- "traefik.http.routers.prometheus.priority=3"
- "traefik.http.routers.prometheus.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.prometheus.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.prometheus.tls=true"
volumes:
- "./prometheus.yml:/etc/prometheus/prometheus.yml"
## Create named volume for Go module cache
volumes:
go-mod:
driver: local
postgresql:
driver: local
## Create shared "photoprism-develop" network for connecting with services in other compose.yaml files
networks:
default:
name: photoprism
driver: bridge

View File

@@ -101,8 +101,12 @@ services:
PHOTOPRISM_DATABASE_NAME: "photoprism"
PHOTOPRISM_DATABASE_USER: "root"
PHOTOPRISM_DATABASE_PASSWORD: "photoprism"
PHOTOPRISM_TEST_DRIVER: "sqlite"
PHOTOPRISM_TEST_DSN_NAME: "sqlite"
# PHOTOPRISM_TEST_DSN_MYSQL8: "root:photoprism@tcp(mysql:4001)/photoprism?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s"
PHOTOPRISM_TEST_DSN_MARIADB: "root:photoprism@tcp(mariadb:4001)/testdb?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true"
PHOTOPRISM_TEST_DSN_SQLITE: ""
PHOTOPRISM_TEST_DSN_SQLITEFILE: "file:/go/src/github.com/photoprism/photoprism/storage/testdata/unit.test.db?_foreign_keys=on&_busy_timeout=5000"
PHOTOPRISM_TEST_DSN_POSTGRES: "postgresql://testdb:testdb@postgres:5432/testdb?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable"
PHOTOPRISM_ASSETS_PATH: "/go/src/github.com/photoprism/photoprism/assets"
PHOTOPRISM_STORAGE_PATH: "/go/src/github.com/photoprism/photoprism/storage"
PHOTOPRISM_ORIGINALS_PATH: "/go/src/github.com/photoprism/photoprism/storage/originals"

View File

@@ -78,6 +78,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
chromium-driver \
chromium-sandbox \
mariadb-client \
postgresql-client \
sqlite3 \
libc6-dev \
libssl-dev \

View File

@@ -75,6 +75,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
gettext \
firefox \
mariadb-client \
postgresql-client \
davfs2 \
chrpath \
libc6-dev \

View File

@@ -56,6 +56,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
va-driver-all libva2 iputils-ping dnsutils libmagic-mgc \
&& \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-darktable.sh && \
/scripts/install-yt-dlp.sh && \
/scripts/install-libheif.sh && \

View File

@@ -72,6 +72,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
&& \
/scripts/install-nodejs.sh && \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-tensorflow.sh && \
/scripts/install-darktable.sh && \
/scripts/install-yt-dlp.sh && \

View File

@@ -52,6 +52,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
x264 x265 libde265-dev libaom-dev libvpx-dev libwebm-dev libjpeg-dev libmatroska-dev libdvdread-dev \
&& \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-darktable.sh && \
/scripts/install-libheif.sh && \
echo 'alias ll="ls -alh"' >> /etc/skel/.bashrc && \

View File

@@ -68,6 +68,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
&& \
/scripts/install-nodejs.sh && \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-tensorflow.sh && \
/scripts/install-darktable.sh && \
/scripts/install-libheif.sh && \

View File

@@ -52,6 +52,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
x264 x265 libde265-dev libaom-dev libvpx-dev libwebm-dev libjpeg-dev libmatroska-dev libdvdread-dev \
&& \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-darktable.sh && \
/scripts/install-libheif.sh && \
echo 'alias ll="ls -alh"' >> /etc/skel/.bashrc && \

View File

@@ -68,6 +68,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
&& \
/scripts/install-nodejs.sh && \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-tensorflow.sh && \
/scripts/install-darktable.sh && \
/scripts/install-libheif.sh && \

View File

@@ -54,6 +54,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
iputils-ping dnsutils \
&& \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-darktable.sh && \
/scripts/install-libheif.sh && \
echo 'alias ll="ls -alh"' >> /etc/skel/.bashrc && \

View File

@@ -70,6 +70,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
&& \
/scripts/install-nodejs.sh && \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-tensorflow.sh && \
/scripts/install-darktable.sh && \
/scripts/install-libheif.sh && \

View File

@@ -57,6 +57,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
iputils-ping dnsutils \
&& \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-darktable.sh && \
/scripts/install-libheif.sh && \
echo 'alias ll="ls -alh"' >> /etc/skel/.bashrc && \

View File

@@ -73,6 +73,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
&& \
/scripts/install-nodejs.sh && \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-tensorflow.sh && \
/scripts/install-darktable.sh && \
/scripts/install-libheif.sh && \

View File

@@ -110,6 +110,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
sudo \
bash \
mariadb-client \
postgresql-client \
sqlite3 \
tzdata \
libc6 \

View File

@@ -109,6 +109,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
sudo \
bash \
mariadb-client \
postgresql-client \
sqlite3 \
tzdata \
libc6 \

22
go.mod
View File

@@ -18,14 +18,12 @@ require (
github.com/google/open-location-code/go v0.0.0-20250620134813-83986da0156b
github.com/gorilla/websocket v1.5.3
github.com/gosimple/slug v1.15.0
github.com/jinzhu/gorm v1.9.16
github.com/jinzhu/inflection v1.0.0
github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0 // indirect
github.com/karrick/godirwalk v1.17.0
github.com/klauspost/cpuid/v2 v2.3.0
github.com/leandro-lugaresi/hub v1.1.1
github.com/leonelquinteros/gotext v1.7.2
github.com/lib/pq v1.10.9 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0
github.com/mandykoh/prism v0.35.3
github.com/manifoldco/promptui v0.9.0
@@ -91,11 +89,14 @@ require (
golang.org/x/mod v0.27.0
golang.org/x/sys v0.35.0
google.golang.org/protobuf v1.36.8
gorm.io/driver/mysql v1.5.7
gorm.io/driver/postgres v1.5.9
gorm.io/driver/sqlite v1.5.6
gorm.io/gorm v1.25.12
)
require (
filippo.io/edwards25519 v1.1.0 // indirect
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 // indirect
github.com/KyleBanks/depth v1.2.1 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/boombuler/barcode v1.0.2 // indirect
@@ -122,6 +123,10 @@ require (
github.com/goccy/go-json v0.10.5 // indirect
github.com/gorilla/securecookie v1.1.2 // indirect
github.com/gosimple/unidecode v1.0.1 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
github.com/jackc/pgx/v5 v5.7.2
github.com/jackc/puddle/v2 v2.2.2 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/jonboulle/clockwork v0.5.0 // indirect
github.com/josharian/intern v1.0.0 // indirect
@@ -148,6 +153,11 @@ require (
github.com/ugorji/go/codec v1.2.14 // indirect
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
github.com/zitadel/logging v0.6.2 // indirect
)
require (
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/tidwall/pretty v1.2.1 // indirect
github.com/zitadel/schema v1.3.1 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
go.opentelemetry.io/otel v1.37.0 // indirect
@@ -168,11 +178,7 @@ require (
golang.org/x/arch v0.18.0 // indirect
)
require (
github.com/emersion/go-webdav v0.6.0
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/tidwall/pretty v1.2.1 // indirect
)
require github.com/emersion/go-webdav v0.6.0
go 1.23.7

54
go.sum
View File

@@ -17,20 +17,14 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 h1:mFRzDkZVAjdal+s7s0MwaRv9igoPqLRdzOLzw/8Xvq8=
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/IGLOU-EU/go-wildcard v1.0.3 h1:r8T46+8/9V1STciXJomTWRpPEv4nGJATDbJkdU0Nou0=
github.com/IGLOU-EU/go-wildcard v1.0.3/go.mod h1:/qeV4QLmydCbwH0UMQJmXDryrFKJknWi/jjO8IiuQfY=
github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc=
github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE=
github.com/PuerkitoBio/goquery v1.5.1/go.mod h1:GsLWisAFVj4WgDibEWF4pvYnkVQBpKBKeU+7zCJoLcc=
github.com/abema/go-mp4 v1.4.1 h1:YoS4VRqd+pAmddRPLFf8vMk74kuGl6ULSjzhsIqwr6M=
github.com/abema/go-mp4 v1.4.1/go.mod h1:vPl9t5ZK7K0x68jh12/+ECWBCXoWuIDtNgPtU2f04ws=
github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa h1:LHTHcTQiSGT7VVbI0o4wBRNQIgn917usHWOd6VAffYI=
github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa/go.mod h1:cEWa1LVoE5KvSD9ONXsZrj0z6KqySlCCNKHlLzbqAt4=
github.com/andybalholm/cascadia v1.1.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bmatcuk/doublestar/v4 v4.9.0 h1:DBvuZxjdKkRP/dr4GVV4w2fnmrk5Hxc90T51LZjv0JA=
@@ -67,8 +61,6 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davidbyttow/govips/v2 v2.16.0 h1:1nH/Rbx8qZP1hd+oYL9fYQjAnm1+KorX9s07ZGseQmo=
github.com/davidbyttow/govips/v2 v2.16.0/go.mod h1:clH5/IDVmG5eVyc23qYpyi7kmOT0B/1QNTKtci4RkyM=
github.com/denisenkom/go-mssqldb v0.0.0-20191124224453-732737034ffd h1:83Wprp6ROGeiHFAP8WJdI2RoxALQYgdllERc3N5N2DM=
github.com/denisenkom/go-mssqldb v0.0.0-20191124224453-732737034ffd/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU=
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
github.com/djherbis/times v1.6.0 h1:w2ctJ92J8fBvWPxugmXIv7Nz7Q3iDMKNx9v5ocVH20c=
@@ -116,8 +108,6 @@ github.com/emersion/go-webdav v0.6.0 h1:rbnBUEXvUM2Zk65Him13LwJOBY0ISltgqM5k6T5L
github.com/emersion/go-webdav v0.6.0/go.mod h1:mI8iBx3RAODwX7PJJ7qzsKAKs/vY429YfS2/9wKnDbQ=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5 h1:Yzb9+7DPaBjB8zlTR87/ElzFsnQfuHnVUVqpZZIcV5Y=
github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5/go.mod h1:a2zkGnVExMxdzMo3M0Hi/3sEU+cWnZpSni0O6/Yb/P0=
github.com/esimov/pigo v1.4.6 h1:wpB9FstbqeGP/CZP+nTR52tUJe7XErq8buG+k4xCXlw=
github.com/esimov/pigo v1.4.6/go.mod h1:uqj9Y3+3IRYhFK071rxz1QYq0ePhA6+R9jrUZavi46M=
github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
@@ -170,7 +160,7 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.26.0 h1:SP05Nqhjcvz81uJaRfEV0YBSSSGMc/iMaVtFbr3Sw2k=
github.com/go-playground/validator/v10 v10.26.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI=
github.com/go-sql-driver/mysql v1.9.0 h1:Y0zIbQXhQKmQgTp44Y1dp3wTXcn804QoTptLZT1vtvo=
github.com/go-sql-driver/mysql v1.9.0/go.mod h1:pDetrLJeA3oMujJuvXc8RJoasr589B6A9fwzD3QMrqw=
github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM=
@@ -178,8 +168,6 @@ github.com/go-xmlfmt/xmlfmt v1.1.3 h1:t8Ey3Uy7jDSEisW2K3somuMKIpzktkWptA0iFCnRUW
github.com/go-xmlfmt/xmlfmt v1.1.3/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM=
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe h1:lXe2qZdvpiX5WZkZR4hgp4KJVfY3nMkvmwbVkpv1rVY=
github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k=
github.com/golang/geo v0.0.0-20190916061304-5b978397cfec/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
github.com/golang/geo v0.0.0-20200319012246-673a6f80352d/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
@@ -235,28 +223,21 @@ github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/b
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8=
github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs=
github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo=
github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM=
github.com/jcmturner/gofork v1.7.6 h1:QH0l3hzAU1tfT3rZCnW5zXl+orbkNMMRGJfdJjHVETg=
github.com/jcmturner/gofork v1.7.6/go.mod h1:1622LH6i/EZqLloHfE7IeZ0uEJwMSUyQ/nDd82IeqRo=
github.com/jcmturner/goidentity/v6 v6.0.1 h1:VKnZd2oEIMorCTsFBnJWbExfNN7yZr3EhJAxwOkZg6o=
github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg=
github.com/jcmturner/gokrb5/v8 v8.4.4 h1:x1Sv4HaTpepFkXbt2IkL29DXRf8sOfZXo8eRKh687T8=
github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs=
github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY=
github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc=
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
github.com/jackc/pgx/v5 v5.7.2 h1:mLoDLV6sonKlvjIEsV56SkWNCnuNv531l94GaIzO+XI=
github.com/jackc/pgx/v5 v5.7.2/go.mod h1:ncY89UGWxg82EykZUwSpUKEfccBGGYq1xjrOpsbsfGQ=
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
github.com/jdeng/goheif v0.0.0-20200323230657-a0d6a8b3e68f/go.mod h1:G7IyA3/eR9IFmUIPdyP3c0l4ZaqEvXAk876WfaQ8plc=
github.com/jeremija/gosubmit v0.2.8 h1:mmSITBz9JxVtu8eqbN+zmmwX7Ij2RidQxhcwRVI4wqA=
github.com/jeremija/gosubmit v0.2.8/go.mod h1:Ui+HS073lCFREXBbdfrJzMB57OI/bdxTiLtrDHHhFPI=
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4=
github.com/jinzhu/gorm v1.9.16 h1:+IyIjPEABKRpsu/F8OvDPy9fyQlgsg2luMV2ZIH5i5o=
github.com/jinzhu/gorm v1.9.16/go.mod h1:G3LB3wezTOWM2ITLzPxEXgSkOXAntiLHS7UdBefADcs=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
github.com/jinzhu/now v1.0.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/jonboulle/clockwork v0.5.0 h1:Hyh9A8u51kptdkR+cqRpT1EebBwTn1oK9YfGYbdFz6I=
@@ -290,9 +271,6 @@ github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
github.com/leonelquinteros/gotext v1.7.2 h1:bDPndU8nt+/kRo1m4l/1OXiiy2v7Z7dfPQ9+YP7G1Mc=
github.com/leonelquinteros/gotext v1.7.2/go.mod h1:9/haCkm5P7Jay1sxKDGJ5WIg4zkz8oZKw4ekNpALob8=
github.com/lib/pq v1.1.1/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/mailru/easyjson v0.9.0 h1:PrnmzHw7262yW8sTBwxi1PdJA3Iw/EKBa8psRf7d9a4=
@@ -309,7 +287,6 @@ github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWE
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mattn/go-sqlite3 v1.14.0/go.mod h1:JIl7NbARA7phWnGvh0LKTyg7S9BA+6gx71ShQilpsus=
github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
@@ -443,11 +420,9 @@ go4.org v0.0.0-20230225012048-214862532bf5/go.mod h1:F57wTi5Lrj6WLyswp5EYV1ncrEb
golang.org/x/arch v0.18.0 h1:WN9poc33zL4AzGxqf8VtpKUnGvMi8O9lhNyBMF/85qc=
golang.org/x/arch v0.18.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191205180655-e7c4368fe9dd/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
@@ -493,7 +468,6 @@ golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.27.0 h1:kb+q2PyFnEADO2IEF935ehFUXlWiNjJWtRNgBLSfbxQ=
golang.org/x/mod v0.27.0/go.mod h1:rWI627Fq0DEoudcK+MBkNkCe0EetEaDSwJJkCcjpazc=
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -509,7 +483,6 @@ golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLL
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200320220750-118fecf932d8/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
@@ -694,6 +667,15 @@ gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gorm.io/driver/mysql v1.5.7 h1:MndhOPYOfEp2rHKgkZIhJ16eVUIRf2HmzgoPmh7FCWo=
gorm.io/driver/mysql v1.5.7/go.mod h1:sEtPWMiqiN1N1cMXoXmBbd8C6/l+TESwriotuRRpkDM=
gorm.io/driver/postgres v1.5.9 h1:DkegyItji119OlcaLjqN11kHoUgZ/j13E0jkJZgD6A8=
gorm.io/driver/postgres v1.5.9/go.mod h1:DX3GReXH+3FPWGrrgffdvCk3DQ1dwDPdmbenSkweRGI=
gorm.io/driver/sqlite v1.5.6 h1:fO/X46qn5NUEEOZtnjJRWRzZMe8nqJiQ9E+0hi+hKQE=
gorm.io/driver/sqlite v1.5.6/go.mod h1:U+J8craQU6Fzkcvu8oLeAQmi50TkwPEhHDEjQZXDah4=
gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
gorm.io/gorm v1.25.12 h1:I0u8i2hWQItBq1WfE0o2+WuL9+8L21K9e2HHSTE/0f8=
gorm.io/gorm v1.25.12/go.mod h1:xh7N7RHfYlNc5EmcI/El95gXusucDrQnHXe0+CgWcLQ=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=

218
gorm2upgrade.md Normal file
View File

@@ -0,0 +1,218 @@
# Gorm V2 Upgrade Documentation
The following documentation covers what needs to be done to use Gorm V2, which is different to Gorm V1.
And what has changed to enable the upgrade from Gorm V1 to Gorm V2.
# Ongoing Development
As new development is done, changes are made to existing columns, or new columns are added to the structs that support PhotoPrism.
These structs are turned into tables in DBMS' that Gorm supports. At the time of writing PhotoPrism supports SQLite and MariaDB, with requests to support PostgreSQL.
Given the requests to support PostgreSQL the way that the Gorm annotations for the structs are used needed to change.
## type/size annotation
For all future development the type/size Gorm annotation needs to only use the default types that Gorm supports.
Do not use a database specific datatype like VARBINARY, VARCHAR, MEDIUMBLOB.
The following tables give an overview of the database type, to Go type, and the required Gorm annotation. Not all types are listed.
If you want the complete set, check the [go-gorm source](https://github.com/go-gorm/) for DataTypeOf for each DBMS.
### MariaDB translation
| DBMS Type | Go Type | Gorm annotation |
|----------------------|---------|-----------------------|
| SMALLINT | int | type:int;size:16; |
| MEDIUMINT | int | type:int;size:24; |
| INT | int | type:int;size:32; |
| BIGINT | int | |
| SMALLINT UNSIGNED | uint | type:uint;size:16; |
| MEDIUMINT UNSIGNED | uint | type:uint;size:24; |
| INT UNSIGNED | uint | type:uint;size:32; |
| BIGINT UNSIGNED | uint | |
| FLOAT | float32 | |
| DOUBLE | float64 | |
| VARBINARY(125) | string | type:byte;size:125; |
| VARCHAR(60) | string | size:60; |
| BLOB | as required | type:byte;size:65535; |
| MEDIUMBLOB | as required | type:byte;size:66666; |
| LONGBLOB | as required | type:byte;size:16777216; |
| DATETIME | time.Time | |
| DECIMAL(16,2) | float64 | precision:16;scale:2; |
### SQLite translation
| DBMS Type | Go Type | Gorm annotation |
|----------------------|---------|-----------------------|
| INTEGER (1) | int | |
| TEXT (2) | string | |
| BLOB (3) | as required | type:byte; |
| REAL (4) | float64 | |
| NUMERIC (5) | time.Time | |
|----------------------|---------|-----------------------|
| SMALLINT (1) | int | type:int;size:16; |
| MEDIUMINT (1) | int | type:int;size:24; |
| INT (1) | int | type:int;size:32; |
| BIGINT (1) | int | |
| SMALLINT UNSIGNED (1) | uint | type:uint;size:16; |
| MEDIUMINT UNSIGNED (1) | uint | type:uint;size:24; |
| INT UNSIGNED (1) | uint | type:uint;size:32; |
| BIGINT UNSIGNED (1) | uint | |
| FLOAT (4) | float32 | |
| DOUBLE (4) | float64 | |
| VARBINARY(125) (2) | string | type:byte;size:125; |
| VARCHAR(60) (2) | string | size:60; |
| BLOB (3) | as required | type:byte;size:65535; |
| MEDIUMBLOB (3) | as required | type:byte;size:66666; |
| LONGBLOB (3) | as required | type:byte;size:16777216; |
| DATETIME (5) | time.Time | |
| DECIMAL(16,2) (5) | float64 | precision:16;scale:2; |
The number in the brackets is "Affinity" which SQLite uses to translate a foreign DBMS type into it's base set of 5 types, at top of table above.
### PostgreSQL translation
| DBMS Type | Go Type | Gorm annotation |
|----------------------|---------|-----------------------|
| SMALLSERIAL | int | size:16;autoIncrement; |
| SERIAL | int | size:32;autoIncrement; |
| BIGSERIAL | int | autoIncrement; |
| SMALLINT | int | size:16; |
| INTEGER | int | size:32; |
| BIGINT | int | |
| SMALLSERIAL | uint | size:15;autoIncrement; |
| SERIAL | uint | size:31;autoIncrement; |
| BIGSERIAL | uint | autoIncrement; |
| SMALLINT | uint | size:15; |
| INTEGER | uint | size:31; |
| BIGINT | uint | |
| NUMERIC(16,2) (5) | float64 | precision:16;scale:2; |
| DECIMAL | float64 | |
| VARCHAR(60) | string | size:60; |
| TEXT | string | |
| TIMESTAMPTZ(4) | time.Time | precision:4; |
| TIMESTAMPTZ | time.Time | |
| BYTEA | Bytes | |
| BYTEA | String | type:byte;size:125; |
| BYTEA | as required | type:byte;size:66666; |
## Foreign Keys
Gorm V2's implementation has introduced foreign keys at the database level. This will ensure that the data relationship between parent and child records is maintained. But, it also means that you can't create a child record if the parent is not already committed to the database (or added earlier in the same transaction).
An example of this is that you can't call the Create function on a Details struct, until the Create function on the Photo struct has already been done. This is NOT a change to the way that PhotoPrism is already developed.
It is possible to create an instance of a struct that has child structs (eg. Photo and Detail) by including the content of the child struct in the parent struct. Gorm will then take care of the creation of both records when photo.Create() is called.
eg.
```
photo := Photo{
TakenAt: time.Date(2020, 11, 11, 9, 7, 18, 0, time.UTC),
TakenSrc: SrcMeta,
Details: &Details {
Keywords: "nature, frog",
Notes: "notes",
}
}
```
## Queries
The use of 0 to represent FALSE and 1 to represent TRUE in queries shall no longer be done. Use TRUE/FALSE as appropriate in queries.
## Managing tables
Gorm V2 uses the Migrator to provide any changes to table structure. This replaces DropTableIfExists and CreateTable with Migrator().DropTable and Migrator().CreateTable. See internal/commands/auth_reset.go for an example.
## Soft Delete
Gorm V2 has changed the struct to support soft deletion. It now uses a type gorm.DeletedAt which has a Time time.Time and a Valid Boolean to indicate if a record is deleted. The structure in the database has not changed.
Valid = true when a record is soft deleted. The Time will also be populated.
# Changes made to support Gorm V2
The follow provides an overview on what changes have been made to PhotoPrism to enable Gorm V2.
## Breaking Changes
There is only 1 known visible change as a result of the implementation of Gorm V2.
That is in the PhotoPrism cli, where the output previously returned a DeletedDate the following difference will be visible.
1. Any command that returns a DeletedDate will not return a column for DeletedAt if the record is not deleted.
2. Any command that returns a DeletedDate will return a gorm.DeletedAt structure if the record is deleted.
## Connection Strings
The connection string for SQLite has been changed, with &_foreign_keys=on being added to ensure that foreign keys are enabled within SQLite like they are on MariaDB.
## Migrator Changes
The migration has moved from a map to an ordered list to ensure that the migration is done in an order that supports foreign keys, instead of randomly.
In addition to that, the Truncate function has been updated to execute in foreign key order when removing all records from all tables. This process also resets the intital auto increment value to one.
__Newly added tables need to be added to these lists.__
## Structs
The following changes have been made to all Gorm related PhotoPrism structs.
The definition of a Primary Key has changed from primary_key to primaryKey.
The definition of auto increment has changed from auto_increment to autoIncrement.
The definition of a foreign key's source has changed from foreignkey to foreignKey.
The definition of a foreign key's target field has changed from association_foreignkey to references.
The definition of a many 2 many relationship has changed from association_jointable_foreignkey to a combination of foreignKey, joinForeignKey, References and joinReferences.
The definition of associations has been removed.
The definition of a unique index has changed from unique_index to uniqueIndex.
The definition of the type SMALLINT has changed from type:SMALLINT to type:int;size:16;
The definition of the type VARBINARY has changed from type:VARBINARY(nn) to type:bytes;size:nn.
The definition of the type VARCHAR has changed from type:VARCHAR(nn) to size:nn.
The definition of the field DeletedAt has changed from *time.Time to gorm.DeletedAt.
The definition of PRELOAD has been removed.
The use of the gorm definition type:DATETIME has been removed (not required).
### Album
The column Photos type has changed from PhotoAlbums to []PhotoAlbum.
### User
The column UserShares type has changed from UserShares to []UserShare.
The columns UserDetails and UserSettings are no longer automatically preloaded.
### Cell
The column Place is no longer automatically preloaded.
### Country
The column CountryPhotoID is no longer a required field. A migration script has been created to change the number 0 to a NULL in the database.
### Face
The column EmbeddingJSON has had it's gorm specific type changed from type:MEDIUMBLOB to type:bytes;size:66666. This is to support PostgreSQL and SQLite which use unsized blob types, whilst the number ensures that MariaDB uses a medium_blob type.
### Marker
The columns EmbeddingsJSON and LandmarksJSON have had their gorm specific types changed from type:MEDIUMBLOB to type:bytes;size:66666. This is to support PostgreSQL and SQLite which use unsized blob types, whilst the number ensures that MariaDB uses a medium_blob type.
### Photo
The columns PhotoLat, PhotoLng and PhotoFNumber have had their gorm specific types removed.
The columns Details, Camera, Lens, Cell and Place have had their explicit assocations removed.
The columns Keywords and Albums have had many2many relationships defined.
### PhotoAlbum
The columns Photo and Album have been removed. The gorm function SetupJoinTable is used to populate the foreign key into the model because this table is not using the primary keys of Photo and Album.
### PhotoLabel
The columns Photo and Label have had their Pre Load status removed, and replaced with foreign key definitions.
### Many to Many joins
The structs Photo and Album are connected via PhotoAlbum by SetupJoinTable.
The structs Photo and Keyword are connected via PhotoKeyword by SetupJoinTable.
The structs Label and LabelCategory are connected via Category by SetupJoinTable.
## Queries
With Gorm V1 the assumption that a 0 = FALSE or 1 = TRUE for boolean values had been made. All cases of this have been changed to using TRUE/FALSE as appropriate.

View File

@@ -7,11 +7,13 @@ import (
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/internal/functions"
"github.com/photoprism/photoprism/pkg/fs"
)
func TestOptions(t *testing.T) {
var configPath = fs.Abs("testdata")
var configPath = fs.Abs("testdata/" + functions.PhotoPrismTestToFolderName())
_ = os.Mkdir(configPath, os.ModePerm)
var configFile = filepath.Join(configPath, "vision.yml")
t.Run("Save", func(t *testing.T) {
@@ -27,4 +29,5 @@ func TestOptions(t *testing.T) {
err := options.Load(filepath.Join(configPath, "invalid.yml"))
assert.Error(t, err)
})
_ = os.RemoveAll(configPath)
}

View File

@@ -6,6 +6,7 @@ import (
"os"
"strings"
"testing"
"time"
"github.com/gin-gonic/gin"
"github.com/sirupsen/logrus"
@@ -16,6 +17,7 @@ import (
"github.com/photoprism/photoprism/internal/form"
"github.com/photoprism/photoprism/internal/photoprism/get"
"github.com/photoprism/photoprism/internal/server/limiter"
"github.com/photoprism/photoprism/internal/testextras"
"github.com/photoprism/photoprism/pkg/media/http/header"
)
@@ -38,6 +40,14 @@ func TestMain(m *testing.M) {
log.SetLevel(logrus.TraceLevel)
event.AuditLog = log
caller := "internal/api/api_test.go/TestMain"
dbc, err := testextras.AcquireDBMutex(log, caller)
if err != nil {
log.Error("FAIL")
os.Exit(1)
}
defer testextras.UnlockDBMutex(dbc.Db())
// Init test config.
c := config.TestConfig()
get.SetConfig(c)
@@ -47,8 +57,11 @@ func TestMain(m *testing.M) {
limiter.Login = limiter.NewLimit(1, 10000)
// Run unit tests.
beforeTimestamp := time.Now().UTC()
code := m.Run()
code = testextras.ValidateDBErrors(dbc.Db(), log, beforeTimestamp, code)
testextras.ReleaseDBMutex(dbc.Db(), log, caller, code)
os.Exit(code)
}

View File

@@ -8,7 +8,7 @@ import (
"github.com/dustin/go-humanize/english"
"github.com/gin-gonic/gin"
"github.com/jinzhu/gorm"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/auth/acl"
"github.com/photoprism/photoprism/internal/config"
@@ -257,8 +257,8 @@ func BatchPhotosPrivate(router *gin.RouterGroup) {
log.Infof("photos: updating private flag for %s", clean.Log(frm.String()))
if err := entity.Db().Model(entity.Photo{}).Where("photo_uid IN (?)", frm.Photos).UpdateColumn("photo_private",
gorm.Expr("CASE WHEN photo_private > 0 THEN 0 ELSE 1 END")).Error; err != nil {
if err := entity.Db().Model(&entity.Photo{}).Where("photo_uid IN (?)", frm.Photos).UpdateColumn("photo_private",
gorm.Expr("CASE WHEN photo_private THEN false ELSE true END")).Error; err != nil {
log.Errorf("private: %s", err)
AbortSaveFailed(c)
return

View File

@@ -2,6 +2,8 @@ package api
import (
"encoding/json"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
@@ -17,6 +19,13 @@ func TestGetFoldersOriginals(t *testing.T) {
_ = conf.CreateDirectories()
expected, err := fs.Dirs(conf.OriginalsPath(), false, true)
if len(expected) == 0 {
// create something so that the test does some work.
newpath := filepath.Join(conf.OriginalsPath(), "2025/01")
os.MkdirAll(newpath, os.ModePerm)
expected, err = fs.Dirs(conf.OriginalsPath(), false, true)
}
if err != nil {
t.Fatal(err)
}

View File

@@ -150,7 +150,7 @@ func PhotoUnstack(router *gin.RouterGroup) {
}
if err := entity.UnscopedDb().Exec(`UPDATE files
SET photo_id = ?, photo_uid = ?, file_name = ?, file_missing = 0
SET photo_id = ?, photo_uid = ?, file_name = ?, file_missing = FALSE
WHERE file_name = ? AND file_root = ?`,
newPhoto.ID, newPhoto.PhotoUID, r.RootRelName(),
relName, relRoot).Error; err != nil {

View File

@@ -3,11 +3,13 @@ package session
import (
"os"
"testing"
"time"
"github.com/sirupsen/logrus"
"github.com/photoprism/photoprism/internal/config"
"github.com/photoprism/photoprism/internal/event"
"github.com/photoprism/photoprism/internal/testextras"
)
func TestMain(m *testing.M) {
@@ -15,10 +17,22 @@ func TestMain(m *testing.M) {
log.SetLevel(logrus.TraceLevel)
event.AuditLog = log
caller := "internal/auth/session/session_test.go/TestMain"
dbc, err := testextras.AcquireDBMutex(log, caller)
if err != nil {
log.Error("FAIL")
os.Exit(1)
}
defer testextras.UnlockDBMutex(dbc.Db())
c := config.TestConfig()
defer c.CloseDb()
beforeTimestamp := time.Now().UTC()
code := m.Run()
code = testextras.ValidateDBErrors(dbc.Db(), log, beforeTimestamp, code)
testextras.ReleaseDBMutex(dbc.Db(), log, caller, code)
os.Exit(code)
}

View File

@@ -71,7 +71,7 @@ func TestAuthListCommand(t *testing.T) {
// Check command output for plausibility.
// t.Logf(output)
assert.Empty(t, output)
assert.Contains(t, output, "Incorrect Usage: flag provided but not defined: -xyz")
assert.Error(t, err)
})
}

View File

@@ -57,12 +57,12 @@ func authResetAction(ctx *cli.Context) error {
db := conf.Db()
// Drop existing sessions table.
if err := db.DropTableIfExists(entity.Session{}).Error; err != nil {
if err := db.Migrator().DropTable(entity.Session{}); err != nil {
return err
}
// Re-create auth_sessions.
if err := db.CreateTable(entity.Session{}).Error; err != nil {
if err := db.Migrator().CreateTable(entity.Session{}); err != nil {
return err
}

View File

@@ -60,7 +60,7 @@ func TestClientsListCommand(t *testing.T) {
// Check command output for plausibility.
// t.Logf(output)
assert.Empty(t, output)
assert.Contains(t, output, "Incorrect Usage: flag provided but not defined: -xyz")
assert.Error(t, err)
})
}

View File

@@ -53,12 +53,12 @@ func clientsResetAction(ctx *cli.Context) error {
db := conf.Db()
// Drop existing auth_clients table.
if err := db.DropTableIfExists(entity.Client{}).Error; err != nil {
if err := db.Migrator().DropTable(entity.Client{}); err != nil {
return err
}
// Re-create auth_clients.
if err := db.CreateTable(entity.Client{}).Error; err != nil {
if err := db.Migrator().CreateTable(entity.Client{}); err != nil {
return err
}

View File

@@ -1,9 +1,11 @@
package commands
import (
"bytes"
"flag"
"os"
"testing"
"time"
"github.com/sirupsen/logrus"
"github.com/urfave/cli/v2"
@@ -11,6 +13,7 @@ import (
"github.com/photoprism/photoprism/internal/config"
"github.com/photoprism/photoprism/internal/event"
"github.com/photoprism/photoprism/internal/photoprism/get"
"github.com/photoprism/photoprism/internal/testextras"
"github.com/photoprism/photoprism/pkg/capture"
)
@@ -21,6 +24,14 @@ func TestMain(m *testing.M) {
log.SetLevel(logrus.TraceLevel)
event.AuditLog = log
caller := "internal/commands/commands_test.go/TestMain"
dbc, err := testextras.AcquireDBMutex(log, caller)
if err != nil {
log.Error("FAIL")
os.Exit(1)
}
defer testextras.UnlockDBMutex(dbc.Db())
c := config.NewTestConfig("commands")
get.SetConfig(c)
@@ -33,7 +44,11 @@ func TestMain(m *testing.M) {
}
// Run unit tests.
beforeTimestamp := time.Now().UTC()
code := m.Run()
code = testextras.ValidateDBErrors(dbc.Db(), log, beforeTimestamp, code)
testextras.ReleaseDBMutex(dbc.Db(), log, caller, code)
os.Exit(code)
}
@@ -67,7 +82,7 @@ func NewTestContext(args []string) *cli.Context {
LogErr(flagSet.Parse(args))
// Create and return new test context.
return cli.NewContext(app, flagSet, nil)
return cli.NewContext(app, flagSet, cli.NewContext(app, flagSet, nil))
}
// RunWithTestContext executes a command with a test context and returns its output.
@@ -75,14 +90,72 @@ func RunWithTestContext(cmd *cli.Command, args []string) (output string, err err
// Create test context with flags and arguments.
ctx := NewTestContext(args)
// TODO: Help output can currently not be generated in test mode due to
// a nil pointer panic in the "github.com/urfave/cli/v2" package.
cmd.HideHelp = true
cmd.HideHelp = false
// Redirect the output from cli to buffer for transfer to output for testing
var catureOutput bytes.Buffer
oldWriter := ctx.App.Writer
ctx.App.Writer = &catureOutput
// Run command with test context.
output = capture.Output(func() {
err = cmd.Run(ctx, args...)
})
ctx.App.Writer = oldWriter
output += catureOutput.String()
return output, err
}
// NewTestContextWithParse creates a new CLI test context with the flags and arguments provided.
func NewTestContextWithParse(appArgs []string, cmdArgs []string) *cli.Context {
// Create new command-line test app.
app := cli.NewApp()
app.Name = "photoprism"
app.Usage = "PhotoPrism®"
app.Description = ""
app.Version = "test"
app.Copyright = "(c) 2018-2025 PhotoPrism UG. All rights reserved."
app.Flags = config.Flags.Cli()
app.Commands = PhotoPrism
app.HelpName = app.Name
app.CustomAppHelpTemplate = ""
app.HideHelp = true
app.HideHelpCommand = true
app.Action = func(*cli.Context) error { return nil }
app.EnableBashCompletion = false
app.Metadata = map[string]interface{}{
"Name": "PhotoPrism",
"About": "PhotoPrism®",
"Edition": "ce",
"Version": "test",
}
// Parse photoprism command arguments.
photoprismFlagSet := flag.NewFlagSet("photoprism", flag.ContinueOnError)
for _, f := range app.Flags {
f.Apply(photoprismFlagSet)
}
LogErr(photoprismFlagSet.Parse(appArgs[1:]))
// Parse command test arguments.
flagSet := flag.NewFlagSet("test", flag.ContinueOnError)
LogErr(flagSet.Parse(cmdArgs))
// Create and return new test context.
return cli.NewContext(app, flagSet, cli.NewContext(app, photoprismFlagSet, nil))
}
func RunWithProvidedTestContext(ctx *cli.Context, cmd *cli.Command, args []string) (output string, err error) {
// Redirect the output from cli to buffer for transfer to output for testing
var catureOutput bytes.Buffer
oldWriter := ctx.App.Writer
ctx.App.Writer = &catureOutput
// Run command with test context.
output = capture.Output(func() {
err = cmd.Run(ctx, args...)
})
ctx.App.Writer = oldWriter
output += catureOutput.String()
return output, err
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,987 @@
package commands
import (
"fmt"
"io"
"os"
"os/exec"
"path/filepath"
"strings"
"testing"
"time"
"github.com/leandro-lugaresi/hub"
"github.com/stretchr/testify/assert"
"gorm.io/driver/mysql"
"gorm.io/driver/postgres"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/entity"
"github.com/photoprism/photoprism/internal/event"
)
func TestMigrationCommand(t *testing.T) {
t.Run("NoMigrateSettings", func(t *testing.T) {
// Run command with test context.
output, err := RunWithTestContext(MigrationsCommands, []string{"migrations", "transfer"})
// Check command output for plausibility.
// t.Logf(output)
assert.Error(t, err)
if err != nil {
assert.Contains(t, err.Error(), "config: transfer config must be provided")
}
assert.Equal(t, "", output)
})
t.Run("InvalidCommand", func(t *testing.T) {
// Run command with test context.
output, err := RunWithTestContext(MigrationsCommands, []string{"migrations", "--magles"})
// Check command output for plausibility.
// t.Logf(output)
assert.Error(t, err)
if err != nil {
assert.Contains(t, err.Error(), "flag provided but not defined: -magles")
}
assert.Contains(t, output, "flag provided but not defined: -magles")
})
t.Run("TargetPopulated", func(t *testing.T) {
// Setup target database
os.Remove("/go/src/github.com/photoprism/photoprism/storage/targetpopulated.test.db")
if err := copyFile("/go/src/github.com/photoprism/photoprism/internal/commands/testdata/transfer_sqlite3", "/go/src/github.com/photoprism/photoprism/storage/targetpopulated.test.db"); err != nil {
t.Fatal(err.Error())
}
// Run command with test context.
log = event.Log
appArgs := []string{"photoprism",
"--database-driver", "mysql",
"--database-dsn", "migrate:migrate@tcp(mariadb:4001)/migrate?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s",
"--transfer-driver", "sqlite",
"--transfer-dsn", "/go/src/github.com/photoprism/photoprism/storage/targetpopulated.test.db?_busy_timeout=5000&_foreign_keys=on"}
cmdArgs := []string{"migrations", "transfer"}
ctx := NewTestContextWithParse(appArgs, cmdArgs)
s := event.Subscribe("log.info")
defer event.Unsubscribe(s)
var l string
assert.IsType(t, hub.Subscription{}, s)
go func() {
for msg := range s.Receiver {
l += msg.Fields["message"].(string) + "\n"
}
}()
output, err := RunWithProvidedTestContext(ctx, MigrationsCommands, cmdArgs)
// Check command output for plausibility.
// t.Logf(output)
assert.Error(t, err)
assert.Contains(t, err.Error(), "migrate: transfer target database is not empty")
assert.NotContains(t, output, "Usage")
time.Sleep(time.Second)
// Check command output.
if l == "" {
t.Fatal("log output missing")
}
assert.Contains(t, l, "migrate: transfer batch size set to 100")
if !t.Failed() {
os.Remove("/go/src/github.com/photoprism/photoprism/storage/targetpopulated.test.db")
}
})
t.Run("TargetPopulatedBatch500", func(t *testing.T) {
// Setup target database
os.Remove("/go/src/github.com/photoprism/photoprism/storage/targetpopulated.test.db")
if err := copyFile("/go/src/github.com/photoprism/photoprism/internal/commands/testdata/transfer_sqlite3", "/go/src/github.com/photoprism/photoprism/storage/targetpopulated.test.db"); err != nil {
t.Fatal(err.Error())
}
// Run command with test context.
log = event.Log
appArgs := []string{"photoprism",
"--database-driver", "mysql",
"--database-dsn", "migrate:migrate@tcp(mariadb:4001)/migrate?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s",
"--transfer-driver", "sqlite",
"--transfer-dsn", "/go/src/github.com/photoprism/photoprism/storage/targetpopulated.test.db?_busy_timeout=5000&_foreign_keys=on"}
cmdArgs := []string{"migrations", "transfer", "-batch", "500"}
ctx := NewTestContextWithParse(appArgs, cmdArgs)
s := event.Subscribe("log.info")
defer event.Unsubscribe(s)
var l string
assert.IsType(t, hub.Subscription{}, s)
go func() {
for msg := range s.Receiver {
l += msg.Fields["message"].(string) + "\n"
}
}()
output, err := RunWithProvidedTestContext(ctx, MigrationsCommands, cmdArgs)
// Check command output for plausibility.
// t.Logf(output)
assert.Error(t, err)
assert.Contains(t, err.Error(), "migrate: transfer target database is not empty")
assert.NotContains(t, output, "Usage")
time.Sleep(time.Second)
// Check command output.
if l == "" {
t.Fatal("log output missing")
}
assert.Contains(t, l, "migrate: transfer batch size set to 500")
if !t.Failed() {
os.Remove("/go/src/github.com/photoprism/photoprism/storage/targetpopulated.test.db")
}
})
t.Run("MySQLtoPostgreSQL", func(t *testing.T) {
// Load migrate database as source
if dumpName, err := filepath.Abs("./testdata/transfer_mysql"); err != nil {
t.Fatal(err)
} else if err = exec.Command("mariadb", "-u", "migrate", "-pmigrate", "migrate",
"-e", "source "+dumpName).Run(); err != nil {
t.Fatal(err)
}
// Clear PostgreSQL target (migrate)
if dumpName, err := filepath.Abs("./testdata/reset-migrate.postgresql.sql"); err != nil {
t.Fatal(err)
} else {
if err = exec.Command("psql", "postgresql://photoprism:photoprism@postgres:5432/postgres", "--file="+dumpName).Run(); err != nil {
t.Fatal(err)
}
}
// Run command with test context.
log = event.Log
appArgs := []string{"photoprism",
"--database-driver", "mysql",
"--database-dsn", "migrate:migrate@tcp(mariadb:4001)/migrate?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s",
"--transfer-driver", "postgres",
"--transfer-dsn", "postgresql://migrate:migrate@postgres:5432/migrate?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable"}
cmdArgs := []string{"migrations", "transfer", "-batch", "10"}
ctx := NewTestContextWithParse(appArgs, cmdArgs)
s := event.Subscribe("log.info")
defer event.Unsubscribe(s)
var l string
assert.IsType(t, hub.Subscription{}, s)
go func() {
for msg := range s.Receiver {
l += msg.Fields["message"].(string) + "\n"
}
}()
output, err := RunWithProvidedTestContext(ctx, MigrationsCommands, cmdArgs)
// Check command output for plausibility.
//t.Logf(output)
if err != nil {
assert.NoError(t, err)
t.FailNow()
}
assert.NotContains(t, output, "Usage")
time.Sleep(time.Second)
// Check command output.
if l == "" {
t.Fatal("log output missing")
}
// t.Logf(l)
assert.Contains(t, l, "migrate: transfer batch size set to 10")
assert.Contains(t, l, "migrate: number of albums transfered 31")
assert.Contains(t, l, "migrate: number of albumusers transfered 0")
assert.Contains(t, l, "migrate: number of cameras transfered 6")
assert.Contains(t, l, "migrate: number of categories transfered 1")
assert.Contains(t, l, "migrate: number of cells transfered 9")
assert.Contains(t, l, "migrate: number of clients transfered 7")
assert.Contains(t, l, "migrate: number of countries transfered 1")
assert.Contains(t, l, "migrate: number of duplicates transfered 0")
assert.Contains(t, l, "migrate: number of errors transfered 0")
assert.Contains(t, l, "migrate: number of faces transfered 7")
assert.Contains(t, l, "migrate: number of files transfered 71")
assert.Contains(t, l, "migrate: number of fileshares transfered 2")
assert.Contains(t, l, "migrate: number of filesyncs transfered 3")
assert.Contains(t, l, "migrate: number of folders transfered 3")
assert.Contains(t, l, "migrate: number of keywords transfered 26")
assert.Contains(t, l, "migrate: number of labels transfered 32")
assert.Contains(t, l, "migrate: number of lenses transfered 2")
assert.Contains(t, l, "migrate: number of links transfered 5")
assert.Contains(t, l, "migrate: number of markers transfered 18")
assert.Contains(t, l, "migrate: number of passcodes transfered 3")
assert.Contains(t, l, "migrate: number of passwords transfered 11")
assert.Contains(t, l, "migrate: number of photos transfered 58")
assert.Contains(t, l, "migrate: number of photousers transfered 0")
assert.Contains(t, l, "migrate: number of places transfered 10")
assert.Contains(t, l, "migrate: number of reactions transfered 3")
assert.Contains(t, l, "migrate: number of sessions transfered 21")
assert.Contains(t, l, "migrate: number of services transfered 2")
assert.Contains(t, l, "migrate: number of subjects transfered 6")
assert.Contains(t, l, "migrate: number of users transfered 11")
assert.Contains(t, l, "migrate: number of userdetails transfered 9")
assert.Contains(t, l, "migrate: number of usersettings transfered 13")
assert.Contains(t, l, "migrate: number of usershares transfered 1")
// Make sure that a sequence update has worked.
testdb, err := gorm.Open(postgres.Open("postgresql://migrate:migrate@postgres:5432/migrate?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable"), &gorm.Config{})
if err != nil {
assert.NoError(t, err)
t.FailNow()
}
lens := entity.Lens{LensSlug: "PhotoPrismTest Data Slug For Lens", LensName: "PhotoPrism Biocular", LensMake: "PhotoPrism", LensModel: "Short", LensType: "Mono", LensDescription: "Special Test Lens"}
if result := testdb.Create(&lens); result.Error != nil {
assert.NoError(t, result.Error)
t.FailNow()
}
})
t.Run("MySQLtoSQLite", func(t *testing.T) {
// Remove target database file
os.Remove("/go/src/github.com/photoprism/photoprism/storage/mysqltosqlite.test.db")
// Load migrate database as source
if dumpName, err := filepath.Abs("./testdata/transfer_mysql"); err != nil {
t.Fatal(err)
} else if err = exec.Command("mariadb", "-u", "migrate", "-pmigrate", "migrate",
"-e", "source "+dumpName).Run(); err != nil {
t.Fatal(err)
}
// Run command with test context.
log = event.Log
appArgs := []string{"photoprism",
"--database-driver", "mysql",
"--database-dsn", "migrate:migrate@tcp(mariadb:4001)/migrate?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s",
"--transfer-driver", "sqlite",
"--transfer-dsn", "/go/src/github.com/photoprism/photoprism/storage/mysqltosqlite.test.db?_busy_timeout=5000&_foreign_keys=on"}
cmdArgs := []string{"migrations", "transfer", "-batch", "1000"}
ctx := NewTestContextWithParse(appArgs, cmdArgs)
s := event.Subscribe("log.info")
defer event.Unsubscribe(s)
var l string
assert.IsType(t, hub.Subscription{}, s)
go func() {
for msg := range s.Receiver {
l += msg.Fields["message"].(string) + "\n"
}
}()
output, err := RunWithProvidedTestContext(ctx, MigrationsCommands, cmdArgs)
// Check command output for plausibility.
//t.Logf(output)
if err != nil {
assert.NoError(t, err)
t.FailNow()
}
assert.NotContains(t, output, "Usage")
time.Sleep(time.Second)
// Check command output.
if l == "" {
t.Fatal("log output missing")
}
// t.Logf(l)
assert.Contains(t, l, "migrate: transfer batch size set to 1000")
assert.Contains(t, l, "migrate: number of albums transfered 31")
assert.Contains(t, l, "migrate: number of albumusers transfered 0")
assert.Contains(t, l, "migrate: number of cameras transfered 6")
assert.Contains(t, l, "migrate: number of categories transfered 1")
assert.Contains(t, l, "migrate: number of cells transfered 9")
assert.Contains(t, l, "migrate: number of clients transfered 7")
assert.Contains(t, l, "migrate: number of countries transfered 1")
assert.Contains(t, l, "migrate: number of duplicates transfered 0")
assert.Contains(t, l, "migrate: number of errors transfered 0")
assert.Contains(t, l, "migrate: number of faces transfered 7")
assert.Contains(t, l, "migrate: number of files transfered 71")
assert.Contains(t, l, "migrate: number of fileshares transfered 2")
assert.Contains(t, l, "migrate: number of filesyncs transfered 3")
assert.Contains(t, l, "migrate: number of folders transfered 3")
assert.Contains(t, l, "migrate: number of keywords transfered 26")
assert.Contains(t, l, "migrate: number of labels transfered 32")
assert.Contains(t, l, "migrate: number of lenses transfered 2")
assert.Contains(t, l, "migrate: number of links transfered 5")
assert.Contains(t, l, "migrate: number of markers transfered 18")
assert.Contains(t, l, "migrate: number of passcodes transfered 3")
assert.Contains(t, l, "migrate: number of passwords transfered 11")
assert.Contains(t, l, "migrate: number of photos transfered 58")
assert.Contains(t, l, "migrate: number of photousers transfered 0")
assert.Contains(t, l, "migrate: number of places transfered 10")
assert.Contains(t, l, "migrate: number of reactions transfered 3")
assert.Contains(t, l, "migrate: number of sessions transfered 21")
assert.Contains(t, l, "migrate: number of services transfered 2")
assert.Contains(t, l, "migrate: number of subjects transfered 6")
assert.Contains(t, l, "migrate: number of users transfered 11")
assert.Contains(t, l, "migrate: number of userdetails transfered 9")
assert.Contains(t, l, "migrate: number of usersettings transfered 13")
assert.Contains(t, l, "migrate: number of usershares transfered 1")
// Make sure that a sequence update has worked.
testdb, err := gorm.Open(sqlite.Open("/go/src/github.com/photoprism/photoprism/storage/mysqltosqlite.test.db?_busy_timeout=5000&_foreign_keys=on"), &gorm.Config{})
if err != nil {
assert.NoError(t, err)
t.FailNow()
}
lens := entity.Lens{LensSlug: "PhotoPrismTest Data Slug For Lens", LensName: "PhotoPrism Biocular", LensMake: "PhotoPrism", LensModel: "Short", LensType: "Mono", LensDescription: "Special Test Lens"}
if result := testdb.Create(&lens); result.Error != nil {
assert.NoError(t, result.Error)
t.FailNow()
}
// Remove target database file
if !t.Failed() {
os.Remove("/go/src/github.com/photoprism/photoprism/storage/mysqltosqlite.test.db")
}
})
t.Run("MySQLtoSQLitePopulated", func(t *testing.T) {
// Remove target database file
os.Remove("/go/src/github.com/photoprism/photoprism/storage/mysqltosqlitepopulated.test.db")
if err := copyFile("/go/src/github.com/photoprism/photoprism/internal/commands/testdata/transfer_sqlite3", "/go/src/github.com/photoprism/photoprism/storage/mysqltosqlitepopulated.test.db"); err != nil {
t.Fatal(err.Error())
}
// Load migrate database as source
if dumpName, err := filepath.Abs("./testdata/transfer_mysql"); err != nil {
t.Fatal(err)
} else if err = exec.Command("mariadb", "-u", "migrate", "-pmigrate", "migrate",
"-e", "source "+dumpName).Run(); err != nil {
t.Fatal(err)
}
// Run command with test context.
log = event.Log
appArgs := []string{"photoprism",
"--database-driver", "mysql",
"--database-dsn", "migrate:migrate@tcp(mariadb:4001)/migrate?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s",
"--transfer-driver", "sqlite",
"--transfer-dsn", "/go/src/github.com/photoprism/photoprism/storage/mysqltosqlitepopulated.test.db?_busy_timeout=5000&_foreign_keys=on"}
cmdArgs := []string{"migrations", "transfer", "-force"}
ctx := NewTestContextWithParse(appArgs, cmdArgs)
s := event.Subscribe("log.info")
defer event.Unsubscribe(s)
var l string
assert.IsType(t, hub.Subscription{}, s)
go func() {
for msg := range s.Receiver {
l += msg.Fields["message"].(string) + "\n"
}
}()
output, err := RunWithProvidedTestContext(ctx, MigrationsCommands, cmdArgs)
// Check command output for plausibility.
//t.Logf(output)
if err != nil {
assert.NoError(t, err)
t.FailNow()
}
assert.NotContains(t, output, "Usage")
time.Sleep(time.Second)
// Check command output.
if l == "" {
t.Fatal("log output missing")
}
// t.Logf(l)
assert.Contains(t, l, "migrate: number of albums transfered 31")
assert.Contains(t, l, "migrate: number of albumusers transfered 0")
assert.Contains(t, l, "migrate: number of cameras transfered 6")
assert.Contains(t, l, "migrate: number of categories transfered 1")
assert.Contains(t, l, "migrate: number of cells transfered 9")
assert.Contains(t, l, "migrate: number of clients transfered 7")
assert.Contains(t, l, "migrate: number of countries transfered 1")
assert.Contains(t, l, "migrate: number of duplicates transfered 0")
assert.Contains(t, l, "migrate: number of errors transfered 0")
assert.Contains(t, l, "migrate: number of faces transfered 7")
assert.Contains(t, l, "migrate: number of files transfered 71")
assert.Contains(t, l, "migrate: number of fileshares transfered 2")
assert.Contains(t, l, "migrate: number of filesyncs transfered 3")
assert.Contains(t, l, "migrate: number of folders transfered 3")
assert.Contains(t, l, "migrate: number of keywords transfered 26")
assert.Contains(t, l, "migrate: number of labels transfered 32")
assert.Contains(t, l, "migrate: number of lenses transfered 2")
assert.Contains(t, l, "migrate: number of links transfered 5")
assert.Contains(t, l, "migrate: number of markers transfered 18")
assert.Contains(t, l, "migrate: number of passcodes transfered 3")
assert.Contains(t, l, "migrate: number of passwords transfered 11")
assert.Contains(t, l, "migrate: number of photos transfered 58")
assert.Contains(t, l, "migrate: number of photousers transfered 0")
assert.Contains(t, l, "migrate: number of places transfered 10")
assert.Contains(t, l, "migrate: number of reactions transfered 3")
assert.Contains(t, l, "migrate: number of sessions transfered 21")
assert.Contains(t, l, "migrate: number of services transfered 2")
assert.Contains(t, l, "migrate: number of subjects transfered 6")
assert.Contains(t, l, "migrate: number of users transfered 11")
assert.Contains(t, l, "migrate: number of userdetails transfered 9")
assert.Contains(t, l, "migrate: number of usersettings transfered 13")
assert.Contains(t, l, "migrate: number of usershares transfered 1")
// Make sure that a sequence update has worked.
testdb, err := gorm.Open(sqlite.Open("/go/src/github.com/photoprism/photoprism/storage/mysqltosqlitepopulated.test.db?_busy_timeout=5000&_foreign_keys=on"), &gorm.Config{})
if err != nil {
assert.NoError(t, err)
t.FailNow()
}
lens := entity.Lens{LensSlug: "PhotoPrismTest Data Slug For Lens", LensName: "PhotoPrism Biocular", LensMake: "PhotoPrism", LensModel: "Short", LensType: "Mono", LensDescription: "Special Test Lens"}
if result := testdb.Create(&lens); result.Error != nil {
assert.NoError(t, result.Error)
t.FailNow()
}
// Remove target database file
if !t.Failed() {
os.Remove("/go/src/github.com/photoprism/photoprism/storage/mysqltosqlitepopulated.test.db")
}
})
t.Run("PostgreSQLtoMySQL", func(t *testing.T) {
// Load migrate database as source
if dumpName, err := filepath.Abs("./testdata/transfer_postgresql"); err != nil {
t.Fatal(err)
} else {
if err = exec.Command("psql", "postgresql://photoprism:photoprism@postgres:5432/postgres", "--file="+dumpName).Run(); err != nil {
t.Fatal(err)
}
}
// Clear MySQL target (migrate)
if dumpName, err := filepath.Abs("./testdata/reset-migrate.mysql.sql"); err != nil {
t.Fatal(err)
} else {
resetFile, err := os.Open(dumpName)
if err != nil {
t.Log("unable to open reset file")
t.Fatal(err)
}
defer resetFile.Close()
cmd := exec.Command("mysql")
cmd.Stdin = resetFile
output, err := cmd.CombinedOutput()
if err != nil {
t.Fatal(err)
}
t.Log(output)
}
// Run command with test context.
log = event.Log
appArgs := []string{"photoprism",
"--database-driver", "postgres",
"--database-dsn", "postgresql://migrate:migrate@postgres:5432/migrate?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable",
"--transfer-driver", "mysql",
"--transfer-dsn", "migrate:migrate@tcp(mariadb:4001)/migrate?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s"}
cmdArgs := []string{"migrations", "transfer"}
ctx := NewTestContextWithParse(appArgs, cmdArgs)
s := event.Subscribe("log.info")
defer event.Unsubscribe(s)
var l string
assert.IsType(t, hub.Subscription{}, s)
go func() {
for msg := range s.Receiver {
l += msg.Fields["message"].(string) + "\n"
}
}()
output, err := RunWithProvidedTestContext(ctx, MigrationsCommands, cmdArgs)
// Check command output for plausibility.
//t.Logf(output)
if err != nil {
assert.NoError(t, err)
t.FailNow()
}
assert.NotContains(t, output, "Usage")
time.Sleep(time.Second)
// Check command output.
if l == "" {
t.Fatal("log output missing")
}
// t.Logf(l)
assert.Contains(t, l, "migrate: number of albums transfered 31")
assert.Contains(t, l, "migrate: number of albumusers transfered 0")
assert.Contains(t, l, "migrate: number of cameras transfered 6")
assert.Contains(t, l, "migrate: number of categories transfered 1")
assert.Contains(t, l, "migrate: number of cells transfered 9")
assert.Contains(t, l, "migrate: number of clients transfered 7")
assert.Contains(t, l, "migrate: number of countries transfered 1")
assert.Contains(t, l, "migrate: number of duplicates transfered 0")
assert.Contains(t, l, "migrate: number of errors transfered 0")
assert.Contains(t, l, "migrate: number of faces transfered 7")
assert.Contains(t, l, "migrate: number of files transfered 71")
assert.Contains(t, l, "migrate: number of fileshares transfered 2")
assert.Contains(t, l, "migrate: number of filesyncs transfered 3")
assert.Contains(t, l, "migrate: number of folders transfered 3")
assert.Contains(t, l, "migrate: number of keywords transfered 26")
assert.Contains(t, l, "migrate: number of labels transfered 32")
assert.Contains(t, l, "migrate: number of lenses transfered 2")
assert.Contains(t, l, "migrate: number of links transfered 5")
assert.Contains(t, l, "migrate: number of markers transfered 18")
assert.Contains(t, l, "migrate: number of passcodes transfered 3")
assert.Contains(t, l, "migrate: number of passwords transfered 11")
assert.Contains(t, l, "migrate: number of photos transfered 58")
assert.Contains(t, l, "migrate: number of photousers transfered 0")
assert.Contains(t, l, "migrate: number of places transfered 10")
assert.Contains(t, l, "migrate: number of reactions transfered 3")
assert.Contains(t, l, "migrate: number of sessions transfered 21")
assert.Contains(t, l, "migrate: number of services transfered 2")
assert.Contains(t, l, "migrate: number of subjects transfered 6")
assert.Contains(t, l, "migrate: number of users transfered 11")
assert.Contains(t, l, "migrate: number of userdetails transfered 9")
assert.Contains(t, l, "migrate: number of usersettings transfered 13")
assert.Contains(t, l, "migrate: number of usershares transfered 1")
// Make sure that a sequence update has worked.
testdb, err := gorm.Open(mysql.Open("migrate:migrate@tcp(mariadb:4001)/migrate?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s"), &gorm.Config{})
if err != nil {
assert.NoError(t, err)
t.FailNow()
}
lens := entity.Lens{LensSlug: "PhotoPrismTest Data Slug For Lens", LensName: "PhotoPrism Biocular", LensMake: "PhotoPrism", LensModel: "Short", LensType: "Mono", LensDescription: "Special Test Lens"}
if result := testdb.Create(&lens); result.Error != nil {
assert.NoError(t, result.Error)
t.FailNow()
}
})
t.Run("PostgreSQLtoSQLite", func(t *testing.T) {
// Remove target database file
os.Remove("/go/src/github.com/photoprism/photoprism/storage/postgresqltosqlite.test.db")
// Load migrate database as source
if dumpName, err := filepath.Abs("./testdata/transfer_postgresql"); err != nil {
t.Fatal(err)
} else {
if err = exec.Command("psql", "postgresql://photoprism:photoprism@postgres:5432/postgres", "--file="+dumpName).Run(); err != nil {
t.Fatal(err)
}
}
// Run command with test context.
log = event.Log
appArgs := []string{"photoprism",
"--database-driver", "postgres",
"--database-dsn", "postgresql://migrate:migrate@postgres:5432/migrate?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable",
"--transfer-driver", "sqlite",
"--transfer-dsn", "/go/src/github.com/photoprism/photoprism/storage/postgresqltosqlite.test.db?_busy_timeout=5000&_foreign_keys=on"}
cmdArgs := []string{"migrations", "transfer"}
ctx := NewTestContextWithParse(appArgs, cmdArgs)
s := event.Subscribe("log.info")
defer event.Unsubscribe(s)
var l string
assert.IsType(t, hub.Subscription{}, s)
go func() {
for msg := range s.Receiver {
l += msg.Fields["message"].(string) + "\n"
}
}()
output, err := RunWithProvidedTestContext(ctx, MigrationsCommands, cmdArgs)
// Check command output for plausibility.
//t.Logf(output)
if err != nil {
assert.NoError(t, err)
t.FailNow()
}
assert.NotContains(t, output, "Usage")
time.Sleep(time.Second)
// Check command output.
if l == "" {
t.Fatal("log output missing")
}
// t.Logf(l)
assert.Contains(t, l, "migrate: number of albums transfered 31")
assert.Contains(t, l, "migrate: number of albumusers transfered 0")
assert.Contains(t, l, "migrate: number of cameras transfered 6")
assert.Contains(t, l, "migrate: number of categories transfered 1")
assert.Contains(t, l, "migrate: number of cells transfered 9")
assert.Contains(t, l, "migrate: number of clients transfered 7")
assert.Contains(t, l, "migrate: number of countries transfered 1")
assert.Contains(t, l, "migrate: number of duplicates transfered 0")
assert.Contains(t, l, "migrate: number of errors transfered 0")
assert.Contains(t, l, "migrate: number of faces transfered 7")
assert.Contains(t, l, "migrate: number of files transfered 71")
assert.Contains(t, l, "migrate: number of fileshares transfered 2")
assert.Contains(t, l, "migrate: number of filesyncs transfered 3")
assert.Contains(t, l, "migrate: number of folders transfered 3")
assert.Contains(t, l, "migrate: number of keywords transfered 26")
assert.Contains(t, l, "migrate: number of labels transfered 32")
assert.Contains(t, l, "migrate: number of lenses transfered 2")
assert.Contains(t, l, "migrate: number of links transfered 5")
assert.Contains(t, l, "migrate: number of markers transfered 18")
assert.Contains(t, l, "migrate: number of passcodes transfered 3")
assert.Contains(t, l, "migrate: number of passwords transfered 11")
assert.Contains(t, l, "migrate: number of photos transfered 58")
assert.Contains(t, l, "migrate: number of photousers transfered 0")
assert.Contains(t, l, "migrate: number of places transfered 10")
assert.Contains(t, l, "migrate: number of reactions transfered 3")
assert.Contains(t, l, "migrate: number of sessions transfered 21")
assert.Contains(t, l, "migrate: number of services transfered 2")
assert.Contains(t, l, "migrate: number of subjects transfered 6")
assert.Contains(t, l, "migrate: number of users transfered 11")
assert.Contains(t, l, "migrate: number of userdetails transfered 9")
assert.Contains(t, l, "migrate: number of usersettings transfered 13")
assert.Contains(t, l, "migrate: number of usershares transfered 1")
// Make sure that a sequence update has worked.
testdb, err := gorm.Open(sqlite.Open("/go/src/github.com/photoprism/photoprism/storage/postgresqltosqlite.test.db?_busy_timeout=5000&_foreign_keys=on"), &gorm.Config{})
if err != nil {
assert.NoError(t, err)
t.FailNow()
}
lens := entity.Lens{LensSlug: "PhotoPrismTest Data Slug For Lens", LensName: "PhotoPrism Biocular", LensMake: "PhotoPrism", LensModel: "Short", LensType: "Mono", LensDescription: "Special Test Lens"}
if result := testdb.Create(&lens); result.Error != nil {
assert.NoError(t, result.Error)
t.FailNow()
}
// Remove target database file
if !t.Failed() {
os.Remove("/go/src/github.com/photoprism/photoprism/storage/postgresqltosqlite.test.db")
}
})
t.Run("SQLiteToMySQL", func(t *testing.T) {
// Remove target database file
os.Remove("/go/src/github.com/photoprism/photoprism/storage/sqlitetomysql.test.db")
// Load migrate database as source
if err := copyFile("/go/src/github.com/photoprism/photoprism/internal/commands/testdata/transfer_sqlite3", "/go/src/github.com/photoprism/photoprism/storage/sqlitetomysql.test.db"); err != nil {
t.Fatal(err.Error())
}
// Clear MySQL target (migrate)
if dumpName, err := filepath.Abs("./testdata/reset-migrate.mysql.sql"); err != nil {
t.Fatal(err)
} else {
resetFile, err := os.Open(dumpName)
if err != nil {
t.Log("unable to open reset file")
t.Fatal(err)
}
defer resetFile.Close()
cmd := exec.Command("mysql")
cmd.Stdin = resetFile
output, err := cmd.CombinedOutput()
if err != nil {
t.Fatal(err)
}
t.Log(output)
}
// Run command with test context.
log = event.Log
appArgs := []string{"photoprism",
"--database-driver", "sqlite",
"--database-dsn", "/go/src/github.com/photoprism/photoprism/storage/sqlitetomysql.test.db?_busy_timeout=5000&_foreign_keys=on",
"--transfer-driver", "mysql",
"--transfer-dsn", "migrate:migrate@tcp(mariadb:4001)/migrate?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s"}
cmdArgs := []string{"migrations", "transfer"}
ctx := NewTestContextWithParse(appArgs, cmdArgs)
s := event.Subscribe("log.info")
defer event.Unsubscribe(s)
var l string
assert.IsType(t, hub.Subscription{}, s)
go func() {
for msg := range s.Receiver {
l += msg.Fields["message"].(string) + "\n"
}
}()
output, err := RunWithProvidedTestContext(ctx, MigrationsCommands, cmdArgs)
// Check command output for plausibility.
//t.Logf(output)
if err != nil {
assert.NoError(t, err)
t.FailNow()
}
assert.NotContains(t, output, "Usage")
time.Sleep(time.Second)
// Check command output.
if l == "" {
t.Fatal("log output missing")
}
// t.Logf(l)
assert.Contains(t, l, "migrate: number of albums transfered 31")
assert.Contains(t, l, "migrate: number of albumusers transfered 0")
assert.Contains(t, l, "migrate: number of cameras transfered 6")
assert.Contains(t, l, "migrate: number of categories transfered 1")
assert.Contains(t, l, "migrate: number of cells transfered 9")
assert.Contains(t, l, "migrate: number of clients transfered 7")
assert.Contains(t, l, "migrate: number of countries transfered 1")
assert.Contains(t, l, "migrate: number of duplicates transfered 0")
assert.Contains(t, l, "migrate: number of errors transfered 0")
assert.Contains(t, l, "migrate: number of faces transfered 7")
assert.Contains(t, l, "migrate: number of files transfered 71")
assert.Contains(t, l, "migrate: number of fileshares transfered 2")
assert.Contains(t, l, "migrate: number of filesyncs transfered 3")
assert.Contains(t, l, "migrate: number of folders transfered 3")
assert.Contains(t, l, "migrate: number of keywords transfered 26")
assert.Contains(t, l, "migrate: number of labels transfered 32")
assert.Contains(t, l, "migrate: number of lenses transfered 2")
assert.Contains(t, l, "migrate: number of links transfered 5")
assert.Contains(t, l, "migrate: number of markers transfered 18")
assert.Contains(t, l, "migrate: number of passcodes transfered 3")
assert.Contains(t, l, "migrate: number of passwords transfered 11")
assert.Contains(t, l, "migrate: number of photos transfered 58")
assert.Contains(t, l, "migrate: number of photousers transfered 0")
assert.Contains(t, l, "migrate: number of places transfered 10")
assert.Contains(t, l, "migrate: number of reactions transfered 3")
assert.Contains(t, l, "migrate: number of sessions transfered 21")
assert.Contains(t, l, "migrate: number of services transfered 2")
assert.Contains(t, l, "migrate: number of subjects transfered 6")
assert.Contains(t, l, "migrate: number of users transfered 11")
assert.Contains(t, l, "migrate: number of userdetails transfered 9")
assert.Contains(t, l, "migrate: number of usersettings transfered 13")
assert.Contains(t, l, "migrate: number of usershares transfered 1")
// Make sure that a sequence update has worked.
testdb, err := gorm.Open(mysql.Open("migrate:migrate@tcp(mariadb:4001)/migrate?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s"), &gorm.Config{})
if err != nil {
assert.NoError(t, err)
t.FailNow()
}
lens := entity.Lens{LensSlug: "PhotoPrismTest Data Slug For Lens", LensName: "PhotoPrism Biocular", LensMake: "PhotoPrism", LensModel: "Short", LensType: "Mono", LensDescription: "Special Test Lens"}
if result := testdb.Create(&lens); result.Error != nil {
assert.NoError(t, result.Error)
t.FailNow()
}
// Remove target database file
if !t.Failed() {
os.Remove("/go/src/github.com/photoprism/photoprism/storage/sqlitetomysql.test.db")
}
})
t.Run("SQLiteToPostgreSQL", func(t *testing.T) {
// Remove target database file
os.Remove("/go/src/github.com/photoprism/photoprism/storage/sqlitetopostgresql.test.db")
// Load migrate database as source
if err := copyFile("/go/src/github.com/photoprism/photoprism/internal/commands/testdata/transfer_sqlite3", "/go/src/github.com/photoprism/photoprism/storage/sqlitetopostgresql.test.db"); err != nil {
t.Fatal(err.Error())
}
// Clear PostgreSQL target (migrate)
if dumpName, err := filepath.Abs("./testdata/reset-migrate.postgresql.sql"); err != nil {
t.Fatal(err)
} else {
if err = exec.Command("psql", "postgresql://photoprism:photoprism@postgres:5432/postgres", "--file="+dumpName).Run(); err != nil {
t.Fatal(err)
}
}
// Run command with test context.
log = event.Log
appArgs := []string{"photoprism",
"--database-driver", "sqlite",
"--database-dsn", "/go/src/github.com/photoprism/photoprism/storage/sqlitetopostgresql.test.db?_busy_timeout=5000&_foreign_keys=on",
"--transfer-driver", "postgres",
"--transfer-dsn", "postgresql://migrate:migrate@postgres:5432/migrate?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable"}
cmdArgs := []string{"migrations", "transfer"}
ctx := NewTestContextWithParse(appArgs, cmdArgs)
s := event.Subscribe("log.info")
defer event.Unsubscribe(s)
var l string
assert.IsType(t, hub.Subscription{}, s)
go func() {
for msg := range s.Receiver {
l += msg.Fields["message"].(string) + "\n"
}
}()
output, err := RunWithProvidedTestContext(ctx, MigrationsCommands, cmdArgs)
// Check command output for plausibility.
//t.Logf(output)
if err != nil {
assert.NoError(t, err)
t.FailNow()
}
assert.NotContains(t, output, "Usage")
time.Sleep(time.Second)
// Check command output.
if l == "" {
t.Fatal("log output missing")
}
// t.Logf(l)
assert.Contains(t, l, "migrate: number of albums transfered 31")
assert.Contains(t, l, "migrate: number of albumusers transfered 0")
assert.Contains(t, l, "migrate: number of cameras transfered 6")
assert.Contains(t, l, "migrate: number of categories transfered 1")
assert.Contains(t, l, "migrate: number of cells transfered 9")
assert.Contains(t, l, "migrate: number of clients transfered 7")
assert.Contains(t, l, "migrate: number of countries transfered 1")
assert.Contains(t, l, "migrate: number of duplicates transfered 0")
assert.Contains(t, l, "migrate: number of errors transfered 0")
assert.Contains(t, l, "migrate: number of faces transfered 7")
assert.Contains(t, l, "migrate: number of files transfered 71")
assert.Contains(t, l, "migrate: number of fileshares transfered 2")
assert.Contains(t, l, "migrate: number of filesyncs transfered 3")
assert.Contains(t, l, "migrate: number of folders transfered 3")
assert.Contains(t, l, "migrate: number of keywords transfered 26")
assert.Contains(t, l, "migrate: number of labels transfered 32")
assert.Contains(t, l, "migrate: number of lenses transfered 2")
assert.Contains(t, l, "migrate: number of links transfered 5")
assert.Contains(t, l, "migrate: number of markers transfered 18")
assert.Contains(t, l, "migrate: number of passcodes transfered 3")
assert.Contains(t, l, "migrate: number of passwords transfered 11")
assert.Contains(t, l, "migrate: number of photos transfered 58")
assert.Contains(t, l, "migrate: number of photousers transfered 0")
assert.Contains(t, l, "migrate: number of places transfered 10")
assert.Contains(t, l, "migrate: number of reactions transfered 3")
assert.Contains(t, l, "migrate: number of sessions transfered 21")
assert.Contains(t, l, "migrate: number of services transfered 2")
assert.Contains(t, l, "migrate: number of subjects transfered 6")
assert.Contains(t, l, "migrate: number of users transfered 11")
assert.Contains(t, l, "migrate: number of userdetails transfered 9")
assert.Contains(t, l, "migrate: number of usersettings transfered 13")
assert.Contains(t, l, "migrate: number of usershares transfered 1")
// Make sure that a sequence update has worked.
testdb, err := gorm.Open(postgres.Open("postgresql://migrate:migrate@postgres:5432/migrate?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable"), &gorm.Config{})
if err != nil {
assert.NoError(t, err)
t.FailNow()
}
lens := entity.Lens{LensSlug: "PhotoPrismTest Data Slug For Lens", LensName: "PhotoPrism Biocular", LensMake: "PhotoPrism", LensModel: "Short", LensType: "Mono", LensDescription: "Special Test Lens"}
if result := testdb.Create(&lens); result.Error != nil {
assert.NoError(t, result.Error)
t.FailNow()
}
// Remove target database file
if !t.Failed() {
os.Remove("/go/src/github.com/photoprism/photoprism/storage/sqlitetomysql.test.db")
}
})
}
func copyFile(source, target string) error {
if _, err := os.Stat(source); err != nil {
return fmt.Errorf("copyFile: source file %s is required", source)
}
if _, err := os.Stat(target); err != nil {
if err = os.Remove(target); err != nil {
if !strings.Contains(err.Error(), "no such file or directory") {
return fmt.Errorf("copyFile: target file %s can not be removed with error %s", target, err.Error())
}
}
}
sourceFile, err := os.Open(source)
if err != nil {
return fmt.Errorf("copyFile: source file %s can not be opened with error %s", source, err.Error())
}
defer sourceFile.Close()
targetFile, err := os.Create(target)
if err != nil {
return fmt.Errorf("copyFile: target file %s can not be opened with error %s", target, err.Error())
}
defer func() {
closeErr := targetFile.Close()
if err == nil {
err = closeErr
}
}()
if _, err = io.Copy(targetFile, sourceFile); err != nil {
return fmt.Errorf("copyFile: copy failed with error %s", err.Error())
}
if err = targetFile.Sync(); err != nil {
return fmt.Errorf("copyFile: target sync failed with error %s", err.Error())
}
return nil
}

View File

@@ -0,0 +1,6 @@
DROP DATABASE IF EXISTS migrate;
CREATE DATABASE IF NOT EXISTS migrate;
CREATE USER IF NOT EXISTS migrate@'%' IDENTIFIED BY 'migrate';
GRANT ALL PRIVILEGES ON migrate.* TO migrate@'%';
FLUSH PRIVILEGES;

View File

@@ -0,0 +1,29 @@
--
-- PostgreSQL database dump
--
-- Dumped from database version 17.4
-- Dumped by pg_dump version 17.4 (Ubuntu 17.4-1)
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET transaction_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
DROP DATABASE IF EXISTS migrate WITH (FORCE);
--
-- TOC entry 3924 (class 1262 OID 25875)
-- Name: migrate; Type: DATABASE; Schema: -; Owner: migrate
--
CREATE DATABASE migrate WITH TEMPLATE = template0 ENCODING = 'UTF8' LOCALE_PROVIDER = libc LOCALE = 'en_US.utf8';
ALTER DATABASE migrate OWNER TO migrate;

2192
internal/commands/testdata/transfer_mysql vendored Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@@ -1,7 +1,9 @@
package commands
import (
"strings"
"testing"
"unicode"
"github.com/stretchr/testify/assert"
)
@@ -14,6 +16,14 @@ func TestUsersLegacyCommand(t *testing.T) {
// Check command output for plausibility.
//t.Logf(output)
assert.NoError(t, err)
assert.Contains(t, output, "│ ID │ UID │ Name │ User │ Email │ Admin │ Created At │")
// remove spaces as this test will fail if there are records in the table due to dynamic sizing of headings
var result strings.Builder
result.Grow(len(output))
for _, char := range output {
if !unicode.IsSpace(char) {
result.WriteRune(char)
}
}
assert.Contains(t, result.String(), "│ID│UID│Name│User│Email│Admin│CreatedAt│")
})
}

View File

@@ -75,7 +75,7 @@ func usersListAction(ctx *cli.Context) error {
}
if ctx.Bool("deleted") {
rows[i] = append(rows[i], report.DateTime(user.DeletedAt))
rows[i] = append(rows[i], report.DateTime(&user.DeletedAt.Time))
}
}

View File

@@ -94,7 +94,7 @@ func TestUsersListCommand(t *testing.T) {
// Check command output for plausibility.
// t.Logf(output)
assert.Empty(t, output)
assert.Contains(t, output, "Incorrect Usage: flag provided but not defined: -xyz")
assert.Error(t, err)
})
}

View File

@@ -5,6 +5,7 @@ import (
"github.com/manifoldco/promptui"
"github.com/urfave/cli/v2"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/config"
"github.com/photoprism/photoprism/internal/entity"
@@ -60,7 +61,7 @@ func usersModAction(ctx *cli.Context) error {
return fmt.Errorf("user already exists")
}
m.DeletedAt = nil
m.DeletedAt = gorm.DeletedAt{}
log.Infof("user %s will be restored", m.String())
}

View File

@@ -56,37 +56,37 @@ func usersResetAction(ctx *cli.Context) error {
db := conf.Db()
// Drop existing user management tables.
if err := db.DropTableIfExists(entity.User{}, entity.UserDetails{}, entity.UserSettings{}, entity.UserShare{}, entity.Passcode{}, entity.Session{}).Error; err != nil {
if err := db.Migrator().DropTable(entity.User{}, entity.UserDetails{}, entity.UserSettings{}, entity.UserShare{}, entity.Passcode{}, entity.Session{}); err != nil {
return err
}
// Re-create auth_users.
if err := db.CreateTable(entity.User{}).Error; err != nil {
if err := db.Migrator().CreateTable(entity.User{}); err != nil {
return err
}
// Re-create auth_users_details.
if err := db.CreateTable(entity.UserDetails{}).Error; err != nil {
if err := db.Migrator().CreateTable(entity.UserDetails{}); err != nil {
return err
}
// Re-create auth_users_settings.
if err := db.CreateTable(entity.UserSettings{}).Error; err != nil {
if err := db.Migrator().CreateTable(entity.UserSettings{}); err != nil {
return err
}
// Re-create auth_users_shares.
if err := db.CreateTable(entity.UserShare{}).Error; err != nil {
if err := db.Migrator().CreateTable(entity.UserShare{}); err != nil {
return err
}
// Re-create passcodes.
if err := db.CreateTable(entity.Passcode{}).Error; err != nil {
if err := db.Migrator().CreateTable(entity.Passcode{}); err != nil {
return err
}
// Re-create auth_sessions.
if err := db.CreateTable(entity.Session{}).Error; err != nil {
if err := db.Migrator().CreateTable(entity.Session{}); err != nil {
return err
}

View File

@@ -25,6 +25,7 @@ func TestUsersCommand(t *testing.T) {
assert.Contains(t, output2, "John")
assert.Contains(t, output2, "admin")
assert.Contains(t, output2, "john@test.de")
assert.NotContains(t, output2, "DeletedAt")
// Modify John
// Run command with test context.
@@ -43,7 +44,7 @@ func TestUsersCommand(t *testing.T) {
assert.Contains(t, output4, "Johnny")
assert.Contains(t, output4, "admin")
assert.Contains(t, output4, "johnnny@test.de")
assert.Contains(t, output4, "DeletedAt │ <nil>")
assert.NotContains(t, output4, "DeletedAt")
// Remove John
// Run command with test context.
@@ -62,7 +63,7 @@ func TestUsersCommand(t *testing.T) {
assert.Contains(t, output6, "Johnny")
assert.Contains(t, output6, "admin")
assert.Contains(t, output6, "johnnny@test.de")
assert.Contains(t, output6, "│ DeletedAt │ time.Date")
assert.NotContains(t, output6, "│ DeletedAt │ <nil>")
assert.Contains(t, output6, "│ DeletedAt │ gorm.DeletedAt{Time:time.Date")
assert.NotContains(t, output6, "│ DeletedAt │ gorm.DeletedAt{Time:time.Date(1, time.January, 1, 0, 0, 0, 0, time.UTC), Valid:false}")
})
}

View File

@@ -567,33 +567,33 @@ func (c *Config) ClientUser(withSettings bool) *ClientConfig {
if hidePrivate {
c.Db().
Table("photos").
Select("SUM(photo_type = 'animated' AND photo_quality > -1 AND photo_private = 0) AS animated, " +
"SUM(photo_type = 'video' AND photo_quality > -1 AND photo_private = 0) AS videos, " +
"SUM(photo_type = 'live' AND photo_quality > -1 AND photo_private = 0) AS live, " +
"SUM(photo_type = 'audio' AND photo_quality > -1 AND photo_private = 0) AS audio, " +
"SUM(photo_type = 'document' AND photo_quality > -1 AND photo_private = 0) AS documents, " +
"SUM(photo_quality = -1) AS hidden, " +
"SUM(photo_type NOT IN ('animated','video','live','audio','document') AND photo_quality > -1 AND photo_private = 0) AS photos, " +
"SUM(photo_quality BETWEEN 0 AND 2) AS review, " +
"SUM(photo_favorite = 1 AND photo_private = 0 AND photo_quality > -1) AS favorites, " +
"SUM(photo_private = 1 AND photo_quality > -1) AS private").
Where("photos.id NOT IN (SELECT photo_id FROM files WHERE file_primary = 1 AND (file_missing = 1 OR file_error <> ''))").
Select("COUNT(CASE WHEN photo_type = 'animated' AND photo_quality > -1 AND photo_private = FALSE THEN 1 END) AS animated, " +
"COUNT(CASE WHEN photo_type = 'video' AND photo_quality > -1 AND photo_private = FALSE THEN 1 END) AS videos, " +
"COUNT(CASE WHEN photo_type = 'live' AND photo_quality > -1 AND photo_private = FALSE THEN 1 END) AS live, " +
"COUNT(CASE WHEN photo_type = 'audio' AND photo_quality > -1 AND photo_private = FALSE THEN 1 END) AS audio, " +
"COUNT(CASE WHEN photo_type = 'document' AND photo_quality > -1 AND photo_private = FALSE THEN 1 END) AS documents, " +
"COUNT(CASE WHEN photo_quality = -1 THEN 1 END) AS hidden, " +
"COUNT(CASE WHEN photo_type NOT IN ('animated','video','live','audio','document') AND photo_quality > -1 AND photo_private = FALSE THEN 1 END) AS photos, " +
"COUNT(CASE WHEN photo_quality BETWEEN 0 AND 2 THEN 1 END) AS review, " +
"COUNT(CASE WHEN photo_favorite = TRUE AND photo_private = FALSE AND photo_quality > -1 THEN 1 END) AS favorites, " +
"COUNT(CASE WHEN photo_private = TRUE AND photo_quality > -1 THEN 1 END) AS private").
Where("photos.id NOT IN (SELECT photo_id FROM files WHERE file_primary = TRUE AND (file_missing = TRUE OR file_error <> ''))").
Where("deleted_at IS NULL").
Take(&cfg.Count)
} else {
c.Db().
Table("photos").
Select("SUM(photo_type = 'animated' AND photo_quality > -1) AS animated, " +
"SUM(photo_type = 'video' AND photo_quality > -1) AS videos, " +
"SUM(photo_type = 'live' AND photo_quality > -1) AS live, " +
"SUM(photo_type = 'audio' AND photo_quality > -1) AS audio, " +
"SUM(photo_type = 'document' AND photo_quality > -1) AS documents, " +
"SUM(photo_quality = -1) AS hidden, " +
"SUM(photo_type NOT IN ('animated','video','live','audio','document') AND photo_quality > -1) AS photos, " +
"SUM(photo_quality BETWEEN 0 AND 2) AS review, " +
"SUM(photo_favorite = 1 AND photo_quality > -1) AS favorites, " +
Select("COUNT(CASE WHEN photo_type = 'animated' AND photo_quality > -1 THEN 1 END) AS animated, " +
"COUNT(CASE WHEN photo_type = 'video' AND photo_quality > -1 THEN 1 END) AS videos, " +
"COUNT(CASE WHEN photo_type = 'live' AND photo_quality > -1 THEN 1 END) AS live, " +
"COUNT(CASE WHEN photo_type = 'audio' AND photo_quality > -1 THEN 1 END) AS audio, " +
"COUNT(CASE WHEN photo_type = 'document' AND photo_quality > -1 THEN 1 END) AS documents, " +
"COUNT(CASE WHEN photo_quality = -1 THEN 1 END) AS hidden, " +
"COUNT(CASE WHEN photo_type NOT IN ('animated','video','live','audio','document') AND photo_quality > -1 THEN 1 END) AS photos, " +
"COUNT(CASE WHEN photo_quality BETWEEN 0 AND 2 THEN 1 END) AS review, " +
"COUNT(CASE WHEN photo_favorite = TRUE AND photo_quality > -1 THEN 1 END) AS favorites, " +
"0 AS private").
Where("photos.id NOT IN (SELECT photo_id FROM files WHERE file_primary = 1 AND (file_missing = 1 OR file_error <> ''))").
Where("photos.id NOT IN (SELECT photo_id FROM files WHERE file_primary = TRUE AND (file_missing = TRUE OR file_error <> ''))").
Where("deleted_at IS NULL").
Take(&cfg.Count)
}
@@ -602,7 +602,7 @@ func (c *Config) ClientUser(withSettings bool) *ClientConfig {
if c.Settings().Features.Archive {
c.Db().
Table("photos").
Select("SUM(photo_quality > -1) AS archived").
Select("COUNT(CASE WHEN photo_quality > -1 THEN 1 END) AS archived").
Where("deleted_at IS NOT NULL").
Take(&cfg.Count)
}
@@ -621,34 +621,34 @@ func (c *Config) ClientUser(withSettings bool) *ClientConfig {
Select("MAX(photo_count) AS label_max_photos, COUNT(*) AS labels").
Where("photo_count > 0").
Where("deleted_at IS NULL").
Where("(label_priority >= 0 OR label_favorite = 1)").
Where("(label_priority >= 0 OR label_favorite = TRUE)").
Take(&cfg.Count)
if hidePrivate {
c.Db().
Table("albums").
Select("SUM(album_type = ?) AS albums, "+
"SUM(album_type = ?) AS moments, "+
"SUM(album_type = ?) AS months, "+
"SUM(album_type = ?) AS states, "+
"SUM(album_type = ?) AS folders, "+
"SUM(album_type = ? AND album_private = 1) AS private_albums, "+
"SUM(album_type = ? AND album_private = 1) AS private_moments, "+
"SUM(album_type = ? AND album_private = 1) AS private_months, "+
"SUM(album_type = ? AND album_private = 1) AS private_states, "+
"SUM(album_type = ? AND album_private = 1) AS private_folders",
Select("COUNT(CASE WHEN album_type = ? THEN 1 END) AS albums, "+
"COUNT(CASE WHEN album_type = ? THEN 1 END) AS moments, "+
"COUNT(CASE WHEN album_type = ? THEN 1 END) AS months, "+
"COUNT(CASE WHEN album_type = ? THEN 1 END) AS states, "+
"COUNT(CASE WHEN album_type = ? THEN 1 END) AS folders, "+
"COUNT(CASE WHEN album_type = ? AND album_private = TRUE THEN 1 END) AS private_albums, "+
"COUNT(CASE WHEN album_type = ? AND album_private = TRUE THEN 1 END) AS private_moments, "+
"COUNT(CASE WHEN album_type = ? AND album_private = TRUE THEN 1 END) AS private_months, "+
"COUNT(CASE WHEN album_type = ? AND album_private = TRUE THEN 1 END) AS private_states, "+
"COUNT(CASE WHEN album_type = ? AND album_private = TRUE THEN 1 END) AS private_folders",
entity.AlbumManual, entity.AlbumMoment, entity.AlbumMonth, entity.AlbumState, entity.AlbumFolder,
entity.AlbumManual, entity.AlbumMoment, entity.AlbumMonth, entity.AlbumState, entity.AlbumFolder).
Where("deleted_at IS NULL AND (albums.album_type <> 'folder' OR albums.album_path IN (SELECT photos.photo_path FROM photos WHERE photos.photo_private = 0 AND photos.deleted_at IS NULL))").
Where("deleted_at IS NULL AND (albums.album_type <> 'folder' OR albums.album_path IN (SELECT photos.photo_path FROM photos WHERE photos.photo_private = FALSE AND photos.deleted_at IS NULL))").
Take(&cfg.Count)
} else {
c.Db().
Table("albums").
Select("SUM(album_type = ?) AS albums, "+
"SUM(album_type = ?) AS moments, "+
"SUM(album_type = ?) AS months, "+
"SUM(album_type = ?) AS states, "+
"SUM(album_type = ?) AS folders",
Select("COUNT(CASE WHEN album_type = ? THEN 1 END) AS albums, "+
"COUNT(CASE WHEN album_type = ? THEN 1 END) AS moments, "+
"COUNT(CASE WHEN album_type = ? THEN 1 END) AS months, "+
"COUNT(CASE WHEN album_type = ? THEN 1 END) AS states, "+
"COUNT(CASE WHEN album_type = ? THEN 1 END) AS folders",
entity.AlbumManual, entity.AlbumMoment, entity.AlbumMonth, entity.AlbumState, entity.AlbumFolder).
Where("deleted_at IS NULL AND (albums.album_type <> 'folder' OR albums.album_path IN (SELECT photos.photo_path FROM photos WHERE photos.deleted_at IS NULL))").
Take(&cfg.Count)
@@ -657,7 +657,7 @@ func (c *Config) ClientUser(withSettings bool) *ClientConfig {
c.Db().
Table("files").
Select("COUNT(*) AS files").
Where("file_missing = 0 AND file_root = ? AND deleted_at IS NULL", entity.RootOriginals).
Where("file_missing = FALSE AND file_root = ? AND deleted_at IS NULL", entity.RootOriginals).
Take(&cfg.Count)
c.Db().
@@ -667,7 +667,7 @@ func (c *Config) ClientUser(withSettings bool) *ClientConfig {
c.Db().
Table("places").
Select("SUM(photo_count > 0) AS places").
Select("COUNT(CASE WHEN photo_count > 0 THEN 1 END) AS places").
Where("id <> 'zz'").
Take(&cfg.Count)
@@ -691,7 +691,7 @@ func (c *Config) ClientUser(withSettings bool) *ClientConfig {
Find(&cfg.Lenses)
c.Db().
Where("deleted_at IS NULL AND album_favorite = 1").
Where("deleted_at IS NULL AND album_favorite = TRUE").
Limit(20).Order("album_title").
Find(&cfg.Albums)

View File

@@ -37,13 +37,12 @@ import (
"time"
"github.com/dustin/go-humanize"
"github.com/jinzhu/gorm"
_ "github.com/jinzhu/gorm/dialects/mysql"
_ "github.com/jinzhu/gorm/dialects/sqlite"
"github.com/jackc/pgx/v5/pgxpool"
"github.com/klauspost/cpuid/v2"
"github.com/pbnjay/memory"
"github.com/sirupsen/logrus"
"github.com/urfave/cli/v2"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/ai/face"
"github.com/photoprism/photoprism/internal/ai/vision"
@@ -73,6 +72,7 @@ type Config struct {
options *Options
settings *customize.Settings
db *gorm.DB
pool *pgxpool.Pool
dbVersion string
hub *hub.Config
token string
@@ -823,3 +823,43 @@ func (c *Config) Hub() *hub.Config {
return c.hub
}
// Swap the database and transfer settings in the config.
func (c *Config) SwapDBAndTransfer() error {
if c.db != nil {
return fmt.Errorf("config: database must not be initialised")
}
if c.options.DBTransferDriver == "" &&
c.options.DBTransferDsn == "" &&
c.options.DBTransferName == "" &&
c.options.DBTransferServer == "" {
return fmt.Errorf("config: transfer config must be provided")
}
tempString := c.options.DBTransferDriver
c.options.DBTransferDriver = c.options.DatabaseDriver
c.options.DatabaseDriver = tempString
tempString = c.options.DBTransferDsn
c.options.DBTransferDsn = c.options.DatabaseDsn
c.options.DatabaseDsn = tempString
tempString = c.options.DBTransferName
c.options.DBTransferName = c.options.DatabaseName
c.options.DatabaseName = tempString
tempString = c.options.DBTransferPassword
c.options.DBTransferPassword = c.options.DatabasePassword
c.options.DatabasePassword = tempString
tempString = c.options.DBTransferServer
c.options.DBTransferServer = c.options.DatabaseServer
c.options.DatabaseServer = tempString
tempString = c.options.DBTransferUser
c.options.DBTransferUser = c.options.DatabaseUser
c.options.DatabaseUser = tempString
return nil
}

View File

@@ -4,16 +4,20 @@ import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/internal/functions"
)
func TestConfig_BackupPath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Contains(t, c.BackupPath(""), "/storage/testdata/backup")
expected := "/storage/testdata/" + functions.PhotoPrismTestToFolderName() + "/backup"
assert.Contains(t, c.BackupPath(""), expected)
}
func TestConfig_BackupBasePath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Contains(t, c.BackupBasePath(), "/storage/testdata/backup")
expected := "/storage/testdata/" + functions.PhotoPrismTestToFolderName() + "/backup"
assert.Contains(t, c.BackupBasePath(), expected)
path := c.options.BackupPath
c.options.BackupPath = "./"
assert.Contains(t, c.BackupBasePath(), "/photoprism/internal/config")
@@ -56,7 +60,8 @@ func TestConfig_BackupDatabase(t *testing.T) {
func TestConfig_BackupDatabasePath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Contains(t, c.BackupDatabasePath(), "/storage/testdata/backup/sqlite")
expected := "/storage/testdata/" + functions.PhotoPrismTestToFolderName() + "/backup/sqlite"
assert.Contains(t, c.BackupDatabasePath(), expected)
}
func TestConfig_BackupAlbums(t *testing.T) {

View File

@@ -10,10 +10,12 @@ import (
"strings"
"time"
"github.com/jinzhu/gorm"
_ "github.com/jinzhu/gorm/dialects/mysql"
_ "github.com/jinzhu/gorm/dialects/sqlite"
"golang.org/x/mod/semver"
"gorm.io/driver/mysql"
"gorm.io/driver/postgres"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"gorm.io/gorm/logger"
"github.com/photoprism/photoprism/internal/entity"
"github.com/photoprism/photoprism/internal/entity/migrate"
@@ -22,12 +24,11 @@ import (
)
// SQL Databases.
// TODO: PostgreSQL support requires upgrading GORM, so generic column data types can be used.
const (
MySQL = "mysql"
MariaDB = "mariadb"
Postgres = "postgres"
SQLite3 = "sqlite3"
SQLite3 = "sqlite"
)
// SQLite default DSNs.
@@ -36,13 +37,21 @@ const (
SQLiteMemoryDSN = ":memory:"
)
var drivers = map[string]func(string) gorm.Dialector{
MySQL: mysql.Open,
SQLite3: sqlite.Open,
Postgres: postgres.Open,
}
// DatabaseDriver returns the database driver name.
func (c *Config) DatabaseDriver() string {
switch strings.ToLower(c.options.DatabaseDriver) {
case MySQL, MariaDB:
c.options.DatabaseDriver = MySQL
case SQLite3, "sqlite", "test", "file", "":
case SQLite3, "sqlite3", "test", "file", "":
c.options.DatabaseDriver = SQLite3
case Postgres:
c.options.DatabaseDriver = Postgres
case "tidb":
log.Warnf("config: database driver 'tidb' is deprecated, using sqlite")
c.options.DatabaseDriver = SQLite3
@@ -61,8 +70,10 @@ func (c *Config) DatabaseDriverName() string {
switch c.DatabaseDriver() {
case MySQL, MariaDB:
return "MariaDB"
case SQLite3, "sqlite", "test", "file", "":
case SQLite3, "sqlite3", "test", "file", "":
return "SQLite"
case Postgres:
return "PostgreSQL"
case "tidb":
return "TiDB"
default:
@@ -124,16 +135,16 @@ func (c *Config) DatabaseDsn() string {
)
case Postgres:
return fmt.Sprintf(
"user=%s password=%s dbname=%s host=%s port=%d connect_timeout=%d sslmode=disable TimeZone=UTC",
"postgresql://%s:%s@%s:%d/%s?TimeZone=UTC&connect_timeout=%d&lock_timeout=50000&sslmode=disable",
c.DatabaseUser(),
c.DatabasePassword(),
c.DatabaseName(),
c.DatabaseHost(),
c.DatabasePort(),
c.DatabaseName(),
c.DatabaseTimeout(),
)
case SQLite3:
return filepath.Join(c.StoragePath(), "index.db?_busy_timeout=5000")
return filepath.Join(c.StoragePath(), "index.db?_busy_timeout=5000&_foreign_keys=on")
default:
log.Errorf("config: empty database dsn")
return ""
@@ -189,9 +200,17 @@ func (c *Config) DatabaseHost() string {
return c.options.DatabaseServer
}
// Get the port based on the database driver Postgres vs MySQL/MariaDB
func (c *Config) _DefaultDatabasePort() int {
if c.DatabaseDriver() == Postgres {
return 5432
}
return 3306
}
// DatabasePort the database server port.
func (c *Config) DatabasePort() int {
const defaultPort = 3306
defaultPort := c._DefaultDatabasePort()
if server := c.DatabaseServer(); server == "" {
return 0
@@ -322,11 +341,16 @@ func (c *Config) Db() *gorm.DB {
// CloseDb closes the db connection (if any).
func (c *Config) CloseDb() error {
if c.db != nil {
if err := c.db.Close(); err == nil {
sqldb, dberr := c.db.DB()
if dberr != nil {
sqldb.Close()
c.db = nil
entity.SetDbProvider(nil)
} else {
return err
return dberr
}
if c.pool != nil {
c.pool.Close()
}
}
@@ -424,6 +448,20 @@ func (c *Config) checkDb(db *gorm.DB) error {
} else if !c.IsDatabaseVersion("v10.5.12") {
return fmt.Errorf("config: MariaDB %s is not supported, see https://docs.photoprism.app/getting-started/#databases", c.dbVersion)
}
case Postgres:
var versions []string
err := db.Raw("SELECT VERSION() AS Value").Pluck("value", &versions).Error
// Version query not supported.
if err != nil {
log.Tracef("config: failed to detect database version (%s)", err)
return nil
}
c.dbVersion = clean.Version(versions[0])
if c.dbVersion == "" {
log.Warnf("config: unknown database server version")
}
case SQLite3:
type Res struct {
Value string `gorm:"column:Value;"`
@@ -449,6 +487,26 @@ func (c *Config) checkDb(db *gorm.DB) error {
return nil
}
// Configure database logging.
func gormConfig() *gorm.Config {
return &gorm.Config{
Logger: logger.New(
log, // This should be dummy.NewLogger(), to match GORM1. Set to log before release...
logger.Config{
SlowThreshold: time.Second, // Slow SQL threshold
LogLevel: logger.Error, // Log level <-- This should be Silent to match GORM1, set to Error before release...
IgnoreRecordNotFoundError: true, // Ignore ErrRecordNotFound error for logger
ParameterizedQueries: true, // Don't include params in the SQL log
Colorful: false, // Disable color
},
),
// Set UTC as the default for created and updated timestamps.
NowFunc: func() time.Time {
return time.Now().UTC()
},
}
}
// connectDb establishes a database connection.
func (c *Config) connectDb() error {
// Make sure this is not running twice.
@@ -468,12 +526,28 @@ func (c *Config) connectDb() error {
}
// Open database connection.
db, err := gorm.Open(dbDriver, dbDsn)
var db *gorm.DB
var err error
if dbDriver == Postgres {
postgresDB, pgxPool := entity.OpenPostgreSQL(dbDsn)
c.pool = pgxPool
db, err = gorm.Open(postgres.New(postgres.Config{Conn: postgresDB}), gormConfig())
} else {
c.pool = nil
db, err = gorm.Open(drivers[dbDriver](dbDsn), gormConfig())
}
if err != nil || db == nil {
log.Infof("config: waiting for the database to become available")
for i := 1; i <= 12; i++ {
db, err = gorm.Open(dbDriver, dbDsn)
if dbDriver == Postgres {
postgresDB, pgxPool := entity.OpenPostgreSQL(dbDsn)
c.pool = pgxPool
db, err = gorm.Open(postgres.New(postgres.Config{Conn: postgresDB}), gormConfig())
} else {
c.pool = nil
db, err = gorm.Open(drivers[dbDriver](dbDsn), gormConfig())
}
if db != nil && err == nil {
break
@@ -488,13 +562,19 @@ func (c *Config) connectDb() error {
}
// Configure database logging.
db.LogMode(false)
db.SetLogger(log)
//db.LogMode(false)
//db.SetLogger(log)
// Set database connection parameters.
db.DB().SetMaxOpenConns(c.DatabaseConns())
db.DB().SetMaxIdleConns(c.DatabaseConnsIdle())
db.DB().SetConnMaxLifetime(time.Hour)
if dbDriver != Postgres {
sqlDB, err := db.DB()
if err != nil {
return err
}
sqlDB.SetMaxOpenConns(c.DatabaseConns())
sqlDB.SetMaxIdleConns(c.DatabaseConnsIdle())
sqlDB.SetConnMaxLifetime(time.Hour)
}
// Check database server version.
if err = c.checkDb(db); err != nil {

View File

@@ -5,6 +5,8 @@ import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/internal/functions"
)
func TestConfig_DatabaseDriver(t *testing.T) {
@@ -30,8 +32,18 @@ func TestConfig_DatabaseVersion(t *testing.T) {
func TestConfig_DatabaseSsl(t *testing.T) {
c := TestConfig()
assert.False(t, c.DatabaseSsl())
driver := c.DatabaseDriverName()
switch driver {
case "SQLite":
assert.False(t, c.DatabaseSsl())
case "MariaDB":
assert.True(t, c.DatabaseSsl())
case "PostgreSQL":
assert.False(t, c.DatabaseSsl())
default:
assert.Empty(t, driver)
assert.Fail(t, "driver not recognised")
}
}
func TestConfig_ParseDatabaseDsn(t *testing.T) {
@@ -94,8 +106,8 @@ func TestConfig_DatabasePortString(t *testing.T) {
func TestConfig_DatabaseName(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/index.db?_busy_timeout=5000", c.DatabaseName())
expected := "/go/src/github.com/photoprism/photoprism/storage/testdata/" + functions.PhotoPrismTestToFolderName() + "/index.db?_busy_timeout=5000&_foreign_keys=on"
assert.Equal(t, expected, c.DatabaseName())
}
func TestConfig_DatabaseUser(t *testing.T) {
@@ -129,13 +141,13 @@ func TestConfig_DatabaseDsn(t *testing.T) {
c.options.DatabaseDriver = "MariaDB"
assert.Equal(t, "photoprism:@tcp(localhost)/photoprism?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s", c.DatabaseDsn())
c.options.DatabaseDriver = "tidb"
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/index.db?_busy_timeout=5000", c.DatabaseDsn())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/index.db?_busy_timeout=5000&_foreign_keys=on", c.DatabaseDsn())
c.options.DatabaseDriver = "Postgres"
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/index.db?_busy_timeout=5000", c.DatabaseDsn())
assert.Equal(t, "postgresql://photoprism:@localhost:5432/photoprism?TimeZone=UTC&connect_timeout=15&lock_timeout=50000&sslmode=disable", c.DatabaseDsn())
c.options.DatabaseDriver = "SQLite"
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/index.db?_busy_timeout=5000", c.DatabaseDsn())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/index.db?_busy_timeout=5000&_foreign_keys=on", c.DatabaseDsn())
c.options.DatabaseDriver = ""
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/index.db?_busy_timeout=5000", c.DatabaseDsn())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/index.db?_busy_timeout=5000&_foreign_keys=on", c.DatabaseDsn())
}
func TestConfig_DatabaseFile(t *testing.T) {
@@ -144,8 +156,8 @@ func TestConfig_DatabaseFile(t *testing.T) {
driver := c.DatabaseDriver()
assert.Equal(t, SQLite3, driver)
c.options.DatabaseDsn = ""
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/index.db", c.DatabaseFile())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/index.db?_busy_timeout=5000", c.DatabaseDsn())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/index.db", c.DatabaseFile())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/index.db?_busy_timeout=5000&_foreign_keys=on", c.DatabaseDsn())
}
func TestConfig_DatabaseTimeout(t *testing.T) {

View File

@@ -673,6 +673,16 @@ func (c *Config) MariadbDumpBin() string {
return FindBin("", "mariadb-dump", "mysqldump")
}
// PostgreSQLBin returns the PostgreSQL restore executable file name.
func (c *Config) PostgreSQLRestoreBin() string {
return FindBin("", "pg_restore")
}
// PostgreSQLDumpBin returns the PostgreSQL backup executable file name.
func (c *Config) PostgreSQLDumpBin() string {
return FindBin("", "pg_dump")
}
// SqliteBin returns the sqlite executable file name.
func (c *Config) SqliteBin() string {
return FindBin("", "sqlite3")

View File

@@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/internal/functions"
"github.com/photoprism/photoprism/pkg/fs"
"github.com/photoprism/photoprism/pkg/rnd"
)
@@ -24,11 +25,11 @@ func TestConfig_FindBin(t *testing.T) {
func TestConfig_SidecarPath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Contains(t, c.SidecarPath(), "testdata/sidecar")
assert.Contains(t, c.SidecarPath(), "testdata/"+functions.PhotoPrismTestToFolderName()+"/sidecar")
c.options.SidecarPath = ".photoprism"
assert.Equal(t, ".photoprism", c.SidecarPath())
c.options.SidecarPath = ""
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/sidecar", c.SidecarPath())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/sidecar", c.SidecarPath())
}
func TestConfig_SidecarYaml(t *testing.T) {
@@ -133,7 +134,7 @@ func TestConfig_TempPath(t *testing.T) {
t.Logf("c.options.TempPath: '%s'", c.options.TempPath)
t.Logf("c.tempPath(): '%s'", d0)
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/temp", c.tempPath())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/temp", c.tempPath())
c.options.TempPath = ""
@@ -190,22 +191,22 @@ func TestConfig_CmdLibPath(t *testing.T) {
func TestConfig_CachePath2(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/cache", c.CachePath())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/cache", c.CachePath())
c.options.CachePath = ""
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/cache", c.CachePath())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/cache", c.CachePath())
}
func TestConfig_StoragePath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata", c.StoragePath())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName(), c.StoragePath())
c.options.StoragePath = ""
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/originals/.photoprism/storage", c.StoragePath())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/originals/.photoprism/storage", c.StoragePath())
}
func TestConfig_TestdataPath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/testdata", c.TestdataPath())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/testdata", c.TestdataPath())
}
func TestConfig_AlbumsPath(t *testing.T) {
@@ -216,13 +217,13 @@ func TestConfig_AlbumsPath(t *testing.T) {
// If this test fails, please manually move “albums” to the “backup” folder
// in the “storage/testdata” directory within your development environment:
// https://github.com/photoprism/photoprism/discussions/4520
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/backup/albums", c.BackupAlbumsPath())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/backup/albums", c.BackupAlbumsPath())
}
func TestConfig_OriginalsAlbumsPath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/originals/albums", c.OriginalsAlbumsPath())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/originals/albums", c.OriginalsAlbumsPath())
}
func TestConfig_CreateDirectories(t *testing.T) {
@@ -422,21 +423,21 @@ func TestConfig_CreateDirectories2(t *testing.T) {
func TestConfig_PIDFilename2(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/photoprism.pid", c.PIDFilename())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/photoprism.pid", c.PIDFilename())
c.options.PIDFilename = "/go/src/github.com/photoprism/photoprism/internal/config/testdata/test.pid"
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/internal/config/testdata/test.pid", c.PIDFilename())
}
func TestConfig_LogFilename2(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/photoprism.log", c.LogFilename())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/photoprism.log", c.LogFilename())
c.options.LogFilename = "/go/src/github.com/photoprism/photoprism/internal/config/testdata/test.log"
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/internal/config/testdata/test.log", c.LogFilename())
}
func TestConfig_OriginalsPath2(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/originals", c.OriginalsPath())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/originals", c.OriginalsPath())
c.options.OriginalsPath = ""
if s := c.OriginalsPath(); s != "" && s != "/photoprism/originals" {
t.Errorf("unexpected originals path: %s", s)
@@ -454,7 +455,7 @@ func TestConfig_OriginalsDeletable(t *testing.T) {
func TestConfig_ImportPath2(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/import", c.ImportPath())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/import", c.ImportPath())
c.options.ImportPath = ""
if s := c.ImportPath(); s != "" && s != "/photoprism/import" {
t.Errorf("unexpected import path: %s", s)

View File

@@ -10,6 +10,8 @@ import (
"github.com/sirupsen/logrus"
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/internal/functions"
"github.com/photoprism/photoprism/internal/testextras"
"github.com/photoprism/photoprism/pkg/fs"
)
@@ -18,10 +20,22 @@ func TestMain(m *testing.M) {
log = logrus.StandardLogger()
log.SetLevel(logrus.TraceLevel)
caller := "internal/config/config_test.go/TestMain"
dbc, err := testextras.AcquireDBMutex(log, caller)
if err != nil {
log.Error("FAIL")
os.Exit(1)
}
defer testextras.UnlockDBMutex(dbc.Db())
c := TestConfig()
defer c.CloseDb()
beforeTimestamp := time.Now().UTC()
code := m.Run()
code = testextras.ValidateDBErrors(dbc.Db(), log, beforeTimestamp, code)
testextras.ReleaseDBMutex(dbc.Db(), log, caller, code)
os.Exit(code)
}
@@ -126,14 +140,14 @@ func TestConfig_OptionsYaml(t *testing.T) {
func TestConfig_PIDFilename(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Contains(t, c.PIDFilename(), "/storage/testdata/photoprism.pid")
expected := "/storage/testdata/" + functions.PhotoPrismTestToFolderName() + "/photoprism.pid"
assert.Contains(t, c.PIDFilename(), expected)
}
func TestConfig_LogFilename(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Contains(t, c.LogFilename(), "/storage/testdata/photoprism.log")
assert.Contains(t, c.LogFilename(), "/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/photoprism.log")
}
func TestConfig_DetachServer(t *testing.T) {
@@ -148,7 +162,7 @@ func TestConfig_OriginalsPath(t *testing.T) {
result := c.OriginalsPath()
assert.True(t, strings.HasPrefix(result, "/"))
assert.True(t, strings.HasSuffix(result, "/storage/testdata/originals"))
assert.True(t, strings.HasSuffix(result, "/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/originals"))
}
func TestConfig_ImportPath(t *testing.T) {
@@ -156,20 +170,20 @@ func TestConfig_ImportPath(t *testing.T) {
result := c.ImportPath()
assert.True(t, strings.HasPrefix(result, "/"))
assert.True(t, strings.HasSuffix(result, "/storage/testdata/import"))
assert.True(t, strings.HasSuffix(result, "/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/import"))
}
func TestConfig_CachePath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.True(t, strings.HasSuffix(c.CachePath(), "storage/testdata/cache"))
assert.True(t, strings.HasSuffix(c.CachePath(), "storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/cache"))
}
func TestConfig_MediaCachePath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.True(t, strings.HasPrefix(c.MediaCachePath(), "/"))
assert.True(t, strings.HasSuffix(c.MediaCachePath(), "storage/testdata/cache/media"))
assert.True(t, strings.HasSuffix(c.MediaCachePath(), "storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/cache/media"))
}
func TestConfig_MediaFileCachePath(t *testing.T) {
@@ -184,7 +198,7 @@ func TestConfig_ThumbCachePath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.True(t, strings.HasPrefix(c.ThumbCachePath(), "/"))
assert.True(t, strings.HasSuffix(c.ThumbCachePath(), "storage/testdata/cache/thumbnails"))
assert.True(t, strings.HasSuffix(c.ThumbCachePath(), "storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/cache/thumbnails"))
}
func TestConfig_AdminUser(t *testing.T) {
@@ -257,14 +271,15 @@ func TestConfig_ThemePath(t *testing.T) {
c := NewConfig(CliTestContext())
path := c.ThemePath()
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/config/theme", path)
expected := "/go/src/github.com/photoprism/photoprism/storage/testdata/" + functions.PhotoPrismTestToFolderName() + "/config/theme"
assert.Equal(t, expected, path)
}
func TestConfig_PortalPath(t *testing.T) {
c := NewConfig(CliTestContext())
path := c.PortalPath()
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/config/portal", path)
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/config/portal", path)
}
func TestConfig_IndexWorkers(t *testing.T) {

View File

@@ -5,11 +5,13 @@ import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/internal/functions"
)
func TestConfig_VisionYaml(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/config/vision.yml", c.VisionYaml())
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/storage/testdata/"+functions.PhotoPrismTestToFolderName()+"/config/vision.yml", c.VisionYaml())
}
func TestConfig_VisionApi(t *testing.T) {

View File

@@ -5,6 +5,8 @@ import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/internal/functions"
)
func TestNewSettings(t *testing.T) {
@@ -87,13 +89,15 @@ func TestSettings_Save(t *testing.T) {
assert.Equal(t, "onyx", s.UI.Theme)
assert.Equal(t, "de", s.UI.Language)
if err := s.Save("testdata/settings_tmp.yml"); err != nil {
_ = os.Mkdir("testdata/"+functions.PhotoPrismTestToFolderName(), os.ModePerm)
if err := s.Save("testdata/" + functions.PhotoPrismTestToFolderName() + "/settings_tmp.yml"); err != nil {
t.Fatal(err)
}
if err := os.Remove("testdata/settings_tmp.yml"); err != nil {
if err := os.Remove("testdata/" + functions.PhotoPrismTestToFolderName() + "/settings_tmp.yml"); err != nil {
t.Fatal(err)
}
_ = os.Remove("testdata/" + functions.PhotoPrismTestToFolderName())
})
}

View File

@@ -1,6 +1,11 @@
package config
import "regexp"
import (
"net/url"
"regexp"
"strings"
"unicode"
)
// dsnPattern is a regular expression matching a database DSN string.
var dsnPattern = regexp.MustCompile(
@@ -36,27 +41,81 @@ func (d *DSN) Parse(dsn string) {
matches := dsnPattern.FindStringSubmatch(dsn)
names := dsnPattern.SubexpNames()
for i, match := range matches {
switch names[i] {
case "driver":
d.Driver = match
case "user":
d.User = match
case "password":
d.Password = match
case "net":
d.Net = match
case "server":
d.Server = match
case "name":
d.Name = match
case "params":
d.Params = match
if len(matches) > 0 {
for i, match := range matches {
switch names[i] {
case "driver":
d.Driver = match
case "user":
d.User = match
case "password":
d.Password = match
case "net":
d.Net = match
case "server":
d.Server = match
case "name":
d.Name = match
case "params":
d.Params = match
}
}
if d.Net != "" && d.Server == "" {
d.Server = d.Net
d.Net = ""
}
} else {
// Assume we have a PostgreSQL key value pair connection string.
lastQuote := rune(0)
smartSplit := func(char rune) bool {
switch {
case char == lastQuote:
lastQuote = rune(0)
return false
case lastQuote != rune(0):
return false
case unicode.In(char, unicode.Quotation_Mark):
lastQuote = char
return false
default:
return unicode.IsSpace(char)
}
}
pairs := strings.FieldsFunc(dsn, smartSplit)
params := url.Values{}
host := ""
port := ""
for _, pair := range pairs {
splitPair := strings.Split(pair, "=")
switch strings.ToLower(splitPair[0]) {
case "host":
host = splitPair[1]
case "port":
port = splitPair[1]
case "user":
d.User = splitPair[1]
case "password":
d.Password = splitPair[1]
case "dbname":
d.Name = splitPair[1]
default:
params.Add(splitPair[0], splitPair[1])
}
}
d.Params = params.Encode()
if len(host) > 0 && len(port) > 0 {
d.Server = host + ":" + port
} else if len(host) > 0 {
d.Server = host
} else {
d.Server = ""
}
if len(pairs) > 1 {
d.Driver = "postgresql"
}
}
if d.Net != "" && d.Server == "" {
d.Server = d.Net
d.Net = ""
}
}

View File

@@ -58,4 +58,43 @@ func TestNewDSN(t *testing.T) {
assert.Equal(t, "my_db", dsn.Name)
assert.Equal(t, "", dsn.Params)
})
t.Run("PostgreSQL URI 1", func(t *testing.T) {
dsn := NewDSN("postgresql://john:pass@postgres:5432/my_db?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable")
assert.Equal(t, "postgresql", dsn.Driver)
assert.Equal(t, "john", dsn.User)
assert.Equal(t, "pass", dsn.Password)
assert.Equal(t, "", dsn.Net)
assert.Equal(t, "postgres:5432", dsn.Server)
assert.Equal(t, "my_db", dsn.Name)
assert.Equal(t, "TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable", dsn.Params)
})
t.Run("PostgreSQL URI 2", func(t *testing.T) {
dsn := NewDSN("postgres://john:pass@postgres:5432/my_db?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable")
assert.Equal(t, "postgres", dsn.Driver)
assert.Equal(t, "john", dsn.User)
assert.Equal(t, "pass", dsn.Password)
assert.Equal(t, "", dsn.Net)
assert.Equal(t, "postgres:5432", dsn.Server)
assert.Equal(t, "my_db", dsn.Name)
assert.Equal(t, "TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable", dsn.Params)
})
t.Run("PostgreSQL Keywords", func(t *testing.T) {
dsn := NewDSN("host=postgres port=5432 dbname=my_db user=john password=pass connect_timeout=15 sslmode=disable TimeZone=UTC application_name='Photo Prism'")
assert.Equal(t, "postgresql", dsn.Driver)
assert.Equal(t, "john", dsn.User)
assert.Equal(t, "pass", dsn.Password)
assert.Equal(t, "", dsn.Net)
assert.Equal(t, "postgres:5432", dsn.Server)
assert.Equal(t, "my_db", dsn.Name)
assert.Contains(t, dsn.Params, "connect_timeout=15")
assert.Contains(t, dsn.Params, "sslmode=disable")
assert.Contains(t, dsn.Params, "TimeZone=UTC")
assert.Contains(t, dsn.Params, "application_name=%27Photo+Prism%27")
})
}

View File

@@ -763,14 +763,14 @@ var Flags = CliFlags{
Flag: &cli.StringFlag{
Name: "database-driver",
Aliases: []string{"db"},
Usage: "database `DRIVER` (sqlite, mysql)",
Usage: "database `DRIVER` (sqlite, mysql, postgres)",
Value: "sqlite",
EnvVars: EnvVars("DATABASE_DRIVER"),
}}, {
Flag: &cli.StringFlag{
Name: "database-dsn",
Aliases: []string{"dsn"},
Usage: "database connection `DSN` (sqlite file, optional for mysql)",
Usage: "database connection `DSN` (sqlite file, optional for mysql and postgres)",
EnvVars: EnvVars("DATABASE_DSN"),
}}, {
Flag: &cli.StringFlag{
@@ -1107,6 +1107,45 @@ var Flags = CliFlags{
EnvVars: EnvVars("LOG_FILENAME"),
TakesFile: true,
}}, {
Flag: &cli.StringFlag{
Name: "transfer-driver",
Aliases: []string{"tfr-db"},
Usage: "database `DRIVER` (sqlite, mysql)",
Value: "sqlite",
EnvVars: EnvVars("TRANSFER_DRIVER"),
}}, {
Flag: &cli.StringFlag{
Name: "transfer-dsn",
Aliases: []string{"tfr-dsn"},
Usage: "database connection `DSN` (sqlite file, optional for mysql)",
EnvVars: EnvVars("TRANSFER_DSN"),
}}, {
Flag: &cli.StringFlag{
Name: "transfer-name",
Aliases: []string{"tfr-db-name"},
Value: "photoprism",
Usage: "database schema `NAME`",
EnvVars: EnvVars("TRANSFER_NAME"),
}}, {
Flag: &cli.StringFlag{
Name: "transfer-server",
Aliases: []string{"tfr-db-server"},
Usage: "database `HOST` incl. port e.g. \"mariadb:3306\" (or socket path)",
EnvVars: EnvVars("TRANSFER_SERVER"),
}}, {
Flag: &cli.StringFlag{
Name: "transfer-user",
Aliases: []string{"tfr-db-user"},
Value: "photoprism",
Usage: "database user `NAME`",
EnvVars: EnvVars("TRANSFER_USER"),
}}, {
Flag: &cli.StringFlag{
Name: "transfer-password",
Aliases: []string{"tfr-db-pass"},
Usage: "database user `PASSWORD`",
EnvVars: EnvVars("TRANSFER_PASSWORD"),
}}, {
Flag: &cli.StringFlag{
Name: "portal-url",
Usage: "PhotoPrism® Portal server `URL`",

View File

@@ -224,6 +224,12 @@ type Options struct {
PortalSecret string `yaml:"PortalSecret" json:"-" flag:"portal-secret"`
InstanceRoles string `yaml:"InstanceRoles" json:"-" flag:"instance-roles"`
InstanceSecret string `yaml:"InstanceSecret" json:"-" flag:"instance-secret"`
DBTransferDriver string `yaml:"DBTransferDriver" json:"-" flag:"transfer-driver"`
DBTransferDsn string `yaml:"DBTransferDsn" json:"-" flag:"transfer-dsn"`
DBTransferName string `yaml:"DBTransferName" json:"-" flag:"transfer-name"`
DBTransferServer string `yaml:"DBTransferServer" json:"-" flag:"transfer-server"`
DBTransferUser string `yaml:"DBTransferUser" json:"-" flag:"transfer-user"`
DBTransferPassword string `yaml:"DBTransferPassword" json:"-" flag:"transfer-password"`
}
// NewOptions creates a new configuration entity by using two methods:

View File

@@ -12,10 +12,8 @@ import (
"github.com/urfave/cli/v2"
_ "github.com/jinzhu/gorm/dialects/mysql"
_ "github.com/jinzhu/gorm/dialects/sqlite"
"github.com/photoprism/photoprism/internal/config/customize"
"github.com/photoprism/photoprism/internal/functions"
"github.com/photoprism/photoprism/internal/thumb"
"github.com/photoprism/photoprism/pkg/authn"
"github.com/photoprism/photoprism/pkg/capture"
@@ -56,11 +54,11 @@ func NewTestOptions(pkg string) *Options {
storagePath = fs.Abs("../../storage")
}
dataPath := filepath.Join(storagePath, "testdata")
pkg = PkgNameRegexp.ReplaceAllString(pkg, "")
driver := os.Getenv("PHOTOPRISM_TEST_DRIVER")
dsn := os.Getenv("PHOTOPRISM_TEST_DSN")
driver, dsn := functions.PhotoPrismTestToDriverDsn()
// enforce folder separation for testdata folders to prevent parallel tests of DBMS' clashing
dataPath := filepath.Join(storagePath, "testdata", functions.PhotoPrismTestToFolderName())
// Config example for MySQL / MariaDB:
// driver = MySQL,
@@ -74,17 +72,19 @@ func NewTestOptions(pkg string) *Options {
// Set default database DSN.
if driver == SQLite3 {
if dsn == "" && pkg != "" {
if dsn = fmt.Sprintf(".%s.db", clean.TypeLower(pkg)); !fs.FileExists(dsn) {
log.Debugf("sqlite: test database %s does not already exist", clean.Log(dsn))
} else if err := os.Remove(dsn); err != nil {
log.Errorf("sqlite: failed to remove existing test database %s (%s)", clean.Log(dsn), err)
dsnFile := fmt.Sprintf(".%s.db", clean.TypeLower(pkg))
dsn = fmt.Sprintf("%s?_foreign_keys=on", dsnFile)
if !fs.FileExists(dsnFile) {
log.Debugf("sqlite: test database %s does not already exist", clean.Log(dsnFile))
} else if err := os.Remove(dsnFile); err != nil {
log.Errorf("sqlite: failed to remove existing test database %s (%s)", clean.Log(dsnFile), err)
}
} else if dsn == "" || dsn == SQLiteTestDB {
dsn = SQLiteTestDB
if !fs.FileExists(dsn) {
log.Debugf("sqlite: test database %s does not already exist", clean.Log(dsn))
} else if err := os.Remove(dsn); err != nil {
log.Errorf("sqlite: failed to remove existing test database %s (%s)", clean.Log(dsn), err)
dsn = fmt.Sprintf("%s?_foreign_keys=on", SQLiteTestDB)
if !fs.FileExists(SQLiteTestDB) {
log.Debugf("sqlite: test database %s does not already exist", clean.Log(SQLiteTestDB))
} else if err := os.Remove(SQLiteTestDB); err != nil {
log.Errorf("sqlite: failed to remove existing test database %s (%s)", clean.Log(SQLiteTestDB), err)
}
}
}

View File

@@ -3,9 +3,9 @@ package config
import (
"testing"
"github.com/jinzhu/gorm"
"github.com/stretchr/testify/assert"
"github.com/urfave/cli/v2"
"gorm.io/gorm"
"github.com/photoprism/photoprism/pkg/fs"
)

View File

@@ -0,0 +1,845 @@
//go:build ignore
// +build ignore
package main
import (
"bufio"
"bytes"
"flag"
"fmt"
"math"
"math/rand/v2"
"os"
"os/exec"
"strings"
"sync"
"time"
"github.com/sirupsen/logrus"
"gorm.io/driver/mysql"
"gorm.io/driver/postgres"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"gorm.io/gorm/logger"
"github.com/jackc/pgx/v5/pgxpool"
"github.com/photoprism/photoprism/internal/ai/classify"
"github.com/photoprism/photoprism/internal/entity"
"github.com/photoprism/photoprism/internal/entity/migrate"
"github.com/photoprism/photoprism/internal/event"
"github.com/photoprism/photoprism/pkg/fs"
"github.com/photoprism/photoprism/pkg/media"
"github.com/photoprism/photoprism/pkg/rnd"
"github.com/photoprism/photoprism/pkg/txt"
)
var drivers = map[string]func(string) gorm.Dialector{
MySQL: mysql.Open,
SQLite3: sqlite.Open,
}
var log = event.Log
// Log logs the error if any and keeps quiet otherwise.
func Log(model, action string, err error) {
if err != nil {
log.Errorf("%s: %s (%s)", model, err, action)
}
}
// UTC returns the current Coordinated Universal Time (UTC).
func UTC() time.Time {
return time.Now().UTC()
}
// Now returns the current time in UTC, truncated to seconds.
func Now() time.Time {
return UTC().Truncate(time.Second)
}
// Db returns the default *gorm.DB connection.
func Db() *gorm.DB {
if dbConn == nil {
return nil
}
return dbConn.Db()
}
// UnscopedDb returns an unscoped *gorm.DB connection
// that returns all records including deleted records.
func UnscopedDb() *gorm.DB {
return Db().Unscoped()
}
// Supported test databases.
const (
MySQL = "mysql"
SQLite3 = "sqlite"
SQLiteTestDB = ".test.db"
SQLiteMemoryDSN = ":memory:?cache=shared"
)
// dbConn is the global gorm.DB connection provider.
var dbConn Gorm
// Gorm is a gorm.DB connection provider interface.
type Gorm interface {
Db() *gorm.DB
}
// DbConn is a gorm.DB connection provider.
type DbConn struct {
Driver string
Dsn string
once sync.Once
db *gorm.DB
pool *pgxpool.Pool
}
// Db returns the gorm db connection.
func (g *DbConn) Db() *gorm.DB {
g.once.Do(g.Open)
if g.db == nil {
log.Fatal("migrate: database not connected")
}
return g.db
}
// Open creates a new gorm db connection.
func (g *DbConn) Open() {
log.Infof("Opening DB connection with driver %s", g.Driver)
var db *gorm.DB
var err error
if g.Driver == entity.Postgres {
postgresDB, pgxPool := entity.OpenPostgreSQL(g.Dsn)
g.pool = pgxPool
db, err = gorm.Open(postgres.New(postgres.Config{Conn: postgresDB}), gormConfig())
} else {
db, err = gorm.Open(drivers[g.Driver](g.Dsn), gormConfig())
}
if err != nil || db == nil {
for i := 1; i <= 12; i++ {
fmt.Printf("gorm.Open(%s, %s) %d\n", g.Driver, g.Dsn, i)
if g.Driver == entity.Postgres {
postgresDB, pgxPool := entity.OpenPostgreSQL(g.Dsn)
g.pool = pgxPool
db, err = gorm.Open(postgres.New(postgres.Config{Conn: postgresDB}), gormConfig())
} else {
db, err = gorm.Open(drivers[g.Driver](g.Dsn), gormConfig())
}
if db != nil && err == nil {
break
} else {
time.Sleep(5 * time.Second)
}
}
if err != nil || db == nil {
fmt.Println(err)
log.Fatal(err)
}
}
log.Info("DB connection established successfully")
if g.Driver != entity.Postgres {
sqlDB, _ := db.DB()
sqlDB.SetMaxIdleConns(4) // in config_db it uses c.DatabaseConnsIdle(), but we don't have the c here.
sqlDB.SetMaxOpenConns(256) // in config_db it uses c.DatabaseConns(), but we don't have the c here.
}
g.db = db
}
// Close closes the gorm db connection.
func (g *DbConn) Close() {
if g.db != nil {
sqlDB, _ := g.db.DB()
if err := sqlDB.Close(); err != nil {
log.Fatal(err)
}
g.db = nil
}
}
func gormConfig() *gorm.Config {
return &gorm.Config{
Logger: logger.New(
log,
logger.Config{
SlowThreshold: time.Second, // Slow SQL threshold
LogLevel: logger.Error, // Log level
IgnoreRecordNotFoundError: true, // Ignore ErrRecordNotFound error for logger
ParameterizedQueries: true, // Don't include params in the SQL log
Colorful: false, // Disable color
},
),
// Set UTC as the default for created and updated timestamps.
NowFunc: func() time.Time {
return UTC()
},
}
}
// IsDialect returns true if the given sql dialect is used.
func IsDialect(name string) bool {
return name == Db().Dialector.Name()
}
// DbDialect returns the sql dialect name.
func DbDialect() string {
return Db().Dialector.Name()
}
// SetDbProvider sets the Gorm database connection provider.
func SetDbProvider(conn Gorm) {
dbConn = conn
}
// HasDbProvider returns true if a db provider exists.
func HasDbProvider() bool {
return dbConn != nil
}
var characterRunes = []rune("ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
func randomSHA1() string {
result := make([]rune, 32)
for i := range result {
result[i] = characterRunes[rand.IntN(len(characterRunes))]
}
return string(result)
}
func main() {
var (
numberOfPhotos int
driver string
dsn string
dropdb bool
sqlitescript bool
)
log = logrus.StandardLogger()
log.SetLevel(logrus.TraceLevel)
event.AuditLog = log
flag.IntVar(&numberOfPhotos, "numberOfPhotos", 0, "Number of photos to generate")
flag.StringVar(&driver, "driver", "sqlite", "GORM driver to use. Choose from sqlite, mysql and postgres")
flag.StringVar(&dsn, "dsn", "testdb.db", "DSN to access the database")
flag.BoolVar(&dropdb, "dropdb", false, "Drop/Delete the database")
flag.BoolVar(&sqlitescript, "sqlitescript", true, "Create an SQLite database from script")
flag.Parse()
if numberOfPhotos < 1 {
flag.PrintDefaults()
log.Errorf("Number of photos is not enough %d", numberOfPhotos)
os.Exit(1)
}
if _, ok := drivers[driver]; ok == false {
flag.PrintDefaults()
log.Errorf("driver %v is not valid", driver)
os.Exit(1)
}
if len(dsn) < 3 {
flag.PrintDefaults()
log.Errorf("dsn %v is to short", dsn)
os.Exit(1)
}
// Set default test database driver.
if driver == "test" || driver == "sqlite" || driver == "" || dsn == "" {
driver = SQLite3
}
// Set default database DSN.
if driver == SQLite3 {
if dsn == "" {
dsn = SQLiteMemoryDSN
}
}
allowDelete := dropdb
if driver == MySQL && allowDelete {
basedsn := dsn[0 : strings.Index(dsn, "/")+1]
basedbname := dsn[strings.Index(dsn, "/")+1 : strings.Index(dsn, "?")]
log.Infof("Connecting to %v", basedsn)
database, err := gorm.Open(mysql.Open(basedsn), &gorm.Config{})
if err != nil {
log.Errorf("Unable to connect to MariaDB %v", err)
}
log.Infof("Dropping database %v if it exists", basedbname)
if res := database.Exec("DROP DATABASE IF EXISTS " + basedbname + ";"); res.Error != nil {
log.Errorf("Unable to drop database %v", res.Error)
os.Exit(1)
}
log.Infof("Creating database %v if it doesnt exist", basedbname)
if res := database.Exec("CREATE DATABASE IF NOT EXISTS " + basedbname + ";"); res.Error != nil {
log.Errorf("Unable to create database %v", res.Error)
os.Exit(1)
}
}
if driver == SQLite3 && dsn != SQLiteMemoryDSN && allowDelete {
filename := dsn
if strings.Index(dsn, "?") > 0 {
if strings.Index(dsn, ":") > 0 {
filename = dsn[strings.Index(dsn, ":")+1 : strings.Index(dsn, "?")]
} else {
filename = dsn[0:strings.Index(dsn, "?")]
}
}
log.Infof("Removing file %v", filename)
os.Remove(filename)
}
log.Infof("Connecting to driver %v with dsn %v", driver, dsn)
// Create gorm.DB connection provider.
db := &DbConn{
Driver: driver,
Dsn: dsn,
}
defer db.Close()
SetDbProvider(db)
// Disable journal to speed up.
if driver == SQLite3 {
Db().Exec("PRAGMA journal_mode=OFF")
}
start := time.Now()
log.Info("Create PhotoPrism tables if they don't exist")
// Run migration if the photos table doesn't exist.
// Otherwise assume that we have a valid structured database.
photoCounter := int64(0)
if err := Db().Model(&entity.Photo{}).Count(&photoCounter).Error; err != nil {
// Handle SQLite differently as it does table recreates on initial migrate, so we need to be able to simulate that.
if driver == SQLite3 && sqlitescript {
filename := dsn
if strings.Index(dsn, "?") > 0 {
if strings.Index(dsn, ":") > 0 {
filename = dsn[strings.Index(dsn, ":")+1 : strings.Index(dsn, "?")]
} else {
filename = dsn[0:strings.Index(dsn, "?")]
}
}
var cmd *exec.Cmd
bashCmd := fmt.Sprintf("cat ./sqlite3.sql | sqlite3 %s", filename)
cmd = exec.Command("bash", "-c", bashCmd)
// Write to stdout or file.
var f *os.File
log.Infof("restore: creating database tables from script")
f = os.Stdout
var stderr bytes.Buffer
cmd.Stderr = &stderr
cmd.Stdout = f
// Log exact command for debugging in trace mode.
log.Debug(cmd.String())
// Run restore command.
if cmdErr := cmd.Run(); cmdErr != nil {
if errStr := strings.TrimSpace(stderr.String()); errStr != "" {
log.Error(errStr)
os.Exit(1)
}
}
} else {
entity.Entities.Migrate(Db(), migrate.Opt(true, false, nil))
if err := entity.Entities.WaitForMigration(Db()); err != nil {
log.Errorf("migrate: %s [%s]", err, time.Since(start))
}
}
} else {
log.Errorf("The photos table already exists in driver %v dsn %v.\nAborting...", driver, dsn)
os.Exit(1)
}
entity.SetDbProvider(dbConn)
log.Info("Create default fixtures")
entity.CreateDefaultFixtures()
// Load the database with data.
// Create all the labels and keywords that have specific handling in internal/ai/classify/rules.go
log.Info("Create labels and keywords")
keywords := make(map[string]uint)
labels := make(map[string]uint)
keywordRandoms := make(map[int]uint)
labelRandoms := make(map[int]uint)
keywordPos, labelPos := 0, 0
for label, rule := range classify.Rules {
keyword := entity.Keyword{
Keyword: label,
Skip: false,
}
Db().Create(&keyword)
keywords[label] = keyword.ID
keywordRandoms[keywordPos] = keyword.ID
keywordPos++
if rule.Label != "" {
if _, found := keywords[rule.Label]; found == false {
keyword = entity.Keyword{
Keyword: rule.Label,
Skip: false,
}
Db().Create(&keyword)
keywords[rule.Label] = keyword.ID
keywordRandoms[keywordPos] = keyword.ID
keywordPos++
}
for _, category := range rule.Categories {
if _, found := labels[category]; found == false {
labelDb := entity.Label{
LabelSlug: strings.ToLower(category),
CustomSlug: strings.ToLower(category),
LabelName: strings.ToLower(category),
LabelPriority: 0,
LabelFavorite: false,
LabelDescription: "",
LabelNotes: "",
PhotoCount: 0,
LabelCategories: []*entity.Label{},
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
DeletedAt: gorm.DeletedAt{},
New: false,
}
Db().Create(&labelDb)
labels[category] = labelDb.ID
labelRandoms[labelPos] = labelDb.ID
labelPos++
}
}
if _, found := labels[rule.Label]; found == false {
labelDb := entity.Label{
LabelSlug: strings.ToLower(rule.Label),
CustomSlug: strings.ToLower(rule.Label),
LabelName: strings.ToLower(rule.Label),
LabelPriority: 0,
LabelFavorite: false,
LabelDescription: "",
LabelNotes: "",
PhotoCount: 0,
LabelCategories: []*entity.Label{},
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
DeletedAt: gorm.DeletedAt{},
New: false,
}
Db().Create(&labelDb)
labels[rule.Label] = labelDb.ID
labelRandoms[labelPos] = labelDb.ID
labelPos++
for _, category := range rule.Categories {
categoryDb := entity.Category{
LabelID: labelDb.ID,
CategoryID: labels[category],
}
Db().Create(&categoryDb)
}
}
}
}
// Create every possible camera and some lenses. Yeah the data is garbage but it's test data anyway.
log.Info("Create cameras and lenses")
lensList := [6]string{"Wide Angle", "Fisheye", "Ultra Wide Angle", "Macro", "Super Zoom", "F80"}
cameras := make(map[string]uint)
lenses := make(map[string]uint)
cameraRandoms := make(map[int]uint)
lensRandoms := make(map[int]uint)
cameraPos, lensPos := 0, 0
for _, make := range entity.CameraMakes {
for _, model := range entity.CameraModels {
camera := entity.NewCamera(make, model)
if _, found := cameras[camera.CameraSlug]; found == false {
Db().Create(camera)
cameras[camera.CameraSlug] = camera.ID
cameraRandoms[cameraPos] = camera.ID
cameraPos++
}
}
for _, model := range lensList {
lens := entity.NewLens(make, model)
if _, found := lenses[lens.LensSlug]; found == false {
Db().Create(lens)
lenses[lens.LensSlug] = lens.ID
lensRandoms[lensPos] = lens.ID
lensPos++
}
}
}
// Load up Countries and Places.
log.Info("Create countries and places")
countries := make(map[int]string)
countryPos := 0
places := make(map[int]string)
placePos := 0
PlaceUID := byte('P')
file, _ := os.Open("../../pkg/txt/resources/countries.txt")
defer file.Close()
scanner := bufio.NewScanner(file)
scanner.Split(bufio.ScanLines)
for scanner.Scan() {
parts := strings.Split(scanner.Text(), ":")
if len(parts) < 2 {
continue
}
country := entity.NewCountry(strings.ToLower(parts[0]), strings.ToLower(parts[1]))
counter := int64(0)
Db().Model(&entity.Country{}).Where("id = ?", country.ID).Count(&counter)
if counter == 0 {
Db().Create(country)
countries[countryPos] = strings.ToLower(parts[0])
countryPos++
}
}
for word := range txt.StopWords {
placeUID := rnd.GenerateUID(PlaceUID)
country := countries[rand.IntN(len(countries))]
place := entity.Place{
ID: placeUID,
PlaceLabel: word,
PlaceDistrict: word,
PlaceCity: word,
PlaceState: word,
PlaceCountry: country,
PlaceKeywords: "",
PlaceFavorite: false,
PhotoCount: 0,
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
}
Db().Create(&place)
places[placePos] = placeUID
placePos++
}
// Create some Subjects
log.Info("Create subjects")
subjects := make(map[int]entity.Subject)
subjectPos := 0
for i := 1; i <= 100; i++ {
subject := entity.Subject{
SubjUID: rnd.GenerateUID('j'),
SubjType: entity.SubjPerson,
SubjSrc: entity.SrcImage,
SubjSlug: fmt.Sprintf("person-%03d", i),
SubjName: fmt.Sprintf("Person %03d", i),
SubjFavorite: false,
SubjPrivate: false,
SubjExcluded: false,
FileCount: 0,
PhotoCount: 0,
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
DeletedAt: gorm.DeletedAt{},
}
Db().Create(&subject)
subjects[subjectPos] = subject
subjectPos++
}
log.Info("Start creating photos")
for i := 1; i <= numberOfPhotos; i++ {
if _, frac := math.Modf(float64(i) / 100.0); frac == 0 {
log.Infof("Generating photo number %v", i)
}
month := rand.IntN(11) + 1
day := rand.IntN(28) + 1
year := rand.IntN(45) + 1980
takenAt := time.Date(year, time.Month(month), day, rand.IntN(24), rand.IntN(60), rand.IntN(60), rand.IntN(1000), time.UTC)
labelCount := rand.IntN(5)
// Create the cell for the Photo's location
placeId := places[rand.IntN(len(places))]
lat := (rand.Float64() * 180.0) - 90.0
lng := (rand.Float64() * 360.0) - 180.0
cell := entity.NewCell(lat, lng)
cell.PlaceID = placeId
Db().FirstOrCreate(cell)
folder := entity.Folder{}
if res := Db().Model(&entity.Folder{}).Where("path = ?", fmt.Sprintf("%04d", year)).First(&folder); res.RowsAffected == 0 {
folder = entity.NewFolder("/", fmt.Sprintf("%04d", year), time.Now().UTC())
folder.Create()
}
folder = entity.Folder{}
if res := Db().Model(&entity.Folder{}).Where("path = ?", fmt.Sprintf("%04d/%02d", year, month)).First(&folder); res.RowsAffected == 0 {
folder = entity.NewFolder("/", fmt.Sprintf("%04d/%02d", year, month), time.Now().UTC())
folder.Create()
}
photo := entity.Photo{
// ID
//
// UUID
TakenAt: takenAt,
TakenAtLocal: takenAt,
TakenSrc: entity.SrcMeta,
PhotoUID: rnd.GenerateUID(entity.PhotoUID),
PhotoType: "image",
TypeSrc: entity.SrcAuto,
PhotoTitle: "Performance Test Load",
TitleSrc: entity.SrcImage,
PhotoDescription: "",
DescriptionSrc: entity.SrcAuto,
PhotoPath: fmt.Sprintf("%04d/%02d", year, month),
PhotoName: fmt.Sprintf("PIC%08d", i),
OriginalName: fmt.Sprintf("PIC%08d", i),
PhotoStack: 0,
PhotoFavorite: false,
PhotoPrivate: false,
PhotoScan: false,
PhotoPanorama: false,
TimeZone: "America/Mexico_City",
PlaceID: placeId,
PlaceSrc: entity.SrcMeta,
CellID: cell.ID,
CellAccuracy: 0,
PhotoAltitude: 5,
PhotoLat: lat,
PhotoLng: lng,
PhotoCountry: countries[rand.IntN(len(countries))],
PhotoYear: year,
PhotoMonth: month,
PhotoDay: day,
PhotoIso: 400,
PhotoExposure: "1/60",
PhotoFNumber: 8,
PhotoFocalLength: 2,
PhotoQuality: 3,
PhotoFaces: 0,
PhotoResolution: 0,
// PhotoDuration : 0,
PhotoColor: 12,
CameraID: cameraRandoms[rand.IntN(len(cameraRandoms))],
CameraSerial: "",
CameraSrc: "",
LensID: lensRandoms[rand.IntN(len(lensRandoms))],
// Details :,
// Camera
// Lens
// Cell
// Place
Keywords: []entity.Keyword{},
Albums: []entity.Album{},
Files: []entity.File{},
Labels: []entity.PhotoLabel{},
// CreatedBy
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
EditedAt: nil,
PublishedAt: nil,
CheckedAt: nil,
EstimatedAt: nil,
DeletedAt: gorm.DeletedAt{},
}
Db().Create(&photo)
// Allocate the labels for this photo
for i := 0; i < labelCount; i++ {
photoLabel := entity.NewPhotoLabel(photo.ID, labelRandoms[rand.IntN(len(labelRandoms))], 0, entity.SrcMeta)
Db().FirstOrCreate(photoLabel)
}
// Allocate the keywords for this photo
keywordCount := rand.IntN(5)
keywordStr := ""
for i := 0; i < keywordCount; i++ {
photoKeyword := entity.PhotoKeyword{PhotoID: photo.ID, KeywordID: keywordRandoms[rand.IntN(len(keywordRandoms))]}
keyword := entity.Keyword{}
Db().Model(&entity.Keyword{}).Where("id = ?", photoKeyword.KeywordID).First(&keyword)
Db().FirstOrCreate(&photoKeyword)
if len(keywordStr) > 0 {
keywordStr = fmt.Sprintf("%s,%s", keywordStr, keyword.Keyword)
} else {
keywordStr = keyword.Keyword
}
}
// Create File
file := entity.File{
// ID
// Photo
PhotoID: photo.ID,
PhotoUID: photo.PhotoUID,
PhotoTakenAt: photo.TakenAt,
// TimeIndex
// MediaID
// MediaUTC
InstanceID: "",
FileUID: rnd.GenerateUID(entity.FileUID),
FileName: fmt.Sprintf("%04d/%02d/PIC%08d.jpg", year, month, i),
FileRoot: entity.RootSidecar,
OriginalName: "",
FileHash: rnd.GenerateUID(entity.FileUID),
FileSize: rand.Int64N(1000000),
FileCodec: "",
FileType: string(fs.ImageJpeg),
MediaType: string(media.Image),
FileMime: "image/jpg",
FilePrimary: true,
FileSidecar: false,
FileMissing: false,
FilePortrait: true,
FileVideo: false,
FileDuration: 0,
// FileFPS
// FileFrames
FileWidth: 1200,
FileHeight: 1600,
FileOrientation: 6,
FileOrientationSrc: entity.SrcMeta,
FileProjection: "",
FileAspectRatio: 0.75,
// FileHDR : false,
// FileWatermark
// FileColorProfile
FileMainColor: "magenta",
FileColors: "226611CC1",
FileLuminance: "ABCDEF123",
FileDiff: 456,
FileChroma: 15,
// FileSoftware
// FileError
ModTime: time.Now().Unix(),
CreatedAt: time.Now().UTC(),
CreatedIn: 935962,
UpdatedAt: time.Now().UTC(),
UpdatedIn: 935962,
// PublishedAt
DeletedAt: gorm.DeletedAt{},
Share: []entity.FileShare{},
Sync: []entity.FileSync{},
//markers
}
Db().Create(&file)
// Add Markers
markersToCreate := rand.IntN(5)
for i := 0; i < markersToCreate; i++ {
subject := subjects[rand.IntN(len(subjects))]
marker := entity.Marker{
MarkerUID: rnd.GenerateUID('m'),
FileUID: file.FileUID,
MarkerType: entity.MarkerFace,
MarkerName: subject.SubjName,
MarkerReview: false,
MarkerInvalid: false,
SubjUID: subject.SubjUID,
SubjSrc: subject.SubjSrc,
X: rand.Float32() * 1024.0,
Y: rand.Float32() * 2048.0,
W: rand.Float32() * 10.0,
H: rand.Float32() * 20.0,
Q: 10,
Size: 100,
Score: 10,
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
}
Db().Create(&marker)
face := entity.Face{
ID: randomSHA1(),
FaceSrc: entity.SrcImage,
FaceKind: 1,
FaceHidden: false,
SubjUID: subject.SubjUID,
Samples: 5,
SampleRadius: 0.35,
Collisions: 5,
CollisionRadius: 0.5,
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
}
Db().Create(&face)
}
// Add to Album
albumSlug := fmt.Sprintf("my-photos-from-%04d", year)
album := entity.Album{}
if res := Db().Model(&entity.Album{}).Where("album_slug = ?", albumSlug).First(&album); res.RowsAffected == 0 {
album = entity.Album{
AlbumUID: rnd.GenerateUID(entity.AlbumUID),
AlbumSlug: albumSlug,
AlbumPath: "",
AlbumType: entity.AlbumManual,
AlbumTitle: fmt.Sprintf("My Photos From %04d", year),
AlbumLocation: "",
AlbumCategory: "",
AlbumCaption: "",
AlbumDescription: "A wonderful year",
AlbumNotes: "",
AlbumFilter: "",
AlbumOrder: "oldest",
AlbumTemplate: "",
AlbumCountry: entity.UnknownID,
AlbumYear: year,
AlbumMonth: 0,
AlbumDay: 0,
AlbumFavorite: false,
AlbumPrivate: false,
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
DeletedAt: gorm.DeletedAt{},
}
Db().Create(&album)
}
photoAlbum := entity.PhotoAlbum{
PhotoUID: photo.PhotoUID,
AlbumUID: album.AlbumUID,
Order: 0,
Hidden: false,
Missing: false,
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
}
Db().Create(photoAlbum)
details := entity.Details{
PhotoID: photo.ID,
Keywords: keywordStr,
KeywordsSrc: entity.SrcMeta,
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
}
Db().Create(details)
}
entity.File{}.RegenerateIndex()
entity.UpdateCounts()
log.Infof("Database Creation completed in %s", time.Since(start))
code := 0
os.Exit(code)
}

View File

@@ -0,0 +1,909 @@
/*M!999999\- enable the sandbox mode */
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*M!100616 SET @OLD_NOTE_VERBOSITY=@@NOTE_VERBOSITY, NOTE_VERBOSITY=0 */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `albums` (
`id` int(10) unsigned NOT NULL,
`album_uid` varbinary(42) DEFAULT NULL,
`parent_uid` varbinary(42) DEFAULT '',
`album_slug` varbinary(160) DEFAULT NULL,
`album_path` varchar(1024) DEFAULT NULL,
`album_type` varbinary(8) DEFAULT 'album',
`album_title` varchar(160) DEFAULT NULL,
`album_location` varchar(160) DEFAULT NULL,
`album_category` varchar(100) DEFAULT NULL,
`album_caption` varchar(1024) DEFAULT NULL,
`album_description` varchar(2048) DEFAULT NULL,
`album_notes` varchar(1024) DEFAULT NULL,
`album_filter` varbinary(2048) DEFAULT '',
`album_order` varbinary(32) DEFAULT NULL,
`album_template` varbinary(255) DEFAULT NULL,
`album_state` varchar(100) DEFAULT NULL,
`album_country` varbinary(2) DEFAULT 'zz',
`album_year` int(11) DEFAULT NULL,
`album_month` int(11) DEFAULT NULL,
`album_day` int(11) DEFAULT NULL,
`album_favorite` tinyint(1) DEFAULT NULL,
`album_private` tinyint(1) DEFAULT NULL,
`thumb` varbinary(128) DEFAULT '',
`thumb_src` varbinary(8) DEFAULT '',
`created_by` varbinary(42) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`published_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uix_albums_album_uid` (`album_uid`),
KEY `idx_albums_album_state` (`album_state`),
KEY `idx_albums_ymd` (`album_day`),
KEY `idx_albums_thumb` (`thumb`),
KEY `idx_albums_deleted_at` (`deleted_at`),
KEY `idx_albums_album_slug` (`album_slug`),
KEY `idx_albums_album_title` (`album_title`),
KEY `idx_albums_album_category` (`album_category`),
KEY `idx_albums_country_year_month` (`album_country`,`album_year`,`album_month`),
KEY `idx_albums_created_by` (`created_by`),
KEY `idx_albums_published_at` (`published_at`),
KEY `idx_albums_album_path` (`album_path`(768)),
KEY `idx_albums_album_filter` (`album_filter`(512))
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `albums_users` (
`uid` varbinary(42) NOT NULL,
`user_uid` varbinary(42) NOT NULL,
`team_uid` varbinary(42) DEFAULT NULL,
`perm` int(10) unsigned DEFAULT NULL,
PRIMARY KEY (`uid`,`user_uid`),
KEY `idx_albums_users_user_uid` (`user_uid`),
KEY `idx_albums_users_team_uid` (`team_uid`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `auth_clients` (
`client_uid` varbinary(42) NOT NULL,
`user_uid` varbinary(42) DEFAULT '',
`user_name` varchar(200) DEFAULT NULL,
`client_name` varchar(200) DEFAULT NULL,
`client_role` varchar(64) DEFAULT '',
`client_type` varbinary(16) DEFAULT NULL,
`client_url` varbinary(255) DEFAULT '',
`callback_url` varbinary(255) DEFAULT '',
`auth_provider` varbinary(128) DEFAULT '',
`auth_method` varbinary(128) DEFAULT '',
`auth_scope` varchar(1024) DEFAULT '',
`auth_expires` bigint(20) DEFAULT NULL,
`auth_tokens` bigint(20) DEFAULT NULL,
`auth_enabled` tinyint(1) DEFAULT NULL,
`last_active` bigint(20) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`client_uid`),
KEY `idx_auth_clients_user_uid` (`user_uid`),
KEY `idx_auth_clients_user_name` (`user_name`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `auth_sessions` (
`id` varbinary(2048) NOT NULL,
`user_uid` varbinary(42) DEFAULT '',
`user_name` varchar(200) DEFAULT NULL,
`client_uid` varbinary(42) DEFAULT '',
`client_name` varchar(200) DEFAULT '',
`client_ip` varchar(64) DEFAULT NULL,
`auth_provider` varbinary(128) DEFAULT '',
`auth_method` varbinary(128) DEFAULT '',
`auth_issuer` varbinary(255) DEFAULT '',
`auth_id` varbinary(255) DEFAULT '',
`auth_scope` varchar(1024) DEFAULT '',
`grant_type` varbinary(64) DEFAULT '',
`last_active` bigint(20) DEFAULT NULL,
`sess_expires` bigint(20) DEFAULT NULL,
`sess_timeout` bigint(20) DEFAULT NULL,
`preview_token` varbinary(64) DEFAULT '',
`download_token` varbinary(64) DEFAULT '',
`access_token` varbinary(4096) DEFAULT '',
`refresh_token` varbinary(2048) DEFAULT NULL,
`id_token` varbinary(2048) DEFAULT NULL,
`user_agent` varchar(512) DEFAULT NULL,
`data_json` varbinary(4096) DEFAULT NULL,
`ref_id` varbinary(16) DEFAULT '',
`login_ip` varchar(64) DEFAULT NULL,
`login_at` datetime DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx_auth_sessions_user_uid` (`user_uid`),
KEY `idx_auth_sessions_user_name` (`user_name`),
KEY `idx_auth_sessions_client_uid` (`client_uid`),
KEY `idx_auth_sessions_client_ip` (`client_ip`),
KEY `idx_auth_sessions_auth_id` (`auth_id`),
KEY `idx_auth_sessions_sess_expires` (`sess_expires`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `auth_users` (
`id` int(11) NOT NULL,
`user_uuid` varbinary(64) DEFAULT NULL,
`user_uid` varbinary(42) DEFAULT NULL,
`auth_provider` varbinary(128) DEFAULT '',
`auth_method` varbinary(128) DEFAULT '',
`auth_issuer` varbinary(255) DEFAULT '',
`auth_id` varbinary(255) DEFAULT '',
`user_name` varchar(200) DEFAULT NULL,
`display_name` varchar(200) DEFAULT NULL,
`user_email` varchar(255) DEFAULT NULL,
`backup_email` varchar(255) DEFAULT NULL,
`user_role` varchar(64) DEFAULT '',
`user_attr` varchar(1024) DEFAULT NULL,
`super_admin` tinyint(1) DEFAULT NULL,
`can_login` tinyint(1) DEFAULT NULL,
`login_at` datetime DEFAULT NULL,
`expires_at` datetime DEFAULT NULL,
`webdav` tinyint(1) DEFAULT NULL,
`base_path` varbinary(1024) DEFAULT NULL,
`upload_path` varbinary(1024) DEFAULT NULL,
`can_invite` tinyint(1) DEFAULT NULL,
`invite_token` varbinary(64) DEFAULT NULL,
`invited_by` varchar(64) DEFAULT NULL,
`verify_token` varbinary(64) DEFAULT NULL,
`verified_at` datetime DEFAULT NULL,
`consent_at` datetime DEFAULT NULL,
`born_at` datetime DEFAULT NULL,
`reset_token` varbinary(64) DEFAULT NULL,
`preview_token` varbinary(64) DEFAULT NULL,
`download_token` varbinary(64) DEFAULT NULL,
`thumb` varbinary(128) DEFAULT '',
`thumb_src` varbinary(8) DEFAULT '',
`ref_id` varbinary(16) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uix_auth_users_user_uid` (`user_uid`),
KEY `idx_auth_users_user_email` (`user_email`),
KEY `idx_auth_users_invite_token` (`invite_token`),
KEY `idx_auth_users_born_at` (`born_at`),
KEY `idx_auth_users_thumb` (`thumb`),
KEY `idx_auth_users_user_uuid` (`user_uuid`),
KEY `idx_auth_users_auth_id` (`auth_id`),
KEY `idx_auth_users_user_name` (`user_name`),
KEY `idx_auth_users_expires_at` (`expires_at`),
KEY `idx_auth_users_deleted_at` (`deleted_at`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `auth_users_details` (
`user_uid` varbinary(42) NOT NULL,
`subj_uid` varbinary(42) DEFAULT NULL,
`subj_src` varbinary(8) DEFAULT '',
`place_id` varbinary(42) DEFAULT 'zz',
`place_src` varbinary(8) DEFAULT NULL,
`cell_id` varbinary(42) DEFAULT 'zz',
`birth_year` int(11) DEFAULT -1,
`birth_month` int(11) DEFAULT -1,
`birth_day` int(11) DEFAULT -1,
`name_title` varchar(32) DEFAULT NULL,
`given_name` varchar(64) DEFAULT NULL,
`middle_name` varchar(64) DEFAULT NULL,
`family_name` varchar(64) DEFAULT NULL,
`name_suffix` varchar(32) DEFAULT NULL,
`nick_name` varchar(64) DEFAULT NULL,
`name_src` varbinary(8) DEFAULT NULL,
`user_gender` varchar(16) DEFAULT NULL,
`user_about` varchar(512) DEFAULT NULL,
`user_bio` varchar(2048) DEFAULT NULL,
`user_location` varchar(512) DEFAULT NULL,
`user_country` varbinary(2) DEFAULT 'zz',
`user_phone` varchar(32) DEFAULT NULL,
`site_url` varbinary(512) DEFAULT NULL,
`profile_url` varbinary(512) DEFAULT NULL,
`feed_url` varbinary(512) DEFAULT NULL,
`avatar_url` varbinary(512) DEFAULT NULL,
`org_title` varchar(64) DEFAULT NULL,
`org_name` varchar(128) DEFAULT NULL,
`org_email` varchar(255) DEFAULT NULL,
`org_phone` varchar(32) DEFAULT NULL,
`org_url` varbinary(512) DEFAULT NULL,
`id_url` varbinary(512) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`user_uid`),
KEY `idx_auth_users_details_org_email` (`org_email`),
KEY `idx_auth_users_details_subj_uid` (`subj_uid`),
KEY `idx_auth_users_details_place_id` (`place_id`),
KEY `idx_auth_users_details_cell_id` (`cell_id`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `auth_users_settings` (
`user_uid` varbinary(42) NOT NULL,
`ui_theme` varbinary(32) DEFAULT NULL,
`ui_language` varbinary(32) DEFAULT NULL,
`ui_time_zone` varbinary(64) DEFAULT NULL,
`maps_style` varbinary(32) DEFAULT NULL,
`maps_animate` int(11) DEFAULT 0,
`index_path` varbinary(1024) DEFAULT NULL,
`index_rescan` int(11) DEFAULT 0,
`import_path` varbinary(1024) DEFAULT NULL,
`import_move` int(11) DEFAULT 0,
`download_originals` int(11) DEFAULT 0,
`download_media_raw` int(11) DEFAULT 0,
`download_media_sidecar` int(11) DEFAULT 0,
`upload_path` varbinary(1024) DEFAULT NULL,
`default_page` varbinary(128) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`user_uid`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `auth_users_shares` (
`user_uid` varbinary(42) NOT NULL,
`share_uid` varbinary(42) NOT NULL,
`link_uid` varbinary(42) DEFAULT NULL,
`expires_at` datetime DEFAULT NULL,
`comment` varchar(512) DEFAULT NULL,
`perm` int(10) unsigned DEFAULT NULL,
`ref_id` varbinary(16) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`user_uid`,`share_uid`),
KEY `idx_auth_users_shares_share_uid` (`share_uid`),
KEY `idx_auth_users_shares_expires_at` (`expires_at`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `cameras` (
`id` int(10) unsigned NOT NULL,
`camera_slug` varbinary(160) DEFAULT NULL,
`camera_name` varchar(160) DEFAULT NULL,
`camera_make` varchar(160) DEFAULT NULL,
`camera_model` varchar(160) DEFAULT NULL,
`camera_type` varchar(100) DEFAULT NULL,
`camera_description` varchar(2048) DEFAULT NULL,
`camera_notes` varchar(1024) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uix_cameras_camera_slug` (`camera_slug`),
KEY `idx_cameras_deleted_at` (`deleted_at`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `categories` (
`label_id` int(10) unsigned NOT NULL,
`category_id` int(10) unsigned NOT NULL,
PRIMARY KEY (`label_id`,`category_id`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `cells` (
`id` varbinary(42) NOT NULL,
`cell_name` varchar(200) DEFAULT NULL,
`cell_street` varchar(100) DEFAULT NULL,
`cell_postcode` varchar(50) DEFAULT NULL,
`cell_category` varchar(50) DEFAULT NULL,
`place_id` varbinary(42) DEFAULT 'zz',
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `countries` (
`id` varbinary(2) NOT NULL,
`country_slug` varbinary(160) DEFAULT NULL,
`country_name` varchar(160) DEFAULT NULL,
`country_description` varchar(2048) DEFAULT NULL,
`country_notes` varchar(1024) DEFAULT NULL,
`country_photo_id` int(10) unsigned DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uix_countries_country_slug` (`country_slug`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `details` (
`photo_id` int(10) unsigned NOT NULL,
`keywords` varchar(2048) DEFAULT NULL,
`keywords_src` varbinary(8) DEFAULT NULL,
`notes` varchar(2048) DEFAULT NULL,
`notes_src` varbinary(8) DEFAULT NULL,
`subject` varchar(1024) DEFAULT NULL,
`subject_src` varbinary(8) DEFAULT NULL,
`artist` varchar(1024) DEFAULT NULL,
`artist_src` varbinary(8) DEFAULT NULL,
`copyright` varchar(1024) DEFAULT NULL,
`copyright_src` varbinary(8) DEFAULT NULL,
`license` varchar(1024) DEFAULT NULL,
`license_src` varbinary(8) DEFAULT NULL,
`software` varchar(1024) DEFAULT NULL,
`software_src` varbinary(8) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`photo_id`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `duplicates` (
`file_name` varbinary(755) NOT NULL,
`file_root` varbinary(16) NOT NULL DEFAULT '/',
`file_hash` varbinary(128) DEFAULT '',
`file_size` bigint(20) DEFAULT NULL,
`mod_time` bigint(20) DEFAULT NULL,
PRIMARY KEY (`file_name`,`file_root`),
KEY `idx_duplicates_file_hash` (`file_hash`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `errors` (
`id` int(10) unsigned NOT NULL,
`error_time` datetime DEFAULT NULL,
`error_level` varbinary(32) DEFAULT NULL,
`error_message` varbinary(2048) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx_errors_error_time` (`error_time`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `faces` (
`id` varbinary(64) NOT NULL,
`face_src` varbinary(8) DEFAULT NULL,
`face_kind` int(11) DEFAULT NULL,
`face_hidden` tinyint(1) DEFAULT NULL,
`subj_uid` varbinary(42) DEFAULT '',
`samples` int(11) DEFAULT NULL,
`sample_radius` double DEFAULT NULL,
`collisions` int(11) DEFAULT NULL,
`collision_radius` double DEFAULT NULL,
`embedding_json` mediumblob DEFAULT NULL,
`matched_at` datetime DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx_faces_subj_uid` (`subj_uid`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `files` (
`id` int(10) unsigned NOT NULL,
`photo_id` int(10) unsigned DEFAULT NULL,
`photo_uid` varbinary(42) DEFAULT NULL,
`photo_taken_at` datetime DEFAULT NULL,
`time_index` varbinary(64) DEFAULT NULL,
`media_id` varbinary(32) DEFAULT NULL,
`media_utc` bigint(20) DEFAULT NULL,
`instance_id` varbinary(64) DEFAULT NULL,
`file_uid` varbinary(42) DEFAULT NULL,
`file_name` varbinary(1024) DEFAULT NULL,
`file_root` varbinary(16) DEFAULT '/',
`original_name` varbinary(755) DEFAULT NULL,
`file_hash` varbinary(128) DEFAULT NULL,
`file_size` bigint(20) DEFAULT NULL,
`file_codec` varbinary(32) DEFAULT NULL,
`file_type` varbinary(16) DEFAULT NULL,
`media_type` varbinary(16) DEFAULT NULL,
`file_mime` varbinary(64) DEFAULT NULL,
`file_primary` tinyint(1) DEFAULT NULL,
`file_sidecar` tinyint(1) DEFAULT NULL,
`file_missing` tinyint(1) DEFAULT NULL,
`file_portrait` tinyint(1) DEFAULT NULL,
`file_video` tinyint(1) DEFAULT NULL,
`file_duration` bigint(20) DEFAULT NULL,
`file_fps` double DEFAULT NULL,
`file_frames` int(11) DEFAULT NULL,
`file_width` int(11) DEFAULT NULL,
`file_height` int(11) DEFAULT NULL,
`file_orientation` int(11) DEFAULT NULL,
`file_orientation_src` varbinary(8) DEFAULT '',
`file_projection` varbinary(64) DEFAULT NULL,
`file_aspect_ratio` float DEFAULT NULL,
`file_hdr` tinyint(1) DEFAULT NULL,
`file_watermark` tinyint(1) DEFAULT NULL,
`file_color_profile` varbinary(64) DEFAULT NULL,
`file_main_color` varbinary(16) DEFAULT NULL,
`file_colors` varbinary(18) DEFAULT NULL,
`File_luminance` varbinary(18) DEFAULT NULL,
`file_diff` int(11) DEFAULT -1,
`file_chroma` smallint(6) DEFAULT -1,
`file_software` varchar(64) DEFAULT NULL,
`file_error` varbinary(512) DEFAULT NULL,
`mod_time` bigint(20) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`created_in` bigint(20) DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`updated_in` bigint(20) DEFAULT NULL,
`published_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uix_files_file_uid` (`file_uid`),
UNIQUE KEY `idx_files_name_root` (`file_name`,`file_root`),
UNIQUE KEY `idx_files_search_media` (`media_id`),
UNIQUE KEY `idx_files_search_timeline` (`time_index`),
KEY `idx_files_photo_id` (`photo_id`,`file_primary`),
KEY `idx_files_photo_taken_at` (`photo_taken_at`),
KEY `idx_files_file_error` (`file_error`),
KEY `idx_files_published_at` (`published_at`),
KEY `idx_files_deleted_at` (`deleted_at`),
KEY `idx_files_photo_uid` (`photo_uid`),
KEY `idx_files_media_utc` (`media_utc`),
KEY `idx_files_instance_id` (`instance_id`),
KEY `idx_files_file_hash` (`file_hash`),
KEY `idx_files_missing_root` (`file_missing`,`file_root`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `files_share` (
`file_id` int(10) unsigned NOT NULL,
`service_id` int(10) unsigned NOT NULL,
`remote_name` varbinary(255) NOT NULL,
`status` varbinary(16) DEFAULT NULL,
`error` varbinary(512) DEFAULT NULL,
`errors` int(11) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`file_id`,`service_id`,`remote_name`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `files_sync` (
`remote_name` varbinary(255) NOT NULL,
`service_id` int(10) unsigned NOT NULL,
`file_id` int(10) unsigned DEFAULT NULL,
`remote_date` datetime DEFAULT NULL,
`remote_size` bigint(20) DEFAULT NULL,
`status` varbinary(16) DEFAULT NULL,
`error` varbinary(512) DEFAULT NULL,
`errors` int(11) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`remote_name`,`service_id`),
KEY `idx_files_sync_file_id` (`file_id`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `folders` (
`path` varbinary(1024) DEFAULT NULL,
`root` varbinary(16) DEFAULT '',
`folder_uid` varbinary(42) NOT NULL,
`folder_type` varbinary(16) DEFAULT NULL,
`folder_title` varchar(200) DEFAULT NULL,
`folder_category` varchar(100) DEFAULT NULL,
`folder_description` varchar(2048) DEFAULT NULL,
`folder_order` varbinary(32) DEFAULT NULL,
`folder_country` varbinary(2) DEFAULT 'zz',
`folder_year` int(11) DEFAULT NULL,
`folder_month` int(11) DEFAULT NULL,
`folder_day` int(11) DEFAULT NULL,
`folder_favorite` tinyint(1) DEFAULT NULL,
`folder_private` tinyint(1) DEFAULT NULL,
`folder_ignore` tinyint(1) DEFAULT NULL,
`folder_watch` tinyint(1) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`modified_at` datetime DEFAULT NULL,
`published_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`folder_uid`),
UNIQUE KEY `idx_folders_path_root` (`path`,`root`),
KEY `idx_folders_folder_category` (`folder_category`),
KEY `idx_folders_country_year_month` (`folder_country`,`folder_year`,`folder_month`),
KEY `idx_folders_published_at` (`published_at`),
KEY `idx_folders_deleted_at` (`deleted_at`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `keywords` (
`id` int(10) unsigned NOT NULL,
`keyword` varchar(64) DEFAULT NULL,
`skip` tinyint(1) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx_keywords_keyword` (`keyword`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `labels` (
`id` int(10) unsigned NOT NULL,
`label_uid` varbinary(42) DEFAULT NULL,
`label_slug` varbinary(160) DEFAULT NULL,
`custom_slug` varbinary(160) DEFAULT NULL,
`label_name` varchar(160) DEFAULT NULL,
`label_priority` int(11) DEFAULT NULL,
`label_favorite` tinyint(1) DEFAULT NULL,
`label_description` varchar(2048) DEFAULT NULL,
`label_notes` varchar(1024) DEFAULT NULL,
`photo_count` int(11) DEFAULT 1,
`thumb` varbinary(128) DEFAULT '',
`thumb_src` varbinary(8) DEFAULT '',
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`published_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uix_labels_label_uid` (`label_uid`),
UNIQUE KEY `uix_labels_label_slug` (`label_slug`),
KEY `idx_labels_thumb` (`thumb`),
KEY `idx_labels_published_at` (`published_at`),
KEY `idx_labels_deleted_at` (`deleted_at`),
KEY `idx_labels_custom_slug` (`custom_slug`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `lenses` (
`id` int(10) unsigned NOT NULL,
`lens_slug` varbinary(160) DEFAULT NULL,
`lens_name` varchar(160) DEFAULT NULL,
`lens_make` varchar(160) DEFAULT NULL,
`lens_model` varchar(160) DEFAULT NULL,
`lens_type` varchar(100) DEFAULT NULL,
`lens_description` varchar(2048) DEFAULT NULL,
`lens_notes` varchar(1024) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uix_lenses_lens_slug` (`lens_slug`),
KEY `idx_lenses_deleted_at` (`deleted_at`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `links` (
`link_uid` varbinary(42) NOT NULL,
`share_uid` varbinary(42) DEFAULT NULL,
`share_slug` varbinary(160) DEFAULT NULL,
`link_token` varbinary(160) DEFAULT NULL,
`link_expires` int(11) DEFAULT NULL,
`link_views` int(10) unsigned DEFAULT NULL,
`max_views` int(10) unsigned DEFAULT NULL,
`has_password` tinyint(1) DEFAULT NULL,
`comment` varchar(512) DEFAULT NULL,
`perm` int(10) unsigned DEFAULT NULL,
`ref_id` varbinary(16) DEFAULT NULL,
`created_by` varbinary(42) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`modified_at` datetime DEFAULT NULL,
PRIMARY KEY (`link_uid`),
UNIQUE KEY `idx_links_uid_token` (`share_uid`,`link_token`),
KEY `idx_links_share_slug` (`share_slug`),
KEY `idx_links_created_by` (`created_by`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `markers` (
`marker_uid` varbinary(42) NOT NULL,
`file_uid` varbinary(42) DEFAULT '',
`marker_type` varbinary(8) DEFAULT '',
`marker_src` varbinary(8) DEFAULT '',
`marker_name` varchar(160) DEFAULT NULL,
`marker_review` tinyint(1) DEFAULT NULL,
`marker_invalid` tinyint(1) DEFAULT NULL,
`subj_uid` varbinary(42) DEFAULT NULL,
`subj_src` varbinary(8) DEFAULT '',
`face_id` varbinary(64) DEFAULT NULL,
`face_dist` double DEFAULT -1,
`embeddings_json` mediumblob DEFAULT NULL,
`landmarks_json` mediumblob DEFAULT NULL,
`x` float DEFAULT NULL,
`y` float DEFAULT NULL,
`w` float DEFAULT NULL,
`h` float DEFAULT NULL,
`q` int(11) DEFAULT NULL,
`size` int(11) DEFAULT -1,
`score` smallint(6) DEFAULT NULL,
`thumb` varbinary(128) DEFAULT '',
`matched_at` datetime DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`marker_uid`),
KEY `idx_markers_file_uid` (`file_uid`),
KEY `idx_markers_subj_uid_src` (`subj_uid`,`subj_src`),
KEY `idx_markers_face_id` (`face_id`),
KEY `idx_markers_thumb` (`thumb`),
KEY `idx_markers_matched_at` (`matched_at`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `migrations` (
`id` varchar(16) NOT NULL,
`dialect` varchar(16) DEFAULT NULL,
`stage` varchar(16) DEFAULT NULL,
`error` varchar(255) DEFAULT NULL,
`source` varchar(16) DEFAULT NULL,
`started_at` datetime DEFAULT NULL,
`finished_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `passcodes` (
`uid` varbinary(255) NOT NULL,
`key_type` varchar(64) NOT NULL DEFAULT '',
`key_url` varchar(2048) DEFAULT '',
`recovery_code` varchar(255) DEFAULT '',
`verified_at` datetime DEFAULT NULL,
`activated_at` datetime DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`uid`,`key_type`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `passwords` (
`uid` varbinary(255) NOT NULL,
`hash` varbinary(255) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`uid`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `photos` (
`id` int(10) unsigned NOT NULL,
`uuid` varbinary(64) DEFAULT NULL,
`taken_at` datetime DEFAULT NULL,
`taken_at_local` datetime DEFAULT NULL,
`taken_src` varbinary(8) DEFAULT NULL,
`photo_uid` varbinary(42) DEFAULT NULL,
`photo_type` varbinary(8) DEFAULT 'image',
`type_src` varbinary(8) DEFAULT NULL,
`photo_title` varchar(200) DEFAULT NULL,
`title_src` varbinary(8) DEFAULT NULL,
`photo_caption` varchar(4096) DEFAULT NULL,
`caption_src` varbinary(8) DEFAULT NULL,
`photo_path` varbinary(1024) DEFAULT NULL,
`photo_name` varbinary(255) DEFAULT NULL,
`original_name` varbinary(755) DEFAULT NULL,
`photo_stack` tinyint(4) DEFAULT NULL,
`photo_favorite` tinyint(1) DEFAULT NULL,
`photo_private` tinyint(1) DEFAULT NULL,
`photo_scan` tinyint(1) DEFAULT NULL,
`photo_panorama` tinyint(1) DEFAULT NULL,
`time_zone` varbinary(64) DEFAULT NULL,
`place_id` varbinary(42) DEFAULT 'zz',
`place_src` varbinary(8) DEFAULT NULL,
`cell_id` varbinary(42) DEFAULT 'zz',
`cell_accuracy` int(11) DEFAULT NULL,
`photo_altitude` int(11) DEFAULT NULL,
`photo_lat` double DEFAULT NULL,
`photo_lng` double DEFAULT NULL,
`photo_country` varbinary(2) DEFAULT 'zz',
`photo_year` int(11) DEFAULT NULL,
`photo_month` int(11) DEFAULT NULL,
`photo_day` int(11) DEFAULT NULL,
`photo_iso` int(11) DEFAULT NULL,
`photo_exposure` varbinary(64) DEFAULT NULL,
`photo_f_number` float DEFAULT NULL,
`photo_focal_length` int(11) DEFAULT NULL,
`photo_quality` smallint(6) DEFAULT NULL,
`photo_faces` int(11) DEFAULT NULL,
`photo_resolution` smallint(6) DEFAULT NULL,
`photo_duration` bigint(20) DEFAULT NULL,
`photo_color` smallint(6) DEFAULT -1,
`camera_id` int(10) unsigned DEFAULT 1,
`camera_serial` varbinary(160) DEFAULT NULL,
`camera_src` varbinary(8) DEFAULT NULL,
`lens_id` int(10) unsigned DEFAULT 1,
`created_by` varbinary(42) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`edited_at` datetime DEFAULT NULL,
`published_at` datetime DEFAULT NULL,
`checked_at` datetime DEFAULT NULL,
`estimated_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uix_photos_photo_uid` (`photo_uid`),
KEY `idx_photos_created_by` (`created_by`),
KEY `idx_photos_cell_id` (`cell_id`),
KEY `idx_photos_camera_lens` (`camera_id`,`lens_id`),
KEY `idx_photos_checked_at` (`checked_at`),
KEY `idx_photos_photo_lng` (`photo_lng`),
KEY `idx_photos_published_at` (`published_at`),
KEY `idx_photos_deleted_at` (`deleted_at`),
KEY `idx_photos_uuid` (`uuid`),
KEY `idx_photos_photo_lat` (`photo_lat`),
KEY `idx_photos_place_id` (`place_id`),
KEY `idx_photos_country_year_month` (`photo_country`,`photo_year`,`photo_month`),
KEY `idx_photos_ymd` (`photo_day`),
KEY `idx_photos_taken_uid` (`taken_at`,`photo_uid`),
KEY `idx_photos_path_name` (`photo_path`,`photo_name`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `photos_albums` (
`photo_uid` varbinary(42) NOT NULL,
`album_uid` varbinary(42) NOT NULL,
`order` int(11) DEFAULT NULL,
`hidden` tinyint(1) DEFAULT NULL,
`missing` tinyint(1) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`photo_uid`,`album_uid`),
KEY `idx_photos_albums_album_uid` (`album_uid`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `photos_keywords` (
`photo_id` int(10) unsigned NOT NULL,
`keyword_id` int(10) unsigned NOT NULL,
PRIMARY KEY (`photo_id`,`keyword_id`),
KEY `idx_photos_keywords_keyword_id` (`keyword_id`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `photos_labels` (
`photo_id` int(10) unsigned NOT NULL,
`label_id` int(10) unsigned NOT NULL,
`label_src` varbinary(8) DEFAULT NULL,
`uncertainty` smallint(6) DEFAULT NULL,
PRIMARY KEY (`photo_id`,`label_id`),
KEY `idx_photos_labels_label_id` (`label_id`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `photos_users` (
`uid` varbinary(42) NOT NULL,
`user_uid` varbinary(42) NOT NULL,
`team_uid` varbinary(42) DEFAULT NULL,
`perm` int(10) unsigned DEFAULT NULL,
PRIMARY KEY (`uid`,`user_uid`),
KEY `idx_photos_users_user_uid` (`user_uid`),
KEY `idx_photos_users_team_uid` (`team_uid`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `places` (
`id` varbinary(42) NOT NULL,
`place_label` varchar(400) DEFAULT NULL,
`place_district` varchar(100) DEFAULT NULL,
`place_city` varchar(100) DEFAULT NULL,
`place_state` varchar(100) DEFAULT NULL,
`place_country` varbinary(2) DEFAULT NULL,
`place_keywords` varchar(300) DEFAULT NULL,
`place_favorite` tinyint(1) DEFAULT NULL,
`photo_count` int(11) DEFAULT 1,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx_places_place_district` (`place_district`),
KEY `idx_places_place_city` (`place_city`),
KEY `idx_places_place_state` (`place_state`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `reactions` (
`uid` varbinary(42) NOT NULL,
`user_uid` varbinary(42) NOT NULL,
`reaction` varbinary(64) NOT NULL,
`reacted` int(11) DEFAULT NULL,
`reacted_at` datetime DEFAULT NULL,
PRIMARY KEY (`uid`,`user_uid`,`reaction`),
KEY `idx_reactions_reacted_at` (`reacted_at`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `services` (
`id` int(10) unsigned NOT NULL,
`acc_name` varchar(160) DEFAULT NULL,
`acc_owner` varchar(160) DEFAULT NULL,
`acc_url` varchar(255) DEFAULT NULL,
`acc_type` varbinary(255) DEFAULT NULL,
`acc_key` varbinary(255) DEFAULT NULL,
`acc_user` varbinary(255) DEFAULT NULL,
`acc_pass` varbinary(255) DEFAULT NULL,
`acc_timeout` varbinary(16) DEFAULT NULL,
`acc_error` varbinary(512) DEFAULT NULL,
`acc_errors` int(11) DEFAULT NULL,
`acc_share` tinyint(1) DEFAULT NULL,
`acc_sync` tinyint(1) DEFAULT NULL,
`retry_limit` int(11) DEFAULT NULL,
`share_path` varbinary(1024) DEFAULT NULL,
`share_size` varbinary(16) DEFAULT NULL,
`share_expires` int(11) DEFAULT NULL,
`sync_path` varbinary(1024) DEFAULT NULL,
`sync_status` varbinary(16) DEFAULT NULL,
`sync_interval` int(11) DEFAULT NULL,
`sync_date` datetime DEFAULT NULL,
`sync_upload` tinyint(1) DEFAULT NULL,
`sync_download` tinyint(1) DEFAULT NULL,
`sync_filenames` tinyint(1) DEFAULT NULL,
`sync_raw` tinyint(1) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx_services_deleted_at` (`deleted_at`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `subjects` (
`subj_uid` varbinary(42) NOT NULL,
`subj_type` varbinary(8) DEFAULT '',
`subj_src` varbinary(8) DEFAULT '',
`subj_slug` varbinary(160) DEFAULT '',
`subj_name` varchar(160) DEFAULT '',
`subj_alias` varchar(160) DEFAULT '',
`subj_about` varchar(512) DEFAULT NULL,
`subj_bio` varchar(2048) DEFAULT NULL,
`subj_notes` varchar(1024) DEFAULT NULL,
`subj_favorite` tinyint(1) DEFAULT 0,
`subj_hidden` tinyint(1) DEFAULT 0,
`subj_private` tinyint(1) DEFAULT 0,
`subj_excluded` tinyint(1) DEFAULT 0,
`file_count` int(11) DEFAULT 0,
`photo_count` int(11) DEFAULT 0,
`thumb` varbinary(128) DEFAULT '',
`thumb_src` varbinary(8) DEFAULT '',
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`subj_uid`),
UNIQUE KEY `uix_subjects_subj_name` (`subj_name`),
KEY `idx_subjects_subj_slug` (`subj_slug`),
KEY `idx_subjects_thumb` (`thumb`),
KEY `idx_subjects_deleted_at` (`deleted_at`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `versions` (
`id` int(10) unsigned NOT NULL,
`version` varchar(255) DEFAULT NULL,
`edition` varchar(255) DEFAULT NULL,
`error` varchar(255) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`migrated_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `idx_version_edition` (`version`,`edition`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*M!100616 SET NOTE_VERBOSITY=@OLD_NOTE_VERBOSITY */;

View File

@@ -0,0 +1,158 @@
CREATE TABLE IF NOT EXISTS "files_share" ("file_id" integer,"service_id" integer,"remote_name" VARBINARY(255),"status" VARBINARY(16),"error" VARBINARY(512),"errors" integer,"created_at" datetime,"updated_at" datetime , PRIMARY KEY ("file_id","service_id","remote_name"));
CREATE TABLE IF NOT EXISTS "photos_labels" ("photo_id" integer,"label_id" integer,"label_src" VARBINARY(8),"uncertainty" SMALLINT , PRIMARY KEY ("photo_id","label_id"));
CREATE INDEX idx_photos_labels_label_id ON "photos_labels"(label_id) ;
CREATE TABLE IF NOT EXISTS "services" ("id" integer primary key autoincrement,"acc_name" VARCHAR(255),"acc_owner" VARCHAR(255),"acc_url" VARBINARY(512),"acc_type" VARBINARY(255),"acc_key" VARBINARY(255),"acc_user" VARBINARY(255),"acc_pass" VARBINARY(255),"acc_error" VARBINARY(512),"acc_errors" integer,"acc_share" bool,"acc_sync" bool,"retry_limit" integer,"share_path" VARBINARY(500),"share_size" VARBINARY(16),"share_expires" integer,"sync_path" VARBINARY(500),"sync_status" VARBINARY(16),"sync_interval" integer,"sync_date" datetime,"sync_upload" bool,"sync_download" bool,"sync_filenames" bool,"sync_raw" bool,"created_at" datetime,"updated_at" datetime,"deleted_at" datetime , "acc_timeout" VARBINARY(16));
CREATE INDEX idx_accounts_deleted_at ON "services"(deleted_at) ;
CREATE TABLE IF NOT EXISTS "photos" ("id" integer primary key autoincrement,"uuid" VARBINARY(42),"taken_at" datetime,"taken_at_local" datetime,"taken_src" VARBINARY(8),"photo_uid" VARBINARY(42),"photo_type" VARBINARY(8) DEFAULT 'image',"type_src" VARBINARY(8),"photo_title" VARCHAR(255),"title_src" VARBINARY(8),"photo_description" TEXT,"description_src" VARBINARY(8),"photo_path" VARBINARY(500),"photo_name" VARBINARY(255),"original_name" VARBINARY(755),"photo_stack" integer,"photo_favorite" bool,"photo_private" bool,"photo_scan" bool,"photo_panorama" bool,"time_zone" VARBINARY(64),"place_id" VARBINARY(42) DEFAULT 'zz',"place_src" VARBINARY(8),"cell_id" VARBINARY(42) DEFAULT 'zz',"cell_accuracy" integer,"photo_altitude" integer,"photo_lat" FLOAT,"photo_lng" FLOAT,"photo_country" VARBINARY(2) DEFAULT 'zz',"photo_year" integer,"photo_month" integer,"photo_day" integer,"photo_iso" integer,"photo_exposure" VARBINARY(64),"photo_f_number" FLOAT,"photo_focal_length" integer,"photo_quality" SMALLINT,"photo_resolution" SMALLINT,"photo_color" integer,"camera_id" integer DEFAULT 1,"camera_serial" VARBINARY(255),"camera_src" VARBINARY(8),"lens_id" integer DEFAULT 1,"created_at" datetime,"updated_at" datetime,"edited_at" datetime,"checked_at" datetime,"deleted_at" datetime , "photo_faces" integer, "estimated_at" datetime, "photo_duration" bigint, "created_by" VARBINARY(42), "published_at" datetime);
CREATE INDEX idx_photos_taken_uid ON "photos"(taken_at, photo_uid) ;
CREATE INDEX idx_photos_cell_id ON "photos"(cell_id) ;
CREATE INDEX idx_photos_photo_lat ON "photos"(photo_lat) ;
CREATE INDEX idx_photos_photo_lng ON "photos"(photo_lng) ;
CREATE INDEX idx_photos_country_year_month ON "photos"(photo_country, photo_year, photo_month) ;
CREATE INDEX idx_photos_checked_at ON "photos"(checked_at) ;
CREATE INDEX idx_photos_deleted_at ON "photos"(deleted_at) ;
CREATE INDEX idx_photos_uuid ON "photos"("uuid") ;
CREATE INDEX idx_photos_path_name ON "photos"(photo_path, photo_name) ;
CREATE INDEX idx_photos_place_id ON "photos"(place_id) ;
CREATE INDEX idx_photos_camera_lens ON "photos"(camera_id, lens_id) ;
CREATE UNIQUE INDEX uix_photos_photo_uid ON "photos"(photo_uid) ;
CREATE TABLE IF NOT EXISTS "details" ("photo_id" integer,"keywords" TEXT,"keywords_src" VARBINARY(8),"notes" TEXT,"notes_src" VARBINARY(8),"subject" VARCHAR(255),"subject_src" VARBINARY(8),"artist" VARCHAR(255),"artist_src" VARBINARY(8),"copyright" VARCHAR(255),"copyright_src" VARBINARY(8),"license" VARCHAR(255),"license_src" VARBINARY(8),"created_at" datetime,"updated_at" datetime , "software" VARCHAR(1024), "software_src" VARBINARY(8), PRIMARY KEY ("photo_id"));
CREATE TABLE IF NOT EXISTS "lenses" ("id" integer primary key autoincrement,"lens_slug" VARBINARY(255),"lens_name" VARCHAR(255),"lens_make" VARCHAR(255),"lens_model" VARCHAR(255),"lens_type" VARCHAR(255),"lens_description" TEXT,"lens_notes" TEXT,"created_at" datetime,"updated_at" datetime,"deleted_at" datetime );
CREATE INDEX idx_lenses_deleted_at ON "lenses"(deleted_at) ;
CREATE UNIQUE INDEX uix_lenses_lens_slug ON "lenses"(lens_slug) ;
CREATE TABLE IF NOT EXISTS "countries" ("id" VARBINARY(2),"country_slug" VARBINARY(255),"country_name" varchar(255),"country_description" TEXT,"country_notes" TEXT,"country_photo_id" integer , PRIMARY KEY ("id"));
CREATE UNIQUE INDEX uix_countries_country_slug ON "countries"(country_slug) ;
CREATE TABLE IF NOT EXISTS "photos_albums" ("photo_uid" VARBINARY(42),"album_uid" VARBINARY(42),"order" integer,"hidden" bool,"missing" bool,"created_at" datetime,"updated_at" datetime , PRIMARY KEY ("photo_uid","album_uid"));
CREATE INDEX idx_photos_albums_album_uid ON "photos_albums"(album_uid) ;
CREATE TABLE IF NOT EXISTS "categories" ("label_id" integer,"category_id" integer, PRIMARY KEY ("label_id","category_id"));
CREATE TABLE IF NOT EXISTS "labels" ("id" integer primary key autoincrement,"label_uid" VARBINARY(42),"label_slug" VARBINARY(255),"custom_slug" VARBINARY(255),"label_name" VARCHAR(255),"label_priority" integer,"label_favorite" bool,"label_description" TEXT,"label_notes" TEXT,"photo_count" integer DEFAULT 1,"created_at" datetime,"updated_at" datetime,"deleted_at" datetime , "thumb" VARBINARY(128) DEFAULT '', "thumb_src" VARBINARY(8) DEFAULT '', "published_at" datetime);
CREATE INDEX idx_labels_custom_slug ON "labels"(custom_slug) ;
CREATE INDEX idx_labels_deleted_at ON "labels"(deleted_at) ;
CREATE UNIQUE INDEX uix_labels_label_uid ON "labels"(label_uid) ;
CREATE UNIQUE INDEX uix_labels_label_slug ON "labels"(label_slug) ;
CREATE TABLE IF NOT EXISTS "photos_keywords" ("photo_id" integer,"keyword_id" integer , PRIMARY KEY ("photo_id","keyword_id"));
CREATE INDEX idx_photos_keywords_keyword_id ON "photos_keywords"(keyword_id) ;
CREATE TABLE IF NOT EXISTS "passwords" ("uid" VARBINARY(255),"hash" VARBINARY(255),"created_at" datetime,"updated_at" datetime , PRIMARY KEY ("uid"));
CREATE TABLE IF NOT EXISTS "duplicates" ("file_name" VARBINARY(755),"file_root" VARBINARY(16) DEFAULT '/',"file_hash" VARBINARY(128) DEFAULT '',"file_size" bigint,"mod_time" bigint , PRIMARY KEY ("file_name","file_root"));
CREATE INDEX idx_duplicates_file_hash ON "duplicates"(file_hash) ;
CREATE TABLE IF NOT EXISTS "places" ("id" VARBINARY(42),"place_label" VARBINARY(755),"place_city" VARCHAR(255),"place_state" VARCHAR(255),"place_country" VARBINARY(2),"place_keywords" VARCHAR(255),"place_favorite" bool,"photo_count" integer DEFAULT 1,"created_at" datetime,"updated_at" datetime , "place_district" VARCHAR(100), PRIMARY KEY ("id"));
CREATE TABLE IF NOT EXISTS "cameras" ("id" integer primary key autoincrement,"camera_slug" VARBINARY(255),"camera_name" VARCHAR(255),"camera_make" VARCHAR(255),"camera_model" VARCHAR(255),"camera_type" VARCHAR(255),"camera_description" TEXT,"camera_notes" TEXT,"created_at" datetime,"updated_at" datetime,"deleted_at" datetime );
CREATE INDEX idx_cameras_deleted_at ON "cameras"(deleted_at) ;
CREATE UNIQUE INDEX uix_cameras_camera_slug ON "cameras"(camera_slug) ;
CREATE TABLE IF NOT EXISTS "keywords" ("id" integer primary key autoincrement,"keyword" VARCHAR(64),"skip" bool );
CREATE INDEX idx_keywords_keyword ON "keywords"("keyword") ;
CREATE TABLE IF NOT EXISTS "folders" ("path" VARBINARY(500),"root" VARBINARY(16) DEFAULT '',"folder_uid" VARBINARY(42),"folder_type" VARBINARY(16),"folder_title" VARCHAR(255),"folder_category" VARCHAR(255),"folder_description" TEXT,"folder_order" VARBINARY(32),"folder_country" VARBINARY(2) DEFAULT 'zz',"folder_year" integer,"folder_month" integer,"folder_day" integer,"folder_favorite" bool,"folder_private" bool,"folder_ignore" bool,"folder_watch" bool,"created_at" datetime,"updated_at" datetime,"modified_at" datetime,"deleted_at" datetime , "published_at" datetime, PRIMARY KEY ("folder_uid"));
CREATE INDEX idx_folders_folder_category ON "folders"(folder_category) ;
CREATE INDEX idx_folders_country_year_month ON "folders"(folder_country, folder_year, folder_month) ;
CREATE INDEX idx_folders_deleted_at ON "folders"(deleted_at) ;
CREATE UNIQUE INDEX idx_folders_path_root ON "folders"("path", "root") ;
CREATE TABLE IF NOT EXISTS "users" ("id" integer primary key autoincrement,"address_id" integer DEFAULT 1,"user_uid" VARBINARY(42),"mother_uid" VARBINARY(42),"father_uid" VARBINARY(42),"global_uid" VARBINARY(42),"full_name" varchar(128),"nick_name" varchar(64),"maiden_name" varchar(64),"artist_name" varchar(64),"user_name" varchar(64),"user_status" varchar(32),"user_disabled" bool,"user_settings" LONGTEXT,"primary_email" varchar(255),"email_confirmed" bool,"backup_email" varchar(255),"person_url" VARBINARY(255),"person_phone" varchar(32),"person_status" varchar(32),"person_avatar" VARBINARY(255),"person_location" varchar(128),"person_bio" TEXT,"person_accounts" LONGTEXT,"business_url" VARBINARY(255),"business_phone" varchar(32),"business_email" varchar(255),"company_name" varchar(128),"department_name" varchar(128),"job_title" varchar(64),"birth_year" integer,"birth_month" integer,"birth_day" integer,"terms_accepted" bool,"is_artist" bool,"is_subject" bool,"role_admin" bool,"role_guest" bool,"role_child" bool,"role_family" bool,"role_friend" bool,"webdav" bool,"storage_path" VARBINARY(500),"can_invite" bool,"invite_token" VARBINARY(32),"invited_by" VARBINARY(32),"confirm_token" VARBINARY(64),"reset_token" VARBINARY(64),"api_token" VARBINARY(128),"api_secret" VARBINARY(128),"login_attempts" integer,"login_at" datetime,"created_at" datetime,"updated_at" datetime,"deleted_at" datetime , "external_id" varchar(255));
CREATE INDEX idx_users_deleted_at ON "users"(deleted_at) ;
CREATE INDEX idx_users_global_uid ON "users"(global_uid) ;
CREATE INDEX idx_users_primary_email ON "users"(primary_email) ;
CREATE UNIQUE INDEX uix_users_user_uid ON "users"(user_uid) ;
CREATE TABLE IF NOT EXISTS "files" ("id" integer primary key autoincrement,"photo_id" integer,"photo_uid" VARBINARY(42),"instance_id" VARBINARY(42),"file_uid" VARBINARY(42),"file_name" VARBINARY(755),"file_root" VARBINARY(16) DEFAULT '/',"original_name" VARBINARY(755),"file_hash" VARBINARY(128),"file_size" bigint,"file_codec" VARBINARY(32),"file_type" VARBINARY(32),"file_mime" VARBINARY(64),"file_primary" bool,"file_sidecar" bool,"file_missing" bool,"file_portrait" bool,"file_video" bool,"file_duration" bigint,"file_width" integer,"file_height" integer,"file_orientation" integer,"file_projection" VARBINARY(16),"file_aspect_ratio" FLOAT,"file_main_color" VARBINARY(16),"file_colors" VARBINARY(9),"file_luminance" VARBINARY(9),"file_diff" integer,"file_chroma" integer,"file_error" VARBINARY(512),"mod_time" bigint,"created_at" datetime,"created_in" bigint,"updated_at" datetime,"updated_in" bigint,"deleted_at" datetime , "photo_taken_at" DATETIME, "time_index" VARBINARY(48), "media_id" VARBINARY(32), "media_utc" bigint, "media_type" VARBINARY(16), "file_fps" real, "file_frames" integer, "file_hdr" bool, "file_watermark" bool, "file_color_profile" VARBINARY(64), "file_software" VARCHAR(64), "published_at" datetime, "file_orientation_src" VARBINARY(8) DEFAULT '');
CREATE INDEX idx_files_instance_id ON "files"(instance_id) ;
CREATE INDEX idx_files_file_hash ON "files"(file_hash) ;
CREATE INDEX idx_files_file_main_color ON "files"(file_main_color) ;
CREATE INDEX idx_files_deleted_at ON "files"(deleted_at) ;
CREATE INDEX idx_files_photo_id ON "files"(photo_id) ;
CREATE INDEX idx_files_photo_uid ON "files"(photo_uid) ;
CREATE UNIQUE INDEX uix_files_file_uid ON "files"(file_uid) ;
CREATE UNIQUE INDEX idx_files_name_root ON "files"(file_name, file_root) ;
CREATE TABLE IF NOT EXISTS "files_sync" ("remote_name" VARBINARY(255),"service_id" integer,"file_id" integer,"remote_date" datetime,"remote_size" bigint,"status" VARBINARY(16),"error" VARBINARY(512),"errors" integer,"created_at" datetime,"updated_at" datetime , PRIMARY KEY ("remote_name","service_id"));
CREATE INDEX idx_files_sync_file_id ON "files_sync"(file_id) ;
CREATE TABLE IF NOT EXISTS "cells" ("id" VARBINARY(42),"cell_name" VARCHAR(255),"cell_category" VARCHAR(64),"place_id" VARBINARY(42) DEFAULT 'zz',"created_at" datetime,"updated_at" datetime , "cell_street" VARCHAR(100), "cell_postcode" VARCHAR(50), PRIMARY KEY ("id"));
CREATE TABLE IF NOT EXISTS "albums" ("id" integer primary key autoincrement,"album_uid" VARBINARY(42),"cover_uid" VARBINARY(42),"folder_uid" VARBINARY(42),"album_slug" VARBINARY(255),"album_path" VARBINARY(500),"album_type" VARBINARY(8) DEFAULT 'album',"album_title" VARCHAR(255),"album_location" VARCHAR(255),"album_category" VARCHAR(255),"album_caption" TEXT,"album_description" TEXT,"album_notes" TEXT,"album_filter" VARBINARY(1024),"album_order" VARBINARY(32),"album_template" VARBINARY(255),"album_country" VARBINARY(2) DEFAULT 'zz',"album_year" integer,"album_month" integer,"album_day" integer,"album_favorite" bool,"album_private" bool,"created_at" datetime,"updated_at" datetime,"deleted_at" datetime , "parent_uid" VARBINARY(42) DEFAULT '', "thumb" VARBINARY(128) DEFAULT '', "thumb_src" VARBINARY(8) DEFAULT '', "album_state" VARCHAR(100), "created_by" VARBINARY(42), "published_at" datetime);
CREATE INDEX idx_albums_album_category ON "albums"(album_category) ;
CREATE INDEX idx_albums_country_year_month ON "albums"(album_country, album_year, album_month) ;
CREATE INDEX idx_albums_deleted_at ON "albums"(deleted_at) ;
CREATE INDEX idx_albums_folder_uid ON "albums"(folder_uid) ;
CREATE INDEX idx_albums_album_slug ON "albums"(album_slug) ;
CREATE INDEX idx_albums_album_path ON "albums"(album_path) ;
CREATE UNIQUE INDEX uix_albums_album_uid ON "albums"(album_uid) ;
CREATE TABLE IF NOT EXISTS "links" ("link_uid" VARBINARY(42),"share_uid" VARBINARY(42),"share_slug" VARBINARY(255),"link_token" VARBINARY(255),"link_expires" integer,"link_views" integer,"max_views" integer,"has_password" bool,"can_comment" bool,"can_edit" bool,"created_at" datetime,"modified_at" datetime , "comment" varchar(512), "perm" integer, "ref_id" VARBINARY(16), "created_by" VARBINARY(42), PRIMARY KEY ("link_uid"));
CREATE INDEX idx_links_share_slug ON "links"(share_slug) ;
CREATE UNIQUE INDEX idx_links_uid_token ON "links"(share_uid, link_token) ;
CREATE TABLE IF NOT EXISTS "errors" ("id" integer primary key autoincrement,"error_time" datetime,"error_level" VARBINARY(32),"error_message" VARBINARY(2048) );
CREATE INDEX idx_errors_error_time ON "errors"(error_time) ;
CREATE INDEX idx_labels_thumb ON "labels"("thumb") ;
CREATE TABLE IF NOT EXISTS "markers" ("marker_uid" VARBINARY(42),"file_uid" VARBINARY(42) DEFAULT '',"marker_type" VARBINARY(8) DEFAULT '',"marker_src" VARBINARY(8) DEFAULT '',"marker_name" VARCHAR(255),"marker_review" bool,"marker_invalid" bool,"subj_uid" VARBINARY(42),"subj_src" VARBINARY(8) DEFAULT '',"face_id" VARBINARY(42),"face_dist" real DEFAULT -1,"embeddings_json" MEDIUMBLOB,"landmarks_json" MEDIUMBLOB,"x" FLOAT,"y" FLOAT,"w" FLOAT,"h" FLOAT,"q" integer,"size" integer DEFAULT -1,"score" SMALLINT,"thumb" VARBINARY(128) DEFAULT '',"matched_at" datetime,"created_at" datetime,"updated_at" datetime , PRIMARY KEY ("marker_uid"));
CREATE INDEX idx_markers_subj_uid_src ON "markers"(subj_uid, subj_src) ;
CREATE INDEX idx_markers_face_id ON "markers"(face_id) ;
CREATE INDEX idx_markers_thumb ON "markers"("thumb") ;
CREATE INDEX idx_markers_matched_at ON "markers"(matched_at) ;
CREATE INDEX idx_markers_file_uid ON "markers"(file_uid) ;
CREATE INDEX idx_photos_ymd ON "photos"(photo_day) ;
CREATE INDEX idx_albums_thumb ON "albums"("thumb") ;
CREATE INDEX idx_albums_album_title ON "albums"(album_title) ;
CREATE INDEX idx_albums_ymd ON "albums"(album_day) ;
CREATE TABLE IF NOT EXISTS "subjects" ("subj_uid" VARBINARY(42),"subj_type" VARBINARY(8) DEFAULT '',"subj_src" VARBINARY(8) DEFAULT '',"subj_slug" VARBINARY(255) DEFAULT '',"subj_name" VARCHAR(255) DEFAULT '',"subj_alias" VARCHAR(255) DEFAULT '',"subj_bio" TEXT,"subj_notes" TEXT,"subj_favorite" bool DEFAULT false,"subj_private" bool DEFAULT false,"subj_excluded" bool DEFAULT false,"file_count" integer DEFAULT 0,"thumb" VARBINARY(128) DEFAULT '',"thumb_src" VARBINARY(8) DEFAULT '',"metadata_json" MEDIUMBLOB,"created_at" datetime,"updated_at" datetime,"deleted_at" datetime , "subj_hidden" bool DEFAULT false, "photo_count" integer DEFAULT 0, "subj_about" varchar(512), PRIMARY KEY ("subj_uid"));
CREATE INDEX idx_subjects_subj_slug ON "subjects"(subj_slug) ;
CREATE INDEX idx_subjects_thumb ON "subjects"("thumb") ;
CREATE INDEX idx_subjects_deleted_at ON "subjects"(deleted_at) ;
CREATE UNIQUE INDEX uix_subjects_subj_name ON "subjects"(subj_name) ;
CREATE TABLE IF NOT EXISTS "faces" ("id" VARBINARY(42),"face_src" VARBINARY(8),"face_hidden" bool,"subj_uid" VARBINARY(42) DEFAULT '',"samples" integer,"sample_radius" real,"collisions" integer,"collision_radius" real,"embedding_json" MEDIUMBLOB,"matched_at" datetime,"created_at" datetime,"updated_at" datetime , "face_kind" integer, PRIMARY KEY ("id"));
CREATE INDEX idx_faces_subj_uid ON "faces"(subj_uid) ;
CREATE TABLE IF NOT EXISTS "migrations" ("id" varchar(16),"dialect" varchar(16),"error" varchar(255),"source" varchar(16),"started_at" datetime,"finished_at" datetime , "stage" varchar(16), PRIMARY KEY ("id"));
CREATE INDEX idx_places_place_district ON "places"(place_district) ;
CREATE INDEX idx_places_place_city ON "places"(place_city) ;
CREATE INDEX idx_places_place_state ON "places"(place_state) ;
CREATE INDEX idx_albums_album_state ON "albums"(album_state) ;
CREATE INDEX idx_files_photo_taken_at ON "files"(photo_taken_at) ;
CREATE INDEX idx_files_media_utc ON "files"(media_utc) ;
CREATE INDEX idx_albums_album_filter ON albums (album_filter);
CREATE UNIQUE INDEX idx_files_search_media ON files (media_id);
CREATE UNIQUE INDEX idx_files_search_timeline ON files (time_index);
CREATE INDEX idx_services_deleted_at ON "services"(deleted_at) ;
CREATE TABLE IF NOT EXISTS "photos_users" ("uid" VARBINARY(42),"user_uid" VARBINARY(42),"team_uid" VARBINARY(42),"perm" integer , PRIMARY KEY ("uid","user_uid"));
CREATE INDEX idx_photos_users_user_uid ON "photos_users"(user_uid) ;
CREATE INDEX idx_photos_users_team_uid ON "photos_users"(team_uid) ;
CREATE TABLE IF NOT EXISTS "auth_users_shares" ("user_uid" VARBINARY(42),"share_uid" VARBINARY(42),"link_uid" VARBINARY(42),"expires_at" datetime,"comment" varchar(512),"perm" integer,"ref_id" VARBINARY(16),"created_at" datetime,"updated_at" datetime , PRIMARY KEY ("user_uid","share_uid"));
CREATE INDEX idx_auth_users_shares_share_uid ON "auth_users_shares"(share_uid) ;
CREATE INDEX idx_auth_users_shares_expires_at ON "auth_users_shares"(expires_at) ;
CREATE TABLE IF NOT EXISTS "auth_users_details" ("user_uid" VARBINARY(42),"subj_uid" VARBINARY(42),"subj_src" VARBINARY(8) DEFAULT '',"place_id" VARBINARY(42) DEFAULT 'zz',"place_src" VARBINARY(8),"cell_id" VARBINARY(42) DEFAULT 'zz',"birth_year" integer,"birth_month" integer,"birth_day" integer,"name_title" varchar(32),"given_name" varchar(64),"middle_name" varchar(64),"family_name" varchar(64),"name_suffix" varchar(32),"nick_name" varchar(64),"name_src" VARBINARY(8),"user_gender" varchar(16),"user_about" varchar(512),"user_bio" varchar(512),"user_location" varchar(512),"user_country" VARBINARY(2),"user_phone" varchar(32),"site_url" VARBINARY(512),"profile_url" VARBINARY(512),"feed_url" VARBINARY(512),"avatar_url" VARBINARY(512),"org_title" varchar(64),"org_name" varchar(128),"org_email" varchar(255),"org_phone" varchar(32),"org_url" VARBINARY(512),"id_url" VARBINARY(512),"created_at" datetime,"updated_at" datetime , PRIMARY KEY ("user_uid"));
CREATE INDEX idx_auth_users_details_subj_uid ON "auth_users_details"(subj_uid) ;
CREATE INDEX idx_auth_users_details_place_id ON "auth_users_details"(place_id) ;
CREATE INDEX idx_auth_users_details_cell_id ON "auth_users_details"(cell_id) ;
CREATE INDEX idx_auth_users_details_org_email ON "auth_users_details"(org_email) ;
CREATE TABLE IF NOT EXISTS "auth_sessions" ("id" VARBINARY(2048),"client_ip" varchar(64),"user_uid" VARBINARY(42) DEFAULT '',"user_name" varchar(64),"auth_provider" VARBINARY(128) DEFAULT '',"auth_method" VARBINARY(128) DEFAULT '',"auth_domain" VARBINARY(255) DEFAULT '',"auth_id" VARBINARY(128) DEFAULT '',"auth_scope" varchar(1024) DEFAULT '',"last_active" bigint,"sess_expires" bigint,"sess_timeout" bigint,"preview_token" VARBINARY(64) DEFAULT '',"download_token" VARBINARY(64) DEFAULT '',"access_token" VARBINARY(4096) DEFAULT '',"refresh_token" VARBINARY(512) DEFAULT '',"id_token" VARBINARY(1024) DEFAULT '',"user_agent" varchar(512),"data_json" VARBINARY(4096),"ref_id" VARBINARY(16) DEFAULT '',"login_ip" varchar(64),"login_at" datetime,"created_at" datetime,"updated_at" datetime , "client_uid" VARBINARY(42) DEFAULT '', "client_name" varchar(200) DEFAULT '', "grant_type" VARBINARY(64) DEFAULT '', PRIMARY KEY ("id"));
CREATE INDEX idx_auth_sessions_client_ip ON "auth_sessions"(client_ip) ;
CREATE INDEX idx_auth_sessions_user_uid ON "auth_sessions"(user_uid) ;
CREATE INDEX idx_auth_sessions_user_name ON "auth_sessions"(user_name) ;
CREATE INDEX idx_auth_sessions_auth_id ON "auth_sessions"(auth_id) ;
CREATE INDEX idx_auth_sessions_sess_expires ON "auth_sessions"(sess_expires) ;
CREATE TABLE IF NOT EXISTS "auth_users_settings" ("user_uid" VARBINARY(42),"ui_theme" VARBINARY(32),"ui_language" VARBINARY(32),"ui_time_zone" VARBINARY(64),"maps_style" VARBINARY(32),"maps_animate" integer,"index_path" VARBINARY(1024),"index_rescan" integer,"import_path" VARBINARY(1024),"import_move" integer,"upload_path" VARBINARY(1024),"default_page" VARBINARY(128),"created_at" datetime,"updated_at" datetime , "download_originals" integer DEFAULT 0, "download_media_raw" integer DEFAULT 0, "download_media_sidecar" integer DEFAULT 0, PRIMARY KEY ("user_uid"));
CREATE INDEX idx_photos_published_at ON "photos"(published_at) ;
CREATE INDEX idx_photos_created_by ON "photos"(created_by) ;
CREATE TABLE IF NOT EXISTS "auth_users" ("id" integer primary key autoincrement,"user_uuid" VARBINARY(64),"user_uid" VARBINARY(42),"auth_provider" VARBINARY(128) DEFAULT '',"auth_method" VARBINARY(128) DEFAULT '',"auth_issuer" VARBINARY(255) DEFAULT '',"auth_id" VARBINARY(128) DEFAULT '',"user_name" varchar(64),"display_name" varchar(200),"user_email" varchar(255),"backup_email" varchar(255),"user_role" varchar(64) DEFAULT '',"user_attr" varchar(1024),"super_admin" bool,"can_login" bool,"login_at" datetime,"expires_at" datetime,"webdav" bool,"base_path" VARBINARY(1024),"upload_path" VARBINARY(1024),"can_invite" bool,"invite_token" VARBINARY(64),"invited_by" varchar(64),"verify_token" VARBINARY(64),"verified_at" datetime,"consent_at" datetime,"born_at" datetime,"reset_token" VARBINARY(64),"preview_token" VARBINARY(64),"download_token" VARBINARY(64),"thumb" VARBINARY(128) DEFAULT '',"thumb_src" VARBINARY(8) DEFAULT '',"ref_id" VARBINARY(16),"created_at" datetime,"updated_at" datetime,"deleted_at" datetime);
CREATE INDEX idx_auth_users_auth_id ON "auth_users"(auth_id) ;
CREATE INDEX idx_auth_users_user_email ON "auth_users"(user_email) ;
CREATE INDEX idx_auth_users_invite_token ON "auth_users"(invite_token) ;
CREATE INDEX idx_auth_users_born_at ON "auth_users"(born_at) ;
CREATE INDEX idx_auth_users_thumb ON "auth_users"("thumb") ;
CREATE INDEX idx_auth_users_deleted_at ON "auth_users"(deleted_at) ;
CREATE INDEX idx_auth_users_user_uuid ON "auth_users"(user_uuid) ;
CREATE INDEX idx_auth_users_expires_at ON "auth_users"(expires_at) ;
CREATE INDEX idx_auth_users_user_name ON "auth_users"(user_name) ;
CREATE UNIQUE INDEX uix_auth_users_user_uid ON "auth_users"(user_uid) ;
CREATE INDEX idx_files_published_at ON "files"(published_at) ;
CREATE TABLE IF NOT EXISTS "reactions" ("uid" VARBINARY(42),"user_uid" VARBINARY(42),"reaction" VARBINARY(64),"reacted" integer,"reacted_at" datetime , PRIMARY KEY ("uid","user_uid","reaction"));
CREATE INDEX idx_reactions_reacted_at ON "reactions"(reacted_at) ;
CREATE INDEX idx_folders_published_at ON "folders"(published_at) ;
CREATE INDEX idx_labels_published_at ON "labels"(published_at) ;
CREATE INDEX idx_albums_created_by ON "albums"(created_by) ;
CREATE INDEX idx_albums_published_at ON "albums"(published_at) ;
CREATE TABLE IF NOT EXISTS "albums_users" ("uid" VARBINARY(42),"user_uid" VARBINARY(42),"team_uid" VARBINARY(42),"perm" integer , PRIMARY KEY ("uid","user_uid"));
CREATE INDEX idx_albums_users_user_uid ON "albums_users"(user_uid) ;
CREATE INDEX idx_albums_users_team_uid ON "albums_users"(team_uid) ;
CREATE INDEX idx_links_created_by ON "links"(created_by) ;
CREATE INDEX idx_files_missing_root ON files (file_missing, file_root);
CREATE TABLE IF NOT EXISTS "versions" ("id" integer primary key autoincrement,"version" varchar(255),"edition" varchar(255),"error" varchar(255),"created_at" datetime,"updated_at" datetime,"migrated_at" datetime );
CREATE UNIQUE INDEX idx_version_edition ON "versions"("version", "edition") ;
CREATE INDEX idx_files_file_error ON "files"(file_error) ;
CREATE TABLE IF NOT EXISTS "auth_clients" ("client_uid" VARBINARY(42),"user_uid" VARBINARY(42) DEFAULT '',"user_name" varchar(200),"client_name" varchar(200),"client_role" varchar(64) DEFAULT '',"client_type" VARBINARY(16),"client_url" VARBINARY(255) DEFAULT '',"callback_url" VARBINARY(255) DEFAULT '',"auth_provider" VARBINARY(128) DEFAULT '',"auth_method" VARBINARY(128) DEFAULT '',"auth_scope" varchar(1024) DEFAULT '',"auth_expires" bigint,"auth_tokens" bigint,"auth_enabled" bool,"last_active" bigint,"created_at" datetime,"updated_at" datetime , PRIMARY KEY ("client_uid"));
CREATE INDEX idx_auth_clients_user_name ON "auth_clients"(user_name) ;
CREATE INDEX idx_auth_clients_user_uid ON "auth_clients"(user_uid) ;
CREATE TABLE IF NOT EXISTS "passcodes" ("uid" VARBINARY(255),"key_type" varchar(64) DEFAULT '',"key_url" varchar(2048) DEFAULT '',"recovery_code" varchar(255) DEFAULT '',"verified_at" datetime,"activated_at" datetime,"created_at" datetime,"updated_at" datetime , PRIMARY KEY ("uid","key_type"));
CREATE INDEX idx_auth_sessions_client_uid ON "auth_sessions"(client_uid) ;

View File

@@ -8,8 +8,8 @@ import (
"sync"
"time"
"github.com/jinzhu/gorm"
"github.com/ulule/deepcopier"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/entity/sortby"
"github.com/photoprism/photoprism/internal/event"
@@ -43,36 +43,36 @@ type Albums []Album
// Album represents a photo album
type Album struct {
ID uint `gorm:"primary_key" json:"ID" yaml:"-"`
AlbumUID string `gorm:"type:VARBINARY(42);unique_index;" json:"UID" yaml:"UID"`
ParentUID string `gorm:"type:VARBINARY(42);default:'';" json:"ParentUID,omitempty" yaml:"ParentUID,omitempty"`
AlbumSlug string `gorm:"type:VARBINARY(160);index;" json:"Slug" yaml:"Slug"`
AlbumPath string `gorm:"type:VARCHAR(1024);index;" json:"Path,omitempty" yaml:"Path,omitempty"`
AlbumType string `gorm:"type:VARBINARY(8);default:'album';" json:"Type" yaml:"Type,omitempty"`
AlbumTitle string `gorm:"type:VARCHAR(160);index;" json:"Title" yaml:"Title"`
AlbumLocation string `gorm:"type:VARCHAR(160);" json:"Location" yaml:"Location,omitempty"`
AlbumCategory string `gorm:"type:VARCHAR(100);index;" json:"Category" yaml:"Category,omitempty"`
AlbumCaption string `gorm:"type:VARCHAR(1024);" json:"Caption" yaml:"Caption,omitempty"`
AlbumDescription string `gorm:"type:VARCHAR(2048);" json:"Description" yaml:"Description,omitempty"`
AlbumNotes string `gorm:"type:VARCHAR(1024);" json:"Notes" yaml:"Notes,omitempty"`
AlbumFilter string `gorm:"type:VARBINARY(2048);" json:"Filter" yaml:"Filter,omitempty"`
AlbumOrder string `gorm:"type:VARBINARY(32);" json:"Order" yaml:"Order,omitempty"`
AlbumTemplate string `gorm:"type:VARBINARY(255);" json:"Template" yaml:"Template,omitempty"`
AlbumState string `gorm:"type:VARCHAR(100);index;" json:"State" yaml:"State,omitempty"`
AlbumCountry string `gorm:"type:VARBINARY(2);index:idx_albums_country_year_month;default:'zz';" json:"Country" yaml:"Country,omitempty"`
AlbumYear int `gorm:"index:idx_albums_ymd;index:idx_albums_country_year_month;" json:"Year" yaml:"Year,omitempty"`
AlbumMonth int `gorm:"index:idx_albums_ymd;index:idx_albums_country_year_month;" json:"Month" yaml:"Month,omitempty"`
AlbumDay int `gorm:"index:idx_albums_ymd;" json:"Day" yaml:"Day,omitempty"`
AlbumFavorite bool `json:"Favorite" yaml:"Favorite,omitempty"`
AlbumPrivate bool `json:"Private" yaml:"Private,omitempty"`
Thumb string `gorm:"type:VARBINARY(128);index;default:'';" json:"Thumb" yaml:"Thumb,omitempty"`
ThumbSrc string `gorm:"type:VARBINARY(8);default:'';" json:"ThumbSrc,omitempty" yaml:"ThumbSrc,omitempty"`
CreatedBy string `gorm:"type:VARBINARY(42);index" json:"CreatedBy,omitempty" yaml:"CreatedBy,omitempty"`
CreatedAt time.Time `json:"CreatedAt" yaml:"CreatedAt,omitempty"`
UpdatedAt time.Time `json:"UpdatedAt" yaml:"UpdatedAt,omitempty"`
PublishedAt *time.Time `sql:"index" json:"PublishedAt,omitempty" yaml:"PublishedAt,omitempty"`
DeletedAt *time.Time `sql:"index" json:"DeletedAt" yaml:"DeletedAt,omitempty"`
Photos PhotoAlbums `gorm:"foreignkey:AlbumUID;association_foreignkey:AlbumUID;" json:"-" yaml:"Photos,omitempty"`
ID uint `gorm:"primaryKey;" json:"ID" yaml:"-"`
AlbumUID string `gorm:"type:bytes;size:42;uniqueIndex;" json:"UID" yaml:"UID"`
ParentUID string `gorm:"type:bytes;size:42;default:'';" json:"ParentUID,omitempty" yaml:"ParentUID,omitempty"`
AlbumSlug string `gorm:"type:bytes;size:160;index;" json:"Slug" yaml:"Slug"`
AlbumPath string `gorm:"type:bytes;size:1024;index;" json:"Path,omitempty" yaml:"Path,omitempty"`
AlbumType string `gorm:"type:bytes;size:8;default:'album';" json:"Type" yaml:"Type,omitempty"`
AlbumTitle string `gorm:"size:160;index;" json:"Title" yaml:"Title"`
AlbumLocation string `gorm:"size:160;" json:"Location" yaml:"Location,omitempty"`
AlbumCategory string `gorm:"size:100;index;" json:"Category" yaml:"Category,omitempty"`
AlbumCaption string `gorm:"size:1024;" json:"Caption" yaml:"Caption,omitempty"`
AlbumDescription string `gorm:"size:2048;" json:"Description" yaml:"Description,omitempty"`
AlbumNotes string `gorm:"size:1024;" json:"Notes" yaml:"Notes,omitempty"`
AlbumFilter string `gorm:"type:bytes;size:2048;" json:"Filter" yaml:"Filter,omitempty"`
AlbumOrder string `gorm:"type:bytes;size:32;" json:"Order" yaml:"Order,omitempty"`
AlbumTemplate string `gorm:"type:bytes;size:255;" json:"Template" yaml:"Template,omitempty"`
AlbumState string `gorm:"size:100;index;" json:"State" yaml:"State,omitempty"`
AlbumCountry string `gorm:"type:bytes;size:2;index:idx_albums_country_year_month;default:'zz';" json:"Country" yaml:"Country,omitempty"`
AlbumYear int `gorm:"index:idx_albums_ymd;index:idx_albums_country_year_month;" json:"Year" yaml:"Year,omitempty"`
AlbumMonth int `gorm:"index:idx_albums_ymd;index:idx_albums_country_year_month;" json:"Month" yaml:"Month,omitempty"`
AlbumDay int `gorm:"index:idx_albums_ymd;" json:"Day" yaml:"Day,omitempty"`
AlbumFavorite bool `json:"Favorite" yaml:"Favorite,omitempty"`
AlbumPrivate bool `json:"Private" yaml:"Private,omitempty"`
Thumb string `gorm:"type:bytes;size:128;index;default:'';" json:"Thumb" yaml:"Thumb,omitempty"`
ThumbSrc string `gorm:"type:bytes;size:8;default:'';" json:"ThumbSrc,omitempty" yaml:"ThumbSrc,omitempty"`
CreatedBy string `gorm:"type:bytes;size:42;index" json:"CreatedBy,omitempty" yaml:"CreatedBy,omitempty"`
CreatedAt time.Time `json:"CreatedAt" yaml:"CreatedAt,omitempty"`
UpdatedAt time.Time `json:"UpdatedAt" yaml:"UpdatedAt,omitempty"`
PublishedAt *time.Time `sql:"index" json:"PublishedAt,omitempty" yaml:"PublishedAt,omitempty"`
DeletedAt gorm.DeletedAt `sql:"index" json:"DeletedAt" yaml:"DeletedAt,omitempty"`
Photos []PhotoAlbum `gorm:"foreignkey:AlbumUID;references:AlbumUID" json:"-" yaml:"Photos,omitempty"`
}
// AfterUpdate flushes the album cache when an album is updated.
@@ -96,7 +96,7 @@ func (Album) TableName() string {
func UpdateAlbum(albumUID string, values interface{}) (err error) {
if rnd.InvalidUID(albumUID, AlbumUID) {
return fmt.Errorf("album: invalid uid %s", clean.Log(albumUID))
} else if err = Db().Model(Album{}).Where("album_uid = ?", albumUID).UpdateColumns(values).Error; err != nil {
} else if err = Db().Model(&Album{}).Where("album_uid = ?", albumUID).UpdateColumns(values).Error; err != nil {
return err
}
@@ -152,7 +152,7 @@ func AddPhotoToUserAlbums(photoUid string, albums []string, sortOrder, userUid s
}
// Refresh updated timestamp.
err = UpdateAlbum(albumUid, Map{"updated_at": TimeStamp()})
err = UpdateAlbum(albumUid, map[string]interface{}{"updated_at": TimeStamp()})
}
}
@@ -452,14 +452,14 @@ func (m *Album) Find() *Album {
}
// BeforeCreate creates a random UID if needed before inserting a new row to the database.
func (m *Album) BeforeCreate(scope *gorm.Scope) error {
func (m *Album) BeforeCreate(scope *gorm.DB) error {
if rnd.IsUID(m.AlbumUID, AlbumUID) {
return nil
}
m.AlbumUID = rnd.GenerateUID(AlbumUID)
return scope.SetColumn("AlbumUID", m.AlbumUID)
scope.Statement.SetColumn("AlbumUID", m.AlbumUID)
return scope.Error
}
// String returns the id or name as string.
@@ -567,7 +567,7 @@ func (m *Album) UpdateTitleAndLocation(title, location, state, country, slug str
// Skip location?
if location == "" && state == "" && (country == "" || country == "zz") {
return m.Updates(Map{
return m.Updates(map[string]interface{}{
"album_title": m.AlbumTitle,
"album_slug": m.AlbumSlug,
})
@@ -575,7 +575,7 @@ func (m *Album) UpdateTitleAndLocation(title, location, state, country, slug str
m.SetLocation(location, state, country)
return m.Updates(Map{
return m.Updates(map[string]interface{}{
"album_title": m.AlbumTitle,
"album_location": m.AlbumLocation,
"album_state": m.AlbumState,
@@ -626,7 +626,7 @@ func (m *Album) UpdateTitleAndState(title, slug, stateName, countryCode string)
m.SetTitle(title)
return m.Updates(Map{"album_title": m.AlbumTitle, "album_slug": m.AlbumSlug, "album_location": m.AlbumLocation, "album_country": m.AlbumCountry, "album_state": m.AlbumState})
return m.Updates(map[string]interface{}{"album_title": m.AlbumTitle, "album_slug": m.AlbumSlug, "album_location": m.AlbumLocation, "album_country": m.AlbumCountry, "album_state": m.AlbumState})
}
// SaveForm updates the entity using form data and stores it in the database.
@@ -747,11 +747,11 @@ func (m *Album) Delete() error {
now := Now()
if err := UnscopedDb().Model(m).UpdateColumns(Map{"updated_at": now, "deleted_at": now}).Error; err != nil {
if err := UnscopedDb().Model(m).UpdateColumns(map[string]interface{}{"updated_at": now, "deleted_at": now}).Error; err != nil {
return err
} else {
m.UpdatedAt = now
m.DeletedAt = &now
m.DeletedAt = gorm.DeletedAt{Time: now, Valid: true}
FlushAlbumCache()
}
@@ -783,11 +783,7 @@ func (m *Album) DeletePermanently() error {
// Deleted tests if the entity is deleted.
func (m *Album) Deleted() bool {
if m.DeletedAt == nil {
return false
}
return !m.DeletedAt.IsZero()
return m.DeletedAt.Valid
}
// Restore restores the entity in the database.
@@ -804,7 +800,7 @@ func (m *Album) Restore() error {
return err
}
m.DeletedAt = nil
m.DeletedAt = gorm.DeletedAt{}
m.PublishCountChange(1)
event.PublishUserEntities("albums", event.EntityCreated, []*Album{m}, m.CreatedBy)
@@ -852,7 +848,7 @@ func (m *Album) AddPhotos(photos PhotosInterface) (added PhotoAlbums) {
}
// Refresh updated timestamp.
if err := UpdateAlbum(m.AlbumUID, Map{"updated_at": TimeStamp()}); err != nil {
if err := UpdateAlbum(m.AlbumUID, map[string]interface{}{"updated_at": TimeStamp()}); err != nil {
log.Errorf("album: %s (update %s)", err.Error(), m)
}
@@ -880,7 +876,7 @@ func (m *Album) RemovePhotos(UIDs []string) (removed PhotoAlbums) {
}
// Refresh updated timestamp.
if err := UpdateAlbum(m.AlbumUID, Map{"updated_at": TimeStamp()}); err != nil {
if err := UpdateAlbum(m.AlbumUID, map[string]interface{}{"updated_at": TimeStamp()}); err != nil {
log.Errorf("album: %s (update %s)", err.Error(), m)
}

View File

@@ -1,10 +1,12 @@
package entity
import (
"errors"
"fmt"
"time"
gc "github.com/patrickmn/go-cache"
"gorm.io/gorm"
"github.com/photoprism/photoprism/pkg/clean"
"github.com/photoprism/photoprism/pkg/rnd"
@@ -33,7 +35,8 @@ func CachedAlbumByUID(uid string) (m Album, err error) {
// Find in database.
m = Album{}
if r := Db().First(&m, "album_uid = ?", uid); r.RecordNotFound() {
r := Db().First(&m, "album_uid = ?", uid)
if errors.Is(r.Error, gorm.ErrRecordNotFound) {
return m, fmt.Errorf("album not found")
} else if r.Error != nil {
return m, r.Error

View File

@@ -2,6 +2,8 @@ package entity
import (
"time"
"gorm.io/gorm"
)
type AlbumMap map[string]Album
@@ -46,7 +48,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"holiday-2030": {
ID: 1000001,
@@ -71,7 +73,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"berlin-2019": {
ID: 1000002,
@@ -96,7 +98,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"april-1990": {
ID: 1000003,
@@ -121,7 +123,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"import": {
ID: 1000004,
@@ -146,7 +148,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"emptyMoment": {
ID: 1000005,
@@ -171,7 +173,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"2016-04": {
ID: 1000006,
@@ -195,7 +197,7 @@ var AlbumFixtures = AlbumMap{
AlbumFavorite: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"september-2021": {
ID: 1000007,
@@ -219,7 +221,7 @@ var AlbumFixtures = AlbumMap{
AlbumFavorite: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"california-usa": {
ID: 1000008,
@@ -243,7 +245,7 @@ var AlbumFixtures = AlbumMap{
AlbumFavorite: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"california-duplicate-1": {
ID: 1000009,
@@ -267,7 +269,7 @@ var AlbumFixtures = AlbumMap{
AlbumFavorite: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"california-duplicate-2": {
ID: 1000010,
@@ -291,7 +293,7 @@ var AlbumFixtures = AlbumMap{
AlbumFavorite: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"&ilikefood": {
ID: 1000011,
@@ -316,7 +318,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"i-love-%-dog": {
ID: 1000012,
@@ -341,7 +343,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"%gold": {
ID: 1000013,
@@ -366,7 +368,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"sale%": {
ID: 1000014,
@@ -391,7 +393,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"pets&dogs": {
ID: 1000015,
@@ -416,7 +418,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"light&": {
ID: 1000016,
@@ -441,7 +443,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"'family": {
ID: 1000017,
@@ -466,7 +468,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"father's-day": {
ID: 1000018,
@@ -491,7 +493,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"ice-cream'": {
ID: 1000019,
@@ -516,7 +518,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"*forrest": {
ID: 1000020,
@@ -541,7 +543,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"my*kids": {
ID: 1000021,
@@ -566,7 +568,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"yoga***": {
ID: 1000022,
@@ -591,7 +593,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"|banana": {
ID: 1000023,
@@ -616,7 +618,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"blue|": {
ID: 1000024,
@@ -641,7 +643,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"345-shirt": {
ID: 1000025,
@@ -666,7 +668,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"color-555-blue": {
ID: 1000026,
@@ -691,7 +693,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"route-66": {
ID: 1000027,
@@ -716,7 +718,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2024, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2024, 3, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"red|green": {
ID: 1000028,
@@ -741,7 +743,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2016, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2025, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"germany": {
ID: 1000029,
@@ -766,7 +768,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"mexico": {
ID: 1000030,
@@ -791,7 +793,32 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"cows": {
ID: 1000035,
AlbumUID: "as6sg6bipotaajfa",
AlbumSlug: "cows",
AlbumPath: "",
AlbumType: AlbumMoment,
AlbumTitle: "Cows",
AlbumFilter: "public:true label:cow",
AlbumLocation: "",
AlbumCategory: "",
AlbumCaption: "",
AlbumDescription: "",
AlbumNotes: "",
AlbumOrder: "name",
AlbumTemplate: "",
AlbumCountry: "zz",
AlbumYear: 0,
AlbumMonth: 0,
AlbumDay: 0,
AlbumFavorite: false,
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: gorm.DeletedAt{},
},
"november-2015": {
ID: 1000031,
@@ -816,7 +843,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"holiday": {
ID: 1000032,
@@ -841,7 +868,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"france-2020": {
ID: 1000033,
@@ -866,7 +893,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2018, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"greece-2024": {
ID: 1000034,
@@ -891,13 +918,18 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2024, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2025, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
}
// CreateAlbumFixtures inserts known entities into the database for testing.
func CreateAlbumFixtures() {
for _, entity := range AlbumFixtures {
Db().Create(&entity)
firstEntity := &Album{}
if err := Db().Model(&Album{}).Where("id = ?", entity.ID).First(&firstEntity).Error; err != nil {
Db().Create(&entity)
} else {
Db().Save(&entity)
}
}
}

View File

@@ -14,7 +14,7 @@ import (
func TestUpdateAlbum(t *testing.T) {
t.Run("InvalidUID", func(t *testing.T) {
err := UpdateAlbum("xxx", Map{"album_title": "New Title", "album_slug": "new-slug"})
err := UpdateAlbum("xxx", map[string]interface{}{"album_title": "New Title", "album_slug": "new-slug"})
assert.Error(t, err)
})
@@ -802,7 +802,7 @@ func TestAlbum_Updates(t *testing.T) {
assert.Equal(t, "test-title", album.AlbumSlug)
if err := album.Updates(Map{"album_title": "New Title", "album_slug": "new-slug"}); err != nil {
if err := album.Updates(map[string]interface{}{"album_title": "New Title", "album_slug": "new-slug"}); err != nil {
t.Fatal(err)
}
@@ -815,7 +815,7 @@ func TestAlbum_Updates(t *testing.T) {
t.Run("NoUID", func(t *testing.T) {
album := Album{}
err := album.Updates(Map{"album_title": "New Title", "album_slug": "new-slug"})
err := album.Updates(map[string]interface{}{"album_title": "New Title", "album_slug": "new-slug"})
assert.Error(t, err)
})

View File

@@ -4,9 +4,9 @@ import "github.com/photoprism/photoprism/internal/event"
// AlbumUser represents the user and group ownership of an Album and the corresponding permissions.
type AlbumUser struct {
UID string `gorm:"type:VARBINARY(42);primary_key;auto_increment:false" json:"UID" yaml:"UID"`
UserUID string `gorm:"type:VARBINARY(42);primary_key;auto_increment:false;index" json:"UserUID,omitempty" yaml:"UserUID,omitempty"`
TeamUID string `gorm:"type:VARBINARY(42);index" json:"TeamUID,omitempty" yaml:"TeamUID,omitempty"`
UID string `gorm:"type:bytes;size:42;primaryKey;autoIncrement:false" json:"UID" yaml:"UID"`
UserUID string `gorm:"type:bytes;size:42;primaryKey;autoIncrement:false;index" json:"UserUID,omitempty" yaml:"UserUID,omitempty"`
TeamUID string `gorm:"type:bytes;size:42;index" json:"TeamUID,omitempty" yaml:"TeamUID,omitempty"`
Perm uint `json:"Perm,omitempty" yaml:"Perm,omitempty"`
}

View File

@@ -4,6 +4,7 @@ import (
"fmt"
"os"
"path/filepath"
"strings"
"sync"
"time"
@@ -20,7 +21,7 @@ func (m *Album) Yaml() (out []byte, err error) {
m.CreatedAt = m.CreatedAt.UTC().Truncate(time.Second)
m.UpdatedAt = m.UpdatedAt.UTC().Truncate(time.Second)
if err = Db().Model(m).Association("Photos").Find(&m.Photos).Error; err != nil {
if err = Db().Model(m).Association("Photos").Find(&m.Photos); err != nil {
log.Errorf("album: %s (yaml)", err)
return out, err
}
@@ -131,7 +132,20 @@ func (m *Album) LoadFromYaml(fileName string) error {
}
if err = yaml.Unmarshal(data, m); err != nil {
return err
if strings.Contains(err.Error(), "gorm.DeletedAt") && strings.Count(err.Error(), "\n") == 1 {
// try and fix the gorm.DeletedAt structure change
deletedAt := JustDeletedAt{}
if err = yaml.Unmarshal(data, &deletedAt); err != nil {
log.Errorf("album: yaml: unable to reparse DeletedAt with %s", err.Error())
return err
} else {
m.DeletedAt.Time = deletedAt.DeletedAt
m.DeletedAt.Valid = true
}
} else {
return err
}
}
return nil

View File

@@ -1,12 +1,17 @@
package entity
import (
"errors"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/stretchr/testify/assert"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/functions"
"github.com/photoprism/photoprism/pkg/fs"
)
@@ -57,12 +62,12 @@ func TestAlbum_SaveAsYaml(t *testing.T) {
m = *found
}
backupPath := fs.Abs("testdata/TestAlbum_SaveAsYaml")
backupPath := fs.Abs("testdata/" + functions.PhotoPrismTestToFolderName() + "/TestAlbum_SaveAsYaml")
fileName, relName, err := m.YamlFileName(backupPath)
assert.NoError(t, err)
assert.True(t, strings.HasSuffix(fileName, "internal/entity/testdata/TestAlbum_SaveAsYaml/album/as6sg6bxpogaaba9.yml"))
assert.True(t, strings.HasSuffix(fileName, "internal/entity/testdata/"+functions.PhotoPrismTestToFolderName()+"/TestAlbum_SaveAsYaml/album/as6sg6bxpogaaba9.yml"))
assert.Equal(t, "album/as6sg6bxpogaaba9.yml", relName)
if err = m.SaveAsYaml(fileName); err != nil {
@@ -247,4 +252,175 @@ func TestAlbum_LoadFromYaml(t *testing.T) {
assert.Error(t, err)
})
t.Run("GormV1Format", func(t *testing.T) {
backupPath, err := filepath.Abs("./testdata/albums")
if err != nil {
t.Fatal(err)
}
if err = os.MkdirAll(backupPath+"/moment", fs.ModeDir); err != nil {
t.Fatal(err)
}
testFileName := backupPath + "/moment/as6sg6bipotaajfa.yml"
_, err = os.Stat(testFileName)
if errors.Is(err, os.ErrNotExist) {
// Gorm V1 format
newYaml := []byte("UID: as6sg6bipotaajfa\nSlug: cows\nType: moment\nTitle: Cows\nFilter: public:true label:supercow\nOrder: name\nDeletedAt: 2025-06-30T10:33:49Z\nCountry: zz\nCreatedAt: 2020-01-01T00:00:00Z\nUpdatedAt: 2025-06-30T10:33:49Z\n")
err = os.WriteFile(testFileName, newYaml, 0644)
assert.NoError(t, err)
}
albumToCheck := Album{}
err = albumToCheck.LoadFromYaml(testFileName)
assert.NoError(t, err)
assert.Equal(t, "as6sg6bipotaajfa", albumToCheck.AlbumUID)
assert.Equal(t, "cows", albumToCheck.AlbumSlug)
assert.Equal(t, "moment", albumToCheck.AlbumType)
assert.Equal(t, "Cows", albumToCheck.AlbumTitle)
assert.Equal(t, "public:true label:supercow", albumToCheck.AlbumFilter)
assert.Equal(t, "name", albumToCheck.AlbumOrder)
assert.Equal(t, "zz", albumToCheck.AlbumCountry)
assert.Equal(t, time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC), albumToCheck.CreatedAt)
assert.Equal(t, time.Date(2025, 6, 30, 10, 33, 49, 0, time.UTC), albumToCheck.UpdatedAt)
assert.Equal(t, gorm.DeletedAt{Time: time.Date(2025, 6, 30, 10, 33, 49, 0, time.UTC), Valid: true}, albumToCheck.DeletedAt)
if err = os.Remove(testFileName); err != nil {
t.Fatal(err)
}
})
t.Run("GormV2Format", func(t *testing.T) {
backupPath, err := filepath.Abs("./testdata/albums")
if err != nil {
t.Fatal(err)
}
if err = os.MkdirAll(backupPath+"/moment", fs.ModeDir); err != nil {
t.Fatal(err)
}
testFileName := backupPath + "/moment/as6sg6bipotaajfa.yml"
_, err = os.Stat(testFileName)
if errors.Is(err, os.ErrNotExist) {
// Gorm V2 format
newYaml := []byte("UID: as6sg6bipotaajfa\nSlug: cows\nType: moment\nTitle: Cows\nFilter: public:true label:cow\nOrder: name\nCountry: zz\nCreatedAt: 2020-01-01T00:00:00Z\nUpdatedAt: 2025-06-30T10:33:49Z\nDeletedAt:\n time: 2025-06-30T10:33:50Z\n valid: true\n")
err = os.WriteFile(testFileName, newYaml, 0644)
assert.NoError(t, err)
}
albumToCheck := Album{}
err = albumToCheck.LoadFromYaml(testFileName)
assert.NoError(t, err)
assert.Equal(t, "as6sg6bipotaajfa", albumToCheck.AlbumUID)
assert.Equal(t, "cows", albumToCheck.AlbumSlug)
assert.Equal(t, "moment", albumToCheck.AlbumType)
assert.Equal(t, "Cows", albumToCheck.AlbumTitle)
assert.Equal(t, "public:true label:cow", albumToCheck.AlbumFilter)
assert.Equal(t, "name", albumToCheck.AlbumOrder)
assert.Equal(t, "zz", albumToCheck.AlbumCountry)
assert.Equal(t, time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC), albumToCheck.CreatedAt)
assert.Equal(t, time.Date(2025, 6, 30, 10, 33, 49, 0, time.UTC), albumToCheck.UpdatedAt)
assert.Equal(t, gorm.DeletedAt{Time: time.Date(2025, 6, 30, 10, 33, 50, 0, time.UTC), Valid: true}, albumToCheck.DeletedAt)
if err = os.Remove(testFileName); err != nil {
t.Fatal(err)
}
})
t.Run("GormV1Format_Bad", func(t *testing.T) {
backupPath, err := filepath.Abs("./testdata/albums")
if err != nil {
t.Fatal(err)
}
if err = os.MkdirAll(backupPath+"/moment", fs.ModeDir); err != nil {
t.Fatal(err)
}
testFileName := backupPath + "/moment/as6sg6bipotaajfa_bad.yml"
_, err = os.Stat(testFileName)
if errors.Is(err, os.ErrNotExist) {
// Gorm V1 format
newYaml := []byte("UID: as6sg6bipotaajfa\nSlug: cows\nType: moment\nTitle: Cows\nFilter: public:true label:supercow\nOrder: name\nDeletedAt: 2025-06-30T10:33:49Z\nCountry: zz\nCreatedAt: 2020-01-01T00:00:00Z\nUpdatedAt: 2025-06-30T10:33:49Z\nYear: TwentyTen\n")
err = os.WriteFile(testFileName, newYaml, 0644)
assert.NoError(t, err)
}
albumToCheck := Album{}
err = albumToCheck.LoadFromYaml(testFileName)
assert.Error(t, err)
assert.Contains(t, err.Error(), "!!timestamp")
assert.Contains(t, err.Error(), "!!str")
assert.Equal(t, "as6sg6bipotaajfa", albumToCheck.AlbumUID)
assert.Equal(t, "cows", albumToCheck.AlbumSlug)
assert.Equal(t, "moment", albumToCheck.AlbumType)
assert.Equal(t, "Cows", albumToCheck.AlbumTitle)
assert.Equal(t, "public:true label:supercow", albumToCheck.AlbumFilter)
assert.Equal(t, "name", albumToCheck.AlbumOrder)
assert.Equal(t, "zz", albumToCheck.AlbumCountry)
assert.Equal(t, time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC), albumToCheck.CreatedAt)
assert.Equal(t, time.Date(2025, 6, 30, 10, 33, 49, 0, time.UTC), albumToCheck.UpdatedAt)
assert.Equal(t, gorm.DeletedAt{}, albumToCheck.DeletedAt)
if err = os.Remove(testFileName); err != nil {
t.Fatal(err)
}
})
t.Run("GormV2Format_Bad", func(t *testing.T) {
backupPath, err := filepath.Abs("./testdata/albums")
if err != nil {
t.Fatal(err)
}
if err = os.MkdirAll(backupPath+"/moment", fs.ModeDir); err != nil {
t.Fatal(err)
}
testFileName := backupPath + "/moment/as6sg6bipotaajfa_Bad.yml"
_, err = os.Stat(testFileName)
if errors.Is(err, os.ErrNotExist) {
// Gorm V2 format
newYaml := []byte("UID: as6sg6bipotaajfa\nSlug: cows\nType: moment\nTitle: Cows\nFilter: public:true label:cow\nOrder: name\nCountry: zz\nYear: TwentyTen\nCreatedAt: 2020-01-01T00:00:00Z\nUpdatedAt: 2025-06-30T10:33:49Z\nDeletedAt:\n time: 2025-06-30T10:33:50Z\n valid: true\n")
err = os.WriteFile(testFileName, newYaml, 0644)
assert.NoError(t, err)
}
albumToCheck := Album{}
err = albumToCheck.LoadFromYaml(testFileName)
assert.Error(t, err)
assert.Contains(t, err.Error(), "!!str")
assert.Equal(t, "as6sg6bipotaajfa", albumToCheck.AlbumUID)
assert.Equal(t, "cows", albumToCheck.AlbumSlug)
assert.Equal(t, "moment", albumToCheck.AlbumType)
assert.Equal(t, "Cows", albumToCheck.AlbumTitle)
assert.Equal(t, "public:true label:cow", albumToCheck.AlbumFilter)
assert.Equal(t, "name", albumToCheck.AlbumOrder)
assert.Equal(t, "zz", albumToCheck.AlbumCountry)
assert.Equal(t, time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC), albumToCheck.CreatedAt)
assert.Equal(t, time.Date(2025, 6, 30, 10, 33, 49, 0, time.UTC), albumToCheck.UpdatedAt)
assert.Equal(t, gorm.DeletedAt{Time: time.Date(2025, 6, 30, 10, 33, 50, 0, time.UTC), Valid: true}, albumToCheck.DeletedAt)
if err = os.Remove(testFileName); err != nil {
t.Fatal(err)
}
})
}

View File

@@ -7,7 +7,7 @@ import (
"github.com/dustin/go-humanize/english"
"github.com/gin-gonic/gin"
"github.com/jinzhu/gorm"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/auth/acl"
"github.com/photoprism/photoprism/internal/event"
@@ -29,17 +29,17 @@ type Clients []Client
// Client represents a client application.
type Client struct {
ClientUID string `gorm:"type:VARBINARY(42);primary_key;auto_increment:false;" json:"-" yaml:"ClientUID"`
UserUID string `gorm:"type:VARBINARY(42);index;default:'';" json:"UserUID" yaml:"UserUID,omitempty"`
ClientUID string `gorm:"type:bytes;size:42;primaryKey;autoIncrement:false;" json:"-" yaml:"ClientUID"`
UserUID string `gorm:"type:bytes;size:42;index;default:'';" json:"UserUID" yaml:"UserUID,omitempty"`
UserName string `gorm:"size:200;index;" json:"UserName" yaml:"UserName,omitempty"`
user *User `gorm:"-" yaml:"-"`
user *User `gorm:"foreignKey:UserUID;references:UserUID" yaml:"-"`
ClientName string `gorm:"size:200;" json:"ClientName" yaml:"ClientName,omitempty"`
ClientRole string `gorm:"size:64;default:'';" json:"ClientRole" yaml:"ClientRole,omitempty"`
ClientType string `gorm:"type:VARBINARY(16)" json:"ClientType" yaml:"ClientType,omitempty"`
ClientURL string `gorm:"type:VARBINARY(255);default:'';column:client_url;" json:"ClientURL" yaml:"ClientURL,omitempty"`
CallbackURL string `gorm:"type:VARBINARY(255);default:'';column:callback_url;" json:"CallbackURL" yaml:"CallbackURL,omitempty"`
AuthProvider string `gorm:"type:VARBINARY(128);default:'';" json:"AuthProvider" yaml:"AuthProvider,omitempty"`
AuthMethod string `gorm:"type:VARBINARY(128);default:'';" json:"AuthMethod" yaml:"AuthMethod,omitempty"`
ClientType string `gorm:"type:bytes;size:16" json:"ClientType" yaml:"ClientType,omitempty"`
ClientURL string `gorm:"type:bytes;size:255;default:'';column:client_url;" json:"ClientURL" yaml:"ClientURL,omitempty"`
CallbackURL string `gorm:"type:bytes;size:255;default:'';column:callback_url;" json:"CallbackURL" yaml:"CallbackURL,omitempty"`
AuthProvider string `gorm:"type:bytes;size:128;default:'';" json:"AuthProvider" yaml:"AuthProvider,omitempty"`
AuthMethod string `gorm:"type:bytes;size:128;default:'';" json:"AuthMethod" yaml:"AuthMethod,omitempty"`
AuthScope string `gorm:"size:1024;default:'';" json:"AuthScope" yaml:"AuthScope,omitempty"`
AuthExpires int64 `json:"AuthExpires" yaml:"AuthExpires,omitempty"`
AuthTokens int64 `json:"AuthTokens" yaml:"AuthTokens,omitempty"`
@@ -74,14 +74,14 @@ func NewClient() *Client {
}
// BeforeCreate creates a random UID if needed before inserting a new row to the database.
func (m *Client) BeforeCreate(scope *gorm.Scope) error {
func (m *Client) BeforeCreate(scope *gorm.DB) error {
if rnd.IsUID(m.ClientUID, ClientUID) {
return nil
}
m.ClientUID = rnd.GenerateUID(ClientUID)
return scope.SetColumn("ClientUID", m.ClientUID)
scope.Statement.SetColumn("ClientUID", m.ClientUID)
return scope.Error
}
// FindClientByUID returns the matching client or nil if it was not found.

View File

@@ -184,6 +184,7 @@ func TestClient_Create(t *testing.T) {
})
t.Run("AlreadyExists", func(t *testing.T) {
var m = ClientFixtures.Get("alice")
log.Info("Expect duplicate key violation Error or SQLSTATE from client.Create")
err := m.Create()
assert.Error(t, err)
})

View File

@@ -9,7 +9,7 @@ import (
"github.com/dustin/go-humanize/english"
"github.com/gin-gonic/gin"
"github.com/jinzhu/gorm"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/auth/acl"
"github.com/photoprism/photoprism/internal/event"
@@ -36,33 +36,33 @@ type Sessions []Session
// Session represents a User session.
type Session struct {
ID string `gorm:"type:VARBINARY(2048);primary_key;auto_increment:false;" json:"-" yaml:"ID"`
ID string `gorm:"type:bytes;size:2048;primaryKey;autoIncrement:false;" json:"-" yaml:"ID"`
authToken string `gorm:"-" yaml:"-"`
UserUID string `gorm:"type:VARBINARY(42);index;default:'';" json:"UserUID" yaml:"UserUID,omitempty"`
UserUID string `gorm:"type:bytes;size:42;index;default:'';" json:"UserUID" yaml:"UserUID,omitempty"`
UserName string `gorm:"size:200;index;" json:"UserName" yaml:"UserName,omitempty"`
user *User `gorm:"-" yaml:"-"`
ClientUID string `gorm:"type:VARBINARY(42);index;default:'';" json:"ClientUID" yaml:"ClientUID,omitempty"`
ClientUID string `gorm:"type:bytes;size:42;index;default:'';" json:"ClientUID" yaml:"ClientUID,omitempty"`
ClientName string `gorm:"size:200;default:'';" json:"ClientName" yaml:"ClientName,omitempty"`
ClientIP string `gorm:"size:64;column:client_ip;index" json:"ClientIP" yaml:"ClientIP,omitempty"`
client *Client `gorm:"-" yaml:"-"`
AuthProvider string `gorm:"type:VARBINARY(128);default:'';" json:"AuthProvider" yaml:"AuthProvider,omitempty"`
AuthMethod string `gorm:"type:VARBINARY(128);default:'';" json:"AuthMethod" yaml:"AuthMethod,omitempty"`
AuthIssuer string `gorm:"type:VARBINARY(255);default:'';" json:"AuthIssuer,omitempty" yaml:"AuthIssuer,omitempty"`
AuthID string `gorm:"type:VARBINARY(255);index;default:'';" json:"AuthID" yaml:"AuthID,omitempty"`
AuthProvider string `gorm:"type:bytes;size:128;default:'';" json:"AuthProvider" yaml:"AuthProvider,omitempty"`
AuthMethod string `gorm:"type:bytes;size:128;default:'';" json:"AuthMethod" yaml:"AuthMethod,omitempty"`
AuthIssuer string `gorm:"type:bytes;size:255;default:'';" json:"AuthIssuer,omitempty" yaml:"AuthIssuer,omitempty"`
AuthID string `gorm:"type:bytes;size:255;index;default:'';" json:"AuthID" yaml:"AuthID,omitempty"`
AuthScope string `gorm:"size:1024;default:'';" json:"AuthScope" yaml:"AuthScope,omitempty"`
GrantType string `gorm:"type:VARBINARY(64);default:'';" json:"GrantType" yaml:"GrantType,omitempty"`
GrantType string `gorm:"type:bytes;size:64;default:'';" json:"GrantType" yaml:"GrantType,omitempty"`
LastActive int64 `json:"LastActive" yaml:"LastActive,omitempty"`
SessExpires int64 `gorm:"index" json:"Expires" yaml:"Expires,omitempty"`
SessTimeout int64 `json:"Timeout" yaml:"Timeout,omitempty"`
PreviewToken string `gorm:"type:VARBINARY(64);column:preview_token;default:'';" json:"-" yaml:"-"`
DownloadToken string `gorm:"type:VARBINARY(64);column:download_token;default:'';" json:"-" yaml:"-"`
AccessToken string `gorm:"type:VARBINARY(4096);column:access_token;default:'';" json:"-" yaml:"-"`
RefreshToken string `gorm:"type:VARBINARY(2048);column:refresh_token;default:'';" json:"-" yaml:"-"`
IdToken string `gorm:"type:VARBINARY(2048);column:id_token;default:'';" json:"IdToken,omitempty" yaml:"IdToken,omitempty"`
PreviewToken string `gorm:"type:bytes;size:64;column:preview_token;default:'';" json:"-" yaml:"-"`
DownloadToken string `gorm:"type:bytes;size:64;column:download_token;default:'';" json:"-" yaml:"-"`
AccessToken string `gorm:"type:bytes;size:4096;column:access_token;default:'';" json:"-" yaml:"-"`
RefreshToken string `gorm:"type:bytes;size:2048;column:refresh_token;default:'';" json:"-" yaml:"-"`
IdToken string `gorm:"type:bytes;size:2048;column:id_token;default:'';" json:"IdToken,omitempty" yaml:"IdToken,omitempty"`
UserAgent string `gorm:"size:512;" json:"UserAgent" yaml:"UserAgent,omitempty"`
DataJSON json.RawMessage `gorm:"type:VARBINARY(4096);" json:"-" yaml:"Data,omitempty"`
DataJSON json.RawMessage `gorm:"type:bytes;size:4096;" json:"-" yaml:"Data,omitempty"`
data *SessionData `gorm:"-" yaml:"-"`
RefID string `gorm:"type:VARBINARY(16);default:'';" json:"ID" yaml:"-"`
RefID string `gorm:"type:bytes;size:16;default:'';" json:"ID" yaml:"-"`
LoginIP string `gorm:"size:64;column:login_ip" json:"LoginIP" yaml:"-"`
LoginAt time.Time `json:"LoginAt" yaml:"-"`
CreatedAt time.Time `json:"CreatedAt" yaml:"CreatedAt"`
@@ -226,10 +226,11 @@ func (m *Session) Updates(values interface{}) error {
}
// BeforeCreate creates a random UID if needed before inserting a new row to the database.
func (m *Session) BeforeCreate(scope *gorm.Scope) error {
func (m *Session) BeforeCreate(scope *gorm.DB) error {
if rnd.InvalidRefID(m.RefID) {
m.RefID = rnd.RefID(SessionPrefix)
Log("session", "set ref id", scope.SetColumn("RefID", m.RefID))
scope.Statement.SetColumn("RefID", m.RefID)
Log("session", "set ref id", scope.Error)
}
if rnd.IsSessionID(m.ID) {
@@ -237,8 +238,8 @@ func (m *Session) BeforeCreate(scope *gorm.Scope) error {
}
m.Regenerate()
return scope.SetColumn("ID", m.ID)
scope.Statement.SetColumn("ID", m.ID)
return scope.Error
}
// SetClient updates the client of this session.

View File

@@ -1,10 +1,12 @@
package entity
import (
"errors"
"fmt"
"time"
gc "github.com/patrickmn/go-cache"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/event"
"github.com/photoprism/photoprism/pkg/clean"
@@ -37,7 +39,7 @@ func FindSession(id string) (*Session, error) {
} else if err := cached.Delete(); err != nil {
event.AuditErr([]string{cached.IP(), "session %s", "failed to delete after expiration", "%s"}, cached.RefID, err)
}
} else if res := Db().First(&found, "id = ?", id); res.RecordNotFound() {
} else if res := Db().First(&found, "id = ?", id); errors.Is(res.Error, gorm.ErrRecordNotFound) {
return found, fmt.Errorf("invalid session")
} else if res.Error != nil {
return found, res.Error

View File

@@ -6,6 +6,7 @@ import (
"github.com/dustin/go-humanize/english"
"github.com/photoprism/photoprism/internal/event"
"github.com/photoprism/photoprism/internal/functions"
"github.com/photoprism/photoprism/pkg/authn"
"github.com/photoprism/photoprism/pkg/rnd"
"github.com/photoprism/photoprism/pkg/time/unix"
@@ -90,7 +91,8 @@ func DeleteClientSessions(client *Client, authMethod authn.MethodType, limit int
q = q.Where("auth_method = ?", authMethod.String())
}
q = q.Order("created_at DESC").Limit(1000000000).Offset(limit)
// NOTE: this loses precision of the token limit. But I think int64 does not make sense for that limit type anyway.
q = q.Order("created_at DESC").Limit(1000000000).Offset(functions.SafeInt64toint(limit))
found := Sessions{}

View File

@@ -1,6 +1,8 @@
package entity
import (
"time"
"github.com/photoprism/photoprism/pkg/authn"
"github.com/photoprism/photoprism/pkg/clean"
"github.com/photoprism/photoprism/pkg/rnd"
@@ -112,9 +114,9 @@ var SessionFixtures = SessionMap{
user: UserFixtures.Pointer("bob"),
UserUID: UserFixtures.Pointer("bob").UserUID,
UserName: UserFixtures.Pointer("bob").UserName,
LoginAt: Now().Add(-24),
CreatedAt: Now().Add(-24),
UpdatedAt: Now().Add(-24),
LoginAt: Now().Add(time.Millisecond * -24),
CreatedAt: Now().Add(time.Millisecond * -24),
UpdatedAt: Now().Add(time.Millisecond * -24),
},
"unauthorized": {
authToken: "69be27ac5ca305b394046a83f6fda18167ca3d3f2dbe7ac2",
@@ -167,9 +169,9 @@ var SessionFixtures = SessionMap{
user: UserFixtures.Pointer("friend"),
UserUID: UserFixtures.Pointer("friend").UserUID,
UserName: UserFixtures.Pointer("friend").UserName,
LoginAt: Now().Add(-12),
CreatedAt: Now().Add(-20),
UpdatedAt: Now().Add(-12),
LoginAt: Now().Add(time.Millisecond * -12),
CreatedAt: Now().Add(time.Millisecond * -20),
UpdatedAt: Now().Add(time.Millisecond * -12),
},
"client_metrics": {
authToken: "9d8b8801ffa23eb52e08ca7766283799ddfd8dd368212345",

View File

@@ -248,6 +248,7 @@ func TestSession_Create(t *testing.T) {
s.SetAuthToken(authToken)
log.Info("Expect duplicate key violation Error or SQLSTATE from session.Create")
err := s.Create()
assert.Error(t, err)
})

View File

@@ -8,9 +8,9 @@ import (
"strings"
"time"
"github.com/jinzhu/gorm"
"github.com/ulule/deepcopier"
"github.com/zitadel/oidc/v3/pkg/oidc"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/auth/acl"
"github.com/photoprism/photoprism/internal/event"
@@ -45,45 +45,45 @@ type Users []User
// User represents a person that may optionally log in as user.
type User struct {
ID int `gorm:"primary_key" json:"ID" yaml:"-"`
UUID string `gorm:"type:VARBINARY(64);column:user_uuid;index;" json:"UUID,omitempty" yaml:"UUID,omitempty"`
UserUID string `gorm:"type:VARBINARY(42);column:user_uid;unique_index;" json:"UID" yaml:"UID"`
AuthProvider string `gorm:"type:VARBINARY(128);default:'';" json:"AuthProvider" yaml:"AuthProvider,omitempty"`
AuthMethod string `gorm:"type:VARBINARY(128);default:'';" json:"AuthMethod" yaml:"AuthMethod,omitempty"`
AuthIssuer string `gorm:"type:VARBINARY(255);default:'';" json:"AuthIssuer,omitempty" yaml:"AuthIssuer,omitempty"`
AuthID string `gorm:"type:VARBINARY(255);index;default:'';" json:"AuthID" yaml:"AuthID,omitempty"`
UserName string `gorm:"size:200;index;" json:"Name" yaml:"Name,omitempty"`
DisplayName string `gorm:"size:200;" json:"DisplayName" yaml:"DisplayName,omitempty"`
UserEmail string `gorm:"size:255;index;" json:"Email" yaml:"Email,omitempty"`
BackupEmail string `gorm:"size:255;" json:"BackupEmail,omitempty" yaml:"BackupEmail,omitempty"`
UserRole string `gorm:"size:64;default:'';" json:"Role" yaml:"Role,omitempty"`
UserAttr string `gorm:"size:1024;" json:"Attr" yaml:"Attr,omitempty"`
SuperAdmin bool `json:"SuperAdmin" yaml:"SuperAdmin,omitempty"`
CanLogin bool `json:"CanLogin" yaml:"CanLogin,omitempty"`
LoginAt *time.Time `json:"LoginAt" yaml:"LoginAt,omitempty"`
ExpiresAt *time.Time `sql:"index" json:"ExpiresAt,omitempty" yaml:"ExpiresAt,omitempty"`
WebDAV bool `gorm:"column:webdav;" json:"WebDAV" yaml:"WebDAV,omitempty"`
BasePath string `gorm:"type:VARBINARY(1024);" json:"BasePath" yaml:"BasePath,omitempty"`
UploadPath string `gorm:"type:VARBINARY(1024);" json:"UploadPath" yaml:"UploadPath,omitempty"`
CanInvite bool `json:"CanInvite" yaml:"CanInvite,omitempty"`
InviteToken string `gorm:"type:VARBINARY(64);index;" json:"-" yaml:"-"`
InvitedBy string `gorm:"size:64;" json:"-" yaml:"-"`
VerifyToken string `gorm:"type:VARBINARY(64);" json:"-" yaml:"-"`
VerifiedAt *time.Time `json:"VerifiedAt,omitempty" yaml:"VerifiedAt,omitempty"`
ConsentAt *time.Time `json:"ConsentAt,omitempty" yaml:"ConsentAt,omitempty"`
BornAt *time.Time `sql:"index" json:"BornAt,omitempty" yaml:"BornAt,omitempty"`
UserDetails *UserDetails `gorm:"PRELOAD:true;foreignkey:UserUID;association_foreignkey:UserUID;" json:"Details,omitempty" yaml:"Details,omitempty"`
UserSettings *UserSettings `gorm:"PRELOAD:true;foreignkey:UserUID;association_foreignkey:UserUID;" json:"Settings,omitempty" yaml:"Settings,omitempty"`
UserShares UserShares `gorm:"-" json:"Shares,omitempty" yaml:"Shares,omitempty"`
ResetToken string `gorm:"type:VARBINARY(64);" json:"-" yaml:"-"`
PreviewToken string `gorm:"type:VARBINARY(64);column:preview_token;" json:"-" yaml:"-"`
DownloadToken string `gorm:"type:VARBINARY(64);column:download_token;" json:"-" yaml:"-"`
Thumb string `gorm:"type:VARBINARY(128);index;default:'';" json:"Thumb" yaml:"Thumb,omitempty"`
ThumbSrc string `gorm:"type:VARBINARY(8);default:'';" json:"ThumbSrc" yaml:"ThumbSrc,omitempty"`
RefID string `gorm:"type:VARBINARY(16);" json:"-" yaml:"-"`
CreatedAt time.Time `json:"CreatedAt" yaml:"-"`
UpdatedAt time.Time `json:"UpdatedAt" yaml:"-"`
DeletedAt *time.Time `sql:"index" json:"DeletedAt,omitempty" yaml:"-"`
ID int `gorm:"primaryKey;" json:"ID" yaml:"-"`
UUID string `gorm:"type:bytes;size:64;column:user_uuid;index;" json:"UUID,omitempty" yaml:"UUID,omitempty"`
UserUID string `gorm:"type:bytes;size:42;column:user_uid;uniqueIndex;" json:"UID" yaml:"UID"`
AuthProvider string `gorm:"type:bytes;size:128;default:'';" json:"AuthProvider" yaml:"AuthProvider,omitempty"`
AuthMethod string `gorm:"type:bytes;size:128;default:'';" json:"AuthMethod" yaml:"AuthMethod,omitempty"`
AuthIssuer string `gorm:"type:bytes;size:255;default:'';" json:"AuthIssuer,omitempty" yaml:"AuthIssuer,omitempty"`
AuthID string `gorm:"type:bytes;size:255;index;default:'';" json:"AuthID" yaml:"AuthID,omitempty"`
UserName string `gorm:"size:200;index;" json:"Name" yaml:"Name,omitempty"`
DisplayName string `gorm:"size:200;" json:"DisplayName" yaml:"DisplayName,omitempty"`
UserEmail string `gorm:"size:255;index;" json:"Email" yaml:"Email,omitempty"`
BackupEmail string `gorm:"size:255;" json:"BackupEmail,omitempty" yaml:"BackupEmail,omitempty"`
UserRole string `gorm:"size:64;default:'';" json:"Role" yaml:"Role,omitempty"`
UserAttr string `gorm:"size:1024;" json:"Attr" yaml:"Attr,omitempty"`
SuperAdmin bool `json:"SuperAdmin" yaml:"SuperAdmin,omitempty"`
CanLogin bool `json:"CanLogin" yaml:"CanLogin,omitempty"`
LoginAt *time.Time `json:"LoginAt" yaml:"LoginAt,omitempty"`
ExpiresAt *time.Time `sql:"index" json:"ExpiresAt,omitempty" yaml:"ExpiresAt,omitempty"`
WebDAV bool `gorm:"column:webdav;" json:"WebDAV" yaml:"WebDAV,omitempty"`
BasePath string `gorm:"type:bytes;size:1024;" json:"BasePath" yaml:"BasePath,omitempty"`
UploadPath string `gorm:"type:bytes;size:1024;" json:"UploadPath" yaml:"UploadPath,omitempty"`
CanInvite bool `json:"CanInvite" yaml:"CanInvite,omitempty"`
InviteToken string `gorm:"type:bytes;size:64;index;" json:"-" yaml:"-"`
InvitedBy string `gorm:"size:64;" json:"-" yaml:"-"`
VerifyToken string `gorm:"type:bytes;size:64;" json:"-" yaml:"-"`
VerifiedAt *time.Time `json:"VerifiedAt,omitempty" yaml:"VerifiedAt,omitempty"`
ConsentAt *time.Time `json:"ConsentAt,omitempty" yaml:"ConsentAt,omitempty"`
BornAt *time.Time `sql:"index" json:"BornAt,omitempty" yaml:"BornAt,omitempty"`
UserDetails *UserDetails `gorm:"foreignKey:UserUID;references:UserUID;constraint:OnDelete:CASCADE" json:"Details,omitempty" yaml:"Details,omitempty"`
UserSettings *UserSettings `gorm:"foreignKey:UserUID;references:UserUID;constraint:OnDelete:CASCADE" json:"Settings,omitempty" yaml:"Settings,omitempty"`
UserShares []UserShare `gorm:"foreignKey:UserUID;references:UserUID" json:"Shares,omitempty" yaml:"Shares,omitempty"`
ResetToken string `gorm:"type:bytes;size:64;" json:"-" yaml:"-"`
PreviewToken string `gorm:"type:bytes;size:64;column:preview_token;" json:"-" yaml:"-"`
DownloadToken string `gorm:"type:bytes;size:64;column:download_token;" json:"-" yaml:"-"`
Thumb string `gorm:"type:bytes;size:128;index;default:'';" json:"Thumb" yaml:"Thumb,omitempty"`
ThumbSrc string `gorm:"type:bytes;size:8;default:'';" json:"ThumbSrc" yaml:"ThumbSrc,omitempty"`
RefID string `gorm:"type:bytes;size:16;" json:"-" yaml:"-"`
CreatedAt time.Time `json:"CreatedAt" yaml:"-"`
UpdatedAt time.Time `json:"UpdatedAt" yaml:"-"`
DeletedAt gorm.DeletedAt `sql:"index" json:"DeletedAt,omitempty" yaml:"-"`
}
// TableName returns the entity table name.
@@ -163,7 +163,10 @@ func FindUser(find User) *User {
}
// Find matching record.
if err := stmt.First(m).Error; err != nil {
if err := stmt.
Preload("UserDetails").
Preload("UserSettings").
First(m).Error; err != nil {
return nil
}
@@ -212,6 +215,8 @@ func FindLocalUser(userName string) *User {
// Build query.
if err := UnscopedDb().
Where("user_name = ? AND auth_provider IN (?)", name, providers).
Preload("UserDetails").
Preload("UserSettings").
First(m).Error; err != nil {
return nil
}
@@ -310,7 +315,12 @@ func (m *User) Create() (err error) {
func (m *User) Save() (err error) {
m.GenerateTokens(false)
err = UnscopedDb().Save(m).Error
// Gormv2 doesn't call BeforeCreate, so check that UserUID is populated as a validity check.
if len(m.UserUID) == 0 {
err = UnscopedDb().Create(m).Error
} else {
err = UnscopedDb().Save(m).Error
}
if err == nil {
m.SaveRelated()
@@ -340,11 +350,7 @@ func (m *User) Delete() (err error) {
// IsDeleted checks if the user account has been deleted.
func (m *User) IsDeleted() bool {
if m.DeletedAt == nil {
return false
}
return !m.DeletedAt.IsZero()
return m.DeletedAt.Valid
}
// LoadRelated loads related settings and details.
@@ -374,7 +380,7 @@ func (m *User) Updates(values interface{}) error {
}
// BeforeCreate sets a random UID if needed before inserting a new row to the database.
func (m *User) BeforeCreate(scope *gorm.Scope) error {
func (m *User) BeforeCreate(db *gorm.DB) (err error) {
if m.UserSettings != nil {
m.UserSettings.UserUID = m.UserUID
}
@@ -387,7 +393,8 @@ func (m *User) BeforeCreate(scope *gorm.Scope) error {
if rnd.InvalidRefID(m.RefID) {
m.RefID = rnd.RefID(UserPrefix)
Log("user", "set ref id", scope.SetColumn("RefID", m.RefID))
db.Statement.SetColumn("RefID", m.RefID)
Log("user", "set ref id", db.Error)
}
if rnd.IsUnique(m.UserUID, UserUID) {
@@ -395,7 +402,8 @@ func (m *User) BeforeCreate(scope *gorm.Scope) error {
}
m.UserUID = rnd.GenerateUID(UserUID)
return scope.SetColumn("UserUID", m.UserUID)
db.Statement.SetColumn("UserUID", m.UserUID)
return db.Error
}
// IsExpired checks if the user account has expired.
@@ -628,7 +636,7 @@ func (m *User) SetAuthID(id, issuer string) *User {
// Make sure other users do not use the same identifier.
if m.HasUID() && m.AuthProvider != "" {
if err := UnscopedDb().Model(&User{}).
Where("user_uid <> ? AND auth_provider = ? AND auth_id = ? AND super_admin = 0", m.UserUID, m.AuthProvider, m.AuthID).
Where("user_uid <> ? AND auth_provider = ? AND auth_id = ? AND super_admin = FALSE", m.UserUID, m.AuthProvider, m.AuthID).
Updates(map[string]interface{}{"auth_id": "", "auth_provider": authn.ProviderNone}).Error; err != nil {
event.AuditErr([]string{"user %s", "failed to resolve auth id conflicts", "%s"}, m.RefID, err)
}
@@ -644,7 +652,7 @@ func (m *User) UpdateAuthID(id, issuer string) error {
}
// Update auth id and issuer record.
return m.SetAuthID(id, issuer).Updates(Map{
return m.SetAuthID(id, issuer).Updates(map[string]interface{}{
"AuthID": m.AuthID,
"AuthIssuer": m.AuthIssuer,
})
@@ -721,7 +729,7 @@ func (m *User) UpdateUsername(login string) (err error) {
}
// Save to database.
return m.Updates(Map{
return m.Updates(map[string]interface{}{
"UserName": m.UserName,
"DisplayName": m.DisplayName,
})
@@ -1175,7 +1183,7 @@ func (m *User) RegenerateTokens() error {
m.GenerateTokens(true)
return m.Updates(Map{"PreviewToken": m.PreviewToken, "DownloadToken": m.DownloadToken})
return m.Updates(map[string]interface{}{"PreviewToken": m.PreviewToken, "DownloadToken": m.DownloadToken})
}
// RefreshShares updates the list of shares.
@@ -1190,7 +1198,7 @@ func (m *User) NoShares() bool {
return true
}
return m.UserShares.Empty()
return UserShares(m.UserShares).Empty()
}
// HasShares checks if the user has any shares.
@@ -1205,7 +1213,7 @@ func (m *User) HasShare(uid string) bool {
}
// Check if the share list contains the specified UID.
return m.UserShares.Contains(uid)
return UserShares(m.UserShares).Contains(uid)
}
// SharedUIDs returns shared entity UIDs.
@@ -1214,7 +1222,7 @@ func (m *User) SharedUIDs() UIDs {
m.RefreshShares()
}
return m.UserShares.UIDs()
return UserShares(m.UserShares).UIDs()
}
// RedeemToken updates shared entity UIDs using the specified token.
@@ -1326,7 +1334,7 @@ func (m *User) SaveForm(frm form.User, u *User) error {
} else if u.IsAdmin() {
// Restore account.
if frm.DeletedAt == nil {
m.DeletedAt = nil
m.DeletedAt = gorm.DeletedAt{}
}
// Prevent admins from locking themselves out.
@@ -1440,5 +1448,5 @@ func (m *User) SetAvatar(thumb, thumbSrc string) error {
m.Thumb = thumb
m.ThumbSrc = thumbSrc
return m.Updates(Map{"Thumb": m.Thumb, "ThumbSrc": m.ThumbSrc})
return m.Updates(map[string]interface{}{"Thumb": m.Thumb, "ThumbSrc": m.ThumbSrc})
}

View File

@@ -3,7 +3,7 @@ package entity
import (
"fmt"
"github.com/jinzhu/gorm"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/form"
"github.com/photoprism/photoprism/pkg/authn"

View File

@@ -2,6 +2,7 @@ package entity
import (
"github.com/urfave/cli/v2"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/form"
"github.com/photoprism/photoprism/pkg/authn"
@@ -104,7 +105,7 @@ func (m *User) SetValuesFromCli(ctx *cli.Context) error {
// RestoreFromCli restored the account from a CLI context.
func (m *User) RestoreFromCli(ctx *cli.Context, newPassword string) (err error) {
m.DeletedAt = nil
m.DeletedAt = gorm.DeletedAt{}
// Set values.
if err = m.SetValuesFromCli(ctx); err != nil {

View File

@@ -2,6 +2,7 @@ package entity
import (
"github.com/photoprism/photoprism/internal/auth/acl"
"github.com/photoprism/photoprism/internal/entity/legacy"
"github.com/photoprism/photoprism/internal/event"
"github.com/photoprism/photoprism/pkg/authn"
)
@@ -67,22 +68,24 @@ func CreateDefaultUsers() {
if admin := FindUser(Admin); admin != nil {
Admin = *admin
} else {
// Set legacy values.
if leg := FindLegacyUser(Admin); leg != nil {
Admin.UserUID = leg.UserUID
if leg.UserName != "" {
Admin.UserName = leg.UserName
// Set legacy values if the legacy table exists.
if Db().Migrator().HasTable(&legacy.User{}) {
if leg := FindLegacyUser(Admin); leg != nil {
Admin.UserUID = leg.UserUID
if leg.UserName != "" {
Admin.UserName = leg.UserName
}
if leg.PrimaryEmail != "" {
Admin.UserEmail = leg.PrimaryEmail
}
if leg.FullName != "" {
Admin.DisplayName = leg.FullName
}
if leg.LoginAt != nil {
Admin.LoginAt = leg.LoginAt
}
log.Infof("users: migrating %s account", Admin.UserName)
}
if leg.PrimaryEmail != "" {
Admin.UserEmail = leg.PrimaryEmail
}
if leg.FullName != "" {
Admin.DisplayName = leg.FullName
}
if leg.LoginAt != nil {
Admin.LoginAt = leg.LoginAt
}
log.Infof("users: migrating %s account", Admin.UserName)
}
// Set default values.

View File

@@ -17,12 +17,12 @@ const (
// UserDetails represents user profile information.
type UserDetails struct {
UserUID string `gorm:"type:VARBINARY(42);primary_key;auto_increment:false;" json:"-" yaml:"-"`
SubjUID string `gorm:"type:VARBINARY(42);index;" json:"SubjUID,omitempty" yaml:"SubjUID,omitempty"`
SubjSrc string `gorm:"type:VARBINARY(8);default:'';" json:"-" yaml:"SubjSrc,omitempty"`
PlaceID string `gorm:"type:VARBINARY(42);index;default:'zz'" json:"-" yaml:"-"`
PlaceSrc string `gorm:"type:VARBINARY(8);" json:"-" yaml:"PlaceSrc,omitempty"`
CellID string `gorm:"type:VARBINARY(42);index;default:'zz'" json:"-" yaml:"CellID,omitempty"`
UserUID string `gorm:"type:bytes;size:42;primaryKey;autoIncrement:false;" json:"-" yaml:"-"`
SubjUID string `gorm:"type:bytes;size:42;index;" json:"SubjUID,omitempty" yaml:"SubjUID,omitempty"`
SubjSrc string `gorm:"type:bytes;size:8;default:'';" json:"-" yaml:"SubjSrc,omitempty"`
PlaceID string `gorm:"type:bytes;size:42;index;default:'zz';" json:"-" yaml:"-"`
PlaceSrc string `gorm:"type:bytes;size:8;" json:"-" yaml:"PlaceSrc,omitempty"`
CellID string `gorm:"type:bytes;size:42;index;default:'zz';" json:"-" yaml:"CellID,omitempty"`
BirthYear int `gorm:"default:-1;" json:"BirthYear" yaml:"BirthYear,omitempty"`
BirthMonth int `gorm:"default:-1;" json:"BirthMonth" yaml:"BirthMonth,omitempty"`
BirthDay int `gorm:"default:-1;" json:"BirthDay" yaml:"BirthDay,omitempty"`
@@ -32,23 +32,23 @@ type UserDetails struct {
FamilyName string `gorm:"size:64;" json:"FamilyName" yaml:"FamilyName,omitempty"`
NameSuffix string `gorm:"size:32;" json:"NameSuffix" yaml:"NameSuffix,omitempty"`
NickName string `gorm:"size:64;" json:"NickName" yaml:"NickName,omitempty"`
NameSrc string `gorm:"type:VARBINARY(8);" json:"NameSrc" yaml:"NameSrc,omitempty"`
NameSrc string `gorm:"type:bytes;size:8;" json:"NameSrc" yaml:"NameSrc,omitempty"`
UserGender string `gorm:"size:16;" json:"Gender" yaml:"Gender,omitempty"`
UserAbout string `gorm:"size:512;" json:"About" yaml:"About,omitempty"`
UserBio string `gorm:"size:2048;" json:"Bio" yaml:"Bio,omitempty"`
UserLocation string `gorm:"size:512;" json:"Location" yaml:"Location,omitempty"`
UserCountry string `gorm:"type:VARBINARY(2);default:'zz'" json:"Country" yaml:"Country,omitempty"`
UserCountry string `gorm:"type:bytes;size:2;default:'zz';" json:"Country" yaml:"Country,omitempty"`
UserPhone string `gorm:"size:32;" json:"Phone" yaml:"Phone,omitempty"`
SiteURL string `gorm:"type:VARBINARY(512);column:site_url" json:"SiteURL" yaml:"SiteURL,omitempty"`
ProfileURL string `gorm:"type:VARBINARY(512);column:profile_url" json:"ProfileURL" yaml:"ProfileURL,omitempty"`
FeedURL string `gorm:"type:VARBINARY(512);column:feed_url" json:"FeedURL,omitempty" yaml:"FeedURL,omitempty"`
AvatarURL string `gorm:"type:VARBINARY(512);column:avatar_url" json:"AvatarURL,omitempty" yaml:"AvatarURL,omitempty"`
SiteURL string `gorm:"type:bytes;size:512;column:site_url" json:"SiteURL" yaml:"SiteURL,omitempty"`
ProfileURL string `gorm:"type:bytes;size:512;column:profile_url" json:"ProfileURL" yaml:"ProfileURL,omitempty"`
FeedURL string `gorm:"type:bytes;size:512;column:feed_url" json:"FeedURL,omitempty" yaml:"FeedURL,omitempty"`
AvatarURL string `gorm:"type:bytes;size:512;column:avatar_url" json:"AvatarURL,omitempty" yaml:"AvatarURL,omitempty"`
OrgTitle string `gorm:"size:64;" json:"OrgTitle" yaml:"OrgTitle,omitempty"`
OrgName string `gorm:"size:128;" json:"OrgName" yaml:"OrgName,omitempty"`
OrgEmail string `gorm:"size:255;index;" json:"OrgEmail" yaml:"OrgEmail,omitempty"`
OrgPhone string `gorm:"size:32;" json:"OrgPhone" yaml:"OrgPhone,omitempty"`
OrgURL string `gorm:"type:VARBINARY(512);column:org_url" json:"OrgURL" yaml:"OrgURL,omitempty"`
IdURL string `gorm:"type:VARBINARY(512);column:id_url;" json:"IdURL,omitempty" yaml:"IdURL,omitempty"`
OrgURL string `gorm:"type:bytes;size:512;column:org_url" json:"OrgURL" yaml:"OrgURL,omitempty"`
IdURL string `gorm:"type:bytes;size:512;column:id_url;" json:"IdURL,omitempty" yaml:"IdURL,omitempty"`
CreatedAt time.Time `json:"CreatedAt" yaml:"-"`
UpdatedAt time.Time `json:"UpdatedAt" yaml:"-"`
}

View File

@@ -14,6 +14,9 @@ func TestCreateUserDetails(t *testing.T) {
})
t.Run("Success", func(t *testing.T) {
m := &User{UserUID: "1234"}
if err := Db().Create(m).Error; err != nil { // Have to create a user BEFORE adding details to it.
t.Fatal(err)
}
err := CreateUserDetails(m)
if err != nil {
@@ -33,6 +36,7 @@ func TestUserDetails_Updates(t *testing.T) {
m := &User{
UserUID: "1234",
UserDetails: &UserDetails{
UserUID: "1234", // m.UserDetails.Updates fails with WHERE conditions required.
BirthYear: 1999,
BirthMonth: 3,
NameTitle: "Dr.",
@@ -41,7 +45,7 @@ func TestUserDetails_Updates(t *testing.T) {
FamilyName: "Doe",
}}
m.UserDetails.Updates(UserDetails{GivenName: "Jane"})
assert.Nil(t, m.UserDetails.Updates(UserDetails{GivenName: "Jane"}))
assert.Equal(t, "Jane", m.UserDetails.GivenName)
}

View File

@@ -1,6 +1,8 @@
package entity
import (
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/auth/acl"
"github.com/photoprism/photoprism/pkg/authn"
)
@@ -118,7 +120,7 @@ var UserFixtures = UserMap{
CanLogin: false,
WebDAV: true,
CanInvite: false,
DeletedAt: TimeStamp(),
DeletedAt: gorm.DeletedAt{Time: Now(), Valid: true},
UserSettings: &UserSettings{
UITheme: "",
MapsStyle: "",

View File

@@ -10,51 +10,58 @@ import (
// FindLegacyUser returns the matching legacy user or nil if it was not found.
func FindLegacyUser(find User) *legacy.User {
m := &legacy.User{}
// Make sure that the legacy.User table exists to avoid DBMS errors
if Db().Migrator().HasTable(&legacy.User{}) {
m := &legacy.User{}
// Build query.
stmt := Db()
if find.ID != 0 {
stmt = stmt.Where("id = ?", find.ID)
} else if find.UserUID != "" {
stmt = stmt.Where("user_uid = ?", find.UserUID)
} else if find.UserName != "" {
stmt = stmt.Where("user_name = ?", find.UserName)
} else if find.UserEmail != "" {
stmt = stmt.Where("primary_email = ?", find.UserEmail)
// Build query.
stmt := Db()
if find.ID != 0 {
stmt = stmt.Where("id = ?", find.ID)
} else if find.UserUID != "" {
stmt = stmt.Where("user_uid = ?", find.UserUID)
} else if find.UserName != "" {
stmt = stmt.Where("user_name = ?", find.UserName)
} else if find.UserEmail != "" {
stmt = stmt.Where("primary_email = ?", find.UserEmail)
} else {
return nil
}
// Find matching record.
if err := stmt.First(m).Error; err != nil {
return nil
}
return m
} else {
return nil
}
// Find matching record.
if err := stmt.First(m).Error; err != nil {
return nil
}
return m
}
// FindLegacyUsers finds registered legacy users.
func FindLegacyUsers(search string) legacy.Users {
result := legacy.Users{}
// Make sure that the legacy.User table exists to avoid DBMS errors
if Db().Migrator().HasTable(&legacy.User{}) {
stmt := Db()
stmt := Db()
search = strings.TrimSpace(search)
search = strings.TrimSpace(search)
if search == "all" {
// Don't filter.
} else if id := txt.Int(search); id != 0 {
stmt = stmt.Where("id = ?", id)
} else if rnd.IsUID(search, UserUID) {
stmt = stmt.Where("user_uid = ?", search)
} else if search != "" {
stmt = stmt.Where("user_name LIKE ? OR primary_email LIKE ? OR full_name LIKE ?", search+"%", search+"%", search+"%")
} else {
stmt = stmt.Where("id > 0")
}
if search == "all" {
// Don't filter.
} else if id := txt.Int(search); id != 0 {
stmt = stmt.Where("id = ?", id)
} else if rnd.IsUID(search, UserUID) {
stmt = stmt.Where("user_uid = ?", search)
} else if search != "" {
stmt = stmt.Where("user_name LIKE ? OR primary_email LIKE ? OR full_name LIKE ?", search+"%", search+"%", search+"%")
} else {
stmt = stmt.Where("id > 0")
stmt.Order("id").Find(&result)
}
stmt.Order("id").Find(&result)
return result
}

View File

@@ -10,53 +10,67 @@ import (
)
func TestFindLegacyUser(t *testing.T) {
// Handle case where we are using a real database, rather than sqlite in memory db
if Db().Migrator().HasTable("users") {
if err := Db().Migrator().DropTable(legacy.User{}); err != nil {
log.Errorf("TestFindLegacyUser: failed dropping legacy.User")
t.Error(err)
}
}
notFound := FindLegacyUser(Admin)
assert.Nil(t, notFound)
// t.Logf("Legacy Admin: %#v", notFound)
if err := Db().AutoMigrate(legacy.User{}).Error; err != nil {
if err := Db().AutoMigrate(legacy.User{}); err != nil {
log.Debugf("TestFindLegacyUser: %s (waiting 1s)", err.Error())
time.Sleep(time.Second)
if err = Db().AutoMigrate(legacy.User{}).Error; err != nil {
if err = Db().AutoMigrate(legacy.User{}); err != nil {
log.Errorf("TestFindLegacyUser: failed migrating legacy.User")
t.Error(err)
}
}
Db().Save(legacy.Admin)
Db().Save(&legacy.Admin)
found := FindLegacyUser(Admin)
assert.NotNil(t, found)
// t.Logf("Legacy Admin: %#v", found)
if err := Db().DropTable(legacy.User{}).Error; err != nil {
if err := Db().Migrator().DropTable(legacy.User{}); err != nil {
log.Errorf("TestFindLegacyUser: failed dropping legacy.User")
t.Error(err)
}
}
func TestFindLegacyUsers(t *testing.T) {
// Handle case where we are using a real database, rather than sqlite in memory db
if Db().Migrator().HasTable("users") {
if err := Db().Migrator().DropTable(legacy.User{}); err != nil {
log.Errorf("TestFindLegacyUser: failed dropping legacy.User")
t.Error(err)
}
}
notFound := FindLegacyUsers("all")
assert.Len(t, notFound, 0)
// t.Logf("Legacy Users: %#v", notFound)
if err := Db().AutoMigrate(legacy.User{}).Error; err != nil {
if err := Db().AutoMigrate(legacy.User{}); err != nil {
log.Debugf("TestFindLegacyUser: %s (waiting 1s)", err.Error())
time.Sleep(time.Second)
if err = Db().AutoMigrate(legacy.User{}).Error; err != nil {
if err = Db().AutoMigrate(legacy.User{}); err != nil {
log.Errorf("TestFindLegacyUser: failed migrating legacy.User")
t.Error(err)
}
}
Db().Save(legacy.Admin)
Db().Save(&legacy.Admin)
found := FindLegacyUsers("all")
@@ -65,7 +79,7 @@ func TestFindLegacyUsers(t *testing.T) {
// t.Logf("Legacy Users: %#v", found)
if err := Db().DropTable(legacy.User{}).Error; err != nil {
if err := Db().Migrator().DropTable(legacy.User{}); err != nil {
log.Errorf("TestFindLegacyUser: failed dropping legacy.User")
t.Error(err)
}

View File

@@ -11,16 +11,16 @@ import (
// UserSettings represents user preferences.
type UserSettings struct {
UserUID string `gorm:"type:VARBINARY(42);primary_key;auto_increment:false;" json:"-" yaml:"UserUID"`
UITheme string `gorm:"type:VARBINARY(32);column:ui_theme;" json:"UITheme,omitempty" yaml:"UITheme,omitempty"`
UserUID string `gorm:"type:bytes;size:42;primaryKey;autoIncrement:false" json:"-" yaml:"UserUID"`
UITheme string `gorm:"type:bytes;size:32;column:ui_theme;" json:"UITheme,omitempty" yaml:"UITheme,omitempty"`
UIStartPage string `gorm:"size:64;column:ui_start_page;default:'default';" json:"UIStartPage,omitempty" yaml:"UIStartPage,omitempty"`
UILanguage string `gorm:"type:VARBINARY(32);column:ui_language;" json:"UILanguage,omitempty" yaml:"UILanguage,omitempty"`
UITimeZone string `gorm:"type:VARBINARY(64);column:ui_time_zone;" json:"UITimeZone,omitempty" yaml:"UITimeZone,omitempty"`
MapsStyle string `gorm:"type:VARBINARY(32);" json:"MapsStyle,omitempty" yaml:"MapsStyle,omitempty"`
UILanguage string `gorm:"type:bytes;size:32;column:ui_language;" json:"UILanguage,omitempty" yaml:"UILanguage,omitempty"`
UITimeZone string `gorm:"type:bytes;size:64;column:ui_time_zone;" json:"UITimeZone,omitempty" yaml:"UITimeZone,omitempty"`
MapsStyle string `gorm:"type:bytes;size:32;" json:"MapsStyle,omitempty" yaml:"MapsStyle,omitempty"`
MapsAnimate int `gorm:"default:0;" json:"MapsAnimate,omitempty" yaml:"MapsAnimate,omitempty"`
IndexPath string `gorm:"type:VARBINARY(1024);" json:"IndexPath,omitempty" yaml:"IndexPath,omitempty"`
IndexPath string `gorm:"type:bytes;size:1024;" json:"IndexPath,omitempty" yaml:"IndexPath,omitempty"`
IndexRescan int `gorm:"default:0;" json:"IndexRescan,omitempty" yaml:"IndexRescan,omitempty"`
ImportPath string `gorm:"type:VARBINARY(1024);" json:"ImportPath,omitempty" yaml:"ImportPath,omitempty"`
ImportPath string `gorm:"type:bytes;size:1024;" json:"ImportPath,omitempty" yaml:"ImportPath,omitempty"`
ImportMove int `gorm:"default:0;" json:"ImportMove,omitempty" yaml:"ImportMove,omitempty"`
DownloadOriginals int `gorm:"default:0;" json:"DownloadOriginals,omitempty" yaml:"DownloadOriginals,omitempty"`
DownloadMediaRaw int `gorm:"default:0;" json:"DownloadMediaRaw,omitempty" yaml:"DownloadMediaRaw,omitempty"`
@@ -28,7 +28,7 @@ type UserSettings struct {
SearchListView int `gorm:"default:0;" json:"SearchListView,omitempty" yaml:"SearchListView,omitempty"`
SearchShowTitles int `gorm:"default:0;" json:"SearchShowTitles,omitempty" yaml:"SearchShowTitles,omitempty"`
SearchShowCaptions int `gorm:"default:0;" json:"SearchShowCaptions,omitempty" yaml:"SearchShowCaptions,omitempty"`
UploadPath string `gorm:"type:VARBINARY(1024);" json:"UploadPath,omitempty" yaml:"UploadPath,omitempty"`
UploadPath string `gorm:"type:bytes;size:1024;" json:"UploadPath,omitempty" yaml:"UploadPath,omitempty"`
CreatedAt time.Time `json:"CreatedAt" yaml:"-"`
UpdatedAt time.Time `json:"UpdatedAt" yaml:"-"`
}
@@ -53,7 +53,9 @@ func CreateUserSettings(user *User) error {
return fmt.Errorf("empty user uid")
}
user.UserSettings = &UserSettings{}
user.UserSettings = &UserSettings{
UserUID: user.UserUID,
}
if err := Db().Where("user_uid = ?", user.GetUID()).First(user.UserSettings).Error; err == nil {
return nil
@@ -74,6 +76,9 @@ func (m *UserSettings) Create() error {
// Save updates the record in the database or inserts a new record if it does not already exist.
func (m *UserSettings) Save() error {
if m.UserUID == "" {
return fmt.Errorf("Cannot save user settings without UserUID")
}
return Db().Save(m).Error
}

View File

@@ -16,6 +16,7 @@ func TestCreateUserSettings(t *testing.T) {
})
t.Run("Success", func(t *testing.T) {
m := &User{UserUID: "1234"}
Db().Create(m) // Have to create a user BEFORE adding settings to it.
err := CreateUserSettings(m)
if err != nil {
@@ -35,11 +36,12 @@ func TestUserSettings_Updates(t *testing.T) {
m := &User{
UserUID: "1234",
UserSettings: &UserSettings{
UserUID: "1234", // Required to prevent WHERE conditions required error.
UITheme: "carbon",
UILanguage: "de",
}}
m.UserSettings.Updates(UserSettings{UITheme: "vanta", UILanguage: "en"})
assert.Nil(t, m.UserSettings.Updates(UserSettings{UITheme: "vanta", UILanguage: "en"}))
assert.Equal(t, "vanta", m.UserSettings.UITheme)
assert.Equal(t, "en", m.UserSettings.UILanguage)
}

View File

@@ -63,13 +63,13 @@ func (m UserShares) Contains(uid string) bool {
// UserShare represents content shared with a user.
type UserShare struct {
UserUID string `gorm:"type:VARBINARY(42);primary_key;auto_increment:false;" json:"-" yaml:"UserUID"`
ShareUID string `gorm:"type:VARBINARY(42);primary_key;index;" json:"ShareUID" yaml:"ShareUID"`
LinkUID string `gorm:"type:VARBINARY(42);" json:"LinkUID,omitempty" yaml:"LinkUID,omitempty"`
UserUID string `gorm:"type:bytes;size:42;primaryKey;autoIncrement:false;" json:"-" yaml:"UserUID"`
ShareUID string `gorm:"type:bytes;size:42;primaryKey;index;" json:"ShareUID" yaml:"ShareUID"`
LinkUID string `gorm:"type:bytes;size:42;" json:"LinkUID,omitempty" yaml:"LinkUID,omitempty"`
ExpiresAt *time.Time `sql:"index" json:"ExpiresAt,omitempty" yaml:"ExpiresAt,omitempty"`
Comment string `gorm:"size:512;" json:"Comment,omitempty" yaml:"Comment,omitempty"`
Perm uint `json:"Perm,omitempty" yaml:"Perm,omitempty"`
RefID string `gorm:"type:VARBINARY(16);" json:"-" yaml:"-"`
RefID string `gorm:"type:bytes;size:16;" json:"-" yaml:"-"`
CreatedAt time.Time `json:"CreatedAt" yaml:"-"`
UpdatedAt time.Time `json:"UpdatedAt" yaml:"-"`
}
@@ -159,7 +159,7 @@ func (m *UserShare) UpdateLink(link Link) error {
m.UpdatedAt = Now()
m.ExpiresAt = link.ExpiresAt()
values := Map{
values := map[string]interface{}{
"link_uid": m.LinkUID,
"expires_at": m.ExpiresAt,
"comment": m.Comment,

View File

@@ -103,12 +103,17 @@ func TestFindUserShares(t *testing.T) {
}
func TestUserShare_Create(t *testing.T) {
m := UserShare{}
newUser := &User{ID: 942198341, UserName: "testUser41"}
Db().Create(newUser)
m := UserShare{UserUID: newUser.UserUID}
err := m.Create()
if err != nil {
t.Fatal(err)
}
UnscopedDb().Delete(m)
UnscopedDb().Delete(newUser)
}
func TestUserShare_UpdateLink(t *testing.T) {

View File

@@ -1,11 +1,15 @@
package entity
import (
"bytes"
"os"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/zitadel/oidc/v3/pkg/oidc"
"gorm.io/gorm"
"gorm.io/gorm/logger"
"github.com/photoprism/photoprism/internal/auth/acl"
"github.com/photoprism/photoprism/internal/form"
@@ -380,7 +384,8 @@ func TestUser_InvalidPassword(t *testing.T) {
assert.True(t, m.InvalidPassword("wrong-password"))
})
t.Run("no password existing", func(t *testing.T) {
p := User{UserUID: "u000000000000010", UserName: "Hans", DisplayName: ""}
expected := rnd.GenerateUID(UserUID)
p := User{UserUID: expected, UserName: "HansNP", DisplayName: ""}
err := p.Save()
if err != nil {
t.Fatal(err)
@@ -664,16 +669,24 @@ func TestUser_SameUID(t *testing.T) {
func TestUser_String(t *testing.T) {
t.Run("UID", func(t *testing.T) {
p := User{UserUID: "abc123", UserName: "", DisplayName: ""}
assert.Equal(t, "abc123", p.String())
expected := rnd.GenerateUID(UserUID) // Use a valid UID, otherwise it will be replaced by BeforeCreate.
p := User{UserUID: expected, UserName: "", DisplayName: ""}
p.Create()
// GormV2 internal failure created in p.String() as the UserDetails fails to save as there isn't a user record.
assert.Equal(t, expected, p.String())
UnscopedDb().Delete(&p)
})
t.Run("FullName", func(t *testing.T) {
p := User{UserUID: "abc123", UserName: "", DisplayName: "Test"}
p.Create()
assert.Equal(t, "'Test'", p.String())
UnscopedDb().Delete(&p)
})
t.Run("UserName", func(t *testing.T) {
p := User{UserUID: "abc123", UserName: "Super-User ", DisplayName: "Test"}
p.Create()
assert.Equal(t, "'super-user'", p.String())
UnscopedDb().Delete(&p)
})
}
@@ -1115,8 +1128,8 @@ func TestUser_UpdateLoginTime(t *testing.T) {
})
t.Run("User deleted", func(t *testing.T) {
u := NewUser()
var deleted = time.Date(2020, 3, 6, 2, 6, 51, 0, time.UTC)
u.DeletedAt = &deleted
var deleted = gorm.DeletedAt{Time: time.Date(2020, 3, 6, 2, 6, 51, 0, time.UTC), Valid: true}
u.DeletedAt = deleted
assert.Nil(t, u.UpdateLoginTime())
})
}
@@ -1949,6 +1962,11 @@ func TestUser_FullName(t *testing.T) {
CanInvite: false,
}
if err := u.Create(); err != nil {
t.Logf("user create fail %s", err)
t.FailNow()
}
assert.Equal(t, "Mr-Happy", u.FullName())
u.UserName = "mr.happy@cat.com"
@@ -1966,6 +1984,11 @@ func TestUser_FullName(t *testing.T) {
u.SetDisplayName("Jane Doe", SrcManual)
assert.Equal(t, "Jane Doe", u.FullName())
if err := UnscopedDb().Delete(&u).Error; err != nil {
t.Logf("user delete fail %s", err)
t.FailNow()
}
})
t.Run("Name from Details", func(t *testing.T) {
u := User{
@@ -2000,9 +2023,19 @@ func TestUser_FullName(t *testing.T) {
CanInvite: false,
}
if err := u.Create(); err != nil {
t.Logf("user create fail %s", err)
t.FailNow()
}
assert.Equal(t, "jens.mander", u.Handle())
assert.Equal(t, "domain\\jens mander", u.Username())
assert.Equal(t, "Jens Mander", u.FullName())
if err := UnscopedDb().Delete(&u).Error; err != nil {
t.Logf("user delete fail %s", err)
t.FailNow()
}
})
}
@@ -2239,7 +2272,116 @@ func TestUser_RedeemToken(t *testing.T) {
assert.Equal(t, "as6sg6bxpogaaba9", m.UserShares[0].ShareUID)
assert.Equal(t, 1, m.RedeemToken("4jxf3jfn2k"))
m.RefreshShares()
assert.Equal(t, "as6sg6bxpogaaba7", m.UserShares[0].ShareUID)
assert.Equal(t, "as6sg6bxpogaaba9", m.UserShares[1].ShareUID)
// m.UserShares is not ordered, so sometimes this test would fail
assert.Equal(t, 2, len(m.UserShares))
var shareUIDs []string
for _, shareUID := range m.UserShares {
shareUIDs = append(shareUIDs, shareUID.ShareUID)
}
assert.Contains(t, shareUIDs, "as6sg6bxpogaaba7")
assert.Contains(t, shareUIDs, "as6sg6bxpogaaba9")
})
}
func TestUser_ValidatePreload(t *testing.T) {
t.Run("FindUser_UserDetails", func(t *testing.T) {
// Setup and capture SQL Logging output
beforeLogMode := Db().Config.Logger
buffer := bytes.Buffer{}
Db().Config.Logger = Db().Config.Logger.LogMode(logger.Info)
log.SetOutput(&buffer)
m := FindUser(User{ID: 7}) // User = Bob
// Reset logger
log.SetOutput(os.Stdout)
Db().Config.Logger = beforeLogMode
assert.Equal(t, "bob", m.UserName)
assert.Equal(t, "Robert Rich", m.DisplayName)
assert.NotEmpty(t, m.UserDetails)
assert.Equal(t, "Bob", m.UserDetails.NickName)
assert.Equal(t, 1981, m.UserDetails.BirthYear)
assert.Contains(t, buffer.String(), "auth_users_details")
// Verify that Preload loaded the data
assert.NotContains(t, buffer.String(), "auth_user_details.go")
})
t.Run("FindUser_UserSettings", func(t *testing.T) {
// Setup and capture SQL Logging output
beforeLogMode := Db().Config.Logger
buffer := bytes.Buffer{}
Db().Config.Logger = Db().Config.Logger.LogMode(logger.Info)
log.SetOutput(&buffer)
m := FindUser(User{ID: 7}) // User = Bob
// Reset logger
log.SetOutput(os.Stdout)
Db().Config.Logger = beforeLogMode
assert.Equal(t, "bob", m.UserName)
assert.Equal(t, "Robert Rich", m.DisplayName)
assert.NotEmpty(t, m.UserSettings)
assert.Equal(t, "grayscale", m.UserSettings.UITheme)
assert.Equal(t, "topographique", m.UserSettings.MapsStyle)
assert.Contains(t, buffer.String(), "auth_users_settings")
// Verify that Preload loaded the data
assert.NotContains(t, buffer.String(), "auth_users_settings.go")
})
t.Run("FindLocalUser_UserSettings", func(t *testing.T) {
// Setup and capture SQL Logging output
beforeLogMode := Db().Config.Logger
buffer := bytes.Buffer{}
Db().Config.Logger = Db().Config.Logger.LogMode(logger.Info)
log.SetOutput(&buffer)
m := FindLocalUser("jane")
// Reset logger
log.SetOutput(os.Stdout)
Db().Config.Logger = beforeLogMode
if m == nil {
t.Fatal("result should not be nil")
}
assert.Equal(t, "jane", m.UserName)
assert.Equal(t, "Jane Dow", m.DisplayName)
assert.NotEmpty(t, m.UserSettings)
assert.Equal(t, "default", m.UserSettings.UITheme)
assert.Equal(t, "hybrid", m.UserSettings.MapsStyle)
assert.Contains(t, buffer.String(), "auth_users_settings")
// Verify that Preload loaded the data
assert.NotContains(t, buffer.String(), "auth_users_settings.go")
})
t.Run("FindLocalUser_UserDetails", func(t *testing.T) {
// Setup and capture SQL Logging output
beforeLogMode := Db().Config.Logger
buffer := bytes.Buffer{}
Db().Config.Logger = Db().Config.Logger.LogMode(logger.Info)
log.SetOutput(&buffer)
m := FindLocalUser("jane")
// Reset logger
log.SetOutput(os.Stdout)
Db().Config.Logger = beforeLogMode
if m == nil {
t.Fatal("result should not be nil")
}
assert.Equal(t, "jane", m.UserName)
assert.Equal(t, "Jane Dow", m.DisplayName)
assert.NotEmpty(t, m.UserDetails)
assert.Equal(t, "Jane", m.UserDetails.NickName)
assert.Equal(t, 2001, m.UserDetails.BirthYear)
assert.Contains(t, buffer.String(), "auth_users_details")
// Verify that Preload loaded the data
assert.NotContains(t, buffer.String(), "auth_users_details.go")
})
}

View File

@@ -5,6 +5,9 @@ import (
"sync"
"time"
"github.com/ulule/deepcopier"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/event"
"github.com/photoprism/photoprism/pkg/clean"
"github.com/photoprism/photoprism/pkg/txt"
@@ -17,17 +20,17 @@ type Cameras []Camera
// Camera model and make (as extracted from UpdateExif metadata)
type Camera struct {
ID uint `gorm:"primary_key" json:"ID" yaml:"ID"`
CameraSlug string `gorm:"type:VARBINARY(160);unique_index;" json:"Slug" yaml:"-"`
CameraName string `gorm:"type:VARCHAR(160);" json:"Name" yaml:"Name"`
CameraMake string `gorm:"type:VARCHAR(160);" json:"Make" yaml:"Make,omitempty"`
CameraModel string `gorm:"type:VARCHAR(160);" json:"Model" yaml:"Model,omitempty"`
CameraType string `gorm:"type:VARCHAR(100);" json:"Type,omitempty" yaml:"Type,omitempty"`
CameraDescription string `gorm:"type:VARCHAR(2048);" json:"Description,omitempty" yaml:"Description,omitempty"`
CameraNotes string `gorm:"type:VARCHAR(1024);" json:"Notes,omitempty" yaml:"Notes,omitempty"`
CreatedAt time.Time `json:"-" yaml:"-"`
UpdatedAt time.Time `json:"-" yaml:"-"`
DeletedAt *time.Time `sql:"index" json:"-" yaml:"-"`
ID uint `gorm:"primaryKey;" json:"ID" yaml:"ID"`
CameraSlug string `gorm:"type:bytes;size:160;uniqueIndex;" json:"Slug" yaml:"-"`
CameraName string `gorm:"size:160;" json:"Name" yaml:"Name"`
CameraMake string `gorm:"size:160;" json:"Make" yaml:"Make,omitempty"`
CameraModel string `gorm:"size:160;" json:"Model" yaml:"Model,omitempty"`
CameraType string `gorm:"size:100;" json:"Type,omitempty" yaml:"Type,omitempty"`
CameraDescription string `gorm:"size:2048;" json:"Description,omitempty" yaml:"Description,omitempty"`
CameraNotes string `gorm:"size:1024;" json:"Notes,omitempty" yaml:"Notes,omitempty"`
CreatedAt time.Time `json:"-" yaml:"-"`
UpdatedAt time.Time `json:"-" yaml:"-"`
DeletedAt gorm.DeletedAt `sql:"index" json:"-" yaml:"-"`
}
// TableName returns the entity table name.
@@ -182,3 +185,14 @@ func (m *Camera) Mobile() bool {
func (m *Camera) Unknown() bool {
return m.CameraSlug == "" || m.CameraSlug == UnknownCamera.CameraSlug
}
// ScopedSearchFirstCamera populates camera with the results of a Where(query, values) excluding soft delete records
func ScopedSearchFirstCamera(camera *Camera, query string, values ...interface{}) (tx *gorm.DB) {
// Preload related entities if a matching record is found.
stmt := Db()
tempCamera := &Camera{}
if tx = stmt.Where(query, values...).First(tempCamera); tx.Error == nil {
deepcopier.Copy(tempCamera).To(camera)
}
return tx
}

View File

@@ -2,6 +2,8 @@ package entity
import (
"time"
"gorm.io/gorm"
)
type CameraMap map[string]Camera
@@ -34,7 +36,7 @@ var CameraFixtures = CameraMap{
CameraNotes: "",
CreatedAt: time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"canon-eos-5d": {
ID: 1000001,
@@ -47,7 +49,7 @@ var CameraFixtures = CameraMap{
CameraNotes: "",
CreatedAt: time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"canon-eos-7d": {
ID: 1000002,
@@ -60,7 +62,7 @@ var CameraFixtures = CameraMap{
CameraNotes: "",
CreatedAt: time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"canon-eos-6d": {
ID: 1000003,
@@ -73,7 +75,7 @@ var CameraFixtures = CameraMap{
CameraNotes: "",
CreatedAt: time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"apple-iphone-6": {
ID: 1000004,
@@ -86,7 +88,7 @@ var CameraFixtures = CameraMap{
CameraNotes: "",
CreatedAt: time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"apple-iphone-7": {
ID: 1000005,
@@ -99,7 +101,7 @@ var CameraFixtures = CameraMap{
CameraNotes: "",
CreatedAt: Now(),
UpdatedAt: Now(),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
}

View File

@@ -4,6 +4,8 @@ import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/pkg/rnd"
)
func TestFirstOrCreateCamera(t *testing.T) {
@@ -253,3 +255,52 @@ func TestCamera_Mobile(t *testing.T) {
assert.True(t, camera.Mobile())
})
}
func TestCamera_ScopedSearchFirst(t *testing.T) {
t.Run("Ok", func(t *testing.T) {
m := CameraFixtures.Get("apple-iphone-se")
Db().Save(&m) // reset back to base
camera := Camera{}
if res := ScopedSearchFirstCamera(&camera, "camera_slug = ?", CameraFixtures.Get("apple-iphone-se").CameraSlug); res.Error != nil {
assert.Nil(t, res.Error)
t.FailNow()
}
camera1 := CameraFixtures.Get("apple-iphone-se")
// Only check items that are preloaded
// Except Labels as they are filtered.
assert.Equal(t, camera1.ID, camera.ID)
assert.Equal(t, camera1.CameraSlug, camera.CameraSlug)
assert.Equal(t, camera1.CameraName, camera.CameraName)
assert.Equal(t, camera1.CameraMake, camera.CameraMake)
assert.Equal(t, camera1.CameraModel, camera.CameraModel)
assert.Equal(t, camera1.CameraType, camera.CameraType)
assert.Equal(t, camera1.CameraDescription, camera.CameraDescription)
assert.Equal(t, camera1.CameraNotes, camera.CameraNotes)
})
t.Run("Nothing Found", func(t *testing.T) {
camera := Camera{}
if res := ScopedSearchFirstCamera(&camera, "camera_slug = ?", rnd.UUID()); res.Error != nil {
assert.NotNil(t, res.Error)
assert.ErrorContains(t, res.Error, "record not found")
} else {
assert.Equal(t, int64(0), res.RowsAffected)
}
})
t.Run("Error", func(t *testing.T) {
camera := Camera{}
log.Info("Expect unknown column Error or SQLSTATE on camera_slugs from ScopedSearchFirstCamera")
if res := ScopedSearchFirstCamera(&camera, "camera_slugs = ?", rnd.UUID()); res.Error == nil {
assert.NotNil(t, res.Error)
t.FailNow()
} else {
assert.Error(t, res.Error)
assert.ErrorContains(t, res.Error, "camera_slugs")
assert.Equal(t, int64(0), res.RowsAffected)
}
})
}

View File

@@ -2,10 +2,10 @@ package entity
// Category of labels regroups labels with the same or a similar meaning using a main/root label
type Category struct {
LabelID uint `gorm:"primary_key;auto_increment:false"`
CategoryID uint `gorm:"primary_key;auto_increment:false"`
Label *Label
Category *Label
LabelID uint `gorm:"primaryKey;autoIncrement:false"`
CategoryID uint `gorm:"primaryKey;autoIncrement:false"`
Label *Label `gorm:"foreignKey:LabelID"`
Category *Label `gorm:"foreignKey:CategoryID"`
}
// TableName returns the entity table name.

Some files were not shown because too many files have changed in this diff Show More