diff --git a/.travis.yml b/.travis.yml
index b7b0f49ca5..8e945a6454 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -11,7 +11,7 @@ language: php
php:
# Test oldest and newest maintained versions.
- '7.3'
- # - '8.0'
+ - '8.0'
env:
# Test oldest and newest maintained versions.
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 589c09db01..3eab4ae432 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,46 @@
+# Version 6.3.0
+*2021-09-21*
+
+* Support for Elasticsearch indexes which contain samples as documents (including empty samples).
+ These can be enabled for access via the REST API.
+* Addition of occurrences.verifier_only_data field to support data synced from other systems where
+ the data is supplied with attribute values that are only permitted to be used for verification.
+* Code updates for PHP 8 compatibility and updated unit test libraries.
+* Improvements to sensitivity handling for sample cache data, including:
+ * Addition of sensitive & private flags.
+ * Blurring the public geometry when any contained occurrences are sensitive,.
+ * The public_entered_sref is now populated with the blurred and localised grid reference when
+ there are sensitive records in a sample. Formerly it was left null.
+ * Fixes a bug where the map square links were not being populated for the full-precision copy of
+ sensitive records.
+* Adds the following fields fields to samples cache for consistency with the occurrences cache
+ tables:
+ * cache_samples_functional.external_key
+ * cache_samples_functional.sensitive
+ * cache_samples_functional.private
+ * cache_samples_nonfunctional.output_sref
+ * cache_samples_nonfunctional.output_sref_system
+ * cache_samples_nonfunctional.private
+* Updating an occurrence in isolation (via web services) now updates the tracking ID associated
+ with the sample that contains the occurrence. This is so that any sample data feeds receive an
+ updated copy of the sample, as the occurrence statistics will have changed.
+* Workflow events now allow filters on location, or stage term. These are applied retrospectively
+ using a Work Queue task, allowing spatial indexing to be applied to the record first. For example
+ this allows a workflow event's effect to be removed from a record if it does not fall inside a
+ boundary or is a juvenile.
+* REST API module provides sync-taxon-observations and sync-annotation end-points designed for
+ synchronising records and verification decisions with remote servers.
+* New json_occurrences server type for the REST API Sync module which sychronises data with any
+ remote (Indicia or otherwise) server that supports the sync-taxon-observations and
+ sync-annotations API format.
+* Bug fixes.
+
+## Deprecation notice
+
+* The previously provided taxon-observations and annotations end-points in the REST API (which were
+ based on the defunct NBN Gateway Exchange Format) are now deprecated and may be removed in a
+ future version.
+
# Version 6.2.0
*2021-08-02*
diff --git a/application/config/version.php b/application/config/version.php
index 534aeb23c3..4dd66d37a5 100644
--- a/application/config/version.php
+++ b/application/config/version.php
@@ -29,14 +29,14 @@
*
* @var string
*/
-$config['version'] = '6.2.15';
+$config['version'] = '6.3.0';
/**
* Version release date.
*
* @var string
*/
-$config['release_date'] = '2021-09-14';
+$config['release_date'] = '2021-09-21';
/**
* Link to the code repository downloads page.
diff --git a/application/controllers/service_base.php b/application/controllers/service_base.php
index fbec7cab5d..c80b7feb2a 100644
--- a/application/controllers/service_base.php
+++ b/application/controllers/service_base.php
@@ -125,10 +125,10 @@ protected function authenticate($mode = 'write') {
$authentic = FALSE; // default
if (array_key_exists('nonce', $array) && array_key_exists('auth_token', $array)) {
$nonce = $array['nonce'];
- $this->cache = new Cache;
+ $this->cache = new Cache();
// Get all cache entries that match this nonce
$paths = $this->cache->exists($nonce);
- foreach($paths as $path) {
+ foreach ($paths as $path) {
// Find the parts of each file name, which is the cache entry ID, then the mode.
$tokens = explode('~', basename($path));
// check this cached nonce is for the correct read or write operation.
@@ -139,14 +139,16 @@ protected function authenticate($mode = 'write') {
$website = ORM::factory('website', $id);
if ($website->id)
$password = $website->password;
- } else
+ }
+ else {
$password = kohana::config('indicia.private_key');
+ }
// calculate the auth token from the nonce and the password. Does it match the request's auth token?
if (isset($password) && sha1("$nonce:$password")==$array['auth_token']) {
Kohana::log('info', "Authentication successful.");
// cache website_password for subsequent use by controllers
$this->website_password = $password;
- $authentic=true;
+ $authentic = TRUE;
}
if ($authentic) {
if ($id > 0) {
@@ -166,7 +168,7 @@ protected function authenticate($mode = 'write') {
$user = ORM::Factory('user', $this->user_id);
$this->user_is_core_admin = ($user->core_role_id === 1);
if (!$this->user_is_core_admin) {
- $this->user_websites = array();
+ $this->user_websites = [];
$userWebsites = ORM::Factory('users_website')->where(array(
'user_id' => $this->user_id,
'site_role_id is not' => NULL,
diff --git a/application/helpers/postgreSQL.php b/application/helpers/postgreSQL.php
index 6acd07d0d2..919b79ee51 100644
--- a/application/helpers/postgreSQL.php
+++ b/application/helpers/postgreSQL.php
@@ -372,7 +372,8 @@ public static function list_fields($entity, $db = NULL) {
SELECT column_name, column_default, is_nullable, data_type, udt_name,
character_maximum_length, numeric_precision, numeric_precision_radix, numeric_scale
FROM information_schema.columns
- WHERE table_name = \'' . $entity . '\'
+ WHERE table_name = \'' . $entity . '\'
+ AND table_schema != \'information_schema\'
ORDER BY ordinal_position
');
diff --git a/application/libraries/MY_ORM.php b/application/libraries/MY_ORM.php
index 125de9e1e7..697ef492f7 100644
--- a/application/libraries/MY_ORM.php
+++ b/application/libraries/MY_ORM.php
@@ -61,7 +61,7 @@ public function last_query() {
return $this->db->last_query();
}
- public $submission = array();
+ public $submission = [];
/**
* Describes the list of nested models that are present after a submission.
@@ -70,8 +70,8 @@ public function last_query() {
*
* @var array
*/
- private $nestedChildModelIds = array();
- private $nestedParentModelIds = array();
+ private $nestedChildModelIds = [];
+ private $nestedParentModelIds = [];
/**
* Default search field name.
@@ -82,7 +82,7 @@ public function last_query() {
*/
public $search_field = 'title';
- protected $errors = array();
+ protected $errors = [];
/**
* Flag that gets set if a unique key violation has occurred on save.
@@ -101,12 +101,12 @@ public function last_query() {
* by a model. If not declared then the model will not transfer them to the saved data when
* posting a record.
*/
- protected $unvalidatedFields = array();
+ protected $unvalidatedFields = [];
/**
* @var array An array which a model can populate to declare additional fields that can be submitted for csv upload.
*/
- protected $additional_csv_fields = array();
+ protected $additional_csv_fields = [];
/**
* @var bool Does the model have custom attributes? Defaults to false.
@@ -128,6 +128,7 @@ public function last_query() {
/**
* Default behaviour on save is to update metadata. If we detect no changes we can skip this.
+ *
* @var bool
*/
public $wantToUpdateMetadata = TRUE;
@@ -139,7 +140,7 @@ public function last_query() {
*/
public $parentChanging = FALSE;
- private $attrValModels = array();
+ private $attrValModels = [];
/**
* @var array If a submission contains submodels, then the array of submodels can be keyed. This
@@ -147,7 +148,7 @@ public function last_query() {
* Normally, super/sub-models can handle foreign keys, but this approach is needed for association
* tables which join across 2 entities created by a submission.
*/
- private $dynamicRowIdReferences = array();
+ private $dynamicRowIdReferences = [];
/**
* Indicates database trigger on table which accesses a sequence.
@@ -163,8 +164,11 @@ public function last_query() {
/**
* Constructor allows plugins to modify the data model.
- * @var int $id ID of the record to load. If null then creates a new record. If -1 then the ORM
- * object is not initialised, providing access to the variables only.
+ *
+ * @var int $id
+ * ID of the record to load. If null then creates a new record. If -1 then
+ * the ORM object is not initialised, providing access to the variables
+ * only.
*/
public function __construct($id = NULL) {
if (is_object($id) || $id != -1) {
@@ -343,7 +347,7 @@ public function getAllErrors()
* Retrieve an array containing all page level errors which are marked with the key general.
*/
public function getPageErrors() {
- $r = array();
+ $r = [];
if (array_key_exists('general', $this->errors)) {
array_push($r, $this->errors['general']);
}
@@ -567,7 +571,7 @@ protected function canCreateFromCaption() {
* @return array, an array of record id values for the created records.
*/
private function createRecordsFromCaptions() {
- $r = array();
+ $r = [];
// Establish the right model and check it supports create from captions,
$modelname = $this->submission['fields']['insert_captions_to_create']['value'];
@@ -584,7 +588,7 @@ private function createRecordsFromCaptions() {
$sub = array(
'id' => $modelname,
'fields' => array(
- 'caption' => array()
+ 'caption' => []
)
);
// submit each caption to create a record, unless it exists
@@ -621,10 +625,10 @@ private function createRecordsFromCaptions() {
*/
private function createIdsFromCaptions($ids) {
$fieldname = $this->submission['fields']['insert_captions_use']['value'];
- if(empty($ids)){
- $this->submission['fields'][$fieldname] = array('value'=>array());
+ if (empty($ids)) {
+ $this->submission['fields'][$fieldname] = ['value'=>[]];
}
- else{
+ else {
$keys = array_fill(0, sizeof($ids), 'value');
$a = array_fill_keys($keys, $ids);
$this->submission['fields'][$fieldname] = $a;
@@ -688,28 +692,35 @@ protected function preSubmit() {
*/
protected function populateIdentifiers() {
if (array_key_exists('website_id', $this->submission['fields'])) {
- if (is_array($this->submission['fields']['website_id']))
+ if (is_array($this->submission['fields']['website_id'])) {
$this->identifiers['website_id'] = $this->submission['fields']['website_id']['value'];
- else
+ }
+ else {
$this->identifiers['website_id'] = $this->submission['fields']['website_id'];
+ }
}
if (array_key_exists('survey_id', $this->submission['fields'])) {
- if (is_array($this->submission['fields']['survey_id']))
+ if (is_array($this->submission['fields']['survey_id'])) {
$this->identifiers['survey_id'] = $this->submission['fields']['survey_id']['value'];
- else
+ }
+ else {
$this->identifiers['survey_id'] = $this->submission['fields']['survey_id'];
+ }
}
}
/**
* Wraps the process of submission in a transaction.
- * @return integer If successful, returns the id of the created/found record. If not, returns null - errors are embedded in the model.
+ *
+ * @return int
+ * If successful, returns the id of the created/found record. If not,
+ * returns null - errors are embedded in the model.
*/
public function submit() {
Kohana::log('debug', 'Commencing new transaction.');
$this->db->query('BEGIN;');
try {
- $this->errors = array();
+ $this->errors = [];
$this->preProcess();
$res = $this->inner_submit();
$this->postProcess();
@@ -720,8 +731,8 @@ public function submit() {
$res = NULL;
}
if ($res) {
- $allowCommitToDB = (isset($_GET['allow_commit_to_db']) ? $_GET['allow_commit_to_db'] : true);
- if (!empty($allowCommitToDB)&&$allowCommitToDB==true) {
+ $allowCommitToDB = (isset($_GET['allow_commit_to_db']) ? $_GET['allow_commit_to_db'] : TRUE);
+ if (!empty($allowCommitToDB) && $allowCommitToDB == TRUE) {
Kohana::log('debug', 'Committing transaction.');
$this->db->query('COMMIT;');
}
@@ -747,13 +758,16 @@ private function preProcess() {
}
/**
- * Handles any index rebuild requirements as a result of new or updated records, e.g. in
- * samples or occurrences. Also handles joining of occurrence_associations to the
- * correct records.
+ * Submission post-processing.
+ *
+ * Handles any index rebuild requirements as a result of new or updated
+ * records, e.g. in samples or occurrences. Also handles joining of
+ * occurrence_associations to the correct records.
*/
private function postProcess() {
if (class_exists('cache_builder')) {
$occurrences = [];
+ $deletedOccurrences = [];
if (!empty(self::$changedRecords['insert']['occurrence'])) {
cache_builder::insert($this->db, 'occurrences', self::$changedRecords['insert']['occurrence']);
$occurrences = self::$changedRecords['insert']['occurrence'];
@@ -764,6 +778,7 @@ private function postProcess() {
}
if (!empty(self::$changedRecords['delete']['occurrence'])) {
cache_builder::delete($this->db, 'occurrences', self::$changedRecords['delete']['occurrence']);
+ $deletedOccurrences = self::$changedRecords['delete']['occurrence'];
}
$samples = [];
if (!empty(self::$changedRecords['insert']['sample'])) {
@@ -785,6 +800,9 @@ private function postProcess() {
// No need to do occurrence map square update if inserting a sample, as
// the above code does the occurrences in bulk.
postgreSQL::insertMapSquaresForOccurrences($occurrences, $this->db);
+ // Need to ensure sample tracking is updated if occurrences change
+ // without a posted sample.
+ cache_builder::updateSampleTrackingForOccurrences($this->db, $occurrences + $deletedOccurrences);
}
}
if (!empty(self::$changedRecords['insert']['occurrence_association']) ||
@@ -801,7 +819,7 @@ private function postProcess() {
}
}
// Reset important if doing an import with multiple submissions.
- Occurrence_association_Model::$to_occurrence_id_pointers = array();
+ Occurrence_association_Model::$to_occurrence_id_pointers = [];
}
$this->createWorkQueueEntries();
}
@@ -912,7 +930,7 @@ public function inner_submit(){
else
$addTo=&self::$changedRecords['update'];
if (!isset($addTo[$this->object_name]))
- $addTo[$this->object_name] = array();
+ $addTo[$this->object_name] = [];
$addTo[$this->object_name][] = $this->id;
}
// Call postSubmit
@@ -978,7 +996,7 @@ protected function validateAndSubmit() {
// The easiest thing here is pretend the current value of any array
// column doesn't match. These array columns are used so rarely that this
// less optimised solution is not important.
- $exactMatches = array();
+ $exactMatches = [];
foreach ($thisValues as $column => $value) {
if (array_key_exists($column, $vArray) &&
!is_array($vArray[$column]) &&
@@ -1326,8 +1344,8 @@ private function checkRequiredAttributes() {
// Test if this model has an attributes sub-table. Also to have required attributes, we must be posting into a
// specified survey or website at least.
if ($this->has_attributes) {
- $got_values=array();
- $empties = array();
+ $got_values=[];
+ $empties = [];
if (isset($this->submission['metaFields'][$this->attrs_submission_name]))
{
// Old way of submitting attribute values but still supported - attributes are stored in a metafield. Find the ones we actually have a value for
@@ -1419,7 +1437,7 @@ protected function getRequiredFieldsCacheKey($typeFilter) {
*/
protected function getAttributes($required = FALSE, $typeFilter = NULL, $hasSurveyRestriction = TRUE) {
if (empty($this->identifiers['website_id']) && empty($this->identifiers['taxon_list_id'])) {
- return array();
+ return [];
}
$attr_entity = $this->object_name . '_attribute';
$this->db->select($attr_entity.'s.id', $attr_entity.'s.caption', $attr_entity.'s.data_type');
@@ -1523,7 +1541,7 @@ public function getSubmittableFields($fk = FALSE, array $identifiers = [], $attr
// currently can only have associations if a single superModel exists.
if($use_associations && count($struct['superModels']) === 1) {
// duplicate all the existing fields, but rename adding a 2 to model end.
- $newFields = array();
+ $newFields = [];
foreach($fields as $name=>$caption){
$parts=explode(':',$name);
if($parts[0]==$struct['model'] || $parts[0] == $struct['model'].'_image' || $parts[0] == $this->attrs_field_prefix) {
@@ -1560,7 +1578,7 @@ public function getRequiredFields($fk = FALSE, array $identifiers = [], $use_ass
$sub = $this->get_submission_structure();
$arr = new Validation(array('id'=>1));
$this->validate($arr, FALSE);
- $fields = array();
+ $fields = [];
foreach ($arr->errors() as $column=>$error) {
if ($error=='required') {
if ($fk && substr($column, -3) == "_id") {
@@ -1584,7 +1602,7 @@ public function getRequiredFields($fk = FALSE, array $identifiers = [], $use_ass
// currently can only have associations if a single superModel exists.
if($use_associations && count($sub['superModels'])===1){
// duplicate all the existing fields, but rename adding a 2 to model end.
- $newFields = array();
+ $newFields = [];
foreach($fields as $id){
$parts=explode(':',$id);
if($parts[0]==$sub['model'] || $parts[0]==$sub['model'].'_image' || $parts[0]==$this->attrs_field_prefix) {
@@ -1611,7 +1629,7 @@ public function getRequiredFields($fk = FALSE, array $identifiers = [], $use_ass
* @return array Prefixed key value pairs.
*/
public function getPrefixedValuesArray($prefix=NULL) {
- $r = array();
+ $r = [];
if (!$prefix) {
$prefix=$this->object_name;
}
@@ -1627,7 +1645,7 @@ public function getPrefixedValuesArray($prefix=NULL) {
* @return array Prefixed columns.
*/
protected function getPrefixedColumnsArray($fk=FALSE, $skipHiddenFields=TRUE) {
- $r = array();
+ $r = [];
$prefix=$this->object_name;
$sub = $this->get_submission_structure();
foreach ($this->table_columns as $column=>$type) {
@@ -2181,7 +2199,7 @@ public function get_submission_structure() {
* on creation of a new record.
*/
public function getDefaults() {
- return array();
+ return [];
}
/**
@@ -2200,8 +2218,8 @@ private function sanitise($array) {
*/
public function clear() {
parent::clear();
- $this->errors=array();
- $this->identifiers = array('website_id'=>NULL,'survey_id'=>NULL);
+ $this->errors = [];
+ $this->identifiers = ['website_id' => NULL, 'survey_id' => NULL];
}
/**
diff --git a/application/libraries/WorkQueue.php b/application/libraries/WorkQueue.php
index 965704242b..5b96a8547b 100644
--- a/application/libraries/WorkQueue.php
+++ b/application/libraries/WorkQueue.php
@@ -72,7 +72,8 @@ public function enqueue($db, array $fields) {
'task=' . pg_escape_literal($fields['task']) .
'AND entity' . (empty($fields['entity']) ? ' IS NULL' : '=' . pg_escape_literal($fields['entity'])) .
'AND record_id' . (empty($fields['record_id']) ? ' IS NULL' : '=' . pg_escape_literal($fields['record_id'])) .
- 'AND params' . (empty($fields['params']) ? ' IS NULL' : '=' . pg_escape_literal($fields['params']));
+ // Use JSONB to compare as valid in pgSQL.
+ 'AND params' . (empty($fields['params']) ? ' IS NULL' : ('::jsonb=' . pg_escape_literal($fields['params']) . '::jsonb'));
foreach ($fields as $value) {
$setValues[] = pg_escape_literal($value);
}
diff --git a/application/models/occurrence.php b/application/models/occurrence.php
index 0c428f6629..6fe4b6c985 100644
--- a/application/models/occurrence.php
+++ b/application/models/occurrence.php
@@ -25,24 +25,27 @@
class Occurrence_Model extends ORM {
protected $requeuedForVerification = FALSE;
- protected $has_many = array(
+ protected $has_many = [
'occurrence_attribute_values',
'determinations',
- 'occurrence_media'
- );
- protected $belongs_to = array(
+ 'occurrence_media',
+ ];
+
+ protected $belongs_to = [
'determiner' => 'person',
'sample',
'taxa_taxon_list',
'created_by' => 'user',
'updated_by' => 'user',
- 'verified_by' => 'user'
- );
- // Declare that this model has child attributes, and the name of the node in the submission which contains them.
+ 'verified_by' => 'user',
+ ];
+
+ // Declare that this model has child attributes, and the name of the node in
+ // the submission which contains them.
protected $has_attributes = TRUE;
protected $attrs_submission_name = 'occAttributes';
public $attrs_field_prefix = 'occAttr';
- protected $additional_csv_fields = array(
+ protected $additional_csv_fields = [
// Extra lookup options.
'occurrence:fk_taxa_taxon_list:genus' => 'Genus (builds binomial name)',
'occurrence:fk_taxa_taxon_list:specific' => 'Specific name/epithet (builds binomial name)',
@@ -58,8 +61,8 @@ class Occurrence_Model extends ORM {
'occurrence_medium:path:3' => 'Media Path 3',
'occurrence_medium:caption:3' => 'Media Caption 3',
'occurrence_medium:path:4' => 'Media Path 4',
- 'occurrence_medium:caption:4' => 'Media Caption 4'
- );
+ 'occurrence_medium:caption:4' => 'Media Caption 4',
+ ];
// During an import it is possible to merge different columns in a CSV row to make a database field
public $specialImportFieldProcessingDefn = [
@@ -108,6 +111,9 @@ class Occurrence_Model extends ORM {
/**
* Returns a caption to identify this model instance.
+ *
+ * @return string
+ * Caption for instance.
*/
public function caption() {
return 'Record of ' . $this->taxa_taxon_list->taxon->taxon;
@@ -165,7 +171,7 @@ public function validate(Validation $array, $save = FALSE) {
$array->add_rules('taxa_taxon_list_id', 'required');
}
// Explicitly add those fields for which we don't do validation.
- $this->unvalidatedFields = array(
+ $this->unvalidatedFields = [
'comment',
'determiner_id',
'deleted',
@@ -184,7 +190,8 @@ public function validate(Validation $array, $save = FALSE) {
'sensitivity_precision',
'import_guid',
'metadata',
- );
+ 'verifier_only_data',
+ ];
if (array_key_exists('id', $fieldlist)) {
// Existing data must not be set to download_flag=F (final download) otherwise it
// is read only.
diff --git a/application/views/attribute_by_survey/index.php b/application/views/attribute_by_survey/index.php
index ab6131b13e..50a5b208ca 100644
--- a/application/views/attribute_by_survey/index.php
+++ b/application/views/attribute_by_survey/index.php
@@ -223,7 +223,7 @@ function get_controls($block_id, array $controlFilter, $db) {
id"] = $attr->caption;
}
diff --git a/composer.json b/composer.json
index 25a01df3d6..8faa3968d8 100644
--- a/composer.json
+++ b/composer.json
@@ -1,10 +1,16 @@
{
+ "repositories": [
+ {
+ "type": "vcs",
+ "url": "https://github.com/misantron/dbunit"
+ }
+ ],
"require": {
"firebase/php-jwt": "^5.4",
"phpoffice/phpspreadsheet": "^1.18"
},
"require-dev": {
"phpunit/phpunit": "^9.5",
- "misantron/dbunit": "^5.1"
+ "misantron/dbunit": "dev-master"
}
}
diff --git a/composer.lock b/composer.lock
index f9d9ce3713..6eff9858be 100644
--- a/composer.lock
+++ b/composer.lock
@@ -4,7 +4,7 @@
"Read more about it at https://getcomposer.org/doc/01-basic-usage.md#installing-dependencies",
"This file is @generated automatically"
],
- "content-hash": "c505b8dc101b76633c8ae935e66c78b2",
+ "content-hash": "2fccf8723ed1dfa22f7247880fd11bd9",
"packages": [
{
"name": "ezyang/htmlpurifier",
@@ -885,37 +885,45 @@
},
{
"name": "misantron/dbunit",
- "version": "5.1.0",
+ "version": "dev-master",
"source": {
"type": "git",
"url": "https://github.com/misantron/dbunit.git",
- "reference": "a3e5d3c74a2ae78827c86e14e3d06d7a8d44ca65"
+ "reference": "73a9c07dca119c68e92002adb4fab9022235a91f"
},
"dist": {
"type": "zip",
- "url": "https://api.github.com/repos/misantron/dbunit/zipball/a3e5d3c74a2ae78827c86e14e3d06d7a8d44ca65",
- "reference": "a3e5d3c74a2ae78827c86e14e3d06d7a8d44ca65",
+ "url": "https://api.github.com/repos/misantron/dbunit/zipball/73a9c07dca119c68e92002adb4fab9022235a91f",
+ "reference": "73a9c07dca119c68e92002adb4fab9022235a91f",
"shasum": ""
},
"require": {
- "ext-libxml": "*",
"ext-pdo": "*",
- "ext-simplexml": "*",
- "php": "^7.2|^7.3|^7.4",
- "phpunit/phpunit": "^8.5|^9.2",
- "symfony/yaml": "^4.4|^5.0"
+ "php": "^7.2 || ^8.0",
+ "phpunit/phpunit": "^8.5 || ^9.2",
+ "symfony/yaml": "^4.4 || ^5.0"
},
"require-dev": {
- "friendsofphp/php-cs-fixer": "^2.16",
- "php-coveralls/php-coveralls": "^2.2"
+ "friendsofphp/php-cs-fixer": "^2.18 || ^3.0",
+ "php-coveralls/php-coveralls": "^2.4",
+ "phpstan/phpstan": "^0.12.98",
+ "squizlabs/php_codesniffer": "^3.6"
},
+ "default-branch": true,
"type": "library",
"autoload": {
"psr-4": {
"PHPUnit\\DbUnit\\": "src/"
}
},
- "notification-url": "https://packagist.org/downloads/",
+ "autoload-dev": {
+ "psr-4": {
+ "PHPUnit\\DbUnit\\Tests\\": "tests/"
+ },
+ "files": [
+ "tests/_files/DatabaseTestUtility.php"
+ ]
+ },
"license": [
"MIT"
],
@@ -923,21 +931,20 @@
{
"name": "Aleksandr Ivanov",
"email": "misantron@gmail.com",
- "role": "developer"
+ "role": "Developer"
}
],
"description": "DbUnit fork supporting PHPUnit 8/9",
- "homepage": "https://github.com/misantron/dbunit/",
"keywords": [
"database",
- "dbUnit",
+ "dbunit",
"testing"
],
"support": {
- "issues": "https://github.com/misantron/dbunit/issues",
- "source": "https://github.com/misantron/dbunit/tree/master"
+ "source": "https://github.com/misantron/dbunit/tree/master",
+ "issues": "https://github.com/misantron/dbunit/issues"
},
- "time": "2020-07-07T20:48:08+00:00"
+ "time": "2021-09-09T19:30:23+00:00"
},
{
"name": "myclabs/deep-copy",
@@ -1324,33 +1331,33 @@
},
{
"name": "phpspec/prophecy",
- "version": "1.13.0",
+ "version": "1.14.0",
"source": {
"type": "git",
"url": "https://github.com/phpspec/prophecy.git",
- "reference": "be1996ed8adc35c3fd795488a653f4b518be70ea"
+ "reference": "d86dfc2e2a3cd366cee475e52c6bb3bbc371aa0e"
},
"dist": {
"type": "zip",
- "url": "https://api.github.com/repos/phpspec/prophecy/zipball/be1996ed8adc35c3fd795488a653f4b518be70ea",
- "reference": "be1996ed8adc35c3fd795488a653f4b518be70ea",
+ "url": "https://api.github.com/repos/phpspec/prophecy/zipball/d86dfc2e2a3cd366cee475e52c6bb3bbc371aa0e",
+ "reference": "d86dfc2e2a3cd366cee475e52c6bb3bbc371aa0e",
"shasum": ""
},
"require": {
"doctrine/instantiator": "^1.2",
- "php": "^7.2 || ~8.0, <8.1",
+ "php": "^7.2 || ~8.0, <8.2",
"phpdocumentor/reflection-docblock": "^5.2",
"sebastian/comparator": "^3.0 || ^4.0",
"sebastian/recursion-context": "^3.0 || ^4.0"
},
"require-dev": {
- "phpspec/phpspec": "^6.0",
+ "phpspec/phpspec": "^6.0 || ^7.0",
"phpunit/phpunit": "^8.0 || ^9.0"
},
"type": "library",
"extra": {
"branch-alias": {
- "dev-master": "1.11.x-dev"
+ "dev-master": "1.x-dev"
}
},
"autoload": {
@@ -1385,9 +1392,9 @@
],
"support": {
"issues": "https://github.com/phpspec/prophecy/issues",
- "source": "https://github.com/phpspec/prophecy/tree/1.13.0"
+ "source": "https://github.com/phpspec/prophecy/tree/1.14.0"
},
- "time": "2021-03-17T13:42:18+00:00"
+ "time": "2021-09-10T09:02:12+00:00"
},
{
"name": "phpunit/php-code-coverage",
@@ -1709,16 +1716,16 @@
},
{
"name": "phpunit/phpunit",
- "version": "9.5.7",
+ "version": "9.5.9",
"source": {
"type": "git",
"url": "https://github.com/sebastianbergmann/phpunit.git",
- "reference": "d0dc8b6999c937616df4fb046792004b33fd31c5"
+ "reference": "ea8c2dfb1065eb35a79b3681eee6e6fb0a6f273b"
},
"dist": {
"type": "zip",
- "url": "https://api.github.com/repos/sebastianbergmann/phpunit/zipball/d0dc8b6999c937616df4fb046792004b33fd31c5",
- "reference": "d0dc8b6999c937616df4fb046792004b33fd31c5",
+ "url": "https://api.github.com/repos/sebastianbergmann/phpunit/zipball/ea8c2dfb1065eb35a79b3681eee6e6fb0a6f273b",
+ "reference": "ea8c2dfb1065eb35a79b3681eee6e6fb0a6f273b",
"shasum": ""
},
"require": {
@@ -1730,7 +1737,7 @@
"ext-xml": "*",
"ext-xmlwriter": "*",
"myclabs/deep-copy": "^1.10.1",
- "phar-io/manifest": "^2.0.1",
+ "phar-io/manifest": "^2.0.3",
"phar-io/version": "^3.0.2",
"php": ">=7.3",
"phpspec/prophecy": "^1.12.1",
@@ -1796,7 +1803,7 @@
],
"support": {
"issues": "https://github.com/sebastianbergmann/phpunit/issues",
- "source": "https://github.com/sebastianbergmann/phpunit/tree/9.5.7"
+ "source": "https://github.com/sebastianbergmann/phpunit/tree/9.5.9"
},
"funding": [
{
@@ -1808,7 +1815,7 @@
"type": "github"
}
],
- "time": "2021-07-19T06:14:47+00:00"
+ "time": "2021-08-31T06:47:40+00:00"
},
{
"name": "sebastian/cli-parser",
@@ -3107,7 +3114,9 @@
],
"aliases": [],
"minimum-stability": "stable",
- "stability-flags": [],
+ "stability-flags": {
+ "misantron/dbunit": 20
+ },
"prefer-stable": false,
"prefer-lowest": false,
"platform": [],
diff --git a/docker/phpunit.sh b/docker/phpunit.sh
index 414ef6f983..8a8b2b1fbb 100755
--- a/docker/phpunit.sh
+++ b/docker/phpunit.sh
@@ -25,7 +25,7 @@ docker-compose -f docker-compose-phpunit.yml build \
--build-arg GID=$(id -g) \
--build-arg USER=$(id -un) \
--build-arg GROUP=$(id -gn) \
- --build-arg PHP_VERSION=7.3 \
+ --build-arg PHP_VERSION=8 \
--build-arg PG_VERSION=13 \
--build-arg PORT=$PORT
# When the container is brought up, the database will start
diff --git a/docker/warehouse/Dockerfile b/docker/warehouse/Dockerfile
index 54aa6e2a4c..ee7b201644 100644
--- a/docker/warehouse/Dockerfile
+++ b/docker/warehouse/Dockerfile
@@ -1,8 +1,6 @@
-# This image contains Debian's Apache httpd in conjunction with PHP7.3
-# (as mod_php) and uses mpm_prefork by default.
+# This image contains Debian's Apache httpd in conjunction with PHP8.0.
# https://hub.docker.com/_/php
-# Currently warehouse is not compatible with PHP 7.4
-FROM php:7.3-apache
+FROM php:8.0-apache
# Use PHP development configuration file
RUN mv "$PHP_INI_DIR/php.ini-development" "$PHP_INI_DIR/php.ini"
# Increase size of files which can be uploaded.
diff --git a/modules/cache_builder/config/cache_builder.php b/modules/cache_builder/config/cache_builder.php
index de820d3d1a..e6c2376d08 100644
--- a/modules/cache_builder/config/cache_builder.php
+++ b/modules/cache_builder/config/cache_builder.php
@@ -109,7 +109,7 @@
$config['termlists_terms']['join_needs_update'] = 'join needs_update_termlists_terms nu on nu.id=tlt.id and nu.deleted=false';
$config['termlists_terms']['key_field'] = 'tlt.id';
-//--------------------------------------------------------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
$config['taxa_taxon_lists']['get_missing_items_query'] = "
select distinct on (ttl.id) ttl.id, tl.deleted or ttl.deleted or ttlpref.deleted or t.deleted
@@ -313,7 +313,7 @@
$config['taxa_taxon_lists']['join_needs_update'] = 'join needs_update_taxa_taxon_lists nu on nu.id=ttl.id and nu.deleted=false';
$config['taxa_taxon_lists']['key_field'] = 'ttl.id';
-$config['taxa_taxon_lists']['extra_multi_record_updates'] = array(
+$config['taxa_taxon_lists']['extra_multi_record_updates'] = [
'setup' => "
-- Find children of updated taxa to ensure they are also changed.
WITH RECURSIVE q AS (
@@ -428,7 +428,7 @@
DROP TABLE descendants;
DROP TABLE ttl_path;
DROP TABLE master_list_paths;",
-);
+];
// --------------------------------------------------------------------------------------------------------------------------
@@ -880,10 +880,10 @@
group by id
";
-$config['samples']['delete_query'] = array(
+$config['samples']['delete_query'] = [
"delete from cache_samples_functional where id in (select id from needs_update_samples where deleted=true);
delete from cache_samples_nonfunctional where id in (select id from needs_update_samples where deleted=true);",
-);
+];
$config['samples']['update']['functional'] = "
UPDATE cache_samples_functional s_update
@@ -892,7 +892,7 @@
input_form=COALESCE(sp.input_form, s.input_form),
location_id= s.location_id,
location_name=CASE WHEN s.privacy_precision IS NOT NULL THEN NULL ELSE COALESCE(l.name, s.location_name, lp.name, sp.location_name) END,
- public_geom=reduce_precision(coalesce(s.geom, l.centroid_geom), false, s.privacy_precision),
+ public_geom=reduce_precision(coalesce(s.geom, l.centroid_geom), false, greatest(s.privacy_precision, (SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id))),
date_start=s.date_start,
date_end=s.date_end,
date_type=s.date_type,
@@ -909,7 +909,10 @@
else 'A'
end,
parent_sample_id=s.parent_id,
- media_count=(SELECT COUNT(sm.*) FROM sample_media sm WHERE sm.sample_id=s.id AND sm.deleted=false)
+ media_count=(SELECT COUNT(sm.*) FROM sample_media sm WHERE sm.sample_id=s.id AND sm.deleted=false),
+ external_key=s.external_key,
+ sensitive=(SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id) IS NOT NULL,
+ private=s.privacy_precision IS NOT NULL
FROM samples s
#join_needs_update#
LEFT JOIN samples sp ON sp.id=s.parent_id AND sp.deleted=false
@@ -938,24 +941,59 @@
SET website_title=w.title,
survey_title=su.title,
group_title=g.title,
- public_entered_sref=case when s.privacy_precision is not null then null else
+ public_entered_sref=case
+ when s.privacy_precision is not null OR (SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id) IS NOT NULL then
+ get_output_sref(
+ greatest(
+ round(sqrt(st_area(st_transform(s.geom, sref_system_to_srid(s.entered_sref_system)))))::integer,
+ (SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id),
+ s.privacy_precision,
+ -- work out best square size to reflect a lat long's true precision
+ case
+ when coalesce(v_sref_precision.int_value, v_sref_precision.float_value)>=501 then 10000
+ when coalesce(v_sref_precision.int_value, v_sref_precision.float_value) between 51 and 500 then 1000
+ when coalesce(v_sref_precision.int_value, v_sref_precision.float_value) between 6 and 50 then 100
+ else 10
+ end,
+ 10 -- default minimum square size
+ ), reduce_precision(coalesce(s.geom, l.centroid_geom), (SELECT bool_or(confidential) FROM occurrences WHERE sample_id=s.id), greatest((SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id), s.privacy_precision))
+ )
+ else
case
when s.entered_sref_system = '4326' and coalesce(s.entered_sref, l.centroid_sref) ~ '^-?[0-9]*\.[0-9]*,[ ]*-?[0-9]*\.[0-9]*' then
- abs(round(((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[1])::numeric, 3))::varchar
- || case when ((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[1])::float>0 then 'N' else 'S' end
- || ', '
- || abs(round(((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[2])::numeric, 3))::varchar
- || case when ((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[2])::float>0 then 'E' else 'W' end
+ abs(round(((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[1])::numeric, 3))::varchar
+ || case when ((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[1])::float>0 then 'N' else 'S' end
+ || ', '
+ || abs(round(((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[2])::numeric, 3))::varchar
+ || case when ((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[2])::float>0 then 'E' else 'W' end
when s.entered_sref_system = '4326' and coalesce(s.entered_sref, l.centroid_sref) ~ '^-?[0-9]*\.[0-9]*[NS](, |[, ])*-?[0-9]*\.[0-9]*[EW]' then
- abs(round(((regexp_split_to_array(coalesce(s.entered_sref, l.centroid_sref), '([NS](, |[, ]))|[EW]'))[1])::numeric, 3))::varchar
- || case when coalesce(s.entered_sref, l.centroid_sref) like '%N%' then 'N' else 'S' end
- || ', '
- || abs(round(((regexp_split_to_array(coalesce(s.entered_sref, l.centroid_sref), '([NS](, |[, ]))|[EW]'))[2])::numeric, 3))::varchar
- || case when coalesce(s.entered_sref, l.centroid_sref) like '%E%' then 'E' else 'W' end
+ abs(round(((regexp_split_to_array(coalesce(s.entered_sref, l.centroid_sref), '([NS](, |[, ]))|[EW]'))[1])::numeric, 3))::varchar
+ || case when coalesce(s.entered_sref, l.centroid_sref) like '%N%' then 'N' else 'S' end
+ || ', '
+ || abs(round(((regexp_split_to_array(coalesce(s.entered_sref, l.centroid_sref), '([NS](, |[, ]))|[EW]'))[2])::numeric, 3))::varchar
+ || case when coalesce(s.entered_sref, l.centroid_sref) like '%E%' then 'E' else 'W' end
else
- coalesce(s.entered_sref, l.centroid_sref)
+ coalesce(s.entered_sref, l.centroid_sref)
end
end,
+ output_sref=get_output_sref(
+ greatest(
+ round(sqrt(st_area(st_transform(s.geom, sref_system_to_srid(s.entered_sref_system)))))::integer,
+ (SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id),
+ s.privacy_precision,
+ -- work out best square size to reflect a lat long's true precision
+ case
+ when coalesce(v_sref_precision.int_value, v_sref_precision.float_value)>=501 then 10000
+ when coalesce(v_sref_precision.int_value, v_sref_precision.float_value) between 51 and 500 then 1000
+ when coalesce(v_sref_precision.int_value, v_sref_precision.float_value) between 6 and 50 then 100
+ else 10
+ end,
+ 10 -- default minimum square size
+ ), reduce_precision(coalesce(s.geom, l.centroid_geom), (SELECT bool_or(confidential) FROM occurrences WHERE sample_id=s.id), greatest((SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id), s.privacy_precision))
+ ),
+ output_sref_system=get_output_system(
+ reduce_precision(coalesce(s.geom, l.centroid_geom), (SELECT bool_or(confidential) FROM occurrences WHERE sample_id=s.id), greatest((SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id), s.privacy_precision))
+ ),
entered_sref_system=case when s.entered_sref_system is null then l.centroid_sref_system else s.entered_sref_system end,
recorders = s.recorder_names,
comment=s.comment,
@@ -1000,7 +1038,8 @@
WHEN 'L'::bpchar THEN t_sample_method.term
ELSE NULL::text
END, t_sample_method_id.term),
- attr_linked_location_id=v_linked_location_id.int_value
+ attr_linked_location_id=v_linked_location_id.int_value,
+ verifier=pv.surname || ', ' || pv.first_name
FROM samples s
#join_needs_update#
LEFT JOIN samples sp ON sp.id=s.parent_id and sp.deleted=false
@@ -1050,6 +1089,8 @@
JOIN sample_attributes a_linked_location_id on a_linked_location_id.id=v_linked_location_id.sample_attribute_id
and a_linked_location_id.deleted=false and a_linked_location_id.system_function='linked_location_id'
) ON v_linked_location_id.sample_id=s.id and v_linked_location_id.deleted=false
+LEFT JOIN users uv on uv.id=s.verified_by_id and uv.deleted=false
+LEFT JOIN people pv on pv.id=uv.person_id and pv.deleted=false
WHERE s.id=cache_samples_nonfunctional.id
";
@@ -1062,23 +1103,15 @@
WHERE s.id=cache_samples_nonfunctional.id
";
-$config['samples']['update']['nonfunctional_sensitive'] = "
-UPDATE cache_samples_nonfunctional
-SET public_entered_sref=null
-FROM samples s
-#join_needs_update#
-JOIN occurrences o ON o.sample_id=s.id AND o.deleted=false AND o.sensitivity_precision IS NOT NULL
-WHERE s.id=cache_samples_nonfunctional.id
-";
-
$config['samples']['insert']['functional'] = "
INSERT INTO cache_samples_functional(
id, website_id, survey_id, input_form, location_id, location_name,
public_geom, date_start, date_end, date_type, created_on, updated_on, verified_on, created_by_id,
- group_id, record_status, training, query, parent_sample_id, media_count)
+ group_id, record_status, training, query, parent_sample_id, media_count, external_key,
+ sensitive, private)
SELECT distinct on (s.id) s.id, su.website_id, s.survey_id, COALESCE(sp.input_form, s.input_form), s.location_id,
CASE WHEN s.privacy_precision IS NOT NULL THEN NULL ELSE COALESCE(l.name, s.location_name, lp.name, sp.location_name) END,
- reduce_precision(coalesce(s.geom, l.centroid_geom), false, s.privacy_precision),
+ reduce_precision(coalesce(s.geom, l.centroid_geom), false, greatest(s.privacy_precision, (SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id))),
s.date_start, s.date_end, s.date_type, s.created_on, s.updated_on, s.verified_on, s.created_by_id,
coalesce(s.group_id, sp.group_id), s.record_status, s.training,
case
@@ -1087,7 +1120,10 @@
else 'A'
end,
s.parent_id,
- (SELECT COUNT(sm.*) FROM sample_media sm WHERE sm.sample_id=s.id AND sm.deleted=false)
+ (SELECT COUNT(sm.*) FROM sample_media sm WHERE sm.sample_id=s.id AND sm.deleted=false),
+ s.external_key,
+ (SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id) IS NOT NULL,
+ s.privacy_precision IS NOT NULL
FROM samples s
#join_needs_update#
LEFT JOIN cache_samples_functional cs on cs.id=s.id
@@ -1115,28 +1151,70 @@
$config['samples']['insert']['nonfunctional'] = "
INSERT INTO cache_samples_nonfunctional(
id, website_title, survey_title, group_title, public_entered_sref,
- entered_sref_system, recorders, comment, privacy_precision, licence_code)
+ entered_sref_system, recorders, comment, privacy_precision, licence_code,
+ attr_sref_precision, output_sref, output_sref_system, verifier)
SELECT distinct on (s.id) s.id, w.title, su.title, g.title,
- case when s.privacy_precision is not null then null else
+ case
+ when s.privacy_precision is not null OR (SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id) IS NOT NULL then
+ get_output_sref(
+ greatest(
+ round(sqrt(st_area(st_transform(s.geom, sref_system_to_srid(s.entered_sref_system)))))::integer,
+ (SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id),
+ s.privacy_precision,
+ -- work out best square size to reflect a lat long's true precision
+ case
+ when coalesce(v_sref_precision.int_value, v_sref_precision.float_value)>=501 then 10000
+ when coalesce(v_sref_precision.int_value, v_sref_precision.float_value) between 51 and 500 then 1000
+ when coalesce(v_sref_precision.int_value, v_sref_precision.float_value) between 6 and 50 then 100
+ else 10
+ end,
+ 10 -- default minimum square size
+ ), reduce_precision(coalesce(s.geom, l.centroid_geom), (SELECT bool_or(confidential) FROM occurrences WHERE sample_id=s.id), greatest((SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id), s.privacy_precision))
+ )
+ else
case
when s.entered_sref_system = '4326' and coalesce(s.entered_sref, l.centroid_sref) ~ '^-?[0-9]*\.[0-9]*,[ ]*-?[0-9]*\.[0-9]*' then
- abs(round(((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[1])::numeric, 3))::varchar
- || case when ((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[1])::float>0 then 'N' else 'S' end
- || ', '
- || abs(round(((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[2])::numeric, 3))::varchar
- || case when ((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[2])::float>0 then 'E' else 'W' end
+ abs(round(((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[1])::numeric, 3))::varchar
+ || case when ((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[1])::float>0 then 'N' else 'S' end
+ || ', '
+ || abs(round(((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[2])::numeric, 3))::varchar
+ || case when ((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[2])::float>0 then 'E' else 'W' end
when s.entered_sref_system = '4326' and coalesce(s.entered_sref, l.centroid_sref) ~ '^-?[0-9]*\.[0-9]*[NS](, |[, ])*-?[0-9]*\.[0-9]*[EW]' then
- abs(round(((regexp_split_to_array(coalesce(s.entered_sref, l.centroid_sref), '([NS](, |[, ]))|[EW]'))[1])::numeric, 3))::varchar
- || case when coalesce(s.entered_sref, l.centroid_sref) like '%N%' then 'N' else 'S' end
- || ', '
- || abs(round(((regexp_split_to_array(coalesce(s.entered_sref, l.centroid_sref), '([NS](, |[, ]))|[EW]'))[2])::numeric, 3))::varchar
- || case when coalesce(s.entered_sref, l.centroid_sref) like '%E%' then 'E' else 'W' end
+ abs(round(((regexp_split_to_array(coalesce(s.entered_sref, l.centroid_sref), '([NS](, |[, ]))|[EW]'))[1])::numeric, 3))::varchar
+ || case when coalesce(s.entered_sref, l.centroid_sref) like '%N%' then 'N' else 'S' end
+ || ', '
+ || abs(round(((regexp_split_to_array(coalesce(s.entered_sref, l.centroid_sref), '([NS](, |[, ]))|[EW]'))[2])::numeric, 3))::varchar
+ || case when coalesce(s.entered_sref, l.centroid_sref) like '%E%' then 'E' else 'W' end
else
- coalesce(s.entered_sref, l.centroid_sref)
+ coalesce(s.entered_sref, l.centroid_sref)
end
end,
case when s.entered_sref_system is null then l.centroid_sref_system else s.entered_sref_system end,
- s.recorder_names, s.comment, s.privacy_precision, li.code
+ s.recorder_names, s.comment, s.privacy_precision, li.code,
+ CASE a_sref_precision.data_type
+ WHEN 'I'::bpchar THEN v_sref_precision.int_value::double precision
+ WHEN 'F'::bpchar THEN v_sref_precision.float_value
+ ELSE NULL::double precision
+ END,
+ get_output_sref(
+ greatest(
+ round(sqrt(st_area(st_transform(s.geom, sref_system_to_srid(s.entered_sref_system)))))::integer,
+ (SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id),
+ s.privacy_precision,
+ -- work out best square size to reflect a lat long's true precision
+ case
+ when coalesce(v_sref_precision.int_value, v_sref_precision.float_value)>=501 then 10000
+ when coalesce(v_sref_precision.int_value, v_sref_precision.float_value) between 51 and 500 then 1000
+ when coalesce(v_sref_precision.int_value, v_sref_precision.float_value) between 6 and 50 then 100
+ else 10
+ end,
+ 10 -- default minimum square size
+ ), reduce_precision(coalesce(s.geom, l.centroid_geom), (SELECT bool_or(confidential) FROM occurrences WHERE sample_id=s.id), greatest((SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id), s.privacy_precision))
+ ),
+ get_output_system(
+ reduce_precision(coalesce(s.geom, l.centroid_geom),(SELECT bool_or(confidential) FROM occurrences WHERE sample_id=s.id), greatest((SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id), s.privacy_precision))
+ ),
+ pv.surname || ', ' || pv.first_name
FROM samples s
#join_needs_update#
LEFT JOIN samples sp ON sp.id=s.parent_id and sp.deleted=false
@@ -1145,7 +1223,13 @@
JOIN websites w on w.id=su.website_id and w.deleted=false
LEFT JOIN groups g on g.id=coalesce(s.group_id, sp.group_id) and g.deleted=false
LEFT JOIN locations l on l.id=s.location_id and l.deleted=false
+LEFT JOIN (sample_attribute_values v_sref_precision
+ JOIN sample_attributes a_sref_precision on a_sref_precision.id=v_sref_precision.sample_attribute_id and a_sref_precision.deleted=false and a_sref_precision.system_function='sref_precision'
+ LEFT JOIN cache_termlists_terms t_sref_precision on a_sref_precision.data_type='L' and t_sref_precision.id=v_sref_precision.int_value
+) on v_sref_precision.sample_id=s.id and v_sref_precision.deleted=false
LEFT JOIN licences li on li.id=s.licence_id and li.deleted=false
+LEFT JOIN users uv on uv.id=s.verified_by_id and uv.deleted=false
+LEFT JOIN people pv on pv.id=uv.person_id and pv.deleted=false
WHERE s.deleted=false
AND cs.id IS NULL";
@@ -1182,11 +1266,6 @@
WHEN 'L'::bpchar THEN t_biotope.term
ELSE NULL::text
END,
- attr_sref_precision=CASE a_sref_precision.data_type
- WHEN 'I'::bpchar THEN v_sref_precision.int_value::double precision
- WHEN 'F'::bpchar THEN v_sref_precision.float_value
- ELSE NULL::double precision
- END,
attr_sample_method=COALESCE(t_sample_method_id.term, CASE a_sample_method.data_type
WHEN 'T'::bpchar THEN v_sample_method.text_value
WHEN 'L'::bpchar THEN t_sample_method.term
@@ -1223,10 +1302,6 @@
JOIN sample_attributes a_biotope on a_biotope.id=v_biotope.sample_attribute_id and a_biotope.deleted=false and a_biotope.system_function='biotope'
LEFT JOIN cache_termlists_terms t_biotope on a_biotope.data_type='L' and t_biotope.id=v_biotope.int_value
) on v_biotope.sample_id=s.id and v_biotope.deleted=false
-LEFT JOIN (sample_attribute_values v_sref_precision
- JOIN sample_attributes a_sref_precision on a_sref_precision.id=v_sref_precision.sample_attribute_id and a_sref_precision.deleted=false and a_sref_precision.system_function='sref_precision'
- LEFT JOIN cache_termlists_terms t_sref_precision on a_sref_precision.data_type='L' and t_sref_precision.id=v_sref_precision.int_value
-) on v_sref_precision.sample_id=s.id and v_sref_precision.deleted=false
LEFT JOIN (sample_attribute_values v_sample_method
JOIN sample_attributes a_sample_method on a_sample_method.id=v_sample_method.sample_attribute_id and a_sample_method.deleted=false and a_sample_method.system_function='sample_method'
LEFT JOIN cache_termlists_terms t_sample_method on a_sample_method.data_type='L' and t_sample_method.id=v_sample_method.int_value
@@ -1247,15 +1322,6 @@
WHERE s.id=cache_samples_nonfunctional.id
";
-$config['samples']['insert']['nonfunctional_sensitive'] = "
-UPDATE cache_samples_nonfunctional
-SET public_entered_sref=null
-FROM samples s
-#join_needs_update#
-JOIN occurrences o ON o.sample_id=s.id AND o.deleted=false AND o.sensitivity_precision IS NOT NULL
-WHERE s.id=cache_samples_nonfunctional.id
-";
-
$config['samples']['join_needs_update'] = 'join needs_update_samples nu on nu.id=s.id and nu.deleted=false';
$config['samples']['key_field'] = 's.id';
@@ -1263,7 +1329,7 @@
// Additional update statements to pick up the recorder name from various possible custom attribute places. Faster than
// loads of left joins. These should be in priority order - i.e. ones where we have recorded the inputter rather than
// specifically the recorder should come after ones where we have recorded the recorder specifically.
-$config['samples']['extra_multi_record_updates'] = array(
+$config['samples']['extra_multi_record_updates'] = [
// s.recorder_names is filled in as a starting point. The rest only proceed if this is null.
// full recorder name
// or surname, firstname.
@@ -1336,11 +1402,11 @@
from needs_update_samples nu, users u
join cache_samples_functional csf on csf.created_by_id=u.id
where cs.recorders is null and nu.id=cs.id
- and cs.id=csf.id and u.id<>1;'
-);
+ and cs.id=csf.id and u.id<>1;',
+];
// Final statements to pick up after an insert of a single record.
-$config['samples']['extra_single_record_updates'] = array(
+$config['samples']['extra_single_record_updates'] = [
// Sample recorder names
// Or, full recorder name
// Or, surname, firstname.
@@ -1415,8 +1481,8 @@
from users u
join cache_samples_functional csf on csf.created_by_id=u.id
where cs.recorders is null and cs.id in (#ids#)
- and cs.id=csf.id and u.id<>1;'
-);
+ and cs.id=csf.id and u.id<>1;',
+];
// ---------------------------------------------------------------------------------------------------------------------
@@ -1465,10 +1531,10 @@
) as sub
group by id";
-$config['occurrences']['delete_query'] = array(
+$config['occurrences']['delete_query'] = [
"delete from cache_occurrences_functional where id in (select id from needs_update_occurrences where deleted=true);
delete from cache_occurrences_nonfunctional where id in (select id from needs_update_occurrences where deleted=true);"
-);
+];
$config['occurrences']['update']['functional'] = "
UPDATE cache_occurrences_functional
@@ -1721,16 +1787,6 @@
AND o.deleted=false
";
-$config['occurrences']['update']['nonfunctional_sensitive'] = "
-UPDATE cache_samples_nonfunctional cs
-SET public_entered_sref=null
-FROM occurrences o
-#join_needs_update#
-WHERE o.sample_id=cs.id
-AND o.deleted=false
-AND o.sensitivity_precision IS NOT NULL
-";
-
$config['occurrences']['insert']['functional'] = "INSERT INTO cache_occurrences_functional(
id, sample_id, website_id, survey_id, input_form, location_id,
location_name, public_geom,
@@ -1975,15 +2031,5 @@
AND o.deleted=false
";
-$config['occurrences']['insert']['nonfunctional_sensitive'] = "
-UPDATE cache_samples_nonfunctional cs
-SET public_entered_sref=null
-FROM occurrences o
-#join_needs_update#
-WHERE o.sample_id=cs.id
-AND o.deleted=false
-AND o.sensitivity_precision IS NOT NULL
-";
-
$config['occurrences']['join_needs_update'] = 'join needs_update_occurrences nu on nu.id=o.id and nu.deleted=false';
$config['occurrences']['key_field'] = 'o.id';
diff --git a/modules/cache_builder/db/version_6_3_0/202109091456_sample_fields.sql b/modules/cache_builder/db/version_6_3_0/202109091456_sample_fields.sql
new file mode 100644
index 0000000000..0c888f04e1
--- /dev/null
+++ b/modules/cache_builder/db/version_6_3_0/202109091456_sample_fields.sql
@@ -0,0 +1,27 @@
+ALTER TABLE cache_samples_functional
+ ADD COLUMN external_key character varying,
+ ADD COLUMN sensitive boolean,
+ ADD COLUMN private boolean;
+
+ALTER TABLE cache_samples_nonfunctional
+ ADD COLUMN output_sref character varying,
+ ADD COLUMN output_sref_system character varying,
+ ADD COLUMN verifier character varying;
+
+COMMENT ON COLUMN cache_samples_functional.external_key IS
+ 'For samples imported from an external system, provides a field to store the external system''s primary key for the record allowing re-synchronisation.';
+
+COMMENT ON COLUMN cache_samples_functional.sensitive IS
+ 'Set to true if the sample is blurred because one of the contained occurrences is sensitive.';
+
+COMMENT ON COLUMN cache_samples_functional.private IS
+ 'Set to true if the sample has a privacy_precision value set, indicating the data are blurred for site privacy reasons (e.g. private gardens).';
+
+COMMENT ON COLUMN cache_samples_nonfunctional.output_sref IS
+ 'A display spatial reference created for all samples, using the most appropriate local grid system where possible.';
+
+COMMENT ON COLUMN cache_samples_nonfunctional.output_sref_system IS
+ 'Spatial reference system used for the output_sref field.';
+
+COMMENT ON COLUMN cache_samples_nonfunctional.verifier IS
+ 'Name of the person who verified the sample, if any.';
\ No newline at end of file
diff --git a/modules/cache_builder/db/version_6_3_0/202109101509_update_samples.sql b/modules/cache_builder/db/version_6_3_0/202109101509_update_samples.sql
new file mode 100644
index 0000000000..53b065e435
--- /dev/null
+++ b/modules/cache_builder/db/version_6_3_0/202109101509_update_samples.sql
@@ -0,0 +1,75 @@
+-- #slow script#
+
+UPDATE cache_samples_functional u
+SET external_key=u.external_key,
+ public_geom=reduce_precision(coalesce(s.geom, l.centroid_geom), false, greatest(s.privacy_precision, (SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id))),
+ sensitive=(SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id) IS NOT NULL,
+ private=s.privacy_precision IS NOT NULL
+FROM samples s
+LEFT JOIN locations l ON l.id=s.location_id AND l.deleted=false
+WHERE s.id=u.id;
+
+UPDATE cache_samples_nonfunctional u
+ SET public_entered_sref=case
+ when s.privacy_precision is not null OR (SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id) IS NOT NULL then
+ get_output_sref(
+ greatest(
+ round(sqrt(st_area(st_transform(s.geom, sref_system_to_srid(s.entered_sref_system)))))::integer,
+ (SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id),
+ s.privacy_precision,
+ -- work out best square size to reflect a lat long's true precision
+ case
+ when coalesce(v_sref_precision.int_value, v_sref_precision.float_value)>=501 then 10000
+ when coalesce(v_sref_precision.int_value, v_sref_precision.float_value) between 51 and 500 then 1000
+ when coalesce(v_sref_precision.int_value, v_sref_precision.float_value) between 6 and 50 then 100
+ else 10
+ end,
+ 10 -- default minimum square size
+ ), reduce_precision(coalesce(s.geom, l.centroid_geom), (SELECT bool_or(confidential) FROM occurrences WHERE sample_id=s.id), greatest((SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id), s.privacy_precision))
+ )
+ else
+ case
+ when s.entered_sref_system = '4326' and coalesce(s.entered_sref, l.centroid_sref) ~ '^-?[0-9]*\.[0-9]*,[ ]*-?[0-9]*\.[0-9]*' then
+ abs(round(((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[1])::numeric, 3))::varchar
+ || case when ((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[1])::float>0 then 'N' else 'S' end
+ || ', '
+ || abs(round(((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[2])::numeric, 3))::varchar
+ || case when ((string_to_array(coalesce(s.entered_sref, l.centroid_sref), ','))[2])::float>0 then 'E' else 'W' end
+ when s.entered_sref_system = '4326' and coalesce(s.entered_sref, l.centroid_sref) ~ '^-?[0-9]*\.[0-9]*[NS](, |[, ])*-?[0-9]*\.[0-9]*[EW]' then
+ abs(round(((regexp_split_to_array(coalesce(s.entered_sref, l.centroid_sref), '([NS](, |[, ]))|[EW]'))[1])::numeric, 3))::varchar
+ || case when coalesce(s.entered_sref, l.centroid_sref) like '%N%' then 'N' else 'S' end
+ || ', '
+ || abs(round(((regexp_split_to_array(coalesce(s.entered_sref, l.centroid_sref), '([NS](, |[, ]))|[EW]'))[2])::numeric, 3))::varchar
+ || case when coalesce(s.entered_sref, l.centroid_sref) like '%E%' then 'E' else 'W' end
+ else
+ coalesce(s.entered_sref, l.centroid_sref)
+ end
+ end,
+ output_sref=get_output_sref(
+ greatest(
+ round(sqrt(st_area(st_transform(s.geom, sref_system_to_srid(s.entered_sref_system)))))::integer,
+ (SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id),
+ s.privacy_precision,
+ -- work out best square size to reflect a lat long's true precision
+ case
+ when coalesce(v_sref_precision.int_value, v_sref_precision.float_value)>=501 then 10000
+ when coalesce(v_sref_precision.int_value, v_sref_precision.float_value) between 51 and 500 then 1000
+ when coalesce(v_sref_precision.int_value, v_sref_precision.float_value) between 6 and 50 then 100
+ else 10
+ end,
+ 10 -- default minimum square size
+ ), reduce_precision(coalesce(s.geom, l.centroid_geom), (SELECT bool_or(confidential) FROM occurrences WHERE sample_id=s.id), greatest((SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id), s.privacy_precision))
+ ),
+ output_sref_system=get_output_system(
+ reduce_precision(coalesce(s.geom, l.centroid_geom), (SELECT bool_or(confidential) FROM occurrences WHERE sample_id=s.id), greatest((SELECT max(sensitivity_precision) FROM occurrences WHERE sample_id=s.id), s.privacy_precision))
+ ),
+ verifier=pv.surname || ', ' || pv.first_name
+FROM samples s
+LEFT JOIN locations l ON l.id=s.location_id AND l.deleted=false
+LEFT JOIN (sample_attribute_values v_sref_precision
+ JOIN sample_attributes a_sref_precision on a_sref_precision.id=v_sref_precision.sample_attribute_id and a_sref_precision.deleted=false and a_sref_precision.system_function='sref_precision'
+ LEFT JOIN cache_termlists_terms t_sref_precision on a_sref_precision.data_type='L' and t_sref_precision.id=v_sref_precision.int_value
+) on v_sref_precision.sample_id=s.id and v_sref_precision.deleted=false
+LEFT JOIN users uv on uv.id=s.verified_by_id and uv.deleted=false
+LEFT JOIN people pv on pv.id=uv.person_id and pv.deleted=false
+WHERE s.id=u.id;
diff --git a/modules/cache_builder/db/version_6_3_0/202109171515_sensitive_grid_squares.sql b/modules/cache_builder/db/version_6_3_0/202109171515_sensitive_grid_squares.sql
new file mode 100644
index 0000000000..f543e2e4a9
--- /dev/null
+++ b/modules/cache_builder/db/version_6_3_0/202109171515_sensitive_grid_squares.sql
@@ -0,0 +1,7 @@
+-- #slow script#
+
+-- Trigger re-queuing of sensitive records for Elasticsearch due to previously
+-- incorrect map grid square field names.
+UPDATE cache_occurrences_functional
+SET website_id=website_id
+WHERE sensitive=true;
\ No newline at end of file
diff --git a/modules/cache_builder/helpers/cache_builder.php b/modules/cache_builder/helpers/cache_builder.php
index 69f7b2f376..acf4929fc1 100644
--- a/modules/cache_builder/helpers/cache_builder.php
+++ b/modules/cache_builder/helpers/cache_builder.php
@@ -1,355 +1,404 @@
-$table";
- $count = cache_builder::getChangeList($db, $table, $queries, $last_run_date);
- if ($count > 0) {
- echo <<
-
-
";
- }
- $db->query("drop table needs_update_$table");
- }
- catch (Exception $e) {
- $db->query("drop table needs_update_$table");
- throw $e;
- }
- }
-
- /**
- * Apply required database changes to the cache tables.
- *
- * When the needs_update_* table already populated, apply the actual cache
- * update changes to the cached entity.
- *
- * @param object $db
- * Database connection.
- * @param string $table
- * Entity name to update (e.g. sample, occurrence, taxa_taxon_list).
- */
- public static function makeChanges($db, $table) {
- $queries = kohana::config("cache_builder.$table");
- cache_builder::do_delete($db, $table, $queries);
- // preprocess some of the tags in the queries
- if (is_array($queries['update']))
- foreach($queries['update'] as $key=>&$sql)
- $sql = str_replace('#join_needs_update#', $queries['join_needs_update'], $sql);
- else
- $queries['update'] = str_replace('#join_needs_update#', $queries['join_needs_update'], $queries['update']);
- cache_builder::run_statement($db, $table, $queries['update'], 'update');
- // preprocess some of the tags in the queries
- if (is_array($queries['insert']))
- foreach($queries['insert'] as $key=>&$sql)
- $sql = str_replace('#join_needs_update#', $queries['join_needs_update'] . ' and (nu.deleted=false or nu.deleted is null)', $sql);
- else
- $queries['insert'] = str_replace('#join_needs_update#', $queries['join_needs_update'] . ' and (nu.deleted=false or nu.deleted is null)', $queries['insert']);
- cache_builder::run_statement($db, $table, $queries['insert'], 'insert');
- if (isset($queries['extra_multi_record_updates']))
- cache_builder::run_statement($db, $table, $queries['extra_multi_record_updates'], 'final update');
- if (!variable::get("populated-$table")) {
- $cacheQuery = $db->query("select count(*) from cache_$table")->result_array(false);
- if (isset($queries['count']))
- $totalQuery = $db->query($queries['count'])->result_array(false);
- else
- $totalQuery = $db->query("select count(*) from $table where deleted='f'")->result_array(false);
- $percent = round($cacheQuery[0]['count']*100/$totalQuery[0]['count']);
- echo "
Initial population of $table progress $percent%.
";
- }
- }
-
- /**
- * Inserts a single record into the cache, e.g. could be used as soon as a record is submitted.
- *
- * @param object $db
- * Database object.
- * @param string $table
- * Plural form of the table name.
- * @param array $ids
- * Record IDs to insert in the cache
- */
- public static function insert($db, $table, array $ids) {
- if (count($ids) > 0) {
- $idlist = implode(',', $ids);
- if (self::$delayCacheUpdates && in_array($table, ['occurrences', 'samples'])) {
- kohana::log('debug', "Delayed inserts for $table ($idlist)");
- self::delayChangesViaWorkQueue($db, $table, $idlist);
- }
- else {
- $master_list_id = warehouse::getMasterTaxonListId();
- $queries = kohana::config("cache_builder.$table");
- if (!isset($queries['key_field']))
- throw new exception('Cannot do a specific record insert into cache as the key_field configuration not defined in cache_builder configuration');
- if (!is_array($queries['insert']))
- $queries['insert'] = array($queries['insert']);
- foreach ($queries['insert'] as $query) {
- $insertSql = str_replace(
- ['#join_needs_update#', '#master_list_id#'],
- ['', $master_list_id],
- $query
- );
- $insertSql .= ' and ' . $queries['key_field'] . " in ($idlist)";
- $db->query($insertSql);
- }
- }
- self::final_queries($db, $table, $ids);
- }
- }
-
- /**
- * Updates a single record in the cache.
- *
- * E.g. could be used as soon as a record is edited.
- *
- * @param object $db
- * Database object.
- * @param string $table
- * Plural form of the table name.
- * @param array $ids
- * Record IDs to insert in the cache.
- */
- public static function update($db, $table, array $ids) {
- if (count($ids) > 0) {
- $idlist = implode(',', $ids);
- if (self::$delayCacheUpdates && in_array($table, ['occurrences', 'samples'])) {
- kohana::log('debug', "Delayed updates for $table ($idlist)");
- self::delayChangesViaWorkQueue($db, $table, $idlist);
- }
- else {
- $master_list_id = warehouse::getMasterTaxonListId();
- $queries = kohana::config("cache_builder.$table");
- if (!isset($queries['key_field']))
- throw new exception('Cannot do a specific record update into cache as the key_field configuration not defined in cache_builder configuration');
- if (!is_array($queries['update']))
- $queries['update'] = array($queries['update']);
- foreach ($queries['update'] as $query) {
- $updateSql = str_replace(
- ['#join_needs_update#', '#master_list_id#'],
- ['', $master_list_id],
- $query
- );
- $updateSql .= ' and ' . $queries['key_field'] . " in ($idlist)";
- $db->query($updateSql);
- }
- self::final_queries($db, $table, $ids);
- }
- }
- }
-
- /**
- * Deletes a single record from the cache.
- *
- * E.g. could be used as soon as a record is deleted.
- *
- * @param object $db
- * Database object.
- * @param string $table
- * Plural form of the table name.
- * @param array $ids
- * Record IDs to delete from the cache.
- */
- public static function delete($db, $table, array $ids) {
- if (self::$delayCacheUpdates && in_array($table, ['occurrences', 'samples'])) {
- self::delayChangesViaWorkQueue($db, $table, implode(',', $ids));
- }
- else {
- foreach ($ids as $id) {
- if ($table === 'occurrences' || $table === 'samples') {
- $db->delete("cache_{$table}_functional", array('id' => $id));
- $db->delete("cache_{$table}_nonfunctional", array('id' => $id));
- if ($table === 'samples') {
- // Slightly more complex delete query to ensure indexes used.
- $sql = <<query($sql);
- $db->query("delete from cache_occurrences_nonfunctional where id in (select id from occurrences where sample_id=$id)");
- }
- }
- else {
- $db->delete("cache_$table", array('id' => $id));
- }
- }
- }
- }
-
- /**
- * During an import, add tasks to work queue rather than do immediate update.
- *
- * Allows performance improvement during import.
- *
- * @param object $db
- * Database object.
- * @param string $table
- * Plural form of the table name.
- * @param string $idCsv
- * Record IDs to delete from the cache (comma separated string).
- */
- private static function delayChangesViaWorkQueue($db, $table, $idCsv) {
- $entity = inflector::singular($table);
- $sql = <<query($sql);
- }
-
- public static function final_queries($db, $table, $ids) {
- $queries = kohana::config("cache_builder.$table");
- $doneCount = 0;
- if (isset($queries['extra_single_record_updates'])) {
- $idlist=implode(',', $ids);
- if (is_array($queries['extra_single_record_updates']))
- foreach($queries['extra_single_record_updates'] as $key=>&$sql) {
- $result=$db->query(str_replace('#ids#', $idlist, $sql));
- $doneCount += $result->count();
- if ($doneCount>=count($ids))
- break; // we've updated all. So can drop out.
- }
- else {
- $db->query(str_replace('#ids#', $idlist, $queries['extra_single_record_updates']));
- }
- }
- }
-
- /**
- * Build a temporary table with the list of IDs of records we need to update.
- * The table has a deleted flag to indicate newly deleted records.
- * @param objcet $db Database connection.
- * @param string $table Name of the table being cached, e.g. occurrences.
- * @param string $query A query which selects a list of IDs for all new, updated or
- * deleted records (including looking for updates or deletions caused by related
- * records).
- * @param string $last_run_date Date/time of the last time the cache builder was
- * run, used to filter records to only the recent changes. Supplied as a string
- * suitable for injection into an SQL query.
- */
- private static function getChangeList($db, $table, $queries, $last_run_date) {
- $query = str_replace('#date#', $last_run_date, $queries['get_changed_items_query']);
- $db->query("create temporary table needs_update_$table as $query");
- if (!variable::get("populated-$table")) {
- // as well as the changed records, pick up max 5000 previous records, which is important for initial population.
- // 5000 is an arbitrary number to compromise between performance and cache population.
- // of the cache
- $query = $queries['get_missing_items_query'] . ' limit 5000';
- $result = $db->query("insert into needs_update_$table $query");
- if ($result->count() === 0) {
- // Flag that we don't need to do any more previously existing records as they are all done.
- // Future cache updates can just pick up changes from now on.
- variable::set("populated-$table", TRUE);
- echo "
Initial population of $table completed
";
- }
- }
- $db->query("ALTER TABLE needs_update_$table ADD CONSTRAINT ix_nu_$table PRIMARY KEY (id)");
- $r = $db->query("select count(*) as count from needs_update_$table")->result_array(FALSE);
- $row = $r[0];
- return $row['count'];
- }
-
- /**
- * Deletes all records from the cache table which are in the table of
- * records to update and where the deleted flag is true.
- *
- * @param object $db
- * Database connection.
- * @param string $table
- * Name of the table being cached.
- * @param array $queries
- * List of configured queries for this table, which might include non-default delete queries.
- */
- private static function do_delete($db, $table, $queries) {
- // set up a default delete query if none are specified
- if (!isset($queries['delete_query'])) {
- $queries['delete_query'] = array("delete from cache_$table where id in (select id from needs_update_$table where deleted=true)");
- }
- $count = 0;
- foreach ($queries['delete_query'] as $query) {
- $count += $db->query($query)->count();
- }
- if (variable::get("populated-$table")) {
- echo "
Delete(s)
$count
\n";
- }
- }
-
- /**
- * Runs an insert or update statemnet to update one of
- * the cache tables.
- * @param object $db Database connection.
- * @param string $query Query used to perform the update or insert. Can be a string, or an
- * associative array of SQL strings if multiple required to do the task.
- * @param string $action Term describing the action, used for feedback only.
- */
- private static function run_statement($db, $table, $query, $action) {
- $master_list_id = warehouse::getMasterTaxonListId();
- if (is_array($query)) {
- foreach ($query as $title => $sql) {
- $sql = str_replace('#master_list_id#', $master_list_id, $sql);
- $count = $db->query($sql)->count();
- if (variable::get("populated-$table"))
- echo "
";
+ }
+ $db->query("drop table needs_update_$table");
+ }
+ catch (Exception $e) {
+ $db->query("drop table needs_update_$table");
+ throw $e;
+ }
+ }
+
+ /**
+ * Apply required database changes to the cache tables.
+ *
+ * When the needs_update_* table already populated, apply the actual cache
+ * update changes to the cached entity.
+ *
+ * @param object $db
+ * Database connection.
+ * @param string $table
+ * Entity name to update (e.g. sample, occurrence, taxa_taxon_list).
+ */
+ public static function makeChanges($db, $table) {
+ $queries = kohana::config("cache_builder.$table");
+ cache_builder::do_delete($db, $table, $queries);
+ // Preprocess some of the tags in the queries.
+ if (is_array($queries['update'])) {
+ foreach ($queries['update'] as &$sql) {
+ $sql = str_replace('#join_needs_update#', $queries['join_needs_update'], $sql);
+ }
+ }
+ else {
+ $queries['update'] = str_replace('#join_needs_update#', $queries['join_needs_update'], $queries['update']);
+ }
+ cache_builder::run_statement($db, $table, $queries['update'], 'update');
+ // Preprocess some of the tags in the queries.
+ if (is_array($queries['insert'])) {
+ foreach ($queries['insert'] as &$sql) {
+ $sql = str_replace('#join_needs_update#', $queries['join_needs_update'] . ' and (nu.deleted=false or nu.deleted is null)', $sql);
+ }
+ }
+ else {
+ $queries['insert'] = str_replace('#join_needs_update#', $queries['join_needs_update'] . ' and (nu.deleted=false or nu.deleted is null)', $queries['insert']);
+ }
+ cache_builder::run_statement($db, $table, $queries['insert'], 'insert');
+ if (isset($queries['extra_multi_record_updates'])) {
+ cache_builder::run_statement($db, $table, $queries['extra_multi_record_updates'], 'final update');
+ }
+ if (!variable::get("populated-$table")) {
+ $cacheQuery = $db->query("select count(*) from cache_$table")->result_array(FALSE);
+ if (isset($queries['count'])) {
+ $totalQuery = $db->query($queries['count'])->result_array(FALSE);
+ }
+ else {
+ $totalQuery = $db->query("select count(*) from $table where deleted='f'")->result_array(FALSE);
+ }
+ $percent = round($cacheQuery[0]['count'] * 100 / $totalQuery[0]['count']);
+ echo "
Initial population of $table progress $percent%.
";
+ }
+ }
+
+ /**
+ * Inserts a single record into the cache, e.g. could be used as soon as a record is submitted.
+ *
+ * @param object $db
+ * Database object.
+ * @param string $table
+ * Plural form of the table name.
+ * @param array $ids
+ * Record IDs to insert in the cache.
+ */
+ public static function insert($db, $table, array $ids) {
+ if (count($ids) > 0) {
+ $idlist = implode(',', $ids);
+ if (self::$delayCacheUpdates && in_array($table, ['occurrences', 'samples'])) {
+ kohana::log('debug', "Delayed inserts for $table ($idlist)");
+ self::delayChangesViaWorkQueue($db, $table, $idlist);
+ }
+ else {
+ $master_list_id = warehouse::getMasterTaxonListId();
+ $queries = kohana::config("cache_builder.$table");
+ if (!isset($queries['key_field']))
+ throw new exception('Cannot do a specific record insert into cache as the key_field configuration not defined in cache_builder configuration');
+ if (!is_array($queries['insert']))
+ $queries['insert'] = array($queries['insert']);
+ foreach ($queries['insert'] as $query) {
+ $insertSql = str_replace(
+ ['#join_needs_update#', '#master_list_id#'],
+ ['', $master_list_id],
+ $query
+ );
+ $insertSql .= ' and ' . $queries['key_field'] . " in ($idlist)";
+ $db->query($insertSql);
+ }
+ }
+ self::final_queries($db, $table, $ids);
+ }
+ }
+
+ /**
+ * Updates a single record in the cache.
+ *
+ * E.g. could be used as soon as a record is edited.
+ *
+ * @param object $db
+ * Database object.
+ * @param string $table
+ * Plural form of the table name.
+ * @param array $ids
+ * Record IDs to insert in the cache.
+ */
+ public static function update($db, $table, array $ids) {
+ if (count($ids) > 0) {
+ $idlist = implode(',', $ids);
+ if (self::$delayCacheUpdates && in_array($table, ['occurrences', 'samples'])) {
+ kohana::log('debug', "Delayed updates for $table ($idlist)");
+ self::delayChangesViaWorkQueue($db, $table, $idlist);
+ }
+ else {
+ $master_list_id = warehouse::getMasterTaxonListId();
+ $queries = kohana::config("cache_builder.$table");
+ if (!isset($queries['key_field']))
+ throw new exception('Cannot do a specific record update into cache as the key_field configuration not defined in cache_builder configuration');
+ if (!is_array($queries['update']))
+ $queries['update'] = array($queries['update']);
+ foreach ($queries['update'] as $query) {
+ $updateSql = str_replace(
+ ['#join_needs_update#', '#master_list_id#'],
+ ['', $master_list_id],
+ $query
+ );
+ $updateSql .= ' and ' . $queries['key_field'] . " in ($idlist)";
+ $db->query($updateSql);
+ }
+ self::final_queries($db, $table, $ids);
+ }
+ }
+ }
+
+ /**
+ * Deletes a single record from the cache.
+ *
+ * E.g. could be used as soon as a record is deleted.
+ *
+ * @param object $db
+ * Database object.
+ * @param string $table
+ * Plural form of the table name.
+ * @param array $ids
+ * Record IDs to delete from the cache.
+ */
+ public static function delete($db, $table, array $ids) {
+ if (self::$delayCacheUpdates && in_array($table, ['occurrences', 'samples'])) {
+ self::delayChangesViaWorkQueue($db, $table, implode(',', $ids));
+ }
+ else {
+ foreach ($ids as $id) {
+ if ($table === 'occurrences' || $table === 'samples') {
+ $db->delete("cache_{$table}_functional", array('id' => $id));
+ $db->delete("cache_{$table}_nonfunctional", array('id' => $id));
+ if ($table === 'samples') {
+ // Slightly more complex delete query to ensure indexes used.
+ $sql = <<query($sql);
+ $db->query("delete from cache_occurrences_nonfunctional where id in (select id from occurrences where sample_id=$id)");
+ }
+ }
+ else {
+ $db->delete("cache_$table", array('id' => $id));
+ }
+ }
+ }
+ }
+
+ /**
+ * If submitting occurrence changes without a sample, update sample tracking.
+ *
+ * This is so that any sample data feeds receive an updated copy of the
+ * sample, as the occurrence statistics will have changed.
+ *
+ * @param object $db
+ * Database object.
+ * @param array $occurrenceIds
+ * List of occurrences affected by a submission.
+ */
+ public static function updateSampleTrackingForOccurrences($db, array $ids) {
+ $idList = implode(',', $ids);
+ $sql = <<query($sql);
+ }
+
+ /**
+ * During an import, add tasks to work queue rather than do immediate update.
+ *
+ * Allows performance improvement during import.
+ *
+ * @param object $db
+ * Database object.
+ * @param string $table
+ * Plural form of the table name.
+ * @param string $idCsv
+ * Record IDs to delete from the cache (comma separated string).
+ */
+ private static function delayChangesViaWorkQueue($db, $table, $idCsv) {
+ $entity = inflector::singular($table);
+ $sql = <<query($sql);
+ }
+
+ public static function final_queries($db, $table, $ids) {
+ $queries = kohana::config("cache_builder.$table");
+ $doneCount = 0;
+ if (isset($queries['extra_single_record_updates'])) {
+ $idlist=implode(',', $ids);
+ if (is_array($queries['extra_single_record_updates']))
+ foreach($queries['extra_single_record_updates'] as $key=>&$sql) {
+ $result=$db->query(str_replace('#ids#', $idlist, $sql));
+ $doneCount += $result->count();
+ if ($doneCount>=count($ids))
+ break; // we've updated all. So can drop out.
+ }
+ else {
+ $db->query(str_replace('#ids#', $idlist, $queries['extra_single_record_updates']));
+ }
+ }
+ }
+
+ /**
+ * Build a temporary table with the list of IDs of records we need to update.
+ *
+ * The table has a deleted flag to indicate newly deleted records.
+ *
+ * @param object $db
+ * Database connection.
+ * @param string $table
+ * Name of the table being cached, e.g. occurrences.
+ * @param string $queries
+ * List of configured queries for this table.
+ * @param string $last_run_date
+ * Date/time of the last time the cache builder was run, used to filter
+ * records to only the recent changes. Supplied as a string suitable for
+ * injection into an SQL query.
+ */
+ private static function getChangeList($db, $table, $queries, $last_run_date) {
+ $query = str_replace('#date#', $last_run_date, $queries['get_changed_items_query']);
+ $db->query("create temporary table needs_update_$table as $query");
+ if (!variable::get("populated-$table")) {
+ // As well as the changed records, pick up max 5000 previous records,
+ // which is important for initial population. 5000 is an arbitrary number
+ // to compromise between performance and cache population.
+ $query = $queries['get_missing_items_query'] . ' limit 5000';
+ $result = $db->query("insert into needs_update_$table $query");
+ if ($result->count() === 0) {
+ // Flag that we don't need to do any more previously existing records
+ // as they are all done.
+ // Future cache updates can just pick up changes from now on.
+ variable::set("populated-$table", TRUE);
+ echo "
Initial population of $table completed
";
+ }
+ }
+ $db->query("ALTER TABLE needs_update_$table ADD CONSTRAINT ix_nu_$table PRIMARY KEY (id)");
+ $r = $db->query("select count(*) as count from needs_update_$table")->result_array(FALSE);
+ $row = $r[0];
+ return $row['count'];
+ }
+
+ /**
+ * Deletes all records from the cache table which are in the table of
+ * records to update and where the deleted flag is true.
+ *
+ * @param object $db
+ * Database connection.
+ * @param string $table
+ * Name of the table being cached.
+ * @param array $queries
+ * List of configured queries for this table, which might include non-default delete queries.
+ */
+ private static function do_delete($db, $table, $queries) {
+ // Set up a default delete query if none are specified.
+ if (!isset($queries['delete_query'])) {
+ $queries['delete_query'] = ["delete from cache_$table where id in (select id from needs_update_$table where deleted=true)"];
+ }
+ $count = 0;
+ foreach ($queries['delete_query'] as $query) {
+ $count += $db->query($query)->count();
+ }
+ if (variable::get("populated-$table")) {
+ echo "
Delete(s)
$count
\n";
+ }
+ }
+
+ /**
+ * Runs an insert or update statemnet to update one of the cache tables.
+ *
+ * @param object $db
+ * Database connection.
+ * @param string $query
+ * Query used to perform the update or insert. Can be a string, or an
+ * associative array of SQL strings if multiple required to do the task.
+ * @param string $action
+ * Term describing the action, used for feedback only.
+ */
+ private static function run_statement($db, $table, $query, $action) {
+ $master_list_id = warehouse::getMasterTaxonListId();
+ if (is_array($query)) {
+ foreach ($query as $title => $sql) {
+ $sql = str_replace('#master_list_id#', $master_list_id, $sql);
+ $count = $db->query($sql)->count();
+ if (variable::get("populated-$table")) {
+ echo "
diff --git a/modules/sref_osie/helpers/osie.php b/modules/sref_osie/helpers/osie.php
index 106af71adf..98d399733c 100644
--- a/modules/sref_osie/helpers/osie.php
+++ b/modules/sref_osie/helpers/osie.php
@@ -13,39 +13,36 @@
* You should have received a copy of the GNU General Public License
* along with this program. If not, see http://www.gnu.org/licenses/gpl.html.
*
- * @package Modules
- * @subpackage OSGB Grid References
- * @author Indicia Team
+ * @author Indicia Team
* @license http://www.gnu.org/licenses/gpl.html GPL 3.0
- * @link https://github.com/indicia-team/warehouse/
+ * @link https://github.com/indicia-team/warehouse/
*/
/**
* Conversion class for OS Ireland grid references (TM75).
- * @package Modules
- * @subpackage OSGB Grid References
- * @author Indicia Team
*/
class osie {
/**
* Returns true if the spatial reference is a recognised Irish Grid square.
*
- * @param $sref string Spatial reference to validate
+ * @param $sref string
+ * Spatial reference to validate
*/
- public static function is_valid($sref)
- {
- // ignore any spaces in the grid ref
- $sref = str_replace(' ','',$sref);
+ public static function is_valid($sref) {
+ // Ignore any spaces in the grid ref.
+ $sref = str_replace(' ', '', $sref);
$sq100 = strtoupper(substr($sref, 0, 1));
- if (!preg_match('([A-HJ-Z])', $sq100))
+ if (!preg_match('([A-HJ-Z])', $sq100)) {
return FALSE;
- $eastnorth=substr($sref, 1);
+ }
+ $eastnorth = substr($sref, 1);
// 2 cases - either remaining chars must be all numeric and an equal number, up to 10 digits
// OR for DINTY Tetrads, 2 numbers followed by a letter (Excluding O, including I)
- if ((!preg_match('/^[0-9]*$/', $eastnorth) || strlen($eastnorth) % 2 != 0 || strlen($eastnorth)>10) AND
- (!preg_match('/^[0-9][0-9][A-NP-Z]$/', $eastnorth)))
+ if ((!preg_match('/^[0-9]*$/', $eastnorth) || strlen($eastnorth) % 2 != 0 || strlen($eastnorth) > 10) &&
+ (!preg_match('/^[0-9][0-9][A-NP-Z]$/', $eastnorth))) {
return FALSE;
+ }
return TRUE;
}
@@ -53,13 +50,15 @@ public static function is_valid($sref)
* Converts a grid reference in OSI notation into the WKT text for the polygon, in
* easting and northings from the zero reference.
*
- * @param string $sref The grid reference
- * @return string String containing the well known text.
+ * @param string $sref
+ * The grid reference.
+ *
+ * @return string
+ * String containing the well known text.
*/
- public static function sref_to_wkt($sref)
- {
+ public static function sref_to_wkt($sref) {
// ignore any spaces in the grid ref
- $sref = str_replace(' ','',$sref);
+ $sref = str_replace(' ', '', $sref);
if (!self::is_valid($sref))
throw new InvalidArgumentException('Spatial reference is not a recognisable grid square.', 4001);
$sq_100 = self::get_100k_square($sref);
diff --git a/modules/taxon_associations/tests/TaxonAssocations_ServicesTest.php b/modules/taxon_associations/tests/TaxonAssocations_ServicesTest.php
index 75b81c2cd8..2e5c2165c8 100644
--- a/modules/taxon_associations/tests/TaxonAssocations_ServicesTest.php
+++ b/modules/taxon_associations/tests/TaxonAssocations_ServicesTest.php
@@ -3,9 +3,9 @@
require_once 'client_helpers/data_entry_helper.php';
require_once 'client_helpers/submission_builder.php';
-define ('CORE_FIXTURE_TERMLIST_COUNT', 4);
-define ('CORE_FIXTURE_TERM_COUNT', 5);
-define ('CORE_FIXTURE_TERMLISTS_TERM_COUNT', 5);
+define('CORE_FIXTURE_TERMLIST_COUNT', 4);
+define('CORE_FIXTURE_TERM_COUNT', 5);
+define('CORE_FIXTURE_TERMLISTS_TERM_COUNT', 5);
class TaxonAssociations_ServicesTest extends Indicia_DatabaseTestCase {
@@ -13,16 +13,16 @@ class TaxonAssociations_ServicesTest extends Indicia_DatabaseTestCase {
public function getDataSet()
{
- $ds1 = new PHPUnit_Extensions_Database_DataSet_YamlDataSet('modules/phpUnit/config/core_fixture.yaml');
+ $ds1 = new PHPUnit_Extensions_Database_DataSet_YamlDataSet('modules/phpUnit/config/core_fixture.yaml');
$ds2 = new Indicia_ArrayDataSet(
- array(
- 'meanings' => array(
- array(
- 'id' => 20000
- )
- ),
- 'termlists' => array(
- array(
+ [
+ 'meanings' => [
+ [
+ 'id' => 20000,
+ ],
+ ],
+ 'termlists' => [
+ [
'title' => 'Taxon association types',
'description' => 'Types of associations between taxa',
'website_id' => 1,
@@ -30,21 +30,21 @@ public function getDataSet()
'created_by_id' => 1,
'updated_on' => '2016-07-22:16:00:00',
'updated_by_id' => 1,
- 'external_key' => NULL
- ),
- ),
- 'terms' => array(
- array(
+ 'external_key' => NULL,
+ ],
+ ],
+ 'terms' => [
+ [
'term' => 'is associated with',
'language_id' => 1,
'created_on' => '2016-07-22:16:00:00',
'created_by_id' => 1,
'updated_on' => '2016-07-22:16:00:00',
- 'updated_by_id' => 1
- ),
- ),
- 'termlists_terms' => array(
- array(
+ 'updated_by_id' => 1,
+ ],
+ ],
+ 'termlists_terms' => [
+ [
'termlist_id' => CORE_FIXTURE_TERMLIST_COUNT + 1,
'term_id' => CORE_FIXTURE_TERM_COUNT + 1,
'created_on' => '2016-07-22:16:00:00',
@@ -52,11 +52,11 @@ public function getDataSet()
'updated_on' => '2016-07-22:16:00:00',
'updated_by_id' => 1,
'meaning_id' => 20000,
- 'preferred' => true,
- 'sort_order' => 1
- ),
- ),
- )
+ 'preferred' => TRUE,
+ 'sort_order' => 1,
+ ],
+ ],
+ ]
);
$compositeDs = new PHPUnit_Extensions_Database_DataSet_CompositeDataSet();
@@ -67,20 +67,30 @@ public function getDataSet()
public function setup() {
$this->auth = data_entry_helper::get_read_write_auth(1, 'password');
- // make the tokens re-usable
- $this->auth['write_tokens']['persist_auth']=true;
+ // Make the tokens re-usable.
+ $this->auth['write_tokens']['persist_auth'] = true;
parent::setup();
}
function testPost() {
- $array = array(
+ $array = [
'taxon_association:from_taxon_meaning_id' => 10000,
'taxon_association:to_taxon_meaning_id' => 10001,
'taxon_association:association_type_id' => CORE_FIXTURE_TERMLISTS_TERM_COUNT + 1,
+ ];
+ $s = submission_builder::build_submission(
+ $array, ['model' => 'taxon_association']
+ );
+ $r = data_entry_helper::forward_post_to(
+ 'taxon_association', $s, $this->auth['write_tokens']
+ );
+ Kohana::log(
+ 'debug',
+ "Submission response to taxon_association save " . print_r($r, TRUE)
+ );
+ $this->assertTrue(
+ isset($r['success']),
+ 'Submitting a taxon_association did not return success response'
);
- $s = submission_builder::build_submission($array, array('model' => 'taxon_association'));
- $r = data_entry_helper::forward_post_to('taxon_association', $s, $this->auth['write_tokens']);
- Kohana::log('debug', "Submission response to taxon_association save " . print_r($r, TRUE));
- $this->assertTrue(isset($r['success']), 'Submitting a taxon_association did not return success response');
}
}
\ No newline at end of file
diff --git a/modules/workflow/controllers/workflow_event.php b/modules/workflow/controllers/workflow_event.php
index 656b4070cc..d6b6ad02c1 100644
--- a/modules/workflow/controllers/workflow_event.php
+++ b/modules/workflow/controllers/workflow_event.php
@@ -53,6 +53,27 @@ protected function get_action_columns() {
);
}
+ /**
+ * Convert location_ids_filter to suitable default for the sub_list control.
+ */
+ protected function getModelValues() {
+ $r = parent::getModelValues();
+ $r['location_ids_filter_array'] = [];
+ if (preg_match('/^{(?\d+(,\d+)*)}$/', $r['workflow_event:location_ids_filter'], $matches)) {
+ $ids = explode(',', $matches['list']);
+ foreach ($ids as $id) {
+ $location = ORM::factory('location', $id);
+ $r['location_ids_filter_array'][] = [
+ 'caption' => $location->name,
+ 'fieldname' => 'workflow_event:location_ids_filter[]',
+ 'default' => $id,
+ ];
+ }
+
+ }
+ return $r;
+ }
+
/**
* Prepares any additional data required by the edit view.
*
diff --git a/modules/workflow/db/version_6_3_0/202108160940_workflow_filters.sql b/modules/workflow/db/version_6_3_0/202108160940_workflow_filters.sql
new file mode 100644
index 0000000000..0892ce934d
--- /dev/null
+++ b/modules/workflow/db/version_6_3_0/202108160940_workflow_filters.sql
@@ -0,0 +1,17 @@
+ALTER TABLE workflow_events
+ ADD COLUMN attrs_filter_term text;
+
+ALTER TABLE workflow_events
+ ADD COLUMN attrs_filter_values text[];
+
+ALTER TABLE workflow_events
+ADD COLUMN location_ids_filter integer[];
+
+COMMENT ON COLUMN workflow_events.attrs_filter_term IS
+ 'When this event should only trigger if a certain attribute value is present, specify the DwC term here which identifies the attribute to use (e.g. ReproductiveCondition or Stage). Typically used to limit bird events to breeding ReproductiveCondition terms.';
+
+COMMENT ON COLUMN workflow_events.attrs_filter_values IS
+ 'When this event should only trigger if a certain attribute value is present, specify the list of triggering values here. A record matching any value in the list will trigger the event.';
+
+COMMENT ON COLUMN workflow_events.location_ids_filter IS
+ 'When this event should only trigger if the record overlaps an indexed location boundary, specify the location ID or list of IDs here. Used to limit alerts to geographic areas.';
\ No newline at end of file
diff --git a/modules/workflow/helpers/task_workflow_event_check_filters.php b/modules/workflow/helpers/task_workflow_event_check_filters.php
new file mode 100644
index 0000000000..fdc1fe2f96
--- /dev/null
+++ b/modules/workflow/helpers/task_workflow_event_check_filters.php
@@ -0,0 +1,80 @@
+>'workflow_events.id'
+LEFT JOIN samples s ON s.id=o.sample_id AND e.location_ids_filter is not null
+LEFT JOIN locations l ON l.id = ANY(e.location_ids_filter) AND st_intersects(l.boundary_geom, s.geom)
+LEFT JOIN (occurrence_attribute_values v
+ JOIN cache_termlists_terms t on t.id=v.int_value
+ JOIN occurrence_attributes a ON a.id=v.occurrence_attribute_id
+) ON v.occurrence_id=o.id AND e.attrs_filter_term IS NOT NULL
+ -- case insensitive array check.
+ AND lower(t.term)=ANY(lower(e.attrs_filter_values::text)::text[])
+ AND lower(a.term_name)=lower(e.attrs_filter_term)
+WHERE q.entity='occurrence' AND q.task='task_workflow_event_check_filters' AND claimed_by='$procId'
+-- Need to either fail on the locations filter, or attribute values filter.
+AND ((e.location_ids_filter IS NOT NULL AND l.id IS NULL)
+OR (e.attrs_filter_term IS NOT NULL AND v.id IS NULL));
+SQL;
+ $tasks = $db->query($qry);
+ $occurrenceIds = [];
+ foreach ($tasks as $task) {
+ $occurrenceIds[] = $task->record_id;
+ }
+ // For the records outside the workflow_event's filter we can rewind them.
+ $rewinds = workflow::getRewindChangesForRecords($db, 'occurrence', $occurrenceIds, ['S', 'V', 'R']);
+ foreach ($rewinds as $key => $rewind) {
+ list($entity, $id) = explode('.', $key);
+ $obj = ORM::factory($entity, $id);
+ foreach ($rewind as $field => $value) {
+ $obj->$field = $value;
+ }
+ $obj->save();
+ }
+ }
+
+}
diff --git a/modules/workflow/helpers/workflow.php b/modules/workflow/helpers/workflow.php
index 29949a4790..0fb203a632 100644
--- a/modules/workflow/helpers/workflow.php
+++ b/modules/workflow/helpers/workflow.php
@@ -45,8 +45,9 @@ public static function getEntityConfig($entity) {
/**
* Applies undo data to rewind records to their originally posted state.
*
- * This occurs when a record has been modified by the workflow system because of a particular key value linking it to
- * a workflow event record, then the key value is changed so the workflow event is no longer relevant.
+ * This occurs when a record has been modified by the workflow system because
+ * of a particular key value linking it to a workflow event record, then the
+ * key value is changed so the workflow event is no longer relevant.
*
* @param object $db
* Database connection.
@@ -71,16 +72,17 @@ public static function getRewoundRecord($db, $entity, $oldRecord, &$newRecord) {
}
$eventTypes = [];
foreach ($entityConfig['keys'] as $keyDef) {
- $keyChanged = false;
+ $keyChanged = FALSE;
// We need to know if the key has changed to decide whether to wind back.
// If the key is in the main entity, we can directly compare the old and new keys.
if ($keyDef['table'] === $entity) {
$keyCol = $keyDef['column'];
- $keyChanged = (string) $oldRecord->$column !== (string) $newRecord->column;
+ $keyChanged = (string) $oldRecord->column !== (string) $newRecord->column;
}
else {
- // Find the definintion of the extra data table that contains the column we need to look for changes in. We can
- // then look to see if the foreign key pointing to that table has changed.
+ // Find the definintion of the extra data table that contains the
+ // column we need to look for changes in. We can then look to see if
+ // the foreign key pointing to that table has changed.
foreach ($entityConfig['extraData'] as $extraDataDef) {
if ($extraDataDef['table'] === $keyDef['table']) {
$column = $extraDataDef['originating_table_column'];
@@ -94,7 +96,8 @@ public static function getRewoundRecord($db, $entity, $oldRecord, &$newRecord) {
}
if ($entity === 'occurrence'
&& $oldRecord->record_status !== $newRecord->record_status) {
- // Remove previuos verification and rejection workflow changes as the record status is changing.
+ // Remove previuos verification and rejection workflow changes as the
+ // record status is changing.
$eventTypes[] = 'V';
$eventTypes[] = 'R';
}
@@ -129,17 +132,18 @@ public static function getRewoundRecord($db, $entity, $oldRecord, &$newRecord) {
* List of event types to rewind ('S', 'V', 'R').
*
* @return array
- * Associatie array keyed by entity.entity_id, containing an array of the fields with undo values to apply.
+ * Associate array keyed by entity.entity_id, containing an array of the
+ * fields with undo values to apply.
*/
public static function getRewindChangesForRecords($db, $entity, array $entityIdList, array $eventTypes) {
$r = [];
$undoRecords = $db
->select('DISTINCT workflow_undo.id, workflow_undo.entity_id, workflow_undo.original_values')
->from('workflow_undo')
- ->where(array(
+ ->where([
'workflow_undo.entity' => $entity,
'workflow_undo.active' => 't',
- ))
+ ])
->in('event_type', $eventTypes)
->in('entity_id', $entityIdList)
->orderby('workflow_undo.id', 'DESC')
@@ -154,7 +158,7 @@ public static function getRewindChangesForRecords($db, $entity, array $entityIdL
$r["$entity.$undoRecord->entity_id"] = array_merge($r["$entity.$undoRecord->entity_id"], $unsetColumns);
}
// As this is a hard rewind, disable the undo data.
- $db->update('workflow_undo', array('active' => 'f'), array('id' => $undoRecord->id));
+ $db->update('workflow_undo', ['active' => 'f'], ['id' => $undoRecord->id]);
}
return $r;
}
@@ -164,7 +168,7 @@ public static function getRewindChangesForRecords($db, $entity, array $entityIdL
*
* @param object $db
* Database connection.
- * @param int $websiteId
+ * @param int $websiteId
* ID of the website the update is associated with.
* @param string $entity
* Name of the database entity being saved, e.g. occurrence.
@@ -174,8 +178,8 @@ public static function getRewindChangesForRecords($db, $entity, array $entityIdL
* List of event types to include in the results.
*
* @return array
- * List of records with events attached (keyed by entity.id), with each entry containing an array of the events
- * associated with that record.
+ * List of records with events attached (keyed by entity.id), with each
+ * entry containing an array of the events associated with that record.
*/
public static function getEventsForRecords($db, $websiteId, $entity, array $entityIdList, array $eventTypes) {
$r = [];
@@ -192,13 +196,13 @@ public static function getEventsForRecords($db, $websiteId, $entity, array $enti
$table = inflector::plural($entity);
foreach ($entityConfig['keys'] as $keyDef) {
$qry = $db
- ->select('workflow_events.key_value, workflow_events.event_type, workflow_events.mimic_rewind_first, ' .
- "workflow_events.values, $table.id as {$entity}_id")
+ ->select('workflow_events.id, workflow_events.key_value, workflow_events.event_type, workflow_events.mimic_rewind_first, ' .
+ "workflow_events.values, $table.id as {$entity}_id, workflow_events.attrs_filter_term, workflow_events.location_ids_filter")
->from('workflow_events')
- ->where(array(
+ ->where([
'workflow_events.deleted' => 'f',
'key' => $keyDef['db_store_value'],
- ))
+ ])
->in('group_code', $groupCodes)
->in('workflow_events.event_type', $eventTypes);
if ($keyDef['table'] === $entity) {
@@ -207,7 +211,8 @@ public static function getEventsForRecords($db, $websiteId, $entity, array $enti
}
else {
$qry->join($keyDef['table'], "$keyDef[table].$keyDef[column]", 'workflow_events.key_value');
- // Cross reference to the extraData for the same table to find the field name which matches $newRecord->column.
+ // Cross reference to the extraData for the same table to find the
+ // field name which matches $newRecord->column.
foreach ($entityConfig['extraData'] as $extraDataDef) {
if ($extraDataDef['table'] === $keyDef['table']) {
$qry->join(
@@ -274,8 +279,8 @@ public static function applyWorkflow($db, $websiteId, $entity, $oldRecord, $rewo
/**
* Construct a query to retrieve workflow events.
*
- * Constructs a query object which will find all the events applicable to the current record for a given key in the
- * entity's configuration.
+ * Constructs a query object which will find all the events applicable to the
+ * current record for a given key in the entity's configuration.
*
* @param object $db
* Database connection.
@@ -293,16 +298,17 @@ public static function applyWorkflow($db, $websiteId, $entity, $oldRecord, $rewo
* @return object
* Query object.
*/
- private static function buildEventQueryForKey($db, $groupCodes, $entity, $oldRecord, $newRecord, array $keyDef) {
+ private static function buildEventQueryForKey($db, array $groupCodes, $entity, $oldRecord, $newRecord, array $keyDef) {
$entityConfig = self::getEntityConfig($entity);
$eventTypes = [];
$qry = $db
- ->select('workflow_events.event_type, workflow_events.mimic_rewind_first, workflow_events.values')
+ ->select('workflow_events.id, workflow_events.event_type, workflow_events.mimic_rewind_first, workflow_events.values, ' .
+ 'workflow_events.attrs_filter_term, workflow_events.location_ids_filter')
->from('workflow_events')
- ->where(array(
+ ->where([
'workflow_events.deleted' => 'f',
'key' => $keyDef['db_store_value'],
- ))
+ ])
->in('group_code', $groupCodes);
if ($keyDef['table'] === $entity) {
$column = $keyDef['column'];
@@ -361,8 +367,10 @@ private static function buildEventQueryForKey($db, $groupCodes, $entity, $oldRec
* Finds configured groups which the current operation's website uses the workflow for.
*
* @param int $websiteId
+ * Website ID.
*
* @return array
+ * List of group names.
*/
private static function getGroupCodesForThisWebsite($websiteId) {
$config = kohana::config('workflow_groups', FALSE, FALSE);
@@ -380,8 +388,9 @@ private static function getGroupCodesForThisWebsite($websiteId) {
/**
* Applies the events query results to a record.
*
- * Applies the field value changes determined by a query against the workflow_events table to the contents of a record
- * that is about to be saved.
+ * Applies the field value changes determined by a query against the
+ * workflow_events table to the contents of a record that is about to be
+ * saved.
*
* @param object $qry
* Query object set up to retrieve the events to apply.
@@ -393,15 +402,17 @@ private static function getGroupCodesForThisWebsite($websiteId) {
* @param object $newRecord
* ORM Validation object containing the new record details.
* @param array $state
- * State data to pass through to the post-process hook, containing undo data.
+ * State data to pass through to the post-process hook, containing undo
+ * data.
*/
private static function applyEventsQueryToRecord($qry, $entity, array $oldValues, &$newRecord, array &$state) {
$events = $qry->get();
foreach ($events as $event) {
- $newUndoRecord = array();
kohana::log('debug', 'Processing event: ' . var_export($event, TRUE));
+ $needsFilterCheck = !empty($event->attrs_filter_term) || !empty($event->location_ids_filter);
$valuesToApply = self::processEvent(
$event,
+ $needsFilterCheck,
$entity,
$oldValues,
$newRecord->as_array(),
@@ -416,25 +427,32 @@ private static function applyEventsQueryToRecord($qry, $entity, array $oldValues
/**
* Processes a single workflow event.
*
- * Retrieves a list of the values that need to be applied to a database record given an event. The values may include
- * the results of a mimiced rewind as well as the value changes required for the event.
+ * Retrieves a list of the values that need to be applied to a database
+ * record given an event. The values may include the results of a mimiced
+ * rewind as well as the value changes required for the event.
*
* @param object $event
* Event object loaded from the database query.
+ * @param bool $needsFilterCheck
+ * If true, then the workflow event has an attribute or location filter
+ * that needs double checking via a work queue task.
* @param string $entity
* Name of the database entity being saved, e.g. occurrence.
* @param array $oldValues
* Array of the record values before the save operation. Used to retrieve
* values for undo state data.
* @param array $newValues
- * Array of the record values that were submitted to be saved, causing the event to fire.
+ * Array of the record values that were submitted to be saved, causing the
+ * event to fire.
* @param array $state
- * Array of undo state data which will be updated by this method to allow any proposed changes to be undone.
+ * Array of undo state data which will be updated by this method to allow
+ * any proposed changes to be undone.
*
* @return array
- * Associative array of the database fields and values which need to be applied.
+ * Associative array of the database fields and values which need to be
+ * applied.
*/
- public static function processEvent($event, $entity, array $oldValues, array $newValues, array &$state) {
+ public static function processEvent($event, $needsFilterCheck, $entity, array $oldValues, array $newValues, array &$state) {
$entityConfig = self::getEntityConfig($entity);
$columnDeltaList = [];
$valuesToApply = [];
@@ -463,12 +481,17 @@ public static function processEvent($event, $entity, array $oldValues, array $ne
$valuesToApply[$deltaColumn] = $deltaValue;
}
}
- $state[] = array('event_type' => $event->event_type, 'old_data' => $newUndoRecord);
+ $state[] = [
+ 'event_id' => $event->id,
+ 'needs_filter_check' => $needsFilterCheck,
+ 'event_type' => $event->event_type,
+ 'old_data' => $newUndoRecord,
+ ];
return $valuesToApply;
}
/**
- * Returns true if the current user is allowed to view the workflow configuration pages.
+ * Returns true if the user is allowed to view the workflow config pages.
*
* @param object $auth
* Kohana authorisation object.
@@ -496,8 +519,8 @@ public static function allowWorkflowConfigAccess($auth) {
/**
* Rewind a record.
*
- * If an event wants to mimic a rewind to reset data to its original state, then undoes all changes to the record
- * caused by workflow.
+ * If an event wants to mimic a rewind to reset data to its original state,
+ * then undoes all changes to the record caused by workflow.
*
* @param string $entity
* Name of the database entity being saved, e.g. occurrence.
@@ -506,8 +529,8 @@ public static function allowWorkflowConfigAccess($auth) {
* @param array $columnDeltaList
* Array containing the field values that will be changed by the rewind.
* @param array $state
- * Undo state change data from events applied to the record on this transaction which may need to be rewound.
- * @return void
+ * Undo state change data from events applied to the record on this
+ * transaction which may need to be rewound.
*/
private static function mimicRewind($entity, $entityId, array &$columnDeltaList, array $state) {
for ($i = count($state) - 1; $i >= 0; $i--) {
@@ -516,11 +539,11 @@ private static function mimicRewind($entity, $entityId, array &$columnDeltaList,
}
}
$undoRecords = ORM::factory('workflow_undo')
- ->where(array(
+ ->where([
'entity' => $entity,
'entity_id' => $entityId,
'active' => 't',
- ))
+ ])
->orderby('id', 'DESC')->find_all();
foreach ($undoRecords as $undoRecord) {
kohana::log('debug', 'mimic rewind record: ' . var_export($undoRecord->as_array(), TRUE));
diff --git a/modules/workflow/models/workflow_event.php b/modules/workflow/models/workflow_event.php
index 6779a22c19..0fad0ef59a 100644
--- a/modules/workflow/models/workflow_event.php
+++ b/modules/workflow/models/workflow_event.php
@@ -27,16 +27,20 @@
* Model class for the workflow_event table.
*/
class Workflow_event_Model extends ORM {
- public $search_field='id';
+ public $search_field = 'id';
- protected $belongs_to = array(
+ protected $belongs_to = [
'created_by' => 'user',
- 'updated_by' => 'user'
- );
- protected $has_and_belongs_to_many = array();
+ 'updated_by' => 'user',
+ ];
+ protected $has_and_belongs_to_many = [];
+ /**
+ * Define model validation behaviour.
+ */
public function validate(Validation $array, $save = FALSE) {
- // uses PHP trim() to remove whitespace from beginning and end of all fields before validation
+ // Uses PHP trim() to remove whitespace from beginning and end of all
+ // fields before validation.
$array->pre_filter('trim');
$array->add_rules('entity', 'required');
$array->add_rules('group_code', 'required');
@@ -46,12 +50,37 @@ public function validate(Validation $array, $save = FALSE) {
$array->add_rules('values', 'required');
// Explicitly add those fields for which we don't do validation.
- $this->unvalidatedFields = array(
+ $this->unvalidatedFields = [
'deleted',
'mimic_rewind_first',
- );
+ 'attrs_filter_term',
+ 'attrs_filter_values',
+ 'location_ids_filter',
+ ];
return parent::validate($array, $save);
}
+ /**
+ * Tidy form data to prepare for submission.
+ *
+ * Converts attr_filter_values from form submission string to array. Also
+ * ensures location_ids_filter array is cleaned.
+ */
+ public function preSubmit() {
+ if (!empty($this->submission['fields']['attrs_filter_values']['value'])
+ && is_string($this->submission['fields']['attrs_filter_values']['value'])) {
+ $valueList = str_replace("\r\n", "\n", $this->submission['fields']['attrs_filter_values']['value']);
+ $valueList = str_replace("\r", "\n", $valueList);
+ $valueList = explode("\n", trim($valueList));
+ $this->submission['fields']['attrs_filter_values'] = ['value' => $valueList];
+ }
+ // Due to the way the sub_list control works, we can have hidden empty
+ // values which need to be cleaned.
+ if (!empty($this->submission['fields']['location_ids_filter']['value'])
+ && is_array($this->submission['fields']['location_ids_filter']['value'])) {
+ $this->submission['fields']['location_ids_filter']['value'] = array_values(array_filter($this->submission['fields']['location_ids_filter']['value']));
+ }
+ }
+
}
diff --git a/modules/workflow/plugins/workflow.php b/modules/workflow/plugins/workflow.php
index d3da739359..38f07eb79d 100644
--- a/modules/workflow/plugins/workflow.php
+++ b/modules/workflow/plugins/workflow.php
@@ -17,8 +17,6 @@
* You should have received a copy of the GNU General Public License
* along with this program. If not, see http://www.gnu.org/licenses/gpl.html.
*
- * @package Modules
- * @subpackage Workflow
* @author Indicia Team
* @license http://www.gnu.org/licenses/gpl.html GPL
* @link https://github.com/Indicia-Team/
@@ -46,25 +44,27 @@ function workflow_alter_menu($menu, $auth) {
/**
* Implements the extend_data_services hook.
*
- * Determines the data entities which should be added to those available via data services.
+ * Determines the data entities which should be added to those available via
+ * data services.
*
* @return array
* List of database entities exposed by this plugin with configuration.
*/
function workflow_extend_data_services() {
- return array(
- 'workflow_events' => array(),
- 'workflow_metadata' => array('allow_full_access' => TRUE)
- );
+ return [
+ 'workflow_events' => [],
+ 'workflow_metadata' => ['allow_full_access' => TRUE],
+ ];
}
/**
* Pre-record save processing hook.
*
- * Potential problem when a record matches multiple events, and they change the same columns,
- * so we are making the assumption that each record will only fire one alert key/key_value combination
- * undo record would require more details on firing event (key and key_value) if this is changed in future
- * In following code, entity means the orm entity, e.g. 'occurrence'
+ * Potential problem when a record matches multiple events, and they change the
+ * same columns, so we are making the assumption that each record will only
+ * fire one alert key/key_value combination undo record would require more
+ * details on firing event (key and key_value) if this is changed in future.
+ * In following code, entity means the orm entity, e.g. 'occurrence'.
*
* @param object $db
* Database connection.
@@ -81,12 +81,13 @@ function workflow_extend_data_services() {
* State data to pass to the post save processing hook.
*/
function workflow_orm_pre_save_processing($db, $websiteId, $entity, $oldRecord, &$newRecord) {
- $state = array();
+ $state = [];
// Abort if no workflow configuration for this entity.
if (empty(workflow::getEntityConfig($entity))) {
return $state;
}
- // Rewind the record if previous workflow rule changes no longer apply (e.g. after redetermination).
+ // Rewind the record if previous workflow rule changes no longer apply (e.g.
+ // safter redetermination).
$rewoundRecord = workflow::getRewoundRecord($db, $entity, $oldRecord, $newRecord);
// Apply any changes in the workflow_events table relevant to the record.
$state = workflow::applyWorkflow($db, $websiteId, $entity, $oldRecord, $rewoundRecord, $newRecord);
@@ -121,14 +122,27 @@ function workflow_orm_post_save_processing($db, $entity, $record, array $state,
$userId = security::getUserId();
// Insert any state undo records.
foreach ($state as $undoDetails) {
- $db->insert('workflow_undo', array(
+ $db->insert('workflow_undo', [
'entity' => $entity,
'entity_id' => $id,
'event_type' => $undoDetails['event_type'],
'created_on' => date("Ymd H:i:s"),
'created_by_id' => $userId,
- 'original_values' => json_encode($undoDetails['old_data'])
- ));
+ 'original_values' => json_encode($undoDetails['old_data']),
+ ]);
+ if ($undoDetails['needs_filter_check']) {
+ $q = new WorkQueue();
+ $q->enqueue($db, [
+ 'task' => 'task_workflow_event_check_filters',
+ 'entity' => $entity,
+ 'record_id' => $id,
+ 'cost_estimate' => 50,
+ 'priority' => 2,
+ 'params' => json_encode([
+ 'workflow_events.id' => $undoDetails['event_id'],
+ ]),
+ ]);
+ }
}
return TRUE;
}
diff --git a/modules/workflow/views/workflow_event/edit.js b/modules/workflow/views/workflow_event/edit.js
index df57119755..70e0a20b71 100644
--- a/modules/workflow/views/workflow_event/edit.js
+++ b/modules/workflow/views/workflow_event/edit.js
@@ -1,8 +1,16 @@
jQuery(document).ready(function($) {
$('#taxon_list_id').change(function() {
- var options = $('input#workflow_event\\:key_value\\:taxon').indiciaAutocomplete('option');
- options.extraParams.taxon_list_id = $('#taxon_list_id').val();
- $('input#workflow_event\\:key_value\\:taxon').indiciaAutocomplete('option', options);
+ $('input#workflow_event\\:key_value\\:taxon').setExtraParams({
+ taxon_list_id: $('#taxon_list_id').val()
+ });
+ });
+
+ $('#location_type').change(function() {
+ // Remove any hanging autocomplete select list.
+ $('.ac_results').hide();
+ $('#workflow_event\\:location_ids_filter\\:search\\:name').setExtraParams({
+ location_type_id: $('#location_type').val()
+ });
});
$('#workflow_event\\:entity').change(function entityChange() {
diff --git a/modules/workflow/views/workflow_event/index.php b/modules/workflow/views/workflow_event/index.php
index 69ab0f16d1..4e60a49f41 100644
--- a/modules/workflow/views/workflow_event/index.php
+++ b/modules/workflow/views/workflow_event/index.php
@@ -31,7 +31,7 @@
echo $grid;
?>
db->select('*')->from('system')->where('name', 'workflow')->get()->as_array(TRUE);
diff --git a/modules/workflow/views/workflow_event/workflow_event_edit.php b/modules/workflow/views/workflow_event/workflow_event_edit.php
index 53ac130b28..e7ff83e52c 100644
--- a/modules/workflow/views/workflow_event/workflow_event_edit.php
+++ b/modules/workflow/views/workflow_event/workflow_event_edit.php
@@ -24,21 +24,20 @@
* @link https://github.com/Indicia-Team/
*/
-require_once DOCROOT . 'client_helpers/data_entry_helper.php';
+warehouse::loadHelpers(['data_entry_helper']);
+$id = html::initial_value($values, 'workflow_event:id');
if (isset($_POST)) {
data_entry_helper::dump_errors(['errors' => $this->model->getAllErrors()]);
}
$readAuth = data_entry_helper::get_read_auth(0 - $_SESSION['auth_user']->id, kohana::config('indicia.private_key'));
?>
-