diff --git a/.env.dev b/.env.dev new file mode 100644 index 0000000..c9c77a7 --- /dev/null +++ b/.env.dev @@ -0,0 +1,33 @@ + +ENVTYPE=dev + +MQTT_QUEUE_PORT=1884 +MQTT_DIAGNOSTIC_PORT=9002 + +REDIS_PORT=6380 + +WEB_GOST_PORT=8001 +WEB_DIAGNOSTIC_PORT=3002 + + +#Mosquitto +MOSQUITTO_USERNAME_VALUE=mosquitto +MOSQUITTO_PASSWORD_VALUE=mosquitto +MOSQUITTO_CLIENT_ID_VALUE=GOST_MQTT_EMULATOR + +#RabbitMQ config +RABBITMQ_USER=monica +RABBITMQ_PASS=monica123 +RABBITMQ_EXPOSED_PORT=5673 +RABBITMQ_EXPOSED_PORT_DIAGNOSTIC=15673 + + +REDISCACHE_PORT=6379 + +SUBNET_BASE_ADDRESS=172.19.4.0 +SUBNET_BASE_NUMBERELEMENTS=16 + +V_LOCALHOST_MACHINE=192.168.2.3 + +GOST_THINGID_SFN_VALUE=1 +GOST_THINGID_WRISTBANDS_VALUE=2 \ No newline at end of file diff --git a/.env.local b/.env.local new file mode 100644 index 0000000..7619169 --- /dev/null +++ b/.env.local @@ -0,0 +1,33 @@ + +ENVTYPE=local + +MQTT_QUEUE_PORT=1884 +MQTT_DIAGNOSTIC_PORT=9002 + +REDIS_PORT=6380 + +WEB_GOST_PORT=8001 +WEB_DIAGNOSTIC_PORT=3002 + + +#Mosquitto +MOSQUITTO_USERNAME_VALUE=mosquitto +MOSQUITTO_PASSWORD_VALUE=mosquitto +MOSQUITTO_CLIENT_ID_VALUE=GOST_MQTT_EMULATOR + +#RabbitMQ config +RABBITMQ_USER=monica +RABBITMQ_PASS=monica123 +RABBITMQ_EXPOSED_PORT=5673 +RABBITMQ_EXPOSED_PORT_DIAGNOSTIC=15673 + + +REDISCACHE_PORT=6379 + +SUBNET_BASE_ADDRESS=172.19.4.0 +SUBNET_BASE_NUMBERELEMENTS=16 + +V_LOCALHOST_MACHINE=192.168.2.3 + +GOST_THINGID_SFN_VALUE=1 +GOST_THINGID_WRISTBANDS_VALUE=2 \ No newline at end of file diff --git a/.env.prod b/.env.prod new file mode 100644 index 0000000..928c305 --- /dev/null +++ b/.env.prod @@ -0,0 +1,33 @@ + +ENVTYPE=prod + +MQTT_QUEUE_PORT=1884 +MQTT_DIAGNOSTIC_PORT=9002 + +REDIS_PORT=6380 + +WEB_GOST_PORT=8001 +WEB_DIAGNOSTIC_PORT=3002 + + +#Mosquitto +MOSQUITTO_USERNAME_VALUE=mosquitto +MOSQUITTO_PASSWORD_VALUE=mosquitto +MOSQUITTO_CLIENT_ID_VALUE=GOST_MQTT_EMULATOR + +#RabbitMQ config +RABBITMQ_USER=monica +RABBITMQ_PASS=monica123 +RABBITMQ_EXPOSED_PORT=5673 +RABBITMQ_EXPOSED_PORT_DIAGNOSTIC=15673 + + +REDISCACHE_PORT=6379 + +SUBNET_BASE_ADDRESS=172.19.4.0 +SUBNET_BASE_NUMBERELEMENTS=16 + +V_LOCALHOST_MACHINE=192.168.2.3 + +GOST_THINGID_SFN_VALUE=1 +GOST_THINGID_WRISTBANDS_VALUE=2 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d5e0b7f --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +.project +.env +.idea diff --git a/README.md b/README.md index 755595c..e50f580 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,2 @@ -# GhostSCRALMQTTEmulator -Solution that can be used to emulate GHOST Catalog and MQTT Observations received from the field +# GOSTDockerMQTTEmulator + diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..707ed86 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,119 @@ +version: '3.7' + +services: + mqtt: + container_name: gostemul_docker_mqtt_${ENVTYPE} + hostname: gostemul_host_mqtt + image: ansi/mosquitto:latest + networks: + - gostemul_docker_network + ports: + - '${MQTT_QUEUE_PORT}:1883' + - '${MQTT_DIAGNOSTIC_PORT}:9001' + expose: + - "${MQTT_QUEUE_PORT}" + - "${MQTT_DIAGNOSTIC_PORT}" + entrypoint: /usr/local/sbin/mosquitto -c /etc/mosquitto/mosquitto.conf + environment: + - MOSQUITTO_MQTT_URL=mqtt + - MOSQUITTO_USERNAME=${MOSQUITTO_USERNAME_VALUE} + - MOSQUITTO_PASSWORD=${MOSQUITTO_PASSWORD_VALUE} + volumes: + - ./images/mosquitto:/etc/mosquitto:ro + - ./images/mosquitto/auth:/etc/mosquitto/auth + - ./images/mosquitto/logs:/var/log/mosquitto:rw + - ./images/mosquitto/data:/mosquitto/data/ + + # RabbitMQ + rabbit: + container_name: gostemul_docker_rabbit_${ENVTYPE} + hostname: gostemul_host_rabbit_${ENVTYPE} + image: rabbitmq:latest + networks: + - gostemul_docker_network + environment: + - RABBITMQ_DEFAULT_USER=${RABBITMQ_USER} + - RABBITMQ_DEFAULT_PASS=${RABBITMQ_PASS} + - RABBITMQ_DEFAULT_VHOST=/ + - RABBITMQ_LOG_BASE=/var/log/rabbitmq + - RABBITMQ_HOSTNAME=rabbit + ports: + - "${RABBITMQ_EXPOSED_PORT}:5672" # we forward this port because it's useful for debugging + - "${RABBITMQ_EXPOSED_PORT_DIAGNOSTIC}:15672" # hre, we can access rabbitmq management plugin + + redis: + container_name: gostemul_docker_redis_${ENVTYPE} + hostname: gostemul_host_redis_${ENVTYPE} + image: redis:latest + sysctls: + net.core.somaxconn: 1024 + ports: + - '${REDIS_PORT}:6379' + networks: + - gostemul_docker_network + + #Celery worker + worker: + container_name: gostemul_docker_celery_worker + hostname: gostemul_host_celery_worker + volumes: + - .:/code + build: + context: . + dockerfile: images/worker_celery/Dockerfile + command: images/worker_celery/celery_entrypoint.sh + networks: + - gostemul_docker_network + environment: + - MQTT_BROKER_IP_ADDRESS=mqtt + - MQTT_BROKER_USERNAME=${MOSQUITTO_USERNAME_VALUE} + - MQTT_BROKER_PASSWORD=${MOSQUITTO_PASSWORD_VALUE} + - MQTT_BROKER_CLIENTID=${MOSQUITTO_CLIENT_ID_VALUE} + - DJANGO_SETTINGS_MODULE=shared.settings.appglobalconf + - RABBITMQ_DEFAULT_USER=${RABBITMQ_USER} + - RABBITMQ_DEFAULT_PASS=${RABBITMQ_PASS} + - RABBITMQ_HOSTNAME=rabbit + - CACHEREDIS_DEFAULT_HOSTNAME=redis + - CACHEREDIS_DEFAULT_PORT=6379 + - GOST_THINGID_SFN=${GOST_THINGID_SFN_VALUE} + - GOST_THINGID_WRISTBAND=${GOST_THINGID_WRISTBANDS_VALUE} + links: + - rabbit + - mqtt + - redis + depends_on: + - rabbit + - mqtt + - redis + web: + container_name: gostemul_docker_web + hostname: gostemul_host_web + restart: always + build: + context: . # Current directory + dockerfile: ./images/web/Dockerfile + command: ./images/web/run_web.sh + volumes: + - .:/code + ports: + - '${WEB_GOST_PORT}:8000' + - '${WEB_DIAGNOSTIC_PORT}:3001' + links: + - rabbit + - mqtt + depends_on: + - rabbit + - mqtt + environment: + - LOCALHOST_MACHINE=${V_LOCALHOST_MACHINE} + - EXPOSED_MQTT_HOST=${V_LOCALHOST_MACHINE} + - EXPOSED_MQTT_PORT=${MQTT_QUEUE_PORT} + - GOST_THINGID_SFN=${GOST_THINGID_SFN_VALUE} + - GOST_THINGID_WRISTBAND=${GOST_THINGID_WRISTBANDS_VALUE} + networks: + - gostemul_docker_network + + +networks: + gostemul_docker_network: + name: gost_emul_network_${ENVTYPE} diff --git a/images/dbsqlite/db.sqlite3 b/images/dbsqlite/db.sqlite3 new file mode 100644 index 0000000..2263110 Binary files /dev/null and b/images/dbsqlite/db.sqlite3 differ diff --git a/images/mongodb/.dockerignore b/images/mongodb/.dockerignore new file mode 100644 index 0000000..c24791a --- /dev/null +++ b/images/mongodb/.dockerignore @@ -0,0 +1,5 @@ +# .dockerignore + +data/ + +# Other files and folder \ No newline at end of file diff --git a/images/mosquitto/auth/mosquitto_pwd b/images/mosquitto/auth/mosquitto_pwd new file mode 100644 index 0000000..cbfb6cf --- /dev/null +++ b/images/mosquitto/auth/mosquitto_pwd @@ -0,0 +1 @@ +mosquitto:$6$sz3bi6DNkGdgCmNz$h8vMcqUXMyuzBhT4jdxEFw8AI4tfMPYAY459SSAhktYPFoVvb9JTZfBaEB3EfgYmNj5fN3qsK4g57rxkPn0rlg== diff --git a/images/mosquitto/data/.gitignore b/images/mosquitto/data/.gitignore new file mode 100644 index 0000000..dcf1780 --- /dev/null +++ b/images/mosquitto/data/.gitignore @@ -0,0 +1 @@ +mosquitto.db diff --git a/images/mosquitto/docker_entrypoint.sh b/images/mosquitto/docker_entrypoint.sh new file mode 100644 index 0000000..20b8f75 --- /dev/null +++ b/images/mosquitto/docker_entrypoint.sh @@ -0,0 +1,17 @@ +#!/bin/ash +set -e + +if ( [ -z "${MOSQUITTO_USERNAME_VALUE}" ] || [ -z "${MOSQUITTO_PASSWORD_VALUE}" ] ); then + echo "MOSQUITTO_USERNAME_VALUE or MOSQUITTO_PASSWORD_VALUE not defined" + exit 1 +fi + +echo "START MOSQUITTO PWD GENERATION" + +# create mosquitto passwordfile +touch /etc/mosquitto/auth/mosquitto_pwd +mosquitto_passwd -b /etc/mosquitto/auth/mosquitto_pwd ${MOSQUITTO_USERNAME_VALUE} ${MOSQUITTO_PASSWORD_VALUE} + +echo "CREATION MOSQUITTO PWD DONE!" + +exec mosquitto "$@" \ No newline at end of file diff --git a/images/mosquitto/mosquitto.conf b/images/mosquitto/mosquitto.conf new file mode 100644 index 0000000..0dd1c27 --- /dev/null +++ b/images/mosquitto/mosquitto.conf @@ -0,0 +1,836 @@ +# Config file for mosquitto +# +# See mosquitto.conf(5) for more information. +# +# Default values are shown, uncomment to change. +# +# Use the # character to indicate a comment, but only if it is the +# very first character on the line. + +# ================================================================= +# General configuration +# ================================================================= + +# Time in seconds to wait before resending an outgoing QoS=1 or +# QoS=2 message. +#retry_interval 20 + +# Time in seconds between updates of the $SYS tree. +# Set to 0 to disable the publishing of the $SYS tree. +#sys_interval 10 + +# Time in seconds between cleaning the internal message store of +# unreferenced messages. Lower values will result in lower memory +# usage but more processor time, higher values will have the +# opposite effect. +# Setting a value of 0 means the unreferenced messages will be +# disposed of as quickly as possible. +#store_clean_interval 10 + +# Write process id to a file. Default is a blank string which means +# a pid file shouldn't be written. +# This should be set to /var/run/mosquitto.pid if mosquitto is +# being run automatically on boot with an init script and +# start-stop-daemon or similar. +#pid_file + +# When run as root, drop privileges to this user and its primary +# group. +# Leave blank to stay as root, but this is not recommended. +# If run as a non-root user, this setting has no effect. +# Note that on Windows this has no effect and so mosquitto should +# be started by the user you wish it to run as. +#user mosquitto + +# The maximum number of QoS 1 and 2 messages currently inflight per +# client. +# This includes messages that are partway through handshakes and +# those that are being retried. Defaults to 20. Set to 0 for no +# maximum. Setting to 1 will guarantee in-order delivery of QoS 1 +# and 2 messages. +#max_inflight_messages 20 + +# The maximum number of QoS 1 and 2 messages to hold in a queue +# above those that are currently in-flight. Defaults to 100. Set +# to 0 for no maximum (not recommended). +# See also queue_qos0_messages. +#max_queued_messages 100 + +# Set to true to queue messages with QoS 0 when a persistent client is +# disconnected. These messages are included in the limit imposed by +# max_queued_messages. +# Defaults to false. +# This is a non-standard option for the MQTT v3.1 spec but is allowed in +# v3.1.1. +#queue_qos0_messages false + +# This option sets the maximum publish payload size that the broker will allow. +# Received messages that exceed this size will not be accepted by the broker. +# The default value is 0, which means that all valid MQTT messages are +# accepted. MQTT imposes a maximum payload size of 268435455 bytes. +#message_size_limit 0 + +# This option controls whether a client is allowed to connect with a zero +# length client id or not. This option only affects clients using MQTT v3.1.1 +# and later. If set to false, clients connecting with a zero length client id +# are disconnected. If set to true, clients will be allocated a client id by +# the broker. This means it is only useful for clients with clean session set +# to true. +#allow_zero_length_clientid true + +# If allow_zero_length_clientid is true, this option allows you to set a prefix +# to automatically generated client ids to aid visibility in logs. +#auto_id_prefix + +# This option allows persistent clients (those with clean session set to false) +# to be removed if they do not reconnect within a certain time frame. +# +# This is a non-standard option in MQTT V3.1 but allowed in MQTT v3.1.1. +# +# Badly designed clients may set clean session to false whilst using a randomly +# generated client id. This leads to persistent clients that will never +# reconnect. This option allows these clients to be removed. +# +# The expiration period should be an integer followed by one of h d w m y for +# hour, day, week, month and year respectively. For example +# +# persistent_client_expiration 2m +# persistent_client_expiration 14d +# persistent_client_expiration 1y +# +# The default if not set is to never expire persistent clients. +#persistent_client_expiration + +# If a client is subscribed to multiple subscriptions that overlap, e.g. foo/# +# and foo/+/baz , then MQTT expects that when the broker receives a message on +# a topic that matches both subscriptions, such as foo/bar/baz, then the client +# should only receive the message once. +# Mosquitto keeps track of which clients a message has been sent to in order to +# meet this requirement. The allow_duplicate_messages option allows this +# behaviour to be disabled, which may be useful if you have a large number of +# clients subscribed to the same set of topics and are very concerned about +# minimising memory usage. +# It can be safely set to true if you know in advance that your clients will +# never have overlapping subscriptions, otherwise your clients must be able to +# correctly deal with duplicate messages even when then have QoS=2. +#allow_duplicate_messages false + +# The MQTT specification requires that the QoS of a message delivered to a +# subscriber is never upgraded to match the QoS of the subscription. Enabling +# this option changes this behaviour. If upgrade_outgoing_qos is set true, +# messages sent to a subscriber will always match the QoS of its subscription. +# This is a non-standard option explicitly disallowed by the spec. +#upgrade_outgoing_qos false + +# ================================================================= +# Default listener +# ================================================================= + +# IP address/hostname to bind the default listener to. If not +# given, the default listener will not be bound to a specific +# address and so will be accessible to all network interfaces. +# bind_address ip-address/host name +#bind_address + +# Port to use for the default listener. +#port 1883 + +# The maximum number of client connections to allow. This is +# a per listener setting. +# Default is -1, which means unlimited connections. +# Note that other process limits mean that unlimited connections +# are not really possible. Typically the default maximum number of +# connections possible is around 1024. +#max_connections -1 + +# Choose the protocol to use when listening. +# This can be either mqtt or websockets. +# Websockets support is currently disabled by default at compile time. +# Certificate based TLS may be used with websockets, except that +# only the cafile, certfile, keyfile and ciphers options are supported. +#protocol mqtt + +# When a listener is using the websockets protocol, it is possible to serve +# http data as well. Set http_dir to a directory which contains the files you +# wish to serve. If this option is not specified, then no normal http +# connections will be possible. +#http_dir + +# Set use_username_as_clientid to true to replace the clientid that a client +# connected with with its username. This allows authentication to be tied to +# the clientid, which means that it is possible to prevent one client +# disconnecting another by using the same clientid. +# If a client connects with no username it will be disconnected as not +# authorised when this option is set to true. +# Do not use in conjunction with clientid_prefixes. +# See also use_identity_as_username. +#use_username_as_clientid + +# ----------------------------------------------------------------- +# Certificate based SSL/TLS support +# ----------------------------------------------------------------- +# The following options can be used to enable SSL/TLS support for +# this listener. Note that the recommended port for MQTT over TLS +# is 8883, but this must be set manually. +# +# See also the mosquitto-tls man page. + +# At least one of cafile or capath must be defined. They both +# define methods of accessing the PEM encoded Certificate +# Authority certificates that have signed your server certificate +# and that you wish to trust. +# cafile defines the path to a file containing the CA certificates. +# capath defines a directory that will be searched for files +# containing the CA certificates. For capath to work correctly, the +# certificate files must have ".crt" as the file ending and you must run +# "c_rehash " each time you add/remove a certificate. +#cafile +#capath + +# Path to the PEM encoded server certificate. +#certfile + +# Path to the PEM encoded keyfile. +#keyfile + +# This option defines the version of the TLS protocol to use for this listener. +# The default value allows v1.2, v1.1 and v1.0, if they are all supported by +# the version of openssl that the broker was compiled against. For openssl >= +# 1.0.1 the valid values are tlsv1.2 tlsv1.1 and tlsv1. For openssl < 1.0.1 the +# valid values are tlsv1. +#tls_version + +# By default a TLS enabled listener will operate in a similar fashion to a +# https enabled web server, in that the server has a certificate signed by a CA +# and the client will verify that it is a trusted certificate. The overall aim +# is encryption of the network traffic. By setting require_certificate to true, +# the client must provide a valid certificate in order for the network +# connection to proceed. This allows access to the broker to be controlled +# outside of the mechanisms provided by MQTT. +#require_certificate false + +# If require_certificate is true, you may set use_identity_as_username to true +# to use the CN value from the client certificate as a username. If this is +# true, the password_file option will not be used for this listener. +#use_identity_as_username false + +# If you have require_certificate set to true, you can create a certificate +# revocation list file to revoke access to particular client certificates. If +# you have done this, use crlfile to point to the PEM encoded revocation file. +#crlfile + +# If you wish to control which encryption ciphers are used, use the ciphers +# option. The list of available ciphers can be optained using the "openssl +# ciphers" command and should be provided in the same format as the output of +# that command. +# If unset defaults to DEFAULT:!aNULL:!eNULL:!LOW:!EXPORT:!SSLv2:@STRENGTH +#ciphers DEFAULT:!aNULL:!eNULL:!LOW:!EXPORT:!SSLv2:@STRENGTH + +# ----------------------------------------------------------------- +# Pre-shared-key based SSL/TLS support +# ----------------------------------------------------------------- +# The following options can be used to enable PSK based SSL/TLS support for +# this listener. Note that the recommended port for MQTT over TLS is 8883, but +# this must be set manually. +# +# See also the mosquitto-tls man page and the "Certificate based SSL/TLS +# support" section. Only one of certificate or PSK encryption support can be +# enabled for any listener. + +# The psk_hint option enables pre-shared-key support for this listener and also +# acts as an identifier for this listener. The hint is sent to clients and may +# be used locally to aid authentication. The hint is a free form string that +# doesn't have much meaning in itself, so feel free to be creative. +# If this option is provided, see psk_file to define the pre-shared keys to be +# used or create a security plugin to handle them. +#psk_hint + +# Set use_identity_as_username to have the psk identity sent by the client used +# as its username. Authentication will be carried out using the PSK rather than +# the MQTT username/password and so password_file will not be used for this +# listener. +#use_identity_as_username false + +# When using PSK, the encryption ciphers used will be chosen from the list of +# available PSK ciphers. If you want to control which ciphers are available, +# use the "ciphers" option. The list of available ciphers can be optained +# using the "openssl ciphers" command and should be provided in the same format +# as the output of that command. +#ciphers + +# ================================================================= +# Extra listeners +# ================================================================= + +# Listen on a port/ip address combination. By using this variable +# multiple times, mosquitto can listen on more than one port. If +# this variable is used and neither bind_address nor port given, +# then the default listener will not be started. +# The port number to listen on must be given. Optionally, an ip +# address or host name may be supplied as a second argument. In +# this case, mosquitto will attempt to bind the listener to that +# address and so restrict access to the associated network and +# interface. By default, mosquitto will listen on all interfaces. +# listener port-number [ip address/host name] +#listener + +# The maximum number of client connections to allow. This is +# a per listener setting. +# Default is -1, which means unlimited connections. +# Note that other process limits mean that unlimited connections +# are not really possible. Typically the default maximum number of +# connections possible is around 1024. +#max_connections -1 + +# The listener can be restricted to operating within a topic hierarchy using +# the mount_point option. This is achieved be prefixing the mount_point string +# to all topics for any clients connected to this listener. This prefixing only +# happens internally to the broker; the client will not see the prefix. +#mount_point + +# Choose the protocol to use when listening. +# This can be either mqtt or websockets. +# Certificate based TLS may be used with websockets, except that only the +# cafile, certfile, keyfile and ciphers options are supported. +#protocol mqtt + +# When a listener is using the websockets protocol, it is possible to serve +# http data as well. Set http_dir to a directory which contains the files you +# wish to serve. If this option is not specified, then no normal http +# connections will be possible. +#http_dir + +# Set use_username_as_clientid to true to replace the clientid that a client +# connected with with its username. This allows authentication to be tied to +# the clientid, which means that it is possible to prevent one client +# disconnecting another by using the same clientid. +# If a client connects with no username it will be disconnected as not +# authorised when this option is set to true. +# Do not use in conjunction with clientid_prefixes. +# See also use_identity_as_username. +#use_username_as_clientid + +# ----------------------------------------------------------------- +# Certificate based SSL/TLS support +# ----------------------------------------------------------------- +# The following options can be used to enable certificate based SSL/TLS support +# for this listener. Note that the recommended port for MQTT over TLS is 8883, +# but this must be set manually. +# +# See also the mosquitto-tls man page and the "Pre-shared-key based SSL/TLS +# support" section. Only one of certificate or PSK encryption support can be +# enabled for any listener. + +# At least one of cafile or capath must be defined to enable certificate based +# TLS encryption. They both define methods of accessing the PEM encoded +# Certificate Authority certificates that have signed your server certificate +# and that you wish to trust. +# cafile defines the path to a file containing the CA certificates. +# capath defines a directory that will be searched for files +# containing the CA certificates. For capath to work correctly, the +# certificate files must have ".crt" as the file ending and you must run +# "c_rehash " each time you add/remove a certificate. +#cafile +#capath + +# Path to the PEM encoded server certificate. +#certfile + +# Path to the PEM encoded keyfile. +#keyfile + +# By default an TLS enabled listener will operate in a similar fashion to a +# https enabled web server, in that the server has a certificate signed by a CA +# and the client will verify that it is a trusted certificate. The overall aim +# is encryption of the network traffic. By setting require_certificate to true, +# the client must provide a valid certificate in order for the network +# connection to proceed. This allows access to the broker to be controlled +# outside of the mechanisms provided by MQTT. +#require_certificate false + +# If require_certificate is true, you may set use_identity_as_username to true +# to use the CN value from the client certificate as a username. If this is +# true, the password_file option will not be used for this listener. +#use_identity_as_username false + +# If you have require_certificate set to true, you can create a certificate +# revocation list file to revoke access to particular client certificates. If +# you have done this, use crlfile to point to the PEM encoded revocation file. +#crlfile + +# If you wish to control which encryption ciphers are used, use the ciphers +# option. The list of available ciphers can be optained using the "openssl +# ciphers" command and should be provided in the same format as the output of +# that command. +#ciphers + +# ----------------------------------------------------------------- +# Pre-shared-key based SSL/TLS support +# ----------------------------------------------------------------- +# The following options can be used to enable PSK based SSL/TLS support for +# this listener. Note that the recommended port for MQTT over TLS is 8883, but +# this must be set manually. +# +# See also the mosquitto-tls man page and the "Certificate based SSL/TLS +# support" section. Only one of certificate or PSK encryption support can be +# enabled for any listener. + +# The psk_hint option enables pre-shared-key support for this listener and also +# acts as an identifier for this listener. The hint is sent to clients and may +# be used locally to aid authentication. The hint is a free form string that +# doesn't have much meaning in itself, so feel free to be creative. +# If this option is provided, see psk_file to define the pre-shared keys to be +# used or create a security plugin to handle them. +#psk_hint + +# Set use_identity_as_username to have the psk identity sent by the client used +# as its username. Authentication will be carried out using the PSK rather than +# the MQTT username/password and so password_file will not be used for this +# listener. +#use_identity_as_username false + +# When using PSK, the encryption ciphers used will be chosen from the list of +# available PSK ciphers. If you want to control which ciphers are available, +# use the "ciphers" option. The list of available ciphers can be optained +# using the "openssl ciphers" command and should be provided in the same format +# as the output of that command. +#ciphers + +# ================================================================= +# Persistence +# ================================================================= + +# If persistence is enabled, save the in-memory database to disk +# every autosave_interval seconds. If set to 0, the persistence +# database will only be written when mosquitto exits. See also +# autosave_on_changes. +# Note that writing of the persistence database can be forced by +# sending mosquitto a SIGUSR1 signal. +#autosave_interval 1800 + +# If true, mosquitto will count the number of subscription changes, retained +# messages received and queued messages and if the total exceeds +# autosave_interval then the in-memory database will be saved to disk. +# If false, mosquitto will save the in-memory database to disk by treating +# autosave_interval as a time in seconds. +#autosave_on_changes false + +# Save persistent message data to disk (true/false). +# This saves information about all messages, including +# subscriptions, currently in-flight messages and retained +# messages. +# retained_persistence is a synonym for this option. +#persistence false + +# The filename to use for the persistent database, not including +# the path. +#persistence_file mosquitto.db + +# Location for persistent database. Must include trailing / +# Default is an empty string (current directory). +# Set to e.g. /var/lib/mosquitto/ if running as a proper service on Linux or +# similar. +#persistence_location + +# ================================================================= +# Logging +# ================================================================= + +# Places to log to. Use multiple log_dest lines for multiple +# logging destinations. +# Possible destinations are: stdout stderr syslog topic file +# +# stdout and stderr log to the console on the named output. +# +# syslog uses the userspace syslog facility which usually ends up +# in /var/log/messages or similar. +# +# topic logs to the broker topic '$SYS/broker/log/', +# where severity is one of D, E, W, N, I, M which are debug, error, +# warning, notice, information and message. Message type severity is used by +# the subscribe/unsubscribe log_types and publishes log messages to +# $SYS/broker/log/M/susbcribe or $SYS/broker/log/M/unsubscribe. +# +# The file destination requires an additional parameter which is the file to be +# logged to, e.g. "log_dest file /var/log/mosquitto.log". The file will be +# closed and reopened when the broker receives a HUP signal. Only a single file +# destination may be configured. +# +# Note that if the broker is running as a Windows service it will default to +# "log_dest none" and neither stdout nor stderr logging is available. +# Use "log_dest none" if you wish to disable logging. +#log_dest stderr + +# If using syslog logging (not on Windows), messages will be logged to the +# "daemon" facility by default. Use the log_facility option to choose which of +# local0 to local7 to log to instead. The option value should be an integer +# value, e.g. "log_facility 5" to use local5. +#log_facility + +# Types of messages to log. Use multiple log_type lines for logging +# multiple general_types of messages. +# Possible general_types are: debug, error, warning, notice, information, +# none, subscribe, unsubscribe, websockets, all. +# Note that debug type messages are for decoding the incoming/outgoing +# network packets. They are not logged in "topics". +#log_type error +#log_type warning +#log_type notice +#log_type information +log_type all + +# Change the websockets logging level. This is a global option, it is not +# possible to set per listener. This is an integer that is interpreted by +# libwebsockets as a bit mask for its lws_log_levels enum. See the +# libwebsockets documentation for more details. "log_type websockets" must also +# be enabled. +#websockets_log_level 0 + +# If set to true, client connection and disconnection messages will be included +# in the log. +#connection_messages true + +# If set to true, add a timestamp value to each log message. +#log_timestamp true + +# ================================================================= +# Security +# ================================================================= + +# If set, only clients that have a matching prefix on their +# clientid will be allowed to connect to the broker. By default, +# all clients may connect. +# For example, setting "secure-" here would mean a client "secure- +# client" could connect but another with clientid "mqtt" couldn't. +#clientid_prefixes + +# Boolean value that determines whether clients that connect +# without providing a username are allowed to connect. If set to +# false then a password file should be created (see the +# password_file option) to control authenticated client access. +# Defaults to true. +#allow_anonymous true + +# In addition to the clientid_prefixes, allow_anonymous and TLS +# authentication options, username based authentication is also +# possible. The default support is described in "Default +# authentication and topic access control" below. The auth_plugin +# allows another authentication method to be used. +# Specify the path to the loadable plugin and see the +# "Authentication and topic access plugin options" section below. +#auth_plugin + +# ----------------------------------------------------------------- +# Default authentication and topic access control +# ----------------------------------------------------------------- + +# Control access to the broker using a password file. This file can be +# generated using the mosquitto_passwd utility. If TLS support is not compiled +# into mosquitto (it is recommended that TLS support should be included) then +# plain text passwords are used, in which case the file should be a text file +# with lines in the format: +# username:password +# The password (and colon) may be omitted if desired, although this +# offers very little in the way of security. +# +# See the TLS client require_certificate and use_identity_as_username options +# for alternative authentication options. +#password_file + +# Access may also be controlled using a pre-shared-key file. This requires +# TLS-PSK support and a listener configured to use it. The file should be text +# lines in the format: +# identity:key +# The key should be in hexadecimal format without a leading "0x". +#psk_file + +# Control access to topics on the broker using an access control list +# file. If this parameter is defined then only the topics listed will +# have access. +# If the first character of a line of the ACL file is a # it is treated as a +# comment. +# Topic access is added with lines of the format: +# +# topic [read|write|readwrite] +# +# The access type is controlled using "read", "write" or "readwrite". This +# parameter is optional (unless contains a space character) - if not +# given then the access is read/write. can contain the + or # +# wildcards as in subscriptions. +# +# The first set of topics are applied to anonymous clients, assuming +# allow_anonymous is true. User specific topic ACLs are added after a +# user line as follows: +# +# user +# +# The username referred to here is the same as in password_file. It is +# not the clientid. +# +# +# If is also possible to define ACLs based on pattern substitution within the +# topic. The patterns available for substition are: +# +# %c to match the client id of the client +# %u to match the username of the client +# +# The substitution pattern must be the only text for that level of hierarchy. +# +# The form is the same as for the topic keyword, but using pattern as the +# keyword. +# Pattern ACLs apply to all users even if the "user" keyword has previously +# been given. +# +# If using bridges with usernames and ACLs, connection messages can be allowed +# with the following pattern: +# pattern write $SYS/broker/connection/%c/state +# +# pattern [read|write|readwrite] +# +# Example: +# +# pattern write sensor/%u/data +# +#acl_file + +# ----------------------------------------------------------------- +# Authentication and topic access plugin options +# ----------------------------------------------------------------- + +# If the auth_plugin option above is used, define options to pass to the +# plugin here as described by the plugin instructions. All options named +# using the format auth_opt_* will be passed to the plugin, for example: +# +# auth_opt_db_host +# auth_opt_db_port +# auth_opt_db_username +# auth_opt_db_password + + +# ================================================================= +# Bridges +# ================================================================= + +# A bridge is a way of connecting multiple MQTT brokers together. +# Create a new bridge using the "connection" option as described below. Set +# options for the bridges using the remaining parameters. You must specify the +# address and at least one topic to subscribe to. +# Each connection must have a unique name. +# The address line may have multiple host address and ports specified. See +# below in the round_robin description for more details on bridge behaviour if +# multiple addresses are used. +# The direction that the topic will be shared can be chosen by +# specifying out, in or both, where the default value is out. +# The QoS level of the bridged communication can be specified with the next +# topic option. The default QoS level is 0, to change the QoS the topic +# direction must also be given. +# The local and remote prefix options allow a topic to be remapped when it is +# bridged to/from the remote broker. This provides the ability to place a topic +# tree in an appropriate location. +# For more details see the mosquitto.conf man page. +# Multiple topics can be specified per connection, but be careful +# not to create any loops. +# If you are using bridges with cleansession set to false (the default), then +# you may get unexpected behaviour from incoming topics if you change what +# topics you are subscribing to. This is because the remote broker keeps the +# subscription for the old topic. If you have this problem, connect your bridge +# with cleansession set to true, then reconnect with cleansession set to false +# as normal. +#connection +#address [:] [[:]] +#topic [[[out | in | both] qos-level] local-prefix remote-prefix] + +# Set the version of the MQTT protocol to use with for this bridge. Can be one +# of mqttv31 or mqttv311. Defaults to mqttv31. +#bridge_protocol_version mqttv31 + +# If a bridge has topics that have "out" direction, the default behaviour is to +# send an unsubscribe request to the remote broker on that topic. This means +# that changing a topic direction from "in" to "out" will not keep receiving +# incoming messages. Sending these unsubscribe requests is not always +# desirable, setting bridge_attempt_unsubscribe to false will disable sending +# the unsubscribe request. +#bridge_attempt_unsubscribe true + +# If the bridge has more than one address given in the address/addresses +# configuration, the round_robin option defines the behaviour of the bridge on +# a failure of the bridge connection. If round_robin is false, the default +# value, then the first address is treated as the main bridge connection. If +# the connection fails, the other secondary addresses will be attempted in +# turn. Whilst connected to a secondary bridge, the bridge will periodically +# attempt to reconnect to the main bridge until successful. +# If round_robin is true, then all addresses are treated as equals. If a +# connection fails, the next address will be tried and if successful will +# remain connected until it fails +#round_robin false + +# Set the client id to use on the remote end of this bridge connection. If not +# defined, this defaults to 'name.hostname' where name is the connection name +# and hostname is the hostname of this computer. +# This replaces the old "clientid" option to avoid confusion. "clientid" +# remains valid for the time being. +#remote_clientid + +# Set the clientid to use on the local broker. If not defined, this defaults to +# 'local.'. If you are bridging a broker to itself, it is important +# that local_clientid and clientid do not match. +#local_clientid + +# Set the clean session variable for this bridge. +# When set to true, when the bridge disconnects for any reason, all +# messages and subscriptions will be cleaned up on the remote +# broker. Note that with cleansession set to true, there may be a +# significant amount of retained messages sent when the bridge +# reconnects after losing its connection. +# When set to false, the subscriptions and messages are kept on the +# remote broker, and delivered when the bridge reconnects. +#cleansession false + +# If set to true, publish notification messages to the local and remote brokers +# giving information about the state of the bridge connection. Retained +# messages are published to the topic $SYS/broker/connection//state +# unless the notification_topic option is used. +# If the message is 1 then the connection is active, or 0 if the connection has +# failed. +#notifications true + +# Choose the topic on which notification messages for this bridge are +# published. If not set, messages are published on the topic +# $SYS/broker/connection//state +#notification_topic + +# Set the keepalive interval for this bridge connection, in +# seconds. +#keepalive_interval 60 + +# Set the start type of the bridge. This controls how the bridge starts and +# can be one of three general_types: automatic, lazy and once. Note that RSMB provides +# a fourth start type "manual" which isn't currently supported by mosquitto. +# +# "automatic" is the default start type and means that the bridge connection +# will be started automatically when the broker starts and also restarted +# after a short delay (30 seconds) if the connection fails. +# +# Bridges using the "lazy" start type will be started automatically when the +# number of queued messages exceeds the number set with the "threshold" +# parameter. It will be stopped automatically after the time set by the +# "idle_timeout" parameter. Use this start type if you wish the connection to +# only be active when it is needed. +# +# A bridge using the "once" start type will be started automatically when the +# broker starts but will not be restarted if the connection fails. +#start_type automatic + +# Set the amount of time a bridge using the automatic start type will wait +# until attempting to reconnect. Defaults to 30 seconds. +#restart_timeout 30 + +# Set the amount of time a bridge using the lazy start type must be idle before +# it will be stopped. Defaults to 60 seconds. +#idle_timeout 60 + +# Set the number of messages that need to be queued for a bridge with lazy +# start type to be restarted. Defaults to 10 messages. +# Must be less than max_queued_messages. +#threshold 10 + +# If try_private is set to true, the bridge will attempt to indicate to the +# remote broker that it is a bridge not an ordinary client. If successful, this +# means that loop detection will be more effective and that retained messages +# will be propagated correctly. Not all brokers support this feature so it may +# be necessary to set try_private to false if your bridge does not connect +# properly. +#try_private true + +# Set the username to use when connecting to a broker that requires +# authentication. +# This replaces the old "username" option to avoid confusion. "username" +# remains valid for the time being. +#remote_username + +# Set the password to use when connecting to a broker that requires +# authentication. This option is only used if remote_username is also set. +# This replaces the old "password" option to avoid confusion. "password" +# remains valid for the time being. +#remote_password + +# ----------------------------------------------------------------- +# Certificate based SSL/TLS support +# ----------------------------------------------------------------- +# Either bridge_cafile or bridge_capath must be defined to enable TLS support +# for this bridge. +# bridge_cafile defines the path to a file containing the +# Certificate Authority certificates that have signed the remote broker +# certificate. +# bridge_capath defines a directory that will be searched for files containing +# the CA certificates. For bridge_capath to work correctly, the certificate +# files must have ".crt" as the file ending and you must run "c_rehash " each time you add/remove a certificate. +#bridge_cafile +#bridge_capath + +# Path to the PEM encoded client certificate, if required by the remote broker. +#bridge_certfile + +# Path to the PEM encoded client private key, if required by the remote broker. +#bridge_keyfile + +# When using certificate based encryption, bridge_insecure disables +# verification of the server hostname in the server certificate. This can be +# useful when testing initial server configurations, but makes it possible for +# a malicious third party to impersonate your server through DNS spoofing, for +# example. Use this option in testing only. If you need to resort to using this +# option in a production environment, your setup is at fault and there is no +# point using encryption. +#bridge_insecure false + +# ----------------------------------------------------------------- +# PSK based SSL/TLS support +# ----------------------------------------------------------------- +# Pre-shared-key encryption provides an alternative to certificate based +# encryption. A bridge can be configured to use PSK with the bridge_identity +# and bridge_psk options. These are the client PSK identity, and pre-shared-key +# in hexadecimal format with no "0x". Only one of certificate and PSK based +# encryption can be used on one +# bridge at once. +#bridge_identity +#bridge_psk + + +# ================================================================= +# External config files +# ================================================================= + +# External configuration files may be included by using the +# include_dir option. This defines a directory that will be searched +# for config files. All files that end in '.conf' will be loaded as +# a configuration file. It is best to have this as the last option +# in the main file. This option will only be processed from the main +# configuration file. The directory specified must not contain the +# main configuration file. +#include_dir + +# ================================================================= +# rsmb options - unlikely to ever be supported +# ================================================================= + +#ffdc_output +#max_log_entries +#trace_level +#trace_output + +pid_file /var/run/mosquitto.pid + +persistence true +persistence_location /mosquitto/data/ + +user mosquitto + +# Port to use for the default listener. +port 1883 + +#log_dest file /var/log/mosquitto/mosquitto.log +log_dest stdout + +allow_anonymous false +password_file /etc/mosquitto/auth/mosquitto_pwd diff --git a/images/redis/docker_entrypoint.sh b/images/redis/docker_entrypoint.sh new file mode 100644 index 0000000..153a171 --- /dev/null +++ b/images/redis/docker_entrypoint.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -e + +echo never | tee /sys/kernel/mm/transparent_hugepage/enabled +echo never | tee /sys/kernel/mm/transparent_hugepage/defrag + diff --git a/images/web/Dockerfile b/images/web/Dockerfile new file mode 100644 index 0000000..cdce092 --- /dev/null +++ b/images/web/Dockerfile @@ -0,0 +1,44 @@ +FROM python:3.8 + +# Update base container install +ENV DEBIAN_FRONTEND noninteractive + +RUN apt-get -y update +RUN apt-get install -y dialog apt-utils +RUN apt-get upgrade -y + +# Add unstable repo to allow us to access latest GDAL builds +RUN echo deb http://ftp.uk.debian.org/debian unstable main contrib non-free >> /etc/apt/sources.list +RUN apt-get update + +# Existing binutils causes a dependency conflict, correct version will be installed when GDAL gets intalled +RUN apt-get remove -y binutils + +# Install GDAL dependencies +RUN apt-get -t unstable install -y libgdal-dev g++ + +# Update C env vars so compiler can find gdal +ENV CPLUS_INCLUDE_PATH=/usr/include/gdal +ENV C_INCLUDE_PATH=/usr/include/gdal + +# This will install GDAL 2.2.4 +#RUN pip install GDAL==2.2.4 + +# Install dependencies +# Update base container install +#RUN apt-get update +#RUN apt-get upgrade -y + +# create a folder and set is as working directory +RUN mkdir /code/ +WORKDIR /code/ + +# add requirements.txt to the image +ADD images/web/requirements.txt /code/ + +# install python depends +RUN pip install --upgrade pip +RUN pip install -r requirements.txt + +# create unprivileged user +RUN adduser --disabled-password --gecos '' myuser \ No newline at end of file diff --git a/images/web/requirements.txt b/images/web/requirements.txt new file mode 100644 index 0000000..0f5ce6e --- /dev/null +++ b/images/web/requirements.txt @@ -0,0 +1,64 @@ +amqp<2.6,>=2.5.2 +Babel==2.6.0 +billiard<4.0,>=3.6.1 +celery==4.4.0 +certifi==2018.8.24 +chardet==3.0.4 +coreapi==2.3.3 +coreschema==0.0.4 +crontab==0.22.2 +Django==2.2.8 +django-cors-headers==2.3.0 +django-environ==0.4.5 +django-filter==2.0.0 +django-rest-auth==0.9.3 +djangorestframework==3.10.3 +djangorestframework-gis==0.3 +djangorestframework-simplejwt==3.2.3 +fastkml==0.11 +flower==0.9.2 +geographiclib==1.49 +geopy==1.19.0 +hupper==1.3 +idna==2.7 +itypes==1.1.0 +Jinja2==2.10 +kombu==4.6.7 +Markdown==2.6.11 +MarkupSafe==1.0 +numpy==1.15.2 +paho-mqtt==1.4.0 +PasteDeploy==1.5.2 +pbr==4.2.0 +plaster==1.0 +plaster-pastedeploy==0.6 +psycopg2-binary==2.8.4 +ptvsd==3.0.0 +pygeoif==0.7 +Pygments==2.2.0 +PyJWT==1.6.4 +pyramid==1.9.2 +python-dateutil==2.7.3 +pytz==2018.5 +redis==3.2.1 +python-redis-lock[django] +repoze.lru==0.7 +requests==2.19.1 +six==1.11.0 +stevedore==1.29.0 +tornado==5.1.1 +translationstring==1.3 +uritemplate==3.0.0 +urllib3==1.23 +venusian==1.1.0 +vine==1.3.0 +virtualenv==16.0.0 +virtualenv-clone==0.3.0 +virtualenvwrapper==4.8.2 +WebOb==1.8.2 +zope.deprecation==4.3.0 +zope.interface==4.5.0 +scipy +pika +django-redis +pymongo==3.8.0 \ No newline at end of file diff --git a/images/web/run_web.sh b/images/web/run_web.sh new file mode 100644 index 0000000..4149256 --- /dev/null +++ b/images/web/run_web.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +# wait for PSQL server to start +sleep 10 + +cd worker + +# su -m myuser -c "[ ! -d logs ] && mkdir logs" + +# prepare init migration +# su -m myuser -c "python manage.py makemigrations users jobs shared --settings=shared.settings.appglobalconf" +# migrate db, so we have the latest db schema +# su -m myuser -c "python manage.py migrate --settings=shared.settings.appglobalconf" +# create superuser +#su -m myuser -c "python manage.py createsuperuser --settings=project.settings.local" # username: demo - password: ismb2018! +# start development server on public ip interface, on port 8000 +su -m myuser -c "python manage.py runserver 0.0.0.0:8000 --settings=shared.settings.appglobalconf" diff --git a/images/worker_celery/Dockerfile b/images/worker_celery/Dockerfile new file mode 100644 index 0000000..e3feaf1 --- /dev/null +++ b/images/worker_celery/Dockerfile @@ -0,0 +1,47 @@ +FROM python:3.8 + +ENV DEBIAN_FRONTEND noninteractive + +RUN apt-get -y update +RUN apt-get upgrade -y +RUN apt-get install -y apt-transport-https wget +RUN apt-get install -y software-properties-common curl +RUN apt-get install -y libssl-dev libffi-dev +RUN apt-get install postgresql postgresql-contrib -y + +# Add unstable repo to allow us to access latest GDAL builds +RUN echo deb http://ftp.uk.debian.org/debian unstable main contrib non-free >> /etc/apt/sources.list +RUN apt-get update + +# Existing binutils causes a dependency conflict, correct version will be installed when GDAL gets intalled +RUN apt-get remove -y binutils + +# Install GDAL dependencies +RUN apt-get -t unstable install -y libgdal-dev g++ + +# Update C env vars so compiler can find gdal +ENV CPLUS_INCLUDE_PATH=/usr/include/gdal +ENV C_INCLUDE_PATH=/usr/include/gdal + +# This will install GDAL 2.2.4 +#RUN pip install GDAL==2.2.4 + +# Install dependencies +# Update base container install +#RUN apt-get update +#RUN apt-get upgrade -y + +# create a folder and set is as working directory +RUN mkdir /code/ +WORKDIR /code/ + +# add requirements.txt to the image +ADD images/worker_celery/requirements.txt /code/ +#ADD images/worker_celery/celery_entrypoint.sh /code/ + +# install python depends +RUN pip install --upgrade pip +RUN pip install -r requirements.txt + +# create unprivileged user +RUN adduser --disabled-password --gecos '' myuser \ No newline at end of file diff --git a/images/worker_celery/celery_entrypoint.sh b/images/worker_celery/celery_entrypoint.sh new file mode 100644 index 0000000..db9962f --- /dev/null +++ b/images/worker_celery/celery_entrypoint.sh @@ -0,0 +1,35 @@ +#!/bin/sh + +echo "CELERY ENTRYPOINT 1" + +# wait for RabbitMQ server to start +sleep 10 + +echo "CELERY ENTRYPOINT 2" + +cd worker + +echo "CELERY ENTRYPOINT 3" + +# FIXME: Retrieved from web container (DB Migration) +# prepare init migration +su -m myuser -c "python manage.py makemigrations users jobs shared --settings=shared.settings.appglobalconf" +# migrate db, so we have the latest db schema +su -m myuser -c "python manage.py migrate --settings=shared.settings.appglobalconf" +# FIXME: Retrieved from web (DB Migration) + +echo "CELERY ENTRYPOINT 4" + +chown myuser:myuser ./jobs +chown myuser:myuser ./manage.py + +echo "CELERY ENTRYPOINT 5" + +sleep 15 + +# run Celery worker for our project monica with Celery configuration stored in Celeryconf +#su -m myuser -c "celery -A jobs.tasks worker -Q priority_queue,crowd_queue_elaboration, queue_sw_update_info --loglevel=debug -n worker1@%h -c 50 -B" +#su -m myuser -c "celery -A jobs.broker_connection worker -Q broker_queue --loglevel=info -n worker2@%h" +su -m myuser -c "celery -A jobs.tasks worker -Q priority_queue,taskqueue_provisioning,queue_sw_update_info,queue_task_alive --without-mingle --loglevel=warning -c 10 -B" + +echo "CELERY ENTRYPOINT 6" diff --git a/images/worker_celery/db.sqlite3 b/images/worker_celery/db.sqlite3 new file mode 100644 index 0000000..e69de29 diff --git a/images/worker_celery/requirements.txt b/images/worker_celery/requirements.txt new file mode 100644 index 0000000..0f5ce6e --- /dev/null +++ b/images/worker_celery/requirements.txt @@ -0,0 +1,64 @@ +amqp<2.6,>=2.5.2 +Babel==2.6.0 +billiard<4.0,>=3.6.1 +celery==4.4.0 +certifi==2018.8.24 +chardet==3.0.4 +coreapi==2.3.3 +coreschema==0.0.4 +crontab==0.22.2 +Django==2.2.8 +django-cors-headers==2.3.0 +django-environ==0.4.5 +django-filter==2.0.0 +django-rest-auth==0.9.3 +djangorestframework==3.10.3 +djangorestframework-gis==0.3 +djangorestframework-simplejwt==3.2.3 +fastkml==0.11 +flower==0.9.2 +geographiclib==1.49 +geopy==1.19.0 +hupper==1.3 +idna==2.7 +itypes==1.1.0 +Jinja2==2.10 +kombu==4.6.7 +Markdown==2.6.11 +MarkupSafe==1.0 +numpy==1.15.2 +paho-mqtt==1.4.0 +PasteDeploy==1.5.2 +pbr==4.2.0 +plaster==1.0 +plaster-pastedeploy==0.6 +psycopg2-binary==2.8.4 +ptvsd==3.0.0 +pygeoif==0.7 +Pygments==2.2.0 +PyJWT==1.6.4 +pyramid==1.9.2 +python-dateutil==2.7.3 +pytz==2018.5 +redis==3.2.1 +python-redis-lock[django] +repoze.lru==0.7 +requests==2.19.1 +six==1.11.0 +stevedore==1.29.0 +tornado==5.1.1 +translationstring==1.3 +uritemplate==3.0.0 +urllib3==1.23 +venusian==1.1.0 +vine==1.3.0 +virtualenv==16.0.0 +virtualenv-clone==0.3.0 +virtualenvwrapper==4.8.2 +WebOb==1.8.2 +zope.deprecation==4.3.0 +zope.interface==4.5.0 +scipy +pika +django-redis +pymongo==3.8.0 \ No newline at end of file diff --git a/testing/.gitignore b/testing/.gitignore new file mode 100644 index 0000000..51fd187 --- /dev/null +++ b/testing/.gitignore @@ -0,0 +1 @@ +logs diff --git a/testing/client_mqtt_test_mannella.py b/testing/client_mqtt_test_mannella.py new file mode 100644 index 0000000..e977f39 --- /dev/null +++ b/testing/client_mqtt_test_mannella.py @@ -0,0 +1,196 @@ +import signal +import sys +import time +import datetime +import json +import logging + +import paho.mqtt.client as mqtt +import arrow + +now = arrow.utcnow().format('YYYY-MM-DD_HH-mm') +logging.basicConfig(filename="logs/" + str(now) + ".log", level=logging.DEBUG) +logger = logging.getLogger() + + +DICTIONARY_OBSERVABLE_TOPICS = {1: ["GOST_TIVOLI/+/Observations"]} +#DICTIONARY_OBSERVABLE_TOPICS = {1: ["#"]} +# from dictionary_catalog_local import DICTIONARY_OBSERVABLE_TOPICS + +GOST_URL = "192.168.2.3" +INTERNAL_BROKER_NAME = "mosquitto" +LOCAL = "localhost" + +LOCAL_PORT = 1884 +BURST_SIZE = 5000 + + +class Settings: + list_topics = list() + flag_connection = 0 + flag_subscribe = 0 + counter_message_received = 0 + time_diff = 30 + subscribe_to_all = False + username = "mosquitto" + password = "mosquitto" + mqtt_url = GOST_URL + mqtt_port = LOCAL_PORT + + @staticmethod + def initialize_main_list(): + if not DICTIONARY_OBSERVABLE_TOPICS: + logger.warning("No dictionary!") + return + + for key in DICTIONARY_OBSERVABLE_TOPICS: + list_string = DICTIONARY_OBSERVABLE_TOPICS[key] + + if not list_string: + continue + + # Settings.list_topics.append((list_string[0], 0)) + Settings.list_topics.append((list_string[0], 1)) + # Settings.list_topics.append((list_string[0], 2)) + + +def on_message(client, userdata, message): + try: + current_time = arrow.utcnow() + + logger.info("Message topic: " + message.topic) + logger.info("Message received: " + str(message.payload)) + logger.info("current_time: " + str(current_time.format('YYYY-MM-DD HH:mm:ss'))) + # logger.info("message qos=",message.qos) + # logger.info("message retain flag=",message.retain) + + string_json = str(message.payload.decode("utf-8")) + json_received = json.loads(string_json) + try: + timestamp_str = json_received["phenomenonTime"] + timestamp = arrow.get(timestamp_str) + # timestamp = datetime.datetime.strptime(timestamp_str, '%Y-%m-%dT%H:%M:%S.%f%z') + diff = current_time - timestamp + diff_sec = diff.total_seconds() + logger.info("Message received after: " + str(diff_sec)) + if abs(diff_sec) > Settings.time_diff: + logger.error(" ---------- Critical ---------- ") + except KeyError: + logger.warning("No phenomenonTime.") + + Settings.counter_message_received += 1 + + logger.info('OnMessage JSON Conversion Success, counter_messages: {}\n' + .format(str(Settings.counter_message_received))) + + if Settings.counter_message_received % BURST_SIZE == 0: + logger.info("======================================================================\n") + + except Exception as ex: + logger.critical('Exception OnMessage: {}'.format(ex)) + + +def on_connect(client, userdata, flags, rc): + try: + if Settings.flag_connection == 1: + return + + Settings.flag_connection = 1 + + if Settings.subscribe_to_all: + logger.info('Client Connected, subscribe to all topics') + client.subscribe('#') + return + + counter_topics = len(Settings.list_topics) + + logger.info('Client Connected, Subscribing to {} Elements'.format(str(counter_topics))) + logger.info(Settings.list_topics) + client.subscribe(Settings.list_topics) + + # client.subscribe('GOST_IOTWEEK/Datastreams(583)/Observations') + # client.subscribe('GOST_IOTWEEK/+/Observations') + # client.subscribe('GOST_LARGE_SCALE_TEST//Antonio/Datastreams') + + except Exception as ex: + logger.critical('Exception: {}'.format(ex)) + + +def on_disconnect(client: mqtt.Client, userdata, rc): + try: + Settings.flag_connection = 0 + logger.debug('Client Disconnected') + #client.reconnect() + except Exception as ex: + logger.critical('Exception: {}'.format(ex)) + + +def on_unsubscribe(client, userdata, level, buf): + logger.error('Unsubscribed Success! {}'.format(buf)) + + +def on_subscribe(client, userdata, level, buf): + logger.info('Subscribed Success! {}'.format(len(buf))) + + +def on_log(client, userdata, level, buf): + logger.debug('MQTT Log raised: {}'.format(buf)) + + +def convert_stringtime_to_epoch(string_time): + time.mktime(datetime.datetime.strptime(string_time).timetuple()) + + +def signal_handler(signal, frame): + """ This signal handler overwrite the default behaviour of SIGKILL (pressing CTRL+C). """ + + logger.critical('You pressed Ctrl+C!') + logger.info("\nThe MQTT listener is turning down now...\n") + sys.exit(0) + + +def main(broker_address: str, port: int, username: str = str(), pwd: str = str()): + print("Creating new instance") + client = mqtt.Client("LocalClientTest") # create new instance + + client.on_connect = on_connect + client.on_subscribe = on_subscribe + client.on_unsubscribe = on_unsubscribe + client.on_message = on_message + client.on_disconnect = on_disconnect + client.on_log = on_log + + if username and pwd: + client.username_pw_set(username=username,password=pwd) + + Settings.initialize_main_list() + + logger.info("Connecting to broker: " + broker_address + ":" + str(port)) + client.connect(host=broker_address, port=port) # connect to broker + try: + print('Prova') + client.loop_forever() + except Exception as ex: + logger.critical('Exception in Main Function: {}'.format(ex)) + + +if __name__ == '__main__': + + formatter = "%(asctime)s.%(msecs)04d %(name)-7s %(levelname)s: %(message)s" + +# logging.basicConfig(filename="logs/" + str(now) + ".log", level=logging.DEBUG) +# logging.getLogger().handlers[0].setFormatter(logging.Formatter(formatter, datefmt="(%b-%d) %H:%M:%S")) + + logger.handlers[0].setFormatter(logging.Formatter(formatter, datefmt="(%b-%d) %H:%M:%S")) + + std_handler = logging.StreamHandler(sys.stdout) + std_handler.setLevel(logging.INFO) + std_handler.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')) + logger.addHandler(std_handler) + + # signal.signal(signal.SIGINT, signal_handler) + + main(broker_address=Settings.mqtt_url, + port=Settings.mqtt_port, + username=Settings.username, + pwd=Settings.password) diff --git a/tools/complete_linux_configuration_prod.sh b/tools/complete_linux_configuration_prod.sh new file mode 100644 index 0000000..1d9a890 --- /dev/null +++ b/tools/complete_linux_configuration_prod.sh @@ -0,0 +1,8 @@ +#!/bin/sh +set -x + +CONFIGURE_SCRIPT=$(realpath ${PWD}/configure_docker_environment.sh) +COMPLETE_STARTUP_LINUX=$(realpath ${PWD}/complete_startup_oslinux.sh) + +eval $CONFIGURE_SCRIPT prod +eval $COMPLETE_STARTUP_LINUX \ No newline at end of file diff --git a/tools/complete_startup_oslinux.sh b/tools/complete_startup_oslinux.sh new file mode 100644 index 0000000..eb35ed9 --- /dev/null +++ b/tools/complete_startup_oslinux.sh @@ -0,0 +1,15 @@ +#!/bin/sh +set -x + +source ${PWD}/repo_paths.sh + +declare -a array_file_enhancepermission=($PATH_IMAGES/redis/redis_run.sh $PATH_IMAGES/worker_celery/celery_entrypoint.sh $PATH_CODE/shared/settings/appglobalconf.py) + +for file_permission in "${array_file_enhancepermission[@]}" + do + if [ ! -f "$file_permission" ]; then + echo "$file_permission Does not exists!" + else + chmod 777 $file_permission + fi + done diff --git a/tools/configure_docker_environment.sh b/tools/configure_docker_environment.sh new file mode 100644 index 0000000..2cec4e2 --- /dev/null +++ b/tools/configure_docker_environment.sh @@ -0,0 +1,28 @@ +#!/bin/sh + +# NOTE: this command must be called with + +set -x + +if [ -z "$1" ]; then + echo "Missing Environment Choice. It must be local, prod or dev" +else + echo "Environment Variable passed: $1" + CONF="$1" +fi + +source ${PWD}/repo_paths.sh + +FOLDER_DOCKER_LOGS="$PATH_CODE"/logs +FILE_SETTINGS="$PATH_CODE_SETTINGS"/settings.py + +if [ -f "$PATH_REPO"/docker-compose.override.yml ]; then rm "$PATH_REPO"/docker-compose.override.yml; fi +if [ -f "$PATH_REPO"/.env ]; then rm "$PATH_REPO"/.env; fi +if [ -f "$FILE_SETTINGS" ]; then rm "$FILE_SETTINGS"; fi +if [ ! -d "$FOLDER_DOCKER_LOGS" ]; then mkdir -p "$FOLDER_DOCKER_LOGS"; fi + +ln -s "$PATH_REPO"/.env."$CONF" "$PATH_REPO"/.env +ln -s "$FILE_SETTINGS"."$CONF" "$FILE_SETTINGS" +if [ -f "$PATH_REPO"/docker-compose.override.yml."$CONF" ]; then ln -s "$PATH_REPO"/docker-compose.override.yml."$CONF" "$PATH_REPO"/docker-compose.override.yml; fi + + diff --git a/tools/repo_paths.sh b/tools/repo_paths.sh new file mode 100644 index 0000000..10adf9b --- /dev/null +++ b/tools/repo_paths.sh @@ -0,0 +1,5 @@ +#!/bin/sh +PATH_REPO=$(realpath ${PWD}/..) +PATH_IMAGES=$PATH_REPO/images +PATH_CODE=$PATH_REPO/worker +PATH_CODE_SETTINGS=$PATH_CODE/shared/settings \ No newline at end of file diff --git a/worker/.gitignore b/worker/.gitignore new file mode 100644 index 0000000..fca0a2c --- /dev/null +++ b/worker/.gitignore @@ -0,0 +1,3 @@ +logs +celerybeat-schedule.db +__pycache__ \ No newline at end of file diff --git a/worker/.pydevproject b/worker/.pydevproject new file mode 100644 index 0000000..d001f0a --- /dev/null +++ b/worker/.pydevproject @@ -0,0 +1,5 @@ + + +Default +python interpreter + diff --git a/worker/__init__.py b/worker/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/worker/api/.gitignore b/worker/api/.gitignore new file mode 100644 index 0000000..fca0a2c --- /dev/null +++ b/worker/api/.gitignore @@ -0,0 +1,3 @@ +logs +celerybeat-schedule.db +__pycache__ \ No newline at end of file diff --git a/worker/api/__init__.py b/worker/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/worker/api/apps.py b/worker/api/apps.py new file mode 100644 index 0000000..d87006d --- /dev/null +++ b/worker/api/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class ApiConfig(AppConfig): + name = 'api' diff --git a/worker/api/signals.py b/worker/api/signals.py new file mode 100644 index 0000000..e69de29 diff --git a/worker/api/views.py b/worker/api/views.py new file mode 100644 index 0000000..8c91262 --- /dev/null +++ b/worker/api/views.py @@ -0,0 +1,38 @@ +from rest_framework.views import APIView +from django.http import JsonResponse +from rest_framework import status +from shared.settings.appglobalconf import GOST_DB_THINGS, GOST_DATASTREAMS_SFN, GOST_DATASTREAMS_WRISTBAND +from api.wp6catalog import WP6Catalog +import logging + +logger = logging.getLogger('textlogger') + + +class GostDBThings(APIView): + def get(self, request, *args, **kwargs): + return JsonResponse(GOST_DB_THINGS, + safe=False, + status=status.HTTP_200_OK) + + +class GostDBSFNDatastreams(APIView): + def get(self, request, *args, **kwargs): + return JsonResponse(GOST_DATASTREAMS_SFN, + safe=False, + status=status.HTTP_200_OK) + + +class GOSTWP6CreateNewDatastream(APIView): + def post(self, request, *args, **kwargs): + logger.info('GOSTWP6CreateNewDatastream request.data {}'.format(request.data)) + response = WP6Catalog.create_thing(dict_input=request.data) + return JsonResponse(response, + safe=False, + status=status.HTTP_200_OK) + + +class GostDBWristbandDatastreams(APIView): + def get(self, request, *args, **kwargs): + return JsonResponse(GOST_DATASTREAMS_WRISTBAND, + safe=False, + status=status.HTTP_200_OK) diff --git a/worker/api/wp6catalog.py b/worker/api/wp6catalog.py new file mode 100644 index 0000000..b3cc934 --- /dev/null +++ b/worker/api/wp6catalog.py @@ -0,0 +1,92 @@ +from typing import Dict, Any, List +from utility.utilitydictionaries import UtilityDictionaries +from os import environ +import logging + +logger = logging.getLogger('textlogger') + + +class WP6Catalog: + LABEL_CATALOG_DATASTREAMID = "dataStreamId" + LABEL_CATALOG_MQTTTOPIC = "mqttTopic" + LABEL_CATALOG_MQTTSERVER = "mqttServer" + LABEL_CATALOG_EXTERNALID = "externalId" + LABEL_CATALOG_METADATA = "metadata" + LABEL_CATALOG_SENSORTYPE = "sensorType" + LABEL_CATALOG_UNITOFMEASUREMENT = "unitOfMeasurement" + LABEL_CATALOG_FIXEDLATITUDE = "fixedLatitude" + LABEL_CATALOG_FIXEDLONGITUDE = "fixedLongitude" + + DATASTREAM_ID_QUEUEDETECTIONALERT = 13150 + DATASTREAM_ID_CROWDHEATMAPOUTPUT = 13151 + DATASTREAM_ID_UNKNOWN = 13148 + + WRONG_ANSWER = {"WrongAnswer": "None"} + + CONVERSION_DATASTREAM = { + "HLDFAD:QueueDetectionAlert": DATASTREAM_ID_QUEUEDETECTIONALERT, + "HLDFAD:PeopleHetmap": DATASTREAM_ID_CROWDHEATMAPOUTPUT + } + + @staticmethod + def get_datastream_id(external_id: str) -> int: + return UtilityDictionaries.get_dict_field_if(dictionary=WP6Catalog.CONVERSION_DATASTREAM, + label=external_id, + none_value=WP6Catalog.DATASTREAM_ID_UNKNOWN) + + @staticmethod + def get_list_fixed_fields() -> List[str]: + return [ + WP6Catalog.LABEL_CATALOG_EXTERNALID, + WP6Catalog.LABEL_CATALOG_SENSORTYPE, + WP6Catalog.LABEL_CATALOG_UNITOFMEASUREMENT, + WP6Catalog.LABEL_CATALOG_METADATA, + WP6Catalog.LABEL_CATALOG_FIXEDLATITUDE, + WP6Catalog.LABEL_CATALOG_FIXEDLONGITUDE + ] + + @staticmethod + def create_thing(dict_input: Dict[str, Any]) -> Dict[str, Any]: + try: + if not dict_input: + return WP6Catalog.WRONG_ANSWER + + dict_output: Dict[str, Any] = dict() + + logger.info('create_thing create Dictionary') + + list_fixed_fields = WP6Catalog.get_list_fixed_fields() + + if not list_fixed_fields: + return WP6Catalog.WRONG_ANSWER + + for field in list_fixed_fields: + dict_output[field] = UtilityDictionaries.get_dict_field_if(dictionary=dict_input, + label=field) + + exposed_mqtt_host = environ.get('EXPOSED_MQTT_HOST', '127.0.0.1') + exposed_mqtt_port = environ.get('EXPOSED_MQTT_PORT', '1884') + + logger.info('create_thing mqtt={0}:{1}'.format(exposed_mqtt_host, + exposed_mqtt_port)) + external_id = UtilityDictionaries.get_dict_field_if(dictionary=dict_input, + label=WP6Catalog.LABEL_CATALOG_EXTERNALID, + none_value="unknown") + dict_output[WP6Catalog.LABEL_CATALOG_DATASTREAMID] = WP6Catalog.get_datastream_id(external_id=external_id) + dict_output[WP6Catalog.LABEL_CATALOG_MQTTTOPIC] = "GOST_TIVOLI/Datastreams({})/Observations".format(external_id) + dict_output[WP6Catalog.LABEL_CATALOG_MQTTSERVER] = "{0}:{1}".format(exposed_mqtt_host, + exposed_mqtt_port) + + logger.info('DICT Output={}'.format(dict_output)) + + return dict_output + except Exception as ex: + logger.error('WP6Catalog create_thing Exception: {}'.format(ex)) + return WP6Catalog.WRONG_ANSWER + + + + + + + diff --git a/worker/general_types/.gitignore b/worker/general_types/.gitignore new file mode 100644 index 0000000..fca0a2c --- /dev/null +++ b/worker/general_types/.gitignore @@ -0,0 +1,3 @@ +logs +celerybeat-schedule.db +__pycache__ \ No newline at end of file diff --git a/worker/general_types/enumerations.py b/worker/general_types/enumerations.py new file mode 100644 index 0000000..468b4d4 --- /dev/null +++ b/worker/general_types/enumerations.py @@ -0,0 +1,7 @@ +from enum import Enum + + +class ObservableGenericType(Enum): + UNDEFINED = 0 + LOCALIZATION = 1, + CROWDDENSITYLOCAL = 2 \ No newline at end of file diff --git a/worker/general_types/general_enums.py b/worker/general_types/general_enums.py new file mode 100644 index 0000000..81d1c4c --- /dev/null +++ b/worker/general_types/general_enums.py @@ -0,0 +1,12 @@ +from enum import Enum + + +class MQTTPayloadConversion(Enum): + TYPE_CONVERSION_STANDARDDICTIONARY = 1, + TYPE_CONVERSION_OGCDICTIONARY = 2 + + +class TypeQueueDetection(Enum): + QUEUEDETECTION_STATIC = 1, + QUEUEDETECTION_DYNAMIC = 2, + QUEUEDETECTION_ALLMONITOREDAREA = 4 \ No newline at end of file diff --git a/worker/general_types/labels.py b/worker/general_types/labels.py new file mode 100644 index 0000000..d8c09f0 --- /dev/null +++ b/worker/general_types/labels.py @@ -0,0 +1,23 @@ + +class GOST_LABELS_DICTIONARY: + LABEL_GOST_DATASTREAMID = "Datastream" + LABEL_GOST_DEVICENAME = "DeviceName" + LABEL_GOST_UNITOFMEASUREMENTS = "UnitOfMeasurements" + LABEL_GOST_THING = "Thing" + + +class LabelThingsName: + LABEL_THING_SFN = "SFN" + LABEL_THING_WRISTBAND = "WRISTBAND-GW" + + +class LabelDatastreamGeneric: + LABEL_DSGENERIC_IOTID = "@iot.id" + LABEL_DSGENERIC_NAME = "name" + LABEL_DSGENERIC_DESCR = "description" + LABEL_DSGENERIC_PROPERTY = "properties" + LABEL_DSGENERIC_UNITOFMEAS = "unitOfMeasurement" + LABEL_DSGENERIC_COORDINATES = "coordinates" + LABEL_DSGENERIC_PHENOMENONTIME = "phenomenonTime" + LABEL_DSGENERIC_RESULT = "result" + LABEL_DSGENERIC_DATASTREAM = "Datastream" diff --git a/worker/general_types/labelsdictionaries.py b/worker/general_types/labelsdictionaries.py new file mode 100644 index 0000000..578a10a --- /dev/null +++ b/worker/general_types/labelsdictionaries.py @@ -0,0 +1,64 @@ +class MQTTLabelsConfigurations: + LABEL_DICTIONARY_USERNAME = 'USERNAME' + LABEL_DICTIONARY_PASSWORD = 'PASSWORD' + LABEL_DICTIONARY_URL = 'URL' + LABEL_DICTIONARY_TOPICS = 'TOPICS' + LABEL_TOPICS_CROWDHEATMAPOUTPUT = 'CROWDHEATMAPOUTPUT' + LABEL_TOPICS_QUEUEDETECTIONALERT = 'QUEUEDETECTIONALERT' + LABEL_DICTIONARY_CLIENT_ID = "CLIENT_ID" + # MQTT Broker Provisioning + + +class LocConfLbls: + LABEL_MQTT_OBSERVATION_URL = "MQTT_OBSERVATION_URL" + LABEL_MQTT_OBSERVATION_PORT = "MQTT_OBSERVATION_PORT" + LABEL_CATALOG_URL = "CATALOG_URL" + LABEL_GOST_URL = 'GOST_URL' + LABEL_CATALOG_USERNAME = "CATALOG_USERNAME" + LABEL_CATALOG_PASSWORD = "CATALOG_PASSWORD" + LABEL_TYPE_GLOBAL_CROWD_DENSITY_LABEL = "TYPE_GLOBAL_CROWD_DENSITY_LABEL" + LABEL_TYPE_LOCAL_CROWD_DENSITY_LABEL = "TYPE_LOCAL_CROWD_DENSITY_LABEL" + LABEL_TYPE_MIC_LABEL = "TYPE_MIC_LABEL" + LABEL_TYPE_GATE_LABEL = "TYPE_GATE_LABEL" + LABEL_TYPE_WEAREABLES = "TYPE_WEAREABLES" + LABEL_TYPE_FLOW_ANALYSIS = "TYPE_FLOW_ANALYSIS" + LABEL_PILOT_NAME = "PILOT_NAME" + LABEL_URL_GET_DEVICECOUNT = "URL_GET_DEVICECOUNT" + LABEL_SW_RELEASE_VERSION = "SW_RELEASE_VERSION" + LABEL_UPDATE_DATASTREAM_LIST = "UPDATE_DATASTREAM_LIST" + LABEL_PREFIX_TOPIC = "PREFIX_TOPIC" + LABEL_INTERVAL_OBS_VALIDITY_SECS = "INTERVAL_OBS_VALIDITY_SECS" + LABEL_ENABLE_EMPTY_CROWD_HEATMAP = "ENABLE_EMPTY_CROWD_HEATMAP" + LABEL_BYPASS_BEGINNING_CATALOG_ACQUISITION = "BYPASS_BEGINNING_CATALOG_ACQUISITION" + LABEL_BYPASS_MQTTINPUTMESSAGEACQUISITION = "BYPASS_MQTTINPUTMESSAGEACQUISITION" + LABEL_ENABLE_UNIT_TESTS = "ENABLE_UNIT_TESTS" + LABEL_ABORT_EXECUTION_AFTERUNITTESTS = "ABORT_EXECUTION_AFTERUNITTESTS" + LABEL_ENABLE_RANDOM_DENSITYMATRIX = "ENABLE_RANDOM_DENSITYMATRIX" + LABEL_ENABLE_RANDOM_QUEUEDETECTIONALERT = "ENABLE_RANDOM_QUEUEDETECTIONALERT" + LABEL_ENABLE_RANDOM_FAKEQUEUEDETECTION = "ENABLE_RANDOM_FAKEQUEUEDETECTION" + LABEL_MQTT_CLIENT_PAHO_NAME_OBSERVABLES = "MQTT_CLIENT_PAHO_NAME_OBSERVABLES" + LABEL_MQTT_CLIENT_PAHO_NAME_DATASTREAMUPDATE = "MQTT_CLIENT_PAHO_NAME_DATASTREAMUPDATE" + LABEL_WP6_CATALOG_CONNECTIONURL = "WP6_CATALOG_CONNECTIONURL" + LABEL_WP6_CATALOG_CONNECTIONPORT = "WP6_CATALOG_CONNECTIONPORT" + LABEL_WP6_CATALOG_POSTSERVICERETRIEVEOUTPUTINFO = "WP6_CATALOG_POSTSERVICERETRIEVEOUTPUTINFO" + LABEL_OBSERVATION_DEBUG_INTERVALNUMBERNOTIFICATION = "OBSERVATION_DEBUG_INTERVALNUMBERNOTIFICATION" + LABEL_WP6_SERVICECATALOG_DICTIONARYSELECTED = "WP6_SERVICECATALOG_DICTIONARYSELECTED" + LABEL_OUTPUT_MESSAGELIST_SELECTED = "OUTPUT_MESSAGELIST_SELECTED" + LABEL_OUTPUT_MQTT_LISTTYPES = "OUTPUT_MQTT_LISTTYPES" + LABEL_TYPEQUEUEDETECTIONCOMPUTATION = "TYPE_QUEUEDETECTION" + + +class LabelDictionaryQueueShapeArea: + LABEL_DICT_QSMA_ID = "qsma_id" + LABEL_DICT_LAT = "Lat" + LABEL_DICT_LONG = "Long" + LABEL_DICT_HORIZONTAL_SIZE_M = "Horizontal_Size_m" + LABEL_DICT_VERTICAL_SIZE_M = "Vertical_Size_m" + LABEL_DICT_THRESHOLD_ALERT = "Threshold_Alert" + LABEL_DICT_FORCESTATICAPPROACH = "ForceStaticApproach" + LABEL_FORCESTATICAPPR_CAMERA_ID = "CameraID" + LABEL_FORCESTATICAPPR_ARRAYELEMENT_QUEUESHAPE = "QueueShapeInMatrix" + LABEL_FORCESTATICAPPR_INDEXROW = "IndexRow" + LABEL_FORCESTATICAPPR_RANGECOLUMNS = "RangeColumns" + LABEL_DICT_DATASTREAMID="DatastreamID" + LABEL_DICT_GROUNDPLANEORIENTATION="GPPOrient" \ No newline at end of file diff --git a/worker/general_types/modelsenums.py b/worker/general_types/modelsenums.py new file mode 100644 index 0000000..1220191 --- /dev/null +++ b/worker/general_types/modelsenums.py @@ -0,0 +1,6 @@ +from enum import Enum + + +class OutputMessageType(Enum): + OUTPUT_MESSAGE_TYPE_CROWDHEATMAPOUTPUT = 1 + OUTPUT_MESSAGE_TYPE_QUEUEDETECTIONALERT = 2 \ No newline at end of file diff --git a/worker/general_types/observables.py b/worker/general_types/observables.py new file mode 100644 index 0000000..d003d6f --- /dev/null +++ b/worker/general_types/observables.py @@ -0,0 +1,194 @@ +import datetime +from typing import Dict, List, Any +from django.contrib.gis.geos import Point +from general_types.labels import LabelDatastreamGeneric +from general_types.enumerations import ObservableGenericType +import logging + +logger = logging.getLogger('textlogger') + + +class ObservableGeneric(object): + + def __init__(self, device_id: str, iot_id: int): + self.iot_id = iot_id + self.timestamp = datetime.datetime.utcnow() + self.device_id = device_id + + def to_dictionary(self) -> Dict[str, Any]: + raise NotImplemented + + def get_device_id(self) -> str: + return self.device_id + + def get_iot_id(self) -> int: + return self.iot_id + + def get_timestamp(self) -> datetime.datetime: + return self.timestamp + + def get_observable_type(self) -> ObservableGenericType: + raise NotImplemented + + +class Localization(ObservableGeneric): + def __init__(self, tag_id: str, iot_id: int, lat: float, lon: float): + super().__init__(device_id=tag_id, iot_id=iot_id) + self.type = 868 + self.areaId = "LST" + self.motion_state = "unknown" + self.lat = lat # 55.67298336627162 + self.lon = lon # 12.56703788516.0 + self.z = 0.0 + self.bearing = 0.0 + self.height = 0.0 + self.herr = 0.0 + self.battery_level = 2.9 + + def to_dictionary(self) -> Dict[str, Any]: + return { + LabelDatastreamGeneric.LABEL_DSGENERIC_DATASTREAM: { + LabelDatastreamGeneric.LABEL_DSGENERIC_IOTID: self.get_iot_id() + }, + LabelDatastreamGeneric.LABEL_DSGENERIC_PHENOMENONTIME: self.timestamp, + LabelDatastreamGeneric.LABEL_DSGENERIC_RESULT: { + "tagId": self.get_device_id(), + "type": "868", + "areaId": "LST", + "motion_state": "unknown", + "lat": self.lat, + "lon": self.lon, + "z": self.z, + "bearing": 0.0, + "height": 0.0, + "herr": 0.0, + "battery_level": 2.9, + "timestamp": self.get_timestamp().isoformat() + } + } + + def get_observable_type(self) -> ObservableGenericType: + return ObservableGenericType.LOCALIZATION + + +class CrowdDensityLocalObservation(ObservableGeneric): + def __init__(self, + device_id: str, + iot_id: int, + map_size: List[int]): + super().__init__(device_id=device_id, iot_id=iot_id) + self.module_id = "435ae19f-0eab-5561-b11a-9ead485180d6_crowd_density_local" + + self.density_map = list() + + if not map_size or len(map_size) < 2: + return + + for index_row in range(0, map_size[0]): + list_row = list() + for index_col in range(0, map_size[1]): + list_row.append(0) + self.density_map.append(list_row) + + self.original_densitymap = self.density_map + + # self.original_densitymap = [[0, 0, 1, 2, 1, 1, 1, 0, 0], + # [0, 0, 1, 2, 1, 0, 0, 0, 0], + # [1, 1, 1, 1, 0, 0, 0, 0, 0], + # [0, 2, 0, 0, 0, 0, 0, 0, 0], + # [0, 0, 1, 1, 0, 0, 2, 2, 1], + # [0, 0, 1, 1, 1, 1, 2, 5, 4], + # [0, 0, 0, 0, 1, 2, 1, 3, 3], + # [0, 0, 0, 2, 3, 2, 1, 1, 1], + # [0, 0, 1, 4, 5, 2, 2, 2, 0], + # [0, 0, 1, 4, 3, 1, 5, 5, 1], + # [0, 2, 2, 3, 4, 3, 6, 11, 8], + # [0, 1, 2, 2, 3, 3, 3, 7, 7]] + # + # self.density_map = \ + # [[0, 0, 1, 2, 1, 1, 1, 0, 0], + # [0, 0, 1, 2, 1, 0, 0, 0, 0], + # [1, 1, 1, 1, 0, 0, 0, 0, 0], + # [0, 2, 0, 0, 0, 0, 0, 0, 0], + # [0, 0, 1, 1, 0, 0, 2, 2, 1], + # [0, 0, 1, 1, 1, 1, 2, 5, 4], + # [0, 0, 0, 0, 1, 2, 1, 3, 3], + # [0, 0, 0, 2, 3, 2, 1, 1, 1], + # [0, 0, 1, 4, 5, 2, 2, 2, 0], + # [0, 0, 1, 4, 3, 1, 5, 5, 1], + # [0, 2, 2, 3, 4, 3, 6, 11, 8], + # [0, 1, 2, 2, 3, 3, 3, 7, 7]] + + self.ground_plane_position = Point(x=0, + y=0, + srid=4326) + self.density_count = 0 + self.size_area_x = 0 + self.size_area_y = 0 + + def reset_density_map(self): + if len(self.density_map) != len(self.original_densitymap): + return + if len(self.density_map[0]) != len(self.original_densitymap[0]): + return + self.density_count = 0 + counter_rows = len(self.density_map) + counter_columns = len(self.density_map[0]) + + for index_row in range(0, counter_rows): + for index_col in range(0, counter_columns): + self.density_map[index_row][index_col] = self.original_densitymap[index_row][index_col] + + def consolidate_observable(self): + self.density_count = 0 + + counter_rows = len(self.density_map) + counter_columns = len(self.density_map[0]) + + for index_row in range(0, counter_rows): + for index_col in range(0, counter_columns): + self.density_count += int(self.density_map[index_row][index_col]) + + def set_density_map(self, counter_people: int): + try: + self.reset_density_map() + + self.density_count = counter_people + counter_rows = len(self.density_map) + counter_columns = len(self.density_map[0]) + + current_counter = 0 + + while current_counter < self.density_count: + for index_row in range(0, counter_rows): + for index_col in range(0, counter_columns): + self.density_map[index_row][index_col] += 1 + current_counter += 1 + except Exception as ex: + logger.error('Observable set_density_map Exception: {}'.format(ex)) + + def to_dictionary(self) -> Dict[str, Any]: + try: + return { + LabelDatastreamGeneric.LABEL_DSGENERIC_DATASTREAM: { + LabelDatastreamGeneric.LABEL_DSGENERIC_IOTID: self.get_iot_id() + }, + LabelDatastreamGeneric.LABEL_DSGENERIC_PHENOMENONTIME: self.get_timestamp().isoformat(), + LabelDatastreamGeneric.LABEL_DSGENERIC_RESULT: { + "module_id": self.module_id, + "camera_ids": [ + self.get_device_id() + ], + "density_map": self.density_map, + "timestamp_2": self.get_timestamp().isoformat(), + "type_module": "crowd_density_local", + "density_count": self.density_count, + "timestamp": self.get_timestamp().isoformat() + } + } + except Exception as ex: + logger.error('Observable to_dictionary Exception: {}'.format(ex)) + return None + + def get_observable_type(self) -> ObservableGenericType: + return ObservableGenericType.CROWDDENSITYLOCAL diff --git a/worker/general_types/virtual_classes.py b/worker/general_types/virtual_classes.py new file mode 100644 index 0000000..4aff929 --- /dev/null +++ b/worker/general_types/virtual_classes.py @@ -0,0 +1,306 @@ +from general_types.modelsenums import OutputMessageType +from general_types.general_enums import MQTTPayloadConversion +from general_types.labels import LabelDatastreamGeneric +from typing import Dict, Any, List +import datetime +from django.contrib.gis.geos import Point, MultiPoint, Polygon +import json +import logging + +logger = logging.getLogger('textlogger') + + +class Dictionarizable(object): + @staticmethod + def convert_geojsonpoint_todictionary(position: Point) -> Dict[str, Any]: + if not position: + return None + + return {LabelDatastreamGeneric.LABEL_DSGENERIC_COORDINATES: [position.x, + position.y], + "type": "Point"} + + @staticmethod + def convert_polygon_to_listpoints(polygon: Polygon) -> List[List[float]]: + try: + if not polygon: + return None + + list_points = list() + + for index_ring in range(0, len(polygon)): + external_ring = polygon[index_ring] + for index_point in range(0, len(external_ring)): + x = external_ring[index_point][0] + y = external_ring[index_point][1] + list_points.append([x, y]) + + return list_points + except Exception as ex: + logger.error('convert_polygon_to_listpoints Exception: {}'.format(ex)) + return None + + @staticmethod + def convert_polygon_todictionary(polygon: Polygon) -> Dict[str, Any]: + try: + if not polygon: + return None + + list_points = Dictionarizable.convert_polygon_to_listpoints(polygon=polygon) + + if not list_points: + return None + + return { + "type": "Feature", + LabelDatastreamGeneric.LABEL_DSGENERIC_PROPERTY: {}, + "geometry": { + LabelDatastreamGeneric.LABEL_DSGENERIC_COORDINATES: + [list_points], + "type": "Polygon" + } + } + + except Exception as ex: + logger.error('convert_polygon_to_listpoints Exception: {}'.format(ex)) + return None + + @staticmethod + def convert_multipoint_todictionary(multipoint: MultiPoint) -> Dict[str, Any]: + if not multipoint: + return None + + list_dict_points = list() + + for point in multipoint: + if not point: + continue + + list_dict_points.append((point.x, point.y)) + + return { + "coordinates": [ + json.dumps(list_dict_points)], + "type": "MultiPoint" + } + + def get_list_keys(self) -> List[str]: + raise NotImplemented + + def get_specific_value(self, key: str) -> Any: + raise NotImplemented + + @staticmethod + def dictionarize_element(elem_to_dict: Any) -> Any: + try: + if isinstance(elem_to_dict, datetime.datetime): + return elem_to_dict.isoformat() + + elif isinstance(elem_to_dict, Point): + return Dictionarizable.convert_geojsonpoint_todictionary(position=elem_to_dict) + + elif isinstance(elem_to_dict, MultiPoint): + return Dictionarizable.convert_multipoint_todictionary(multipoint=elem_to_dict) + + elif isinstance(elem_to_dict, Polygon): + return Dictionarizable.convert_polygon_todictionary(polygon=elem_to_dict) + + elif isinstance(elem_to_dict, list): + list_return = list() + + for elem in elem_to_dict: + dict_elem = Dictionarizable.dictionarize_element(elem_to_dict=elem) + + list_return.append(dict_elem) + + return list_return + return elem_to_dict + except Exception as ex: + logger.error('dictionarize_element Exception: {}'.format(ex)) + return None + + def to_dictionary(self) -> Dict[str, Any]: + try: + list_labels = self.get_list_keys() + + if not list_labels: + return None + + dictionary = dict() + + for key in list_labels: + value = self.get_specific_value(key) + + if value is None: + continue + + value = Dictionarizable.dictionarize_element(elem_to_dict=value) + + dictionary[key] = value + + return dictionary + except Exception as ex: + return None + + def to_string(self) -> str: + try: + dictionary = self.to_dictionary() + + if not dictionary: + return str() + + return json.dumps(dictionary) + except Exception as ex: + logger.error('Dictionarizable to_string Exception: {}'.format(ex)) + return str() + + def set_single_property(self, key: str, value: Any) -> bool: + raise NotImplemented + + def from_dictionary(self, dictionary: Dict[str, Any]) -> bool: + if not dictionary: + return False + try: + counter_elem_set = 0 + for key in dictionary: + value = dictionary[key] + if self.set_single_property(key=key, + value=value): + counter_elem_set += 1 + if counter_elem_set == 0: + return False + + return True + except Exception as ex: + logger.error('Dictionarizable from_dictionary Exception: {}'.format(ex)) + return False + + +class OutputMessage(Dictionarizable): + def set_timestamp(self, timestamp: datetime.datetime): + raise NotImplemented + + def get_outputmessagetype(self) -> OutputMessageType: + raise NotImplemented + + def get_timestamp(self) -> datetime.datetime: + raise NotImplemented + + def to_specific_dictionary(self, mqtt_payloadtype: MQTTPayloadConversion) -> Dict[str, Any]: + if mqtt_payloadtype == MQTTPayloadConversion.TYPE_CONVERSION_STANDARDDICTIONARY: + return self.to_dictionary() + elif mqtt_payloadtype == MQTTPayloadConversion.TYPE_CONVERSION_OGCDICTIONARY: + return self.to_ogc_dictionary() + + return None + + def to_ogc_dictionary(self) -> Dict[str, Any]: + return_dictionary = { + LabelDatastreamGeneric.LABEL_DSGENERIC_PHENOMENONTIME: self.get_timestamp().isoformat(), + LabelDatastreamGeneric.LABEL_DSGENERIC_RESULT: self.to_dictionary() + } + return return_dictionary + + +# FIXME: Add Dependency Dictionarizable +class ObservableGeneric(object): + def __init__(self): + self.observation_id = 0 + self.run_id = 0 + self.datastream_id = 0 + self.device_id = str() + self.timestamp = datetime.datetime + self.label_cache_group = str() + self.is_exploited = False + self.pilot_name = "" + + @classmethod + def from_json(cls, json_data): + return cls(json_data) + + def __eq__(self, other) -> bool: + if not isinstance(other, self.__class__): + return False + + return self.observation_id == other.observation_id and self.datastream_id == other.datastream_id + + def __ne__(self, other) -> bool: + if not isinstance(other, self.__class__): + return True + + return self.observation_id != other.observation_id or self.datastream_id != other.datastream_id + + def __hash__(self, *args, **kwargs): + return self.observation_id + + def set_datastream_id(self, datastream_id: int): + self.datastream_id = datastream_id + + def get_datastream_id(self) -> int: + return self.datastream_id + + def from_dictionary(self, dictionary: Dict[str, Any]): + raise NotImplemented + + def to_dictionary(self) -> Dict[str, Any]: + return dict() + + def to_trace_string(self): + return self.to_string() + + def to_string(self) -> str: + return str(self.to_dictionary()) + + def set_pilot_name(self, pilot_name: str): + self.pilot_name = pilot_name + + def get_pilot_name(self) -> str: + return self.pilot_name + + def get_timestamp(self) -> datetime.datetime: + return self.timestamp + + def set_observable_id(self, observable_id: int): + self.observation_id = observable_id + + def get_observation_id(self) -> int: + return self.observation_id + + def set_label_cache(self, label_cache: str): + self.label_cache_group = label_cache + + def get_label_cache(self) -> str: + return self.label_cache_group + + def set_output_id(self, output_id: int): + raise NotImplementedError + + def get_output_id(self) -> int: + return 0 + + def get_run_id(self) -> int: + return self.run_id + + def set_run_id(self, run_id: int): + self.run_id = run_id + + def ckeck_observable_complete(self) -> bool: + return False + + def get_type_observable(self) -> str: + raise NotImplementedError() + + def set_device_id(self, device_id): + self.device_id = device_id + + def get_device_id(self) -> str: + return self.device_id + + def check_is_observation(self, observation_id: int) -> bool: + return observation_id == self.observation_id + + def check_equals(self, observation): + if not observation: + return False + + return self.check_is_observation(observation_id=observation.get_observation_id()) diff --git a/worker/jobs/.gitignore b/worker/jobs/.gitignore new file mode 100644 index 0000000..60ac3be --- /dev/null +++ b/worker/jobs/.gitignore @@ -0,0 +1,2 @@ +migrations +__pycache__ diff --git a/worker/jobs/__init__.py b/worker/jobs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/worker/jobs/apps.py b/worker/jobs/apps.py new file mode 100644 index 0000000..14c323a --- /dev/null +++ b/worker/jobs/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class JobsConfig(AppConfig): + name = 'jobs' diff --git a/worker/jobs/cache_redis.py b/worker/jobs/cache_redis.py new file mode 100644 index 0000000..dffd724 --- /dev/null +++ b/worker/jobs/cache_redis.py @@ -0,0 +1,629 @@ +import redis +import pickle +import json +import logging +import redis_lock +import datetime +from typing import Any, Dict, List + + +logger = logging.getLogger('textlogger') + + +class MyClass(object): + def __init__(self): + self.attr1 = 1 + self.attr2 = "stringa di prova" + + @classmethod + def from_json(cls, json_data): + return cls(json_data) + # or something like: + # instance = cls() + # instance.attr1 = json_data['attr1'] + # ... + + def to_json(self): + return json.dumps({ + 'attr1': self.attr1, + 'attr2': self.attr2 + }) + + +class CacheRecord: + @staticmethod + def string_to_number(byte_array_counter: bytearray) -> int: + try: + if not byte_array_counter: + return 0 + + string_converted = str(byte_array_counter, 'utf-8') + + return int(string_converted) + except Exception as ex: + logger.error('CacheRecord string_to_number Exception: {}'.format(ex)) + return 0 + + @staticmethod + def dumps(record: Any) -> bytearray: + try: + if record is None: + return None + if type(record) is str: + return str(record).encode(encoding='utf-8') + if type(record) is int: + return int(record).to_bytes(length=4, + byteorder='little') + return pickle.dumps(obj=record) + except Exception as ex: + logger.info('CacheRecord Dump Exception: {}'.format(ex)) + return None + + @staticmethod + def loads(byte_array: bytearray, type_object: Any) -> Any: + try: + if not byte_array: + return None + if type_object is str: + return byte_array.decode(encoding='utf-8') + if type_object is int: + return int.from_bytes(bytes=byte_array, + byteorder='little') + return pickle.loads(byte_array) + except Exception as ex: + logger.info('CacheRecord Dump Exception: {}'.format(ex)) + return None + + @staticmethod + def dumps_list(list_elements: list) -> List[bytearray]: + try: + if not list_elements: + return None + + list_return = list() + + for elem in list_elements: + byte_array = CacheRecord.dumps(elem) + + if not byte_array: + continue + list_return.append(object=byte_array) + + return list_return + except Exception as ex: + logger.info('CacheRecord DumpList Exception: {}'.format(ex)) + return None + + @staticmethod + def load_list(list_bytesarray: List[bytearray], type_elems: Any = object) -> list: + try: + if not list_bytesarray: + return None + + list_return = list() + + for byte_array in list_bytesarray: + elem = CacheRecord.loads(byte_array=byte_array, type_object=type_elems) + + if not elem: + continue + list_return.append(object=elem) + + return list_return + except Exception as ex: + logger.info('CacheRecord load_list Exception: {}'.format(ex)) + return None + + @staticmethod + def load_dictionary(dict_bytesarray: Dict[bytearray, bytearray], type_key: Any, type_value: Any) -> Dict[Any, Any]: + try: + if not dict_bytesarray: + return None + + dict_return = dict() + + for key_bytes in dict_bytesarray: + key = CacheRecord.loads(byte_array=key_bytes, + type_object=type_key) + + if not key: + continue + + value_bytes = dict_bytesarray[key_bytes] + + value = CacheRecord.loads(byte_array=value_bytes, + type_object=type_value) + + if not value: + continue + + dict_return[key] = value + + return dict_return + except Exception as ex: + logger.info('CacheRecord load_dictionary Exception: {}'.format(ex)) + return None + + +class CacheRedisAdapter: + is_initialized = False + client_cache = redis.Redis + dictionary_locker = dict() + + @staticmethod + def initialize(cache_redis_configuration: Dict[str, Any]) -> bool: + try: + if CacheRedisAdapter.is_initialized: + return True + + if "HOST" not in cache_redis_configuration or "PORT" not in cache_redis_configuration: + return False + + host = cache_redis_configuration["HOST"] + port = cache_redis_configuration["PORT"] + + CacheRedisAdapter.client_cache = redis.Redis(host=host, port=port) + + redis_lock.reset_all(redis_client=CacheRedisAdapter.client_cache) + + if not CacheRedisAdapter.client_cache: + logger.error('CacheRedisAdapter Initialization Failed') + return False + + CacheRedisAdapter.client_cache.config_resetstat() + CacheRedisAdapter.client_cache.flushall() + + CacheRedisAdapter.is_initialized = True + + logger.info('CacheRedisAdapter Initialization Success') + return True + except Exception as ex: + logger.error('CacheRedisAdapter Initialization Exception: {}'.format(ex)) + return False + + @staticmethod + def release_locker(label_info: str, locker: redis_lock.Lock) -> bool: + try: + if not locker: + return False + + if label_info in CacheRedisAdapter.dictionary_locker \ + and CacheRedisAdapter.dictionary_locker[label_info]: + locker.release() + logger.warning('CacheRedisAdapter recycle_locker Already Available') + return False + + CacheRedisAdapter.dictionary_locker[label_info] = locker + locker.release() + + return True + except Exception as ex: + logger.error('CacheRedisAdapter recycle_locker Exception: {}'.format(ex)) + return None + + @staticmethod + def acquire_locker(label_info: str) -> redis_lock.Lock: + try: + if label_info not in CacheRedisAdapter.dictionary_locker \ + or not CacheRedisAdapter.dictionary_locker[label_info]: + + lock = redis_lock.Lock(redis_client=CacheRedisAdapter.client_cache, name=str(label_info)) + CacheRedisAdapter.dictionary_locker[label_info] = lock + + locker_return = CacheRedisAdapter.dictionary_locker.pop(label_info) + + if not locker_return: + logger.error('CacheRedisAdapter acquire_locker gives None Locker! Exit') + return None + + locker_return.acquire() + return locker_return + except Exception as ex: + logger.error('CacheRedisAdapter GetLocker Exception: {}'.format(ex)) + return None + + @staticmethod + def remove_cache_info(label_info: str): + try: + if not CacheRedisAdapter.is_initialized: + return False + + CacheRedisAdapter.client_cache.delete(label_info) + + return True + except Exception as ex: + logger.error('CacheRedisAdapter RemoveCachedInfo Exception: {}'.format(ex)) + return False + + @staticmethod + def set_cache_info(label_info: str, data: Any) -> bool: + try: + if not CacheRedisAdapter.is_initialized: + return False + + value_to_set = CacheRecord.dumps(record=data) + + if not value_to_set: + logger.info('CacheRedisAdapter set_cache_info Failed (Wrong Dump Operation), label: {}' + .format(label_info)) + return False + + lock = CacheRedisAdapter.acquire_locker(label_info=label_info) + return_flag = CacheRedisAdapter.client_cache.set(name=label_info, + value=value_to_set) + CacheRedisAdapter.release_locker(label_info=label_info, locker=lock) + + return return_flag + except Exception as ex: + logger.error('CacheRedisAdapter set_cache_info Label={0}, Exception: {1}'.format(label_info, ex)) + return False + + @staticmethod + def get_cached_info(label_info: str, type_data: Any) -> Any: + if not CacheRedisAdapter.is_initialized: + return None + try: + if not CacheRedisAdapter.client_cache.exists(label_info): + return None + + lock = CacheRedisAdapter.acquire_locker(label_info=label_info) + return_value = CacheRedisAdapter.client_cache.get(name=label_info) + CacheRedisAdapter.release_locker(label_info=label_info, locker=lock) + + return CacheRecord.loads(byte_array=return_value, type_object=type_data) + except Exception as ex: + logger.error('CacheRedisAdapter get_cached_info Label={0}, Exception: {1}'.format(label_info, ex)) + return None + + @staticmethod + def list_append_singleelement(label_info: str, elem_to_append: Any) -> bool: + try: + if not CacheRedisAdapter.is_initialized: + return False + + if not elem_to_append: + return False + + value_to_append = CacheRecord.dumps(record=elem_to_append) + + lock = CacheRedisAdapter.acquire_locker(label_info=label_info) + + if not CacheRedisAdapter.client_cache.exists(label_info): + result = CacheRedisAdapter.client_cache.rpush(label_info, + value_to_append) + else: + result = CacheRedisAdapter.client_cache.rpushx(name=label_info, + value=value_to_append) + + CacheRedisAdapter.release_locker(label_info=label_info, locker=lock) + + if result <= 0: + return False + + return True + except Exception as ex: + logger.error('CacheRedisAdapter list_append_singleelement Label={0}, Exception: {1}'.format(label_info, ex)) + return False + + @staticmethod + def list_create(label_info: str, list_startup: list = None): + try: + if not CacheRedisAdapter.is_initialized: + return False + + if not list_startup: + return True + + list_to_set = CacheRecord.dumps_list(list_elements=list_startup) + + if not list_to_set: + logger.error('CacheRedisAdapter list_append Label={0} Failed (Dump Error)'.format(label_info)) + return False + + lock = CacheRedisAdapter.acquire_locker(label_info=label_info) + + result = CacheRedisAdapter.client_cache.rpush(name=label_info, + *list_to_set) + CacheRedisAdapter.release_locker(label_info=label_info, locker=lock) + + if result <=0 : + return False + + return True + except Exception as ex: + logger.error('CacheRedisAdapter list_append Label={0}, Exception: {1}'.format(label_info, ex)) + return False + + @staticmethod + def list_getcounterelements(label_info: str) -> int: + try: + if not CacheRedisAdapter.is_initialized: + return -1 + + if not CacheRedisAdapter.client_cache.exists(label_info): + return 0 + + lock = CacheRedisAdapter.acquire_locker(label_info=label_info) + + counter_elements = CacheRedisAdapter.client_cache.llen(name=label_info) + + CacheRedisAdapter.release_locker(label_info=label_info, locker=lock) + + if not counter_elements: + return 0 + + return counter_elements + except Exception as ex: + logger.error('CacheRedisAdapter list_getcounterelements Label={0}, Exception: {1}'.format(label_info, ex)) + return 0 + + @staticmethod + def list_extractlastelement(label_info: str, type_element: Any) -> Any: + try: + if not CacheRedisAdapter.is_initialized: + return None + + if not CacheRedisAdapter.client_cache.exists(label_info): + return None + + lock = CacheRedisAdapter.acquire_locker(label_info=label_info) + + elem_return = CacheRedisAdapter.client_cache.rpop(name=label_info) + + CacheRedisAdapter.release_locker(label_info=label_info, locker=lock) + + if not elem_return: + return None + + return CacheRecord.loads(byte_array=elem_return, + type_object=type_element) + + except Exception as ex: + logger.error('CacheRedisAdapter list_extractlastelement Label={0}, Exception: {1}'.format(label_info, ex)) + return None + + @staticmethod + def list_extractallelements(label_info: str, type_element: Any) -> List[Any]: + try: + if not CacheRedisAdapter.is_initialized: + return None + + if not CacheRedisAdapter.client_cache.exists(label_info): + return None + + list_elements_raw = list() + + while CacheRedisAdapter.list_getcounterelements(label_info=label_info) > 0: + lock = CacheRedisAdapter.acquire_locker(label_info=label_info) + + elem_return = CacheRedisAdapter.client_cache.lpop(name=label_info) + + CacheRedisAdapter.release_locker(label_info=label_info, locker=lock) + + if not elem_return: + break + + list_elements_raw.append(elem_return) + + if not list_elements_raw: + return None + + list_elems_return = list() + + for raw_elem in list_elements_raw: + elem = CacheRecord.loads(byte_array=raw_elem, + type_object=type_element) + + if not elem: + continue + + list_elems_return.append(elem) + + list_elements_raw.clear() + + return list_elems_return + except Exception as ex: + logger.error('CacheRedisAdapter list_extractlastelement Label={0}, Exception: {1}'.format(label_info, ex)) + return None + + @staticmethod + def list_extractfirstelement(label_info: str, type_element: Any) -> Any: + try: + if not CacheRedisAdapter.is_initialized: + return None + + if not CacheRedisAdapter.client_cache.exists(label_info): + return None + + lock = CacheRedisAdapter.acquire_locker(label_info=label_info) + + elem_return = CacheRedisAdapter.client_cache.lpop(name=label_info) + + CacheRedisAdapter.release_locker(label_info=label_info, locker=lock) + + return CacheRecord.loads(byte_array=elem_return, + type_object=type_element) + + except Exception as ex: + logger.error('CacheRedisAdapter list_extractlastelement Label={0}, Exception: {1}'.format(label_info, ex)) + return None + + @staticmethod + def counter_get(label_info: str) -> int: + try: + if not CacheRedisAdapter.is_initialized: + return False + + lock = CacheRedisAdapter.acquire_locker(label_info=label_info) + + if not CacheRedisAdapter.client_cache.exists(label_info): + CacheRedisAdapter.release_locker(label_info=label_info, locker=lock) + return 0 + + byte_array_counter = CacheRedisAdapter.client_cache.get(name=label_info) + + CacheRedisAdapter.release_locker(label_info=label_info, locker=lock) + + return CacheRecord.string_to_number(byte_array_counter=byte_array_counter) + except Exception as ex: + logger.error('CacheRedisAdapter counter_get Label={0}, Exception: {1}'.format(label_info, ex)) + return False + + @staticmethod + def counter_create(label_info: str, start_value: int = 0) -> bool: + try: + if not CacheRedisAdapter.is_initialized: + return False + + lock = CacheRedisAdapter.acquire_locker(label_info=label_info) + + if CacheRedisAdapter.client_cache.exists(label_info): + CacheRedisAdapter.client_cache.delete(label_info) + + CacheRedisAdapter.client_cache.incr(name=label_info, + amount=start_value) + CacheRedisAdapter.release_locker(label_info=label_info, locker=lock) + return True + except Exception as ex: + logger.error('CacheRedisAdapter counter_create Label={0}, Exception: {1}'.format(label_info, ex)) + return False + + @staticmethod + def counter_increase(label_info: str, increase: int = 1) -> bool: + try: + if not CacheRedisAdapter.is_initialized: + return False + + if not CacheRedisAdapter.client_cache.exists(label_info): + return False + + lock = CacheRedisAdapter.acquire_locker(label_info=label_info) + + CacheRedisAdapter.client_cache.incr(name=label_info, + amount=increase) + + CacheRedisAdapter.release_locker(label_info=label_info, locker=lock) + return True + except Exception as ex: + logger.error('CacheRedisAdapter counter_increase Label={0}, Exception: {1}'.format(label_info, ex)) + return False + + @staticmethod + def counter_decrease(label_info: str, decrease: int = 1) -> bool: + try: + if not CacheRedisAdapter.is_initialized: + return False + + if not CacheRedisAdapter.client_cache.exists(label_info): + return False + + lock = CacheRedisAdapter.acquire_locker(label_info=label_info) + + CacheRedisAdapter.client_cache.decr(name=label_info, + amount=decrease) + CacheRedisAdapter.release_locker(label_info=label_info, locker=lock) + + return True + except Exception as ex: + logger.error('CacheRedisAdapter counter_decrease Label={0}, Exception: {1}'.format(label_info, ex)) + return False + + @staticmethod + def dictionary_create(label_info: str, dict_startup: dict = dict()) -> Any: + try: + if not CacheRedisAdapter.is_initialized: + return False + + # if label_info not in CacheRedisAdapter.list_dictionaries: + # CacheRedisAdapter.list_dictionaries.append(label_info) + + return True + except Exception as ex: + logger.error('CacheRedisAdapter dictionary_get_value Label={0}, Exception: {1}'.format(label_info, ex)) + return None + + @staticmethod + def dictionary_get_value(label_info: str, key: str, type_value: Any) -> Any: + try: + if not CacheRedisAdapter.is_initialized: + return None + + if not CacheRedisAdapter.client_cache.exists(label_info): + return False + + if not CacheRedisAdapter.client_cache.hexists(name=label_info, + key=key): + return None + + lock = CacheRedisAdapter.acquire_locker(label_info=label_info) + byte_array = CacheRedisAdapter.client_cache.hget(name=label_info, + key=key) + CacheRedisAdapter.release_locker(label_info=label_info, locker=lock) + + if not byte_array: + return None + + return CacheRecord.loads(byte_array=byte_array, type_object=type_value) + except Exception as ex: + logger.error('CacheRedisAdapter dictionary_get_value Label={0}, Exception: {1}'.format(label_info, ex)) + return None + + @staticmethod + def dictionary_remove_value(label_info: str, key: str) -> bool: + try: + if not CacheRedisAdapter.is_initialized: + return False + + if not CacheRedisAdapter.client_cache.exists(label_info): + return False + + lock = CacheRedisAdapter.acquire_locker(label_info=label_info) + CacheRedisAdapter.client_cache.hdel(label_info, key) + CacheRedisAdapter.release_locker(label_info=label_info, locker=lock) + return True + except Exception as ex: + logger.error('CacheRedisAdapter dictionary_remove_value Label={0}, Exception: {1}'.format(label_info, ex)) + return False + + @staticmethod + def dictionary_get_all(label_info: str, type_value: Any) -> Dict[str, Any]: + try: + if not CacheRedisAdapter.is_initialized: + return None + + if not CacheRedisAdapter.client_cache.exists(label_info): + return None + + lock = CacheRedisAdapter.acquire_locker(label_info=label_info) + return_value = CacheRedisAdapter.client_cache.hgetall(name=label_info) + CacheRedisAdapter.release_locker(label_info=label_info, locker=lock) + + if not return_value: + return None + + return CacheRecord.load_dictionary(dict_bytesarray=return_value, + type_key=str, + type_value=type_value) + except Exception as ex: + logger.error('CacheRedisAdapter dictionary_get_all Label={0}, Exception: {1}'.format(label_info, ex)) + return None + + @staticmethod + def dictionary_update_value(label_info: str, key: str, value: Any) -> bool: + try: + if not CacheRedisAdapter.is_initialized: + return False + + if value is None: + return False + + lock = CacheRedisAdapter.acquire_locker(label_info=label_info) + CacheRedisAdapter.client_cache.hset(name=label_info, + key=key, + value=CacheRecord.dumps(record=value)) + CacheRedisAdapter.release_locker(label_info=label_info, locker=lock) + return True + except Exception as ex: + logger.error('CacheRedisAdapter dictionary_update_value Label={0}, Exception: {1}'.format(label_info, ex)) + return False + diff --git a/worker/jobs/models.py b/worker/jobs/models.py new file mode 100644 index 0000000..426ae8d --- /dev/null +++ b/worker/jobs/models.py @@ -0,0 +1,32 @@ +# This is an auto-generated Django model module. +# You'll have to do the following manually to clean this up: +# * Rearrange models' order +# * Make sure each model has one field with primary_key=True +# * Make sure each ForeignKey has `on_delete` set to the desired behavior. +# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table +# Feel free to rename the models, but don't rename db_table values or field names. +from django.contrib.gis.db import models +from pygments.lexers import get_all_lexers +from pygments.styles import get_all_styles +from django.contrib.gis.geos import Point, MultiPoint, Polygon + +import logging + +logger = logging.getLogger('textlogger') + +LEXERS = [item for item in get_all_lexers() if item[1]] +LANGUAGE_CHOICES = sorted([(item[1][0], item[0]) for item in LEXERS]) +STYLE_CHOICES = sorted((item, item) for item in get_all_styles()) + + +class SWRunningInfo(models.Model): + software_version = models.TextField(primary_key=True, blank=True) + timestamp_start = models.DateTimeField(null=True, auto_created=False, auto_now=False, blank=True) + timestamp_stop = models.DateTimeField(null=True, auto_created=False, auto_now=False, blank=True) + run_id = models.IntegerField(blank=True, null=True) + counter_observables = models.IntegerField(blank=True, default=0, null=True) + counter_device_registered = models.IntegerField(blank=True, default=0, null=True) + counter_message_output = models.IntegerField(blank=True, default=0, null=True) + + class Meta: + db_table = 'sw_running_info' diff --git a/worker/jobs/tasks.py b/worker/jobs/tasks.py new file mode 100644 index 0000000..33ddc27 --- /dev/null +++ b/worker/jobs/tasks.py @@ -0,0 +1,152 @@ +#!/usr/bin/env python + +from shared.celery_settings import app +from shared.settings.appglobalconf import LOCAL_CONFIG_THINGS +from celery.signals import celeryd_after_setup +from services.messagesender import Publisher, MessageProducer +from shared.settings.settings import Settings +from shared.settings.dockersconf import CACHE_REDIS_CONFIGURATION +import datetime +from shared.settings.dictionary_topics import get_dictionary_observables_topics +from jobs.cache_redis import CacheRedisAdapter + +import celery + +import logging + +logger = logging.getLogger('textlogger') + + +class WorkerTasks(celery.Task): + alive_counter = 0 + + def run(self, *args, **kwargs): + logging.info('WorkerTasks RUNNING METHOD CALLED') + + def on_failure(self, exc, task_id, args, kwargs, einfo): + logging.info('{0!r} failed: {1!r}'.format(task_id, exc)) + + def after_return(self, *args, **kwargs): + pass + + def on_retry(self, exc, task_id, args, kwargs, einfo): + logging.info('WorkerTasks ON RETRY METHOD') + + def on_success(self, retval, task_id, args, kwargs): + logging.info('WorkerTasks SUCCESS ACTIVATION WORKERTASK') + + def shadow_name(self, args, kwargs, options): + logging.info('WorkerTasks SHADOW NAME') + + @staticmethod + def periodic_publish() -> bool: + try: + if Settings.list_events_publish: + return + Settings.list_events_publish.append(1) + logger.info('Called periodic publish, time: {}'.format(datetime.datetime.utcnow().isoformat())) + + dictionary_topics = get_dictionary_observables_topics(things_to_analyze=Settings.THINGS_TO_ANALYSE, + local_config_things=LOCAL_CONFIG_THINGS) + + Publisher.publish_topics(dictionary_observables=dictionary_topics, + translate_map=Settings.THINGS_TO_ANALYSE) + Settings.list_events_publish.clear() + return True + except Exception as ex: + logging.error('periodic_publish Exception: {}'.format(ex)) + return False + + @staticmethod + def request_launch_task_provisioning(): + try: + logger.info('WorkerTask request_launch_task_provisioning') + except Exception as ex: + logger.error('request_launch_task_provisioning Exception: {}'.format(ex)) + + @staticmethod + def request_launch_task_sw_update(): + try: + logger.info('WorkerTask request_launch_task_sw_update') + task_sw_update_info.apply_async(args=["{'Prova':1}"], + queue='queue_sw_update_info', + serializer='json') + except Exception as ex: + logger.error('request_launch_task_sw_update Exception: {}'.format(ex)) + + +@app.task(bind=True, typing=False, serializer='json', base=WorkerTasks) +def first(self, data): + try: + return {"status": True} + except Exception as ex: + logger.error('First Task Exception: {}'.format(ex)) + + +@app.task(bind=True, typing=False, serializer='json', base=WorkerTasks) +def check_db(self): + # TODO + return {"status": True} + + +@app.task(bind=True, typing=False, serializer='json', base=WorkerTasks) +def task_sw_update_info(self, data): + try: + return {"status": True} + except Exception as ex: + logger.error('Task Discover Devices Exception: {}'.format(ex)) + return {"status": False} + + +@app.task(bind=True, typing=False, serializer='json', base=WorkerTasks) +def task_alive(self): + try: + logger.info('TASK ALIVE CALLED Counter: {}'.format(WorkerTasks.alive_counter)) + WorkerTasks.alive_counter += 1 + + return {"status": True} + except Exception as ex: + logger.error('TASK ALIVE EXCEPTION: {}'.format(ex)) + return {"status": False} + + +@app.task(bind=True, typing=False, serializer='json', base=WorkerTasks) +def task_provisioning(self): + try: + logger.info('TASK PROVISIONING ACTIVE') + WorkerTasks.periodic_publish() + return {"status": True} + + except Exception as ex: + logger.error('task_elaboration Exception: {}'.format(ex)) + return {"status": False} + + +@celeryd_after_setup.connect() +def broker_connection(sender, instance, **kwargs): + try: + logger.info('broker_connection Application Initialization Launched') + + CacheRedisAdapter.initialize(cache_redis_configuration=CACHE_REDIS_CONFIGURATION) + + Settings.retrieve_environment_settings() + Publisher.configure(client_id=Settings.client_id, + hostname=Settings.hostname, + port=Settings.port, + username=Settings.username, + pwd=Settings.password) + dict_obs_topics = get_dictionary_observables_topics(things_to_analyze=Settings.THINGS_TO_ANALYSE, + local_config_things=LOCAL_CONFIG_THINGS) + topics = Settings.get_list_topics(dictionary_obs_topics=dict_obs_topics) + Publisher.set_topics(topics=topics) + + MessageProducer.set_max_counter_people(Settings.max_counter_people_densitymap) + Publisher.set_reference_geo_area(geo_area=Settings.geographic_area) + Publisher.loop_start() + Publisher.connect() + logger.info('broker_connection Application Initialization Done') + + return {"status", True} + except Exception as ex: + logger.error('broker_connection Launched Exception: {}'.format(ex)) + return {"status", False} diff --git a/worker/manage.py b/worker/manage.py new file mode 100644 index 0000000..fa440e4 --- /dev/null +++ b/worker/manage.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python3 +import os +import sys + +if __name__ == "__main__": + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "shared.settings.appglobalconf") + try: + + from django.core.management import execute_from_command_line + + # IF --noreload is enabled + # import ptvsd + + # ptvsd.enable_attach("my_secret", address=('0.0.0.0', 3000)) + + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc + execute_from_command_line(sys.argv) diff --git a/worker/processing/.gitignore b/worker/processing/.gitignore new file mode 100644 index 0000000..fca0a2c --- /dev/null +++ b/worker/processing/.gitignore @@ -0,0 +1,3 @@ +logs +celerybeat-schedule.db +__pycache__ \ No newline at end of file diff --git a/worker/processing/calculate_new_position.py b/worker/processing/calculate_new_position.py new file mode 100644 index 0000000..13b89b2 --- /dev/null +++ b/worker/processing/calculate_new_position.py @@ -0,0 +1,20 @@ +from utility.geodesy import GeoPosition +from shared.settings.settings import GeoRefArea +import random + + +class CalculatePosition: + @staticmethod + def calculate_position(ref_pos: GeoPosition, georefarea: GeoRefArea) -> GeoPosition: + try: + new_east = random.uniform(a=0, b=georefarea.geoarea_max_east) + new_north = random.uniform(a=0, b=georefarea.geoarea_max_north) + + return_pos = ref_pos.get_copy() + + return_pos = return_pos.add_enu_distance(enu_distance=[new_east, + new_north]) + + return return_pos + except Exception as ex: + print('calculate_position Exception: {}'.format(ex)) diff --git a/worker/services/.gitignore b/worker/services/.gitignore new file mode 100644 index 0000000..fca0a2c --- /dev/null +++ b/worker/services/.gitignore @@ -0,0 +1,3 @@ +logs +celerybeat-schedule.db +__pycache__ \ No newline at end of file diff --git a/worker/services/messageproducer.py b/worker/services/messageproducer.py new file mode 100644 index 0000000..97b3b46 --- /dev/null +++ b/worker/services/messageproducer.py @@ -0,0 +1,70 @@ +from general_types.observables import ObservableGeneric, Localization, CrowdDensityLocalObservation +from general_types.enumerations import ObservableGenericType +from processing.calculate_new_position import CalculatePosition +from utility.utilitydictionaries import UtilityDictionaries +from shared.settings.datastreams import LabelDatastramCamera +from shared.settings.settings import GeoRefArea +from utility.geodesy import GeoPosition +from typing import Dict, Any +import random +import logging +import math + +logger = logging.getLogger('textlogger') + + +class MessageProducer(object): + ref_pos: GeoPosition = None + geo_area: GeoRefArea = None + max_counter_people: int = 2 + + @staticmethod + def set_max_counter_people(max_counter_people: int): + MessageProducer.set_max_counter_people = max_counter_people + + @staticmethod + def get_new_observable( + type_obs: ObservableGenericType, + device_id: str, + iot_id: int, + dictionary_unitofmeasures: Dict[str, Any] = None) -> ObservableGeneric: + try: + if type_obs == ObservableGenericType.UNDEFINED: + logger.info('MessageProducer get_new_observable NO Type Observable Set (None)') + return None + + if type_obs == ObservableGenericType.LOCALIZATION: + position = CalculatePosition.calculate_position(ref_pos=MessageProducer.ref_pos, + georefarea=MessageProducer.geo_area) + + if not position: + logger.warning('MessageProducer get_new_observable NO Position Provided') + return None + + localization = Localization(tag_id=device_id, + iot_id=iot_id, + lat=position.latitude, + lon=position.longitude) + + logger.info('MessageProducer get_new_observable Position') + + return localization + + elif type_obs == ObservableGenericType.CROWDDENSITYLOCAL: + density_map_size = UtilityDictionaries.get_dict_field_if(dictionary=dictionary_unitofmeasures, + label=LabelDatastramCamera.LABEL_DSCAM_GPS) + + counter_people = random.uniform(a=0, + b=MessageProducer.max_counter_people) + crowd_density_local = CrowdDensityLocalObservation(device_id=device_id, + iot_id=iot_id, + map_size=density_map_size) + crowd_density_local.set_density_map(counter_people=int(math.floor(counter_people))) + crowd_density_local.consolidate_observable() + + return crowd_density_local + + return None + except Exception as ex: + logger.error('MessageProducer get_new_observable Exception: {}'.format(ex)) + return None diff --git a/worker/services/messagesender.py b/worker/services/messagesender.py new file mode 100644 index 0000000..7743b15 --- /dev/null +++ b/worker/services/messagesender.py @@ -0,0 +1,136 @@ +from services.mqtt_publisher import ServerMQTT +from services.messageproducer import MessageProducer +from shared.settings.settings import GeoRefArea +from general_types.labels import GOST_LABELS_DICTIONARY +from general_types.enumerations import ObservableGenericType +from utility.geodesy import GeoPosition +from utility.utilitydictionaries import UtilityDictionaries +from typing import List, Dict, Any + +import logging + +logger = logging.getLogger('textlogger') + + +class Publisher(object): + @staticmethod + def set_topics(topics: List[str]): + ServerMQTT.set_topics(topics=topics) + + @staticmethod + def configure(client_id: str, + hostname: str, + port: int, + username: str = str(), + pwd: str = str()): + logger.info('Try Connecting MQTT Client on {0} Port: {1}, User: {2}, PWD: {3}'.format(hostname, + port, + username, + pwd)) + ServerMQTT.configure_client(client_id=client_id, + hostname=hostname, + port=port, + username=username, + pwd=pwd) + + @staticmethod + def connect(): + ServerMQTT.connect_client() + + @staticmethod + def loop_wait(): + ServerMQTT.loop_wait() + + @staticmethod + def loop_start(): + ServerMQTT.loop_start() + + @staticmethod + def stop_client(): + ServerMQTT.stop_client() + + @staticmethod + def set_reference_geo_area(geo_area: GeoRefArea) -> bool: + try: + if not geo_area: + return False + MessageProducer.geo_area = geo_area + MessageProducer.ref_pos = GeoPosition(latitude=geo_area.reference_pos_lat, + longitude=geo_area.reference_pos_long, + altitude=0, + request_ecef_conf=True) + return True + except Exception as ex: + logger.error('set_reference_geo_area Exception: {}'.format(ex)) + return False + + @staticmethod + def extract_device_id(topic: str) -> str: + if not topic: + return str() + + if '/' not in topic: + return topic + + list_parts = topic.split('/') + + return list_parts[-1] + + @staticmethod + def publish_topics(dictionary_observables: Dict[int, Dict[str, str]], + translate_map: Dict[str, ObservableGenericType]) -> bool: + try: + if not dictionary_observables: + logger.info('publish_topics NO Dictionary to Publish') + return False + + counter_message_sent = 0 + + for iot_id in dictionary_observables: + dict_topic_deviceid = dictionary_observables[iot_id] + + if GOST_LABELS_DICTIONARY.LABEL_GOST_DATASTREAMID not in dict_topic_deviceid.keys(): + continue + + if GOST_LABELS_DICTIONARY.LABEL_GOST_DEVICENAME not in dict_topic_deviceid.keys(): + continue + + if GOST_LABELS_DICTIONARY.LABEL_GOST_THING not in dict_topic_deviceid.keys(): + continue + + topic = dict_topic_deviceid[GOST_LABELS_DICTIONARY.LABEL_GOST_DATASTREAMID] + device_id = dict_topic_deviceid[GOST_LABELS_DICTIONARY.LABEL_GOST_DEVICENAME] + properties = UtilityDictionaries.get_dict_field_if(dictionary=dict_topic_deviceid, + label=GOST_LABELS_DICTIONARY.LABEL_GOST_UNITOFMEASUREMENTS) + + thing_name = UtilityDictionaries.get_dict_field_if(dictionary=dict_topic_deviceid, + label=GOST_LABELS_DICTIONARY.LABEL_GOST_THING) + + observable_type = UtilityDictionaries.get_dict_field_if(dictionary=translate_map, + label=thing_name, + none_value=ObservableGenericType.UNDEFINED) + + logger.info('MQTT Publisher Topic: {0}, DeviceID: {1}'.format(topic, + device_id)) + + observable = MessageProducer.get_new_observable( + type_obs=observable_type, + device_id=device_id, + iot_id=iot_id, + dictionary_unitofmeasures=properties + ) + + if not observable: + continue + + ServerMQTT.publish(topic=topic, + dictionary=observable.to_dictionary()) + + counter_message_sent += 1 + + if (counter_message_sent % 10) == 0: + logger.info('MQTT Publish Messages: {}'.format(counter_message_sent)) + + logger.info('MQTT Publish Messages Completed: {}'.format(counter_message_sent)) + except Exception as ex: + logger.error('Exception publish_topics: {}'.format(ex)) diff --git a/worker/services/mqtt_client.py b/worker/services/mqtt_client.py new file mode 100644 index 0000000..ab6fcb6 --- /dev/null +++ b/worker/services/mqtt_client.py @@ -0,0 +1,94 @@ +import paho.mqtt.client as mqtt + + +class MQTTClient: + client_mqtt = None + flag_connected = 0 + list_topics = None + + @staticmethod + def test_method(print_msg: str): + print('MQTTClient TestMethod Print: {}'.format(print_msg)) + + @staticmethod + def on_connect(client, userdata, flags, rc): + try: + if MQTTClient.flag_connected == 1: + return + + MQTTClient.flag_connected = 1 + + print('MQTT on_connect event') + + if not MQTTClient.list_topics: + print('MQTT Registration to all topics') + client.subscribe(topic='#', qos=0) + return + + client.subscribe(MQTTClient.list_topics) + print('MQTT Registration to specific topics: {}'.format(len(MQTTClient.list_topics))) + except Exception as ex: + print('Exception: {}'.format(ex)) + + @staticmethod + def on_disconnect(client: mqtt.Client, userdata, rc): + try: + MQTTClient.flag_connected = 0 + + print('Client Disconnected') + # client.reconnect() + except Exception as ex: + print('Exception: {}'.format(ex)) + + @staticmethod + def on_unsubscribe(client, userdata, level): + print('Unsubscribed Success!') + + @staticmethod + def on_subscribe(client: mqtt.Client, userdata, mid, granted_qos): + print('Subscribed Success!') + + @staticmethod + def on_message(client, userdata, message): + try: + if client: + return + print('Message topic: ' + message.topic) + print('Message received: ' + str(message.payload)) + except Exception as ex: + print(ex) + + @staticmethod + def connect(hostname: str, port: int): + try: + MQTTClient.client_mqtt.connect(host=hostname, port=port) + print('MQTT Client Test Connected to host: {0}, port: {1}'.format(hostname, port)) + MQTTClient.client_mqtt.loop_forever() + except Exception as ex: + print('MQTT Client connect Exception: {}'.format(ex)) + + @staticmethod + def disconnect(): + try: + if not MQTTClient.client_mqtt: + return + MQTTClient.client_mqtt.disconnect() + print('MQTT Client Test Disonnected') + MQTTClient.client_mqtt.loop_stop() + except Exception as ex: + print('MQTT Client connect Exception: {}'.format(ex)) + + @staticmethod + def set_list_topics(list_topics: list): + MQTTClient.list_topics = list_topics + + @staticmethod + def initialize(client_id: str): + try: + MQTTClient.client_mqtt = mqtt.Client(client_id) + MQTTClient.client_mqtt.on_connect = MQTTClient.on_connect + MQTTClient.client_mqtt.on_disconnect = MQTTClient.on_disconnect + MQTTClient.client_mqtt.on_message = MQTTClient.on_message + MQTTClient.client_mqtt.on_subscribe = MQTTClient.on_subscribe + except Exception as ex: + print('MQTT Client initialize Exception: {}'.format(ex)) diff --git a/worker/services/mqtt_publisher.py b/worker/services/mqtt_publisher.py new file mode 100644 index 0000000..b2b6fee --- /dev/null +++ b/worker/services/mqtt_publisher.py @@ -0,0 +1,267 @@ +import paho.mqtt.client as mqtt +import paho.mqtt.publish as publish # FIXME: REMOVE (It is just for test) +import json +from typing import Dict, Any, List +import datetime +import logging +from jobs.cache_redis import CacheRedisAdapter + +logger = logging.getLogger('textlogger') + + +class DateTimeEncoder(json.JSONEncoder): + def default(self, o): + if isinstance(o, datetime.datetime): + return o.isoformat() + 'Z' + + return json.JSONEncoder.default(self, o) + + +class SettingsMQTT(object): + def __init__(self, + hostname: str = str(), + port: int = 0, + client_id: str = str(), + username: str = str(), + password: str = str()): + self.hostname = hostname + self.port = port + self.client_id = client_id + self.username = username + self.password = password + self.dict_auth = dict() + self.set_dict_auth(username=username, + pwd=password) + + def set_dict_auth(self, + username: str, + pwd: str) -> bool: + if not username or not pwd: + return False + + self.dict_auth['username'] = username + self.dict_auth['password'] = pwd + + def get_auth_dictionary(self) -> Dict[str, str]: + if not self.dict_auth: + return None + + return self.dict_auth + + +class ServerMQTT(object): + LABEL_TOPICS = 'SERVERMQTT_TOPICS' + client_mqtt = None + flag_connected = 0 + counter_message_published = 0 + debug_numbernotification = 1 + settings_mqtt = SettingsMQTT() + port = 0 + reference_datetime = None + + @staticmethod + def get_client_mqtt() -> mqtt.Client: + return ServerMQTT.client_mqtt + + @staticmethod + def subscribe_topic_lists(topics: List[str]) -> bool: + try: + if not topics: + return False + + if not ServerMQTT.client_mqtt: + return False + + list_tuple = list() + + for topic in topics: + if not topic: + continue + list_tuple.append((topic, 0)) + + ServerMQTT.get_client_mqtt().subscribe(topic=list_tuple) + logger.info('ServerMQTT Subscription to topics') + except Exception as ex: + logger.error('subscribe_topic_lists Exception: {}'.format(ex)) + return False + + @staticmethod + def on_connect(client, userdata, flags, rc): + try: + if ServerMQTT.flag_connected == 1: + return + + topics = CacheRedisAdapter.get_cached_info(label_info=ServerMQTT.LABEL_TOPICS, + type_data=list) + + ServerMQTT.subscribe_topic_lists(topics=topics) + + ServerMQTT.flag_connected = 1 + + logger.info('ServerMQTT on_connect event') + except Exception as ex: + logger.error('Exception: {}'.format(ex)) + + @staticmethod + def on_log(client, userdata, level, buf): + logger.info('ServerMQTT Log raised: {}'.format(buf)) + + @staticmethod + def on_disconnect(client: mqtt.Client, userdata, rc): + try: + ServerMQTT.flag_connected = 0 + + logger.info('ServerMQTT Disconnected') + except Exception as ex: + logger.error('ServerMQTT on_disconnect Exception: {}'.format(ex)) + + @staticmethod + def on_subscribe(client: mqtt.Client, userdata, mid, granted_qos): + try: + logger.error('ServerMQTT on_subscribe Raised') + except Exception as ex: + logger.error('ServerMQTT on_subscribe Exception: {}'.format(ex)) + + @staticmethod + def on_publish(client, userdata, result): + try: + logger.info('ServerMQTT OnPublish Raised') + ServerMQTT.counter_message_published += 1 + + if (ServerMQTT.counter_message_published % ServerMQTT.debug_numbernotification) == 0: + interval_secs = (datetime.datetime.utcnow()-ServerMQTT.reference_datetime).total_seconds() + logger.info('ServerMQTT OnPublish Method raised: {0} RelativeTime: {1}'.format(ServerMQTT.counter_message_published, + interval_secs)) + except Exception as ex: + logger.error('ServerMQTT OnPublish Exception: {}'.format(ex)) + + @staticmethod + def configure_client(client_id: str, + hostname: str, + port: int, + username: str = str(), + pwd: str = str()) -> bool: + try: + ServerMQTT.client_mqtt = mqtt.Client(client_id=client_id, clean_session=True) + ServerMQTT.client_mqtt.on_connect = ServerMQTT.on_connect + ServerMQTT.client_mqtt.on_disconnect = ServerMQTT.on_disconnect + ServerMQTT.client_mqtt.on_subscribe = ServerMQTT.on_subscribe + ServerMQTT.client_mqtt.on_publish = ServerMQTT.on_publish + ServerMQTT.client_mqtt.on_log = ServerMQTT.on_log + ServerMQTT.hostname = hostname + ServerMQTT.port = port + + ServerMQTT.settings_mqtt = SettingsMQTT(hostname=hostname, + port=port, + username=username, + password=pwd) + + if not username or not pwd: + logger.info('ServerMQTT configure_client NO Credential Set') + return True + + logger.info('ServerMQTT configure_client set Username and PWD: {}'.format(username)) + + ServerMQTT.client_mqtt.username_pw_set(username=username, + password=pwd) + return True + except Exception as ex: + logger.error('ServerMQTT configure_client Exception: {}'.format(ex)) + + @staticmethod + def set_topics(topics: List[str]) -> bool: + try: + if not topics: + return False + + CacheRedisAdapter.set_cache_info(label_info=ServerMQTT.LABEL_TOPICS, + data=topics) + return True + except Exception as ex: + logger.error('ServerMQTT set_topics Exception: {}'.format(ex)) + return False + + @staticmethod + def stop_client(): + try: + ServerMQTT.get_client_mqtt().disconnect() + ServerMQTT.get_client_mqtt().loop_stop() + except Exception as ex: + logger.error('ServerMQTT stop_client Exception: {}'.format(ex)) + + @staticmethod + def connect_client(): + try: + # logger.info( + # 'ServerMQTT configure_client trying connect hostname: {0}, port: {1}'.format(ServerMQTT.hostname, ServerMQTT.port)) + # ServerMQTT.client_mqtt.connect(host=ServerMQTT.hostname, + # port=ServerMQTT.port) + logger.info('ServerMQTT configure_client hostname: {0}, port: {1}'.format(ServerMQTT.hostname, ServerMQTT.port)) + ServerMQTT.reference_datetime = datetime.datetime.utcnow() + except Exception as ex: + logger.error('ServerMQTT connect_client Exception: {}'.format(ex)) + + @staticmethod + def loop_start(): + try: + logger.info('ServerMQTT Loop Start') + # ServerMQTT.get_client_mqtt().loop_start() + except Exception as ex: + logger.error('ServerMQTT Loop Forever Exception: {}'.format(ex)) + + @staticmethod + def loop_wait(): + try: + logger.info('ServerMQTT Loop Forever') + ServerMQTT.get_client_mqtt().loop_forever() + except Exception as ex: + logger.error('ServerMQTT Loop Forever Exception: {}'.format(ex)) + + @staticmethod + def publish(topic: str, + dictionary: Dict[str, Any]) -> bool: + try: + if not ServerMQTT.client_mqtt: + logger.warning('ServerMQTT Publish NOT DONE') + return False + + if not dictionary: + logger.warning('No Datat To Transfer') + return False + + # print('Try Sending MQTT Message publish_bis....') + + string_json = json.dumps(obj=dictionary, + cls=DateTimeEncoder) + + publish.single(topic=topic, + payload=string_json, + hostname=ServerMQTT.settings_mqtt.hostname, + port=ServerMQTT.settings_mqtt.port, + retain=False, + auth=ServerMQTT.settings_mqtt.get_auth_dictionary(), + client_id=ServerMQTT.settings_mqtt.client_id + ) + + # return_info = ServerMQTT.client_mqtt.publish(topic=topic, + # payload=string_json, + # qos=0, + # retain=False) + # + # if not return_info: + # logger.warning('ServerMQTT Publish Failed return_info is None') + # return False + # + # if return_info.rc != mqtt.MQTT_ERR_SUCCESS: + # logger.warning('ServerMQTT Publish Error: {}'.format(str(return_info.rc))) + # return False + # + # return_info.wait_for_publish() + + logger.info('ServerMQTT Publish Success, topic: {}'.format(topic)) + + # print('Success Sending publish_bis: {}'.format(string_json)) + return True + except Exception as ex: + logger.error('Exception ServerMQTT PublishBis: {}'.format(ex)) + return False diff --git a/worker/shared/.gitignore b/worker/shared/.gitignore new file mode 100644 index 0000000..64e14ae --- /dev/null +++ b/worker/shared/.gitignore @@ -0,0 +1,4 @@ +logs +celerybeat-schedule.db +__pycache__ +db.sqlite3 \ No newline at end of file diff --git a/worker/shared/__init__.py b/worker/shared/__init__.py new file mode 100644 index 0000000..b1eadfb --- /dev/null +++ b/worker/shared/__init__.py @@ -0,0 +1,7 @@ +from __future__ import absolute_import + +# This will make sure the app is always imported when +# Django starts so that shared_task will use this app. +from shared.celery_settings import app as celery_app + +__all__ = ['celery_app'] \ No newline at end of file diff --git a/worker/shared/celery_settings.py b/worker/shared/celery_settings.py new file mode 100644 index 0000000..26ad035 --- /dev/null +++ b/worker/shared/celery_settings.py @@ -0,0 +1,37 @@ +from __future__ import absolute_import, unicode_literals +import os +from celery import Celery +from django.apps import apps +import django +from shared.settings.appglobalconf import SCHEDULER_SETTINGS + +#from celery.task.schedules import crontab + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "shared.settings.appglobalconf") + +app = Celery('jobs', fixups=[]) + + +django.setup() + +app.config_from_object('django.conf:settings') +app.autodiscover_tasks(lambda: [n.name for n in apps.get_app_configs()]) + + +app.conf.beat_schedule = { + 'task_elaboration': { + 'task': 'jobs.tasks.task_provisioning', + 'schedule': SCHEDULER_SETTINGS['TASK_PROVISIONING'], # Every N seconds + 'options': {'queue': 'taskqueue_provisioning'}, + }, + 'task_alive': { + 'task': 'jobs.tasks.task_alive', + 'schedule': SCHEDULER_SETTINGS['TASK_ALIVEAPP'], # Every N seconds + 'options': {'queue': 'queue_task_alive'}, + }, + # 'task_sw_update_info': { + # 'task': 'jobs.tasks.task_sw_update_info', + # 'schedule': 100.0, #E very N seconds + # 'options': {'queue': 'queue_sw_update_info'}, + # }, +} \ No newline at end of file diff --git a/worker/shared/settings/.gitignore b/worker/shared/settings/.gitignore new file mode 100644 index 0000000..3837bc2 --- /dev/null +++ b/worker/shared/settings/.gitignore @@ -0,0 +1,2 @@ +__pycache__ +settings.py \ No newline at end of file diff --git a/worker/shared/settings/__init__.py b/worker/shared/settings/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/worker/shared/settings/appglobalconf.py b/worker/shared/settings/appglobalconf.py new file mode 100644 index 0000000..cdcfa84 --- /dev/null +++ b/worker/shared/settings/appglobalconf.py @@ -0,0 +1,76 @@ +from shared.settings.base import * +from shared.settings.dockersconf import * +from shared.settings.version import SW_VERSION +from shared.settings.appstableconf import * +from general_types.labelsdictionaries import LocConfLbls +from general_types.general_enums import TypeQueueDetection +from typing import Dict, List, Any +from general_types.labels import LabelThingsName, LabelDatastreamGeneric +from shared.settings.datastreams import DatastreamWristband, DatastreamCamera +from shared.settings.settings import Settings +from utility.geodesy import GeoPosition + +# Database Condocker-composeuration +# https://docs.djangoproject.com/en/2.0/ref/settings/#databases + +GLOBAL_INFO_ENVIRONMENT = "DEV" +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = True +TEMPLATE_DEBUG = True + +GOST_IOTID_THING_SFN = os.environ.get('GOST_THINGID_SFN', '5') +GOST_IOTID_THING_WRISTBAND = os.environ.get('GOST_THINGID_WRISTBAND', '18') +GOST_IOTID_THING_SFN = int(GOST_IOTID_THING_SFN) +GOST_IOTID_THING_WRISTBAND = int(GOST_IOTID_THING_WRISTBAND) + +GOST_DB_THINGS = { + "value": + [ + { + LabelDatastreamGeneric.LABEL_DSGENERIC_IOTID: GOST_IOTID_THING_SFN, + LabelDatastreamGeneric.LABEL_DSGENERIC_NAME: LabelThingsName.LABEL_THING_SFN, + LabelDatastreamGeneric.LABEL_DSGENERIC_DESCR: "Security Fusion Node", + LabelDatastreamGeneric.LABEL_DSGENERIC_PROPERTY: + { + "type": "Video Processing Framework" + }, + }, + { + LabelDatastreamGeneric.LABEL_DSGENERIC_IOTID: GOST_IOTID_THING_WRISTBAND, + LabelDatastreamGeneric.LABEL_DSGENERIC_NAME: LabelThingsName.LABEL_THING_WRISTBAND, + LabelDatastreamGeneric.LABEL_DSGENERIC_DESCR: "Wristband Gateway by DEXELS", + LabelDatastreamGeneric.LABEL_DSGENERIC_PROPERTY: { + "type": "Integration Gateway for 868 and UWB Wristbands" + }, + }, + ] +} + +GOST_DATASTREAMS_SFN = { + "value": [ + DatastreamCamera(iot_id=Settings.CAMERA_IOTID, + name=Settings.CAMERA_NAME, + gpp=GeoPosition(latitude=Settings.CAMERA_GPP_LATITUDE, + longitude=Settings.CAMERA_GPP_LONGITUDE), + ground_plane_size=[Settings.CAMERA_NUMBER_ROWS, + Settings.CAMERA_NUMBER_COLS], + gpo=Settings.CAMERA_GPO).to_dict() + ] +} + + +GOST_DATASTREAMS_WRISTBAND = { + "value": DatastreamWristband.get_datastreams_wristbands(iot_id_start=Settings.WRISTBAND_ID_START, + counter_datastreams=Settings.COUNT_WRISTBANDS) +} + + +LOCAL_CONFIG_THINGS = { + LabelThingsName.LABEL_THING_SFN: GOST_DATASTREAMS_SFN, + LabelThingsName.LABEL_THING_WRISTBAND: GOST_DATASTREAMS_WRISTBAND +} + +SCHEDULER_SETTINGS = { + "TASK_PROVISIONING": 20, + "TASK_ALIVEAPP": 60 +} diff --git a/worker/shared/settings/appstableconf.py b/worker/shared/settings/appstableconf.py new file mode 100644 index 0000000..63060f4 --- /dev/null +++ b/worker/shared/settings/appstableconf.py @@ -0,0 +1,9 @@ +from general_types.general_enums import MQTTPayloadConversion +from general_types.modelsenums import OutputMessageType +from general_types.labelsdictionaries import LabelDictionaryQueueShapeArea + + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = '*v$ykys&=(aag!7f%b$rtss$*al4-%!d6@y_qh0zd+yq@#56&l' + + diff --git a/worker/shared/settings/base.py b/worker/shared/settings/base.py new file mode 100644 index 0000000..2f658a8 --- /dev/null +++ b/worker/shared/settings/base.py @@ -0,0 +1,351 @@ +""" +Django settings for project project. + +Generated by 'django-admin startproject' using Django 2.0.2. + +For more information on this file, see +https://docs.djangoproject.com/en/2.0/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/2.0/ref/settings/ +""" + +import os +# import environ +from typing import Dict, Any, Union +from general_types.labelsdictionaries import MQTTLabelsConfigurations +from kombu import Exchange, Queue + +# Build paths inside the project like this: os.path.join(BASE_DIR, ...) +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +#ROOT_DIR = environ.Path(__file__) - 3 #project +#APPS_DIR = ROOT_DIR.path('project') # path for django apps + +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/ +ALLOWED_HOSTS = ['*'] + + +# Application definition + +INSTALLED_APPS = [ + 'django.contrib.gis', + 'django.contrib.admin', + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.messages', + 'django.contrib.staticfiles', + 'rest_framework', + 'rest_framework.authtoken', + 'rest_framework_gis', + 'api', + 'jobs', + 'shared', + 'users', + # 'main.apps.MainConfig' +] + +AUTH_USER_MODEL = 'users.User' + +MIDDLEWARE = [ + 'django.middleware.security.SecurityMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', +] + +ROOT_URLCONF = 'shared.urls' + +TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [], + 'APP_DIRS': True, + 'OPTIONS': { + 'context_processors': [ + 'django.template.context_processors.debug', + 'django.template.context_processors.request', + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + 'django.template.context_processors.i18n', + 'django.template.context_processors.media', + 'django.template.context_processors.static', + 'django.template.context_processors.tz', + ], + }, + }, +] + +WSGI_APPLICATION = 'shared.wsgi.application' + +# Password validation +# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators + +AUTH_PASSWORD_VALIDATORS = [ + { + 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + }, +] + +REST_FRAMEWORK = { + 'DEFAULT_RENDERER_CLASSES': [ + 'rest_framework.renderers.JSONRenderer', + 'rest_framework.renderers.BrowsableAPIRenderer' + ], + 'DEFAULT_PARSER_CLASSES': [ + 'rest_framework.parsers.JSONParser', + 'rest_framework.parsers.FormParser', + 'rest_framework.parsers.MultiPartParser' + ], + 'DEFAULT_AUTHENTICATION_CLASSES': [], + 'DEFAULT_PERMISSION_CLASSES': [], + # 'DEFAULT_AUTHENTICATION_CLASSES': ( + # 'rest_framework.authentication.TokenAuthentication', + # ), + # 'DEFAULT_PERMISSION_CLASSES': ( + # 'rest_framework.permissions.IsAuthenticated', + # ), +} + +HEADER_JSON = { 'Content-Type' : 'application/json' } + +# Rabbitmq configuration +# ToDO: Create your account in RABBIT container +BROKER_URL = 'pyamqp://{user}:{password}@{hostname}/{vhost}'.format( + user=os.environ.get('RABBITMQ_DEFAULT_USER', ''), + password=os.environ.get('RABBITMQ_DEFAULT_PASS', ''), + hostname=os.environ.get('RABBITMQ_HOSTNAME', '172.18.1.2'), + vhost=os.environ.get('RABBITMQ_DEFAULT_VHOST', '')) + +BROKER_USER_URL = 'pyamqp://monica_guest:5LSeufD4P5xpu9OybT8S@rabbit/' + +# We don't want to have dead connections stored on rabbitmq, so we have to negotiate using heartbeats +BROKER_HEARTBEAT = '?heartbeat=30' +if not BROKER_URL.endswith(BROKER_HEARTBEAT): + BROKER_URL += BROKER_HEARTBEAT + +BROKER_POOL_LIMIT = 1 +BROKER_CONNECTION_TIMEOUT = 10 + +# SCHUDULES TIME VALUE +TASK_ELABORATION_TIME = 30.0 #in secs +TASK_PROVISIONING = 60.0 #in secs + +# QUEUES NAMES +CROWD_HEATMAP_NAME = 'crowd_heatmap_queue' + + +# Celery configuration +# configure queues, currently we have only one +default_exchange = Exchange('default', type='direct') +priority_exchange = Exchange('priority_queue', type='direct') +broker_exchange = Exchange('broker_queue', type='direct') +log_exchange = Exchange('log_queue', type='direct') +task_alive_exchange = Exchange('queue_task_alive', type='direct') +discover_devices_exchange = Exchange('queue_sw_update_info', type='direct') +taskqueue_provisioning_exchange = Exchange('taskqueue_provisioning', type='direct') + +CELERY_ENABLE_UTC = True +CELERY_TIMEZONE = "UTC" + +CELERY_QUEUES = ( + Queue('default', + exchange=default_exchange, + routing_key='default', + consumer_arguments={'x-priority': 2}), + Queue('taskqueue_provisioning', + exchange=taskqueue_provisioning_exchange, + routing_key='taskqueue_provisioning', + consumer_arguments={'x-priority': 2}), + Queue('queue_task_alive', + exchange=task_alive_exchange, + routing_key='queue_task_alive', + consumer_arguments={'x-priority': 1}), + Queue('priority_queue', + exchange=priority_exchange, + routing_key='priority_queue', + consumer_arguments={'x-priority': 1}), + Queue('broker_queue', + exchange=broker_exchange, + routing_key='broker_queue', + consumer_arguments={'x-priority': 5}), + Queue('log_queue', + exchange=log_exchange, + routing_key='log_queue', + consumer_arguments={'x-priority': 7}), +) + +CELERY_DEFAULT_QUEUE = 'default' +CELERY_DEFAULT_EXCHANGE = 'default' +CELERY_DEFAULT_ROUTING_KEY = 'default' + +CELERY_ROUTES = ({'app.jobs.tasks.first': { + 'queue': 'priority_queue', + 'routing_key': 'priority_queue' + }}, + {'app.jobs.tasks.broker_connection': { + 'queue': 'broker_queue', + 'routing_key': 'broker_queue' + }}, + {'app.jobs.tasks.save_logs': { + 'queue': 'log_queue', + 'routing_key': 'log_queue' + }}, + {'app.jobs.tasks.task_provisioning': { + 'queue': 'taskqueue_provisioning', + 'routing_key': 'taskqueue_provisioning' + }}, + {'app.jobs.tasks.task_alive': { + 'queue': 'queue_task_alive', + 'routing_key': 'queue_task_alive' + }} +) + +CELERY_CREATE_MISSING_QUEUES = True + +# Sensible settings for celery +# CELERY_TASK_ALWAYS_EAGER = True +# CELERY_ACKS_LATE = True +# CELERYD_PREFETCH_MULTIPLIER = 1 +# CELERY_TASK_PUBLISH_RETRY = True +# CELERY_DISABLE_RATE_LIMITS = False +# CELERY_EAGER_PROPAGATES_EXCEPTIONS = True +# CELERY_RDB_HOST = 6900 + +# By default we will ignore result +# If you want to see results and try out tasks interactively, change it to False +# Or change this setting on tasks level +# CELERY_IGNORE_RESULT = True +# CELERY_SEND_TASK_ERROR_EMAILS = False +# CELERY_TASK_RESULT_EXPIRES = 600 + +# Set redis as celery result backend +# CELERY_RESULT_BACKEND = 'redis://%s:%d/%d' % (REDIS_HOST, REDIS_PORT, REDIS_DB) +# CELERY_REDIS_MAX_CONNECTIONS = 1 + +# Don't use pickle as serializer, json is much safer +CELERY_TASK_SERIALIZER = 'json' +CELERY_ACCEPT_CONTENT = ['text/plain', 'json', 'application/json', 'yaml'] +CELERY_RESULT_SERIALIZER = 'json' #json pickle msgpack + +# CELERYD_HIJACK_ROOT_LOGGER = False +# CELERYD_PREFETCH_MULTIPLIER = 1 +# CELERYD_MAX_TASKS_PER_CHILD = 1000 + +CELERY_IMPORTS = ('api', 'jobs',) + + +# Internationalization +# https://docs.djangoproject.com/en/2.0/topics/i18n/ + +LANGUAGE_CODE = 'en-us' + +TIME_ZONE = 'UTC' + +USE_I18N = True + +USE_L10N = True + +USE_TZ = True + + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/2.0/howto/static-files/ + +STATIC_URL = '/static/' + +LOGGING = { + 'version': 1, + 'formatters': { + 'verbose': { + 'format': '[%(asctime)s] %(levelname)s [%(pathname)s:%(lineno)s] %(message)s', + 'datefmt' : "%d/%b/%Y %H:%M:%S" + }, + 'standard': { + 'format' : "[%(asctime)s] %(levelname)s %(message)s", + 'datefmt' : "%d/%b/%Y %H:%M:%S" + }, + }, + 'handlers': { + 'null': { + 'level': 'DEBUG', + 'class': 'logging.NullHandler', + }, + 'logfile': { + 'level': 'INFO', + 'class': 'logging.handlers.RotatingFileHandler', + 'filename': "logs/tracefile.log", + 'maxBytes': 104857600, + 'backupCount': 2, + 'formatter': 'verbose', + }, + 'console': { + 'level': 'INFO', + 'class': 'logging.StreamHandler', + 'formatter': 'standard' + }, + }, + 'loggers': { + 'django': { + 'handlers': ['console'], + 'propagate': False, + 'level': 'WARN', + }, + 'django.request': { + 'handlers': ['null'], + 'propagate': False, + 'level': 'INFO', + }, + 'django.server': { + 'handlers': ['null'], + 'propagate': False, + 'level': 'INFO', + }, + 'django.template': { + 'handlers': ['null'], + 'propagate': False, + 'level': 'INFO', + }, + 'django.db.backends': { + 'handlers': ['null'], + 'level': 'INFO', + 'propagate': False, + }, + 'django.security.*': { + 'handlers': ['null'], + 'level': 'INFO', + 'propagate': False, + }, + 'textlogger': { + 'handlers': ['console', 'logfile'], + 'level': 'INFO', + 'propagate': False, + }, + } +} + + +MQTT_BROKERPROVISIONING = { + MQTTLabelsConfigurations.LABEL_DICTIONARY_USERNAME: os.environ.get('MOSQUITTO_USERNAME', 'mosquitto'), + MQTTLabelsConfigurations.LABEL_DICTIONARY_PASSWORD: os.environ.get('MOSQUITTO_PASSWORD', 'mosquitto'), + MQTTLabelsConfigurations.LABEL_DICTIONARY_URL: 'mqtt', + MQTTLabelsConfigurations.LABEL_DICTIONARY_CLIENT_ID: os.environ.get('MOSQUITTO_CLIENTID', 'MONICA_HLDFAD'), + MQTTLabelsConfigurations.LABEL_DICTIONARY_TOPICS: + { + MQTTLabelsConfigurations.LABEL_TOPICS_CROWDHEATMAPOUTPUT: 'CrowdHeatmap', + MQTTLabelsConfigurations.LABEL_TOPICS_QUEUEDETECTIONALERT: 'QueueDetectionAlert' + } +} diff --git a/worker/shared/settings/create_urls.py b/worker/shared/settings/create_urls.py new file mode 100644 index 0000000..7a63db1 --- /dev/null +++ b/worker/shared/settings/create_urls.py @@ -0,0 +1,83 @@ +from shared.settings.appglobalconf import GOST_DB_THINGS +from shared.settings.settings import Settings +from general_types.labels import LabelThingsName, LabelDatastreamGeneric +from django.urls import path +from typing import List, Optional, Any +from api.views import GostDBSFNDatastreams, \ + GostDBWristbandDatastreams, GostDBThings + +import logging + +logger = logging.getLogger('textlogger') + + +def create_single_path(iot_id: int, + thing_name: str) -> path: + try: + url_string = 'v1.0/Things({0})/Datastreams'.format(iot_id) + + if thing_name == LabelThingsName.LABEL_THING_SFN: + logger.info('Creating SFN Path: {}'.format(url_string)) + return path(url_string, GostDBSFNDatastreams.as_view()) + elif thing_name == LabelThingsName.LABEL_THING_WRISTBAND: + logger.info('Creating WB-GW Path: {}'.format(url_string)) + return path(url_string, GostDBWristbandDatastreams.as_view()) + + logger.info('NO PATH Created: {}'.format(url_string)) + + return None + except Exception as ex: + logger.error('create_single_path Exception: {}'.format(ex)) + return None + + +def create_paths_datastreams() -> List[path]: + try: + path_thing = path('v1.0/Things', GostDBThings.as_view()) + + if not Settings.THINGS_TO_ANALYSE or not GOST_DB_THINGS: + return [path_thing] + + if "value" not in GOST_DB_THINGS: + return [path_thing] + + LIST_THINGS = GOST_DB_THINGS["value"] + + if not LIST_THINGS: + logger.info('create_paths_datastreams Empty List_Things') + return None + + list_paths: List[path] = list() + list_paths.append(path_thing) + + for thing_name in Settings.THINGS_TO_ANALYSE: + for thing in LIST_THINGS: + if not thing or LabelDatastreamGeneric.LABEL_DSGENERIC_NAME not in thing \ + or LabelDatastreamGeneric.LABEL_DSGENERIC_IOTID not in thing: + logger.warning('create_paths_datastreams No parameters found on thing: {}'.format(thing)) + continue + + iot_id = thing[LabelDatastreamGeneric.LABEL_DSGENERIC_IOTID] + + if thing[LabelDatastreamGeneric.LABEL_DSGENERIC_NAME] == thing_name: + path_to_add = create_single_path(iot_id=iot_id, + thing_name=thing_name) + + if not path_to_add: + logger.warning('create_paths_datastreams PATH_TO_ADD IS None') + continue + + list_paths.append(path_to_add) + else: + logger.info('create_paths_datastreams NO Matching: {0} vs {1}'.format(thing["name"], thing_name)) + + if not list_paths: + logger.info('create_paths_datastreams NO Path to add') + return None + + logger.info('create_paths_datastreams {} path to append'.format(len(list_paths))) + + return list_paths + except Exception as ex: + logger.error('create_paths_datastreams Exception: {}'.format(ex)) + return None diff --git a/worker/shared/settings/datastreams.py b/worker/shared/settings/datastreams.py new file mode 100644 index 0000000..d382311 --- /dev/null +++ b/worker/shared/settings/datastreams.py @@ -0,0 +1,200 @@ +from typing import Dict, Any, List +from utility.geodesy import GeoPosition +from datetime import datetime +from general_types.labels import LabelThingsName, LabelDatastreamGeneric + +import logging + +logger = logging.getLogger('textlogger') + + +class Datastream(object): + def get_altfunc_param(self, + label: str, + list_labels: List[Dict[str, Any]]) -> Any: + try: + if not list_labels: + return self.get_param(label=label) + + dict_return = dict() + for dictionary in list_labels: + if not dictionary: + continue + + for sub_label in dictionary.keys(): + if not sub_label: + continue + + list_labels = dictionary[sub_label] + + dict_return[sub_label] = self.get_altfunc_param(sub_label, + list_labels) + return dict_return + except Exception as ex: + logger.error('get_altfunc_param Exception: {}'.format(ex)) + return None + + def to_dict(self) -> Dict[str, Any]: + list_labels = self.get_labels() + + if not list_labels: + return None + + dict_return = dict() + + for dict_label in list_labels: + if not dict_label: + continue + + for label in dict_label.keys(): + if not label: + continue + + sub_labels = dict_label[label] + dict_return[label] = self.get_altfunc_param(label=label, + list_labels=sub_labels) + + return dict_return + + def get_param(self, label: str) -> Any: + pass + + def get_labels(self) -> List[Dict[str, List[str]]]: + pass + + +class LabelDatastramCamera: + LABEL_DSCAM_GPP = "ground_plane_position" + LABEL_DSCAM_GPO = "ground_plane_orientation" + LABEL_DSCAM_GPS = "ground_plane_size" + + @staticmethod + def get_complete_list() -> List[Dict[str, List[str]]]: + return [ + {LabelDatastreamGeneric.LABEL_DSGENERIC_IOTID: []}, + {LabelDatastreamGeneric.LABEL_DSGENERIC_NAME: []}, + {LabelDatastreamGeneric.LABEL_DSGENERIC_DESCR: []}, + {LabelDatastreamGeneric.LABEL_DSGENERIC_UNITOFMEAS: + [ + {LabelDatastramCamera.LABEL_DSCAM_GPP: []}, + {LabelDatastramCamera.LABEL_DSCAM_GPO: []}, + {LabelDatastramCamera.LABEL_DSCAM_GPS: []} + ] + } + ] + + +class DatastreamCamera(Datastream): + def __init__(self, + iot_id: int, + name: str, + gpp: GeoPosition, + gpo: int, + ground_plane_size: List[int], + zone_id: str = "FAKE CAMERA"): + self.iot_id = iot_id + self.name = "{0}/Camera/CDL-Estimation/{1}".format(LabelThingsName.LABEL_THING_SFN, + name) + self.gpp = gpp + self.gpo = gpo + self.ground_plane_size = ground_plane_size + self.desc = "Datastream for Estimation of Gate-Counting events" + self.zone_id = zone_id + + def get_labels(self) -> List[Dict[str, List[str]]]: + return LabelDatastramCamera.get_complete_list() + + def get_param(self, label: str) -> Any: + if label == LabelDatastreamGeneric.LABEL_DSGENERIC_IOTID: + return self.iot_id + elif label == LabelDatastreamGeneric.LABEL_DSGENERIC_NAME: + return self.name + elif label == LabelDatastreamGeneric.LABEL_DSGENERIC_DESCR: + return self.desc + elif label == LabelDatastramCamera.LABEL_DSCAM_GPP: + return [ + self.gpp.latitude, + self.gpp.longitude + ] + elif label == LabelDatastramCamera.LABEL_DSCAM_GPO: + return self.gpo + + elif label == LabelDatastramCamera.LABEL_DSCAM_GPS: + return self.ground_plane_size + + return str() + + +class LabelDatastreamWristband: + LABEL_DSWRIST_METADATA = "metadata" + LABEL_DSWRIST_BUTTONID = "buttonId" + LABEL_DSWRIST_TIMESTAMP = "timestamp" + LABEL_DSWRIST_TAGID = "tagId" + LABEL_DSWRIST_TYPE = "type" + + @staticmethod + def get_complete_list() -> List[Dict[str, List[str]]]: + return [ + { + LabelDatastreamGeneric.LABEL_DSGENERIC_IOTID: [] + }, + { + LabelDatastreamGeneric.LABEL_DSGENERIC_NAME: [] + }, + { + LabelDatastreamGeneric.LABEL_DSGENERIC_DESCR: [] + }, + { + LabelDatastreamGeneric.LABEL_DSGENERIC_UNITOFMEAS: + [ + {LabelDatastreamWristband.LABEL_DSWRIST_METADATA: + [ + {LabelDatastreamWristband.LABEL_DSWRIST_BUTTONID: []}, + {LabelDatastreamWristband.LABEL_DSWRIST_TIMESTAMP: []}, + {LabelDatastreamWristband.LABEL_DSWRIST_TAGID: []}, + {LabelDatastreamWristband.LABEL_DSWRIST_TYPE: []}, + ]}, + ] + } + ] + + +class DatastreamWristband(Datastream): + def __init__(self, + iot_id: int, + name_id: int): + self.iot_id = iot_id + self.timestamp = datetime.utcnow() + self.name = "WRISTBAND-GW/868/Localization-Wristband/{}".format(name_id) + self.desc = "Datastream for Estimation of Gate-Counting events" + + def get_labels(self) -> List[Dict[str, List[str]]]: + return LabelDatastreamWristband.get_complete_list() + + def get_param(self, label: str) -> Any: + if label == LabelDatastreamGeneric.LABEL_DSGENERIC_IOTID: + return self.iot_id + elif label == LabelDatastreamGeneric.LABEL_DSGENERIC_NAME: + return self.name + elif label == LabelDatastreamGeneric.LABEL_DSGENERIC_DESCR: + return self.desc + elif label == LabelDatastreamWristband.LABEL_DSWRIST_BUTTONID: + return 1 + elif label == LabelDatastreamWristband.LABEL_DSWRIST_TIMESTAMP: + return self.timestamp.isoformat() + elif label == LabelDatastreamWristband.LABEL_DSWRIST_TAGID: + return str(self.iot_id) + elif label == LabelDatastreamWristband.LABEL_DSWRIST_TYPE: + return "868" + + return str() + + @staticmethod + def get_datastreams_wristbands(iot_id_start: int, + counter_datastreams: int) -> List[Dict[str, Any]]: + list_ds = list() + + for index in range(0, counter_datastreams): + list_ds.append(DatastreamWristband(iot_id=iot_id_start+index, + name_id=index+1).to_dict()) + return list_ds diff --git a/worker/shared/settings/dictionary_topics.py b/worker/shared/settings/dictionary_topics.py new file mode 100644 index 0000000..3b87b62 --- /dev/null +++ b/worker/shared/settings/dictionary_topics.py @@ -0,0 +1,94 @@ +from general_types.labels import GOST_LABELS_DICTIONARY, LabelDatastreamGeneric +from utility.utilitydictionaries import UtilityDictionaries +from general_types.enumerations import ObservableGenericType +from typing import Dict, Any, List +import logging + +logger = logging.getLogger('textlogger') + +PILOT_SELECTED = 'TIVOLI' +GOST_SELECTED = 'GOST_'+PILOT_SELECTED + + +class GOSTProvider: + @staticmethod + def get_datastream(gost: str, + id: int) -> str: + return '{0}/Datastreams({1})/Observations'.format(gost, id) + + @staticmethod + def get_device_id(pilot_name: str, + id: int) -> str: + return '{0}_{1}'.format(pilot_name, + id) + + @staticmethod + def get_device_name(pilot_name: str, + id: int): + return 'SFN/Camera/CDL-Estimation/{0}'.format( + GOSTProvider.get_device_id(pilot_name=pilot_name, + id=id)) + + +def extract_device_id(device_name_complete: str) -> str: + if not device_name_complete: + return str() + + if '/' not in device_name_complete: + return device_name_complete + + list_parts = device_name_complete.split('/') + + return list_parts[-1] + + +def get_dictionary_observables_topics(things_to_analyze: Dict[str, ObservableGenericType], + local_config_things: Dict[str, Dict[str, Any]]) -> Dict[int, Dict[str, Any]]: + try: + if not things_to_analyze or not local_config_things: + return None + + dict_return = dict() + + for thing_name in things_to_analyze.keys(): + if thing_name not in local_config_things.keys(): + continue + + thing_datastreams = local_config_things[thing_name] + + if not thing_datastreams: + continue + + if "value" not in thing_datastreams: + continue + + list_datastreams = thing_datastreams["value"] + + if not list_datastreams: + continue + + for datastream in list_datastreams: + if LabelDatastreamGeneric.LABEL_DSGENERIC_IOTID not in datastream.keys(): + continue + + iot_id = datastream[LabelDatastreamGeneric.LABEL_DSGENERIC_IOTID] + device_name = UtilityDictionaries.get_dict_field_if(dictionary=datastream, + label=LabelDatastreamGeneric.LABEL_DSGENERIC_NAME) + device_name = extract_device_id(device_name_complete=device_name) + + properties = UtilityDictionaries.get_dict_field_if(dictionary=datastream, + label=LabelDatastreamGeneric.LABEL_DSGENERIC_UNITOFMEAS) + + dict_return[iot_id] = { + GOST_LABELS_DICTIONARY.LABEL_GOST_DATASTREAMID: GOSTProvider.get_datastream(gost=GOST_SELECTED, + id=iot_id), + GOST_LABELS_DICTIONARY.LABEL_GOST_DEVICENAME: device_name, + GOST_LABELS_DICTIONARY.LABEL_GOST_UNITOFMEASUREMENTS: properties, + GOST_LABELS_DICTIONARY.LABEL_GOST_THING: thing_name + } + logger.info('get_dictionary_observables_topics iot_id: {0}, topic: {1}'.format(iot_id, + dict_return[iot_id])) + return dict_return + except Exception as ex: + logger.error('get_dictionary_observables_topics Exception: {}'.format(ex)) + return None diff --git a/worker/shared/settings/dockersconf.py b/worker/shared/settings/dockersconf.py new file mode 100644 index 0000000..2918303 --- /dev/null +++ b/worker/shared/settings/dockersconf.py @@ -0,0 +1,32 @@ +import os + +CACHE_REDIS_CONFIGURATION = { + "HOST": os.environ.get('CACHEREDIS_DEFAULT_HOSTNAME', 'redis'), + "PORT": os.environ.get('CACHEREDIS_DEFAULT_PORT', 6379), +} + +CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": "redis://redis:6379/0", + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + "SOCKET_CONNECT_TIMEOUT": 5, # in seconds + "SOCKET_TIMEOUT": 5, # in seconds + } + } +} + +SESSION_ENGINE = "django.contrib.sessions.backends.cache" +SESSION_CACHE_ALIAS = "default" +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + +DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': '/code/images/dbsqlite/db.sqlite3' # os.path.join(BASE_DIR, 'db.sqlite3'), + }, +} + +ALLOWED_HOSTS = ['localhost', os.environ.get('LOCALHOST_MACHINE', '192.168.1.1'), '127.0.0.1'] +INTERNAL_IPS = ('40.115.100.253', '130.192.85.198') diff --git a/worker/shared/settings/settings.py.dev b/worker/shared/settings/settings.py.dev new file mode 100644 index 0000000..747c8f1 --- /dev/null +++ b/worker/shared/settings/settings.py.dev @@ -0,0 +1,86 @@ +from general_types.enumerations import ObservableGenericType +from general_types.labels import GOST_LABELS_DICTIONARY, LabelThingsName +import os +from typing import List, Dict, Any +import logging + +logger = logging.getLogger('textlogger') + + +class GeoRefArea(object): + def __init__(self): + self.reference_pos_lat = 45.796845 + self.reference_pos_long = 4.950293 + self.geoarea_max_east = 300.0 + self.geoarea_max_north = 200.0 + + +# TODO: Such application start from hypothesis of having just one camera +# for Crowd Density Local messages +class Settings: + list_events_publish: list = list() + hostname: str = str() + port: int = 0 + username: str = str() + password: str = str() + client_id: str = str() + geographic_area: GeoRefArea = GeoRefArea() + max_counter_people_densitymap: int = 200 + COUNT_WRISTBANDS: int = 100 + WRISTBAND_ID_START: int = 1 + CAMERA_IOTID: int = 6000 + CAMERA_NAME = "TIVOLI_26" + CAMERA_GPP_LATITUDE = 55.67384 + CAMERA_GPP_LONGITUDE = 12.56717 + CAMERA_GPO = 30 + CAMERA_NUMBER_ROWS = 10 + CAMERA_NUMBER_COLS = 18 + + # NOTE: In order to retrieve Thing MQTT messages, it is necessary to add tuple label: type in the following + # THINGS_TO_ANALYSE dictionary. For instance, if the lines are both inserted in such list, + # both MQTT messages are enabled + THINGS_TO_ANALYSE = { + LabelThingsName.LABEL_THING_SFN: ObservableGenericType.CROWDDENSITYLOCAL, + LabelThingsName.LABEL_THING_WRISTBAND: ObservableGenericType.LOCALIZATION + } + + @staticmethod + def retrieve_environment_settings() -> bool: + try: + Settings.hostname = os.environ.get('MQTT_BROKER_IP_ADDRESS', 'mqtt') # '192.168.229.101' + Settings.port = int(os.environ.get('MQTT_BROKER_TCP_PORT', '1883')) + Settings.username = os.environ.get('MQTT_BROKER_USERNAME', '') + Settings.password = os.environ.get('MQTT_BROKER_PASSWORD', '') + Settings.client_id = os.environ.get('MQTT_BROKER_CLIENTID', 'gostmqttemulator') + + print('Settings retrieve_environment_settings Done. hostname: {0}, port: {1}'.format(Settings.hostname, + Settings.port)) + return True + except Exception as ex: + print('Settings retrieve_environment_settings Exception: {}'.format(ex)) + return False + + @staticmethod + def get_list_topics(dictionary_obs_topics: Dict[str, Dict[str, Any]]) -> List[str]: + try: + if not dictionary_obs_topics: + logger.warning('get_list_topics NO TOPIC Found') + return None + + list_topics = list() + + for key in dictionary_obs_topics.keys(): + value = dictionary_obs_topics[key] + if not value: + continue + + topic = value[GOST_LABELS_DICTIONARY.LABEL_GOST_DATASTREAMID] + + logger.info('get_list_topics New TOPIC Created: {}'.format(topic)) + + list_topics.append(topic) + return list_topics + except Exception as ex: + logger.error('get_list_topics Exception: {}'.format(ex)) + return None + diff --git a/worker/shared/settings/settings.py.local b/worker/shared/settings/settings.py.local new file mode 100644 index 0000000..747c8f1 --- /dev/null +++ b/worker/shared/settings/settings.py.local @@ -0,0 +1,86 @@ +from general_types.enumerations import ObservableGenericType +from general_types.labels import GOST_LABELS_DICTIONARY, LabelThingsName +import os +from typing import List, Dict, Any +import logging + +logger = logging.getLogger('textlogger') + + +class GeoRefArea(object): + def __init__(self): + self.reference_pos_lat = 45.796845 + self.reference_pos_long = 4.950293 + self.geoarea_max_east = 300.0 + self.geoarea_max_north = 200.0 + + +# TODO: Such application start from hypothesis of having just one camera +# for Crowd Density Local messages +class Settings: + list_events_publish: list = list() + hostname: str = str() + port: int = 0 + username: str = str() + password: str = str() + client_id: str = str() + geographic_area: GeoRefArea = GeoRefArea() + max_counter_people_densitymap: int = 200 + COUNT_WRISTBANDS: int = 100 + WRISTBAND_ID_START: int = 1 + CAMERA_IOTID: int = 6000 + CAMERA_NAME = "TIVOLI_26" + CAMERA_GPP_LATITUDE = 55.67384 + CAMERA_GPP_LONGITUDE = 12.56717 + CAMERA_GPO = 30 + CAMERA_NUMBER_ROWS = 10 + CAMERA_NUMBER_COLS = 18 + + # NOTE: In order to retrieve Thing MQTT messages, it is necessary to add tuple label: type in the following + # THINGS_TO_ANALYSE dictionary. For instance, if the lines are both inserted in such list, + # both MQTT messages are enabled + THINGS_TO_ANALYSE = { + LabelThingsName.LABEL_THING_SFN: ObservableGenericType.CROWDDENSITYLOCAL, + LabelThingsName.LABEL_THING_WRISTBAND: ObservableGenericType.LOCALIZATION + } + + @staticmethod + def retrieve_environment_settings() -> bool: + try: + Settings.hostname = os.environ.get('MQTT_BROKER_IP_ADDRESS', 'mqtt') # '192.168.229.101' + Settings.port = int(os.environ.get('MQTT_BROKER_TCP_PORT', '1883')) + Settings.username = os.environ.get('MQTT_BROKER_USERNAME', '') + Settings.password = os.environ.get('MQTT_BROKER_PASSWORD', '') + Settings.client_id = os.environ.get('MQTT_BROKER_CLIENTID', 'gostmqttemulator') + + print('Settings retrieve_environment_settings Done. hostname: {0}, port: {1}'.format(Settings.hostname, + Settings.port)) + return True + except Exception as ex: + print('Settings retrieve_environment_settings Exception: {}'.format(ex)) + return False + + @staticmethod + def get_list_topics(dictionary_obs_topics: Dict[str, Dict[str, Any]]) -> List[str]: + try: + if not dictionary_obs_topics: + logger.warning('get_list_topics NO TOPIC Found') + return None + + list_topics = list() + + for key in dictionary_obs_topics.keys(): + value = dictionary_obs_topics[key] + if not value: + continue + + topic = value[GOST_LABELS_DICTIONARY.LABEL_GOST_DATASTREAMID] + + logger.info('get_list_topics New TOPIC Created: {}'.format(topic)) + + list_topics.append(topic) + return list_topics + except Exception as ex: + logger.error('get_list_topics Exception: {}'.format(ex)) + return None + diff --git a/worker/shared/settings/settings.py.prod b/worker/shared/settings/settings.py.prod new file mode 100644 index 0000000..747c8f1 --- /dev/null +++ b/worker/shared/settings/settings.py.prod @@ -0,0 +1,86 @@ +from general_types.enumerations import ObservableGenericType +from general_types.labels import GOST_LABELS_DICTIONARY, LabelThingsName +import os +from typing import List, Dict, Any +import logging + +logger = logging.getLogger('textlogger') + + +class GeoRefArea(object): + def __init__(self): + self.reference_pos_lat = 45.796845 + self.reference_pos_long = 4.950293 + self.geoarea_max_east = 300.0 + self.geoarea_max_north = 200.0 + + +# TODO: Such application start from hypothesis of having just one camera +# for Crowd Density Local messages +class Settings: + list_events_publish: list = list() + hostname: str = str() + port: int = 0 + username: str = str() + password: str = str() + client_id: str = str() + geographic_area: GeoRefArea = GeoRefArea() + max_counter_people_densitymap: int = 200 + COUNT_WRISTBANDS: int = 100 + WRISTBAND_ID_START: int = 1 + CAMERA_IOTID: int = 6000 + CAMERA_NAME = "TIVOLI_26" + CAMERA_GPP_LATITUDE = 55.67384 + CAMERA_GPP_LONGITUDE = 12.56717 + CAMERA_GPO = 30 + CAMERA_NUMBER_ROWS = 10 + CAMERA_NUMBER_COLS = 18 + + # NOTE: In order to retrieve Thing MQTT messages, it is necessary to add tuple label: type in the following + # THINGS_TO_ANALYSE dictionary. For instance, if the lines are both inserted in such list, + # both MQTT messages are enabled + THINGS_TO_ANALYSE = { + LabelThingsName.LABEL_THING_SFN: ObservableGenericType.CROWDDENSITYLOCAL, + LabelThingsName.LABEL_THING_WRISTBAND: ObservableGenericType.LOCALIZATION + } + + @staticmethod + def retrieve_environment_settings() -> bool: + try: + Settings.hostname = os.environ.get('MQTT_BROKER_IP_ADDRESS', 'mqtt') # '192.168.229.101' + Settings.port = int(os.environ.get('MQTT_BROKER_TCP_PORT', '1883')) + Settings.username = os.environ.get('MQTT_BROKER_USERNAME', '') + Settings.password = os.environ.get('MQTT_BROKER_PASSWORD', '') + Settings.client_id = os.environ.get('MQTT_BROKER_CLIENTID', 'gostmqttemulator') + + print('Settings retrieve_environment_settings Done. hostname: {0}, port: {1}'.format(Settings.hostname, + Settings.port)) + return True + except Exception as ex: + print('Settings retrieve_environment_settings Exception: {}'.format(ex)) + return False + + @staticmethod + def get_list_topics(dictionary_obs_topics: Dict[str, Dict[str, Any]]) -> List[str]: + try: + if not dictionary_obs_topics: + logger.warning('get_list_topics NO TOPIC Found') + return None + + list_topics = list() + + for key in dictionary_obs_topics.keys(): + value = dictionary_obs_topics[key] + if not value: + continue + + topic = value[GOST_LABELS_DICTIONARY.LABEL_GOST_DATASTREAMID] + + logger.info('get_list_topics New TOPIC Created: {}'.format(topic)) + + list_topics.append(topic) + return list_topics + except Exception as ex: + logger.error('get_list_topics Exception: {}'.format(ex)) + return None + diff --git a/worker/shared/settings/version.py b/worker/shared/settings/version.py new file mode 100644 index 0000000..dab13e4 --- /dev/null +++ b/worker/shared/settings/version.py @@ -0,0 +1 @@ +SW_VERSION = "02.05.00.01" diff --git a/worker/shared/urls.py b/worker/shared/urls.py new file mode 100644 index 0000000..24d60fa --- /dev/null +++ b/worker/shared/urls.py @@ -0,0 +1,55 @@ +"""project URL Configuration + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/2.0/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: path('', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.urls import include, path + 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) +""" +from django.contrib import admin +from django.urls import path +from django.conf.urls import url, include +import os + +from rest_framework import routers + +from api.views import GostDBThings, GostDBSFNDatastreams, GostDBWristbandDatastreams, GOSTWP6CreateNewDatastream +from shared.settings.create_urls import create_paths_datastreams +import logging + +router = routers.DefaultRouter() +# register job endpoint in the router +# router.register(r'jobs', jviews.JobViewSet) + +logger = logging.getLogger('textlogger') + +GOST_IOTID_THING_SFN = os.environ.get('GOST_THINGID_SFN', '5') +GOST_IOTID_THING_WRISTBAND = os.environ.get('GOST_THINGID_WRISTBAND', '18') + +urlpatterns = [ + path('', include(router.urls)), + path('admin', admin.site.urls), + url(r'^api-auth/', include('rest_framework.urls')), + path('v1.0/Things', GostDBThings.as_view()), # , name="Emulated GOST Things" + path('v1.0/Things/', GostDBThings.as_view()), + path('v1.0/Things({0})/Datastreams'.format(GOST_IOTID_THING_SFN), GostDBSFNDatastreams.as_view()), # , name="Emulated GOST Things" + path('v1.0/Things({0})/Datastreams'.format(GOST_IOTID_THING_WRISTBAND), GostDBWristbandDatastreams.as_view()), # , name="Emulated GOST Things" + path('SearchOrCreateOGCDataStreamId', GOSTWP6CreateNewDatastream.as_view()) +] + +# logger.info('Executing List New Path') +# +# list_new_paths = create_paths_datastreams() +# +# if list_new_paths: +# urlpatterns.extend(list_new_paths) +# logger.info('Append List New Paths, New List Size: {}'.format(len(urlpatterns))) +# else: +# logger.info('NOTHING TO APPEND') \ No newline at end of file diff --git a/worker/shared/wsgi.py b/worker/shared/wsgi.py new file mode 100644 index 0000000..3e7ea4d --- /dev/null +++ b/worker/shared/wsgi.py @@ -0,0 +1,17 @@ +""" +WSGI config for project project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/ +""" + +import os + +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "shared.settings.appglobalconf") + +application = get_wsgi_application() + diff --git a/worker/unittests/unittests_main.py b/worker/unittests/unittests_main.py new file mode 100644 index 0000000..2e16a58 --- /dev/null +++ b/worker/unittests/unittests_main.py @@ -0,0 +1,31 @@ +import logging +from typing import List, Dict + +logger = logging.getLogger('textlogger') + + +class UnitTestMain: + @staticmethod + def print_report(dictionary_test_results: Dict[str, bool] = dict()): + if not dictionary_test_results: + return + + for key in dictionary_test_results: + logger.info('UnitTestName: {0}, Result: {1}'.format(key, dictionary_test_results[key])) + + @staticmethod + def launch_all_tests(enable_tests: bool = False, list_enabled_tests: List[str] = list()) \ + -> Dict[str, bool]: + try: + if not enable_tests or not list_enabled_tests: + return None + + dict_results = dict() + + for test_name in list_enabled_tests: + print('Launching Test: {}'.format(test_name)) + + return dict_results + except Exception as ex: + logger.error('UnitTestMain Execution Exception: {}'.format(ex)) + return None diff --git a/worker/users/.gitignore b/worker/users/.gitignore new file mode 100644 index 0000000..e0d1ea3 --- /dev/null +++ b/worker/users/.gitignore @@ -0,0 +1 @@ +migrations \ No newline at end of file diff --git a/worker/users/__init__.py b/worker/users/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/worker/users/admin.py b/worker/users/admin.py new file mode 100644 index 0000000..8c38f3f --- /dev/null +++ b/worker/users/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin + +# Register your models here. diff --git a/worker/users/apps.py b/worker/users/apps.py new file mode 100644 index 0000000..4ce1fab --- /dev/null +++ b/worker/users/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class UsersConfig(AppConfig): + name = 'users' diff --git a/worker/users/models.py b/worker/users/models.py new file mode 100644 index 0000000..b69a7be --- /dev/null +++ b/worker/users/models.py @@ -0,0 +1,14 @@ +from django.db import models +from django.contrib.auth.models import AbstractUser +from django.db.models.signals import post_save +from django.dispatch import receiver +from rest_framework.authtoken.models import Token +from django.conf import settings + +class User(AbstractUser): + pass + +@receiver(post_save, sender=settings.AUTH_USER_MODEL) +def create_auth_token(sender, instance=None, created=False, **kwargs): + if created: + Token.objects.create(user=instance) \ No newline at end of file diff --git a/worker/users/serializers.py b/worker/users/serializers.py new file mode 100644 index 0000000..626d8e7 --- /dev/null +++ b/worker/users/serializers.py @@ -0,0 +1,35 @@ +from rest_framework import serializers +from users.models import User + + +class UserSerializer(serializers.HyperlinkedModelSerializer): + password = serializers.CharField(write_only=True) + + def create(self, validated_data): + user = User( + username=validated_data.get('username', None) + ) + user.set_password(validated_data.get('password', None)) + user.save() + return user + + def update(self, instance, validated_data): + for field in validated_data: + if field == 'password': + instance.set_password(validated_data.get(field)) + else: + instance.__setattr__(field, validated_data.get(field)) + instance.save() + return instance + + class Meta: + model = User + fields = ('url', 'id', 'username', + 'password', 'first_name', 'last_name', + 'email' + ) + extra_kwargs = { + 'url': { + 'view_name': 'users:user-detail', + } + } \ No newline at end of file diff --git a/worker/users/urls.py b/worker/users/urls.py new file mode 100644 index 0000000..e20ae52 --- /dev/null +++ b/worker/users/urls.py @@ -0,0 +1,9 @@ +from django.conf.urls import url +from rest_framework.urlpatterns import format_suffix_patterns + +app_name = "users_app" + +# urlpatterns = [ +# url(r'^users/$', views.UserList.as_view(), name='user-list'), +# url(r'^users/(?P[0-9]+)/$', views.UserDetail.as_view(), name='user-detail'), +# ] \ No newline at end of file diff --git a/worker/utility/geodesy.py b/worker/utility/geodesy.py new file mode 100644 index 0000000..7bd9113 --- /dev/null +++ b/worker/utility/geodesy.py @@ -0,0 +1,566 @@ +#!/usr/bin/env python3 + +""" +Geoscience Australia - Python Geodesy Package +Geodesy Module +""" + +from math import (pi, degrees, radians, sqrt, sin, + cos, tan, asin, acos, atan, atan2, fabs) +import numpy as np +# from geodepy.constants import grs80 +from enum import Enum +from django.contrib.gis.geos import Point +from typing import List + + +# Ellipsoid Constants +class Ellipsoid(object): + def __init__(self, semimaj, inversef): + self.semimaj = semimaj + self.inversef = inversef + self.f = 1 / self.inversef + self.semimin = float(self.semimaj * (1 - self.f)) + self.ecc1sq = float(self.f * (2 - self.f)) + self.ecc2sq = float(self.ecc1sq / (1 - self.ecc1sq)) + self.ecc1 = sqrt(self.ecc1sq) + self.n = float(self.f / (2 - self.f)) + self.n2 = self.n ** 2 + + +# Geodetic Reference System 1980 (http://www.epsg-registry.org/export.htm?gml=urn:ogc:def:ellipsoid:EPSG::7019) +grs80 = Ellipsoid(6378137, 298.257222101) + + +class SurfaceVector: + def __init__(self, x, y): + self.x = x + self.y = y + + def set_properties(self, x, y): + self.x = x + self.y = y + + +class PolarCoordinates(object): + def __init__(self, magnitude: float=0, angle: float=0): + self.magnitude = magnitude + self.angle = angle + + def set_surface_coordinates(self, surface_vector: SurfaceVector) -> "PolarCoordinates": + self.magnitude = sqrt((surface_vector.x**2)+(surface_vector.y**2)) + self.angle = atan2(surface_vector.y, surface_vector.x) + return self + + def add_rotation_angle(self, rotation: float): + self.angle += radians(rotation) + + def convert_surface_coordinate(self) -> SurfaceVector: + surface_x = self.magnitude * cos(self.angle) + surface_y = self.magnitude * sin(self.angle) + return SurfaceVector(x=surface_x, y=surface_y) + + +class TypePosition(Enum): + ONLY_ECEF = 1, + ONLY_LLH = 2, + ECEF_LLH = 3 + + +class ENUCoordinates(object): + def __init__(self, east: float = 0, + north: float = 0, + up: float = 0): + self.east = east + self.north = north + self.up = up + + +class GeoLocalConstants: + eps_difference_coords = 1e-10 + + +class GeoPosition(object): + def __init__(self, + latitude: float, + longitude: float, + altitude: float = 0.0, + request_ecef_conf: bool = False): + self.latitude = latitude + self.longitude = longitude + self.altitude = altitude + self.ecef_x = 0 + self.ecef_y = 0 + self.ecef_z = 0 + self.type_position = TypePosition.ONLY_LLH + if request_ecef_conf: + self.ecef_x, self.ecef_y, self.ecef_z = GeoTranslations.llh2xyz(lat=self.latitude, + long=self.longitude, + ellht=0) + self.type_position = TypePosition.ECEF_LLH + + @staticmethod + def remove_duplicates(list_points: List["GeoPosition"]) -> List["GeoPosition"]: + if not GeoPosition.check_if_duplicates(list_points=list_points): + return list_points + + list_new = list() + + for geo_point in list_points: + if not geo_point: + continue + + if not list_new or geo_point not in list_new: + list_new.append(geo_point) + else: + del geo_point + + del list_points + + return list_new + + @staticmethod + def check_if_duplicates(list_points: List["GeoPosition"]) -> bool: + if not list_points: + return False + + if len(list_points) == 1: + return False + + for index_point_start in range(0, len(list_points)-1): + point_master = list_points[index_point_start] + for index_point_end in range(index_point_start+1, len(list_points)): + point_check = list_points[index_point_end] + + if point_master == point_check: + return True + + return False + + def get_copy(self) -> "GeoPosition": + geo_position = GeoPosition(latitude=self.latitude, + longitude=self.longitude, + altitude=self.altitude, + request_ecef_conf=False) + geo_position.ecef_x = self.ecef_x + geo_position.ecef_y = self.ecef_y + geo_position.ecef_z = self.ecef_z + geo_position.type_position = self.type_position + + return geo_position + + def __eq__(self, other: "GeoPosition"): + if not other: + return False + if fabs(self.latitude-other.latitude) > GeoLocalConstants.eps_difference_coords: + return False + if fabs(self.longitude-other.longitude) > GeoLocalConstants.eps_difference_coords: + return False + if fabs(self.altitude-other.altitude) > GeoLocalConstants.eps_difference_coords: + return False + + return True + + def consolidate_calculation(self) -> bool: + if self.type_position == TypePosition.ECEF_LLH: + return True + + if self.type_position == TypePosition.ONLY_ECEF: + self.latitude, self.longitude, self.altitude = GeoTranslations.xyz2llh(x=self.ecef_x, + y=self.ecef_y, + z=self.ecef_z) + elif self.type_position == TypePosition.ONLY_LLH: + self.ecef_x, self.ecef_y, self.ecef_z = GeoTranslations.llh2xyz(lat=self.latitude, + long=self.longitude, + ellht=0) + self.type_position = TypePosition.ECEF_LLH + + def to_point(self) -> Point: + + if self.type_position == TypePosition.ONLY_ECEF: + self.latitude, self.longitude, self.altitude = GeoTranslations.xyz2llh(x=self.ecef_x, + y=self.ecef_y, + z=self.ecef_z) + self.type_position = TypePosition.ECEF_LLH + + return Point(x=self.longitude, + y=self.latitude, + srid=4326) + + def add_enu_distance(self, + enu_distance: List[float]) -> "GeoPosition": + try: + if not enu_distance or len(enu_distance) < 2: + return self.to_point() + + self.ecef_x, self.ecef_y, self.ecef_z = \ + GeoTranslations.enu2xyz(lat=self.latitude, + long=self.longitude, + east=enu_distance[0], + north=enu_distance[1], + up=0) + + self.latitude, self.longitude, self.altitude = GeoTranslations.xyz2llh(x=self.ecef_x, + y=self.ecef_y, + z=self.ecef_z) + return self + except Exception as ex: + return self + + def calculate_vector_distance(self, ref_pos: "GeoPosition") -> SurfaceVector: + self.consolidate_calculation() + ref_pos.consolidate_calculation() + + (east, north, up) = GeoTranslations.xyz2enu_bis(pos_to_translate=self, + ref_pos=ref_pos) + + return SurfaceVector(x=east, + y=north) + + +class RotationMatrixType(Enum): + TYPE_ECEF_LLH = 1, + TYPE_LLH_ECEF = 2 + + +class GeoTranslations: + @staticmethod + def calculate_rotation_matrix(latitude: float, + longitude: float, + type_matrix: RotationMatrixType) -> np.array: + """ + function to calculate rotation matrix from position latitude and longitude + :param latitude: latitude in decimal degrees + :param longitude: longitude in decimal degrees + :param type_matrix: RotationMatrixType.TYPE_ECEF_LLH or RotationMatrixType.TYPE_LLH_ECEF + :return: rotation_matrix np.array + """ + + lat_rad = radians(latitude) + long_rad = radians(longitude) + sin_lat = sin(lat_rad) + cos_lat = cos(lat_rad) + sin_long = sin(long_rad) + cos_long = cos(long_rad) + + if type_matrix == RotationMatrixType.TYPE_LLH_ECEF: + rotate = np.array([[-sin_long, -sin_lat * cos_long, cos_lat * cos_long], + [cos_long, -sin_lat * sin_long, cos_lat * sin_long], + [0, cos_lat, sin_lat]]) + + elif type_matrix == RotationMatrixType.TYPE_ECEF_LLH: + rotate = np.array([[-sin_long, cos_long, 0], + [-sin_lat * cos_long, -sin_lat * sin_long, cos_lat], + [cos_lat * cos_long, cos_lat * sin_long, sin_lat]]) + + return rotate + + @staticmethod + def enu2xyz(lat, long, east, north, up) -> (float, float, float): + """ + function to convert a vector in a local east, north, up reference frame to + a vector in a cartesian x, y, z reference frame + :param lat: latitude in decimal degrees + :param long: longitude in decimal degrees + :param east: in metres + :param north: in metres + :param up: in metres + :return: x, y, z in metres + """ + # Create ENU Vector + enu = np.array([[east], + [north], + [up]]) + # Create Rotation Matrix + + rotate = GeoTranslations.calculate_rotation_matrix(latitude=lat, + longitude=long, + type_matrix=RotationMatrixType.TYPE_ECEF_LLH) + + inv_rotate = np.linalg.inv(rotate) + + delta_xyz = np.dot(inv_rotate, enu) + # Assign to separate variables + + org_x, org_y, org_z = GeoTranslations.llh2xyz(lat=lat, + long=long, + ellht=0) + + x = org_x + float(delta_xyz[0]) + y = org_y + float(delta_xyz[1]) + z = org_z + float(delta_xyz[2]) + + return x, y, z + + @staticmethod + def xyz2llh(x, y, z, ellipsoid=grs80) -> (float, float, float): + # Add input for ellipsoid (default: grs80) + """ + Input: Cartesian XYZ coordinate in metres + Output: Latitude and Longitude in Decimal + Degrees and Ellipsoidal Height in Metres + """ + # Calculate Longitude + long = atan2(y, x) + # Calculate Latitude + p = sqrt(x ** 2 + y ** 2) + latinit = atan((z * (1 + ellipsoid.ecc2sq)) / p) + lat = latinit + itercheck = 1 + while abs(itercheck) > 1e-10: + nu = ellipsoid.semimaj / (sqrt(1 - ellipsoid.ecc1sq * (sin(lat)) ** 2)) + itercheck = lat - atan((z + nu * ellipsoid.ecc1sq * sin(lat)) / p) + lat = atan((z + nu * ellipsoid.ecc1sq * sin(lat)) / p) + nu = ellipsoid.semimaj / (sqrt(1 - ellipsoid.ecc1sq * (sin(lat)) ** 2)) + ellht = p / (cos(lat)) - nu + # Convert Latitude and Longitude to Degrees + lat = degrees(lat) + long = degrees(long) + return lat, long, ellht + + @staticmethod + def xyz2enu(lat, long, x, y, z): + """ + function to convert a vector in a cartesian x, y, z reference frame to a + vector in a local east, north, up reference frame + :param lat: latitude in decimal degrees + :param long: longitude in decimal degrees + :param x: in metres + :param y: in metres + :param z: in metres + :return: east, north, up in metres + """ + # Create XYZ Vector + + (xref, yref, zref) = GeoTranslations.llh2xyz(lat=lat, + long=long, + ellht=0) + + xyz = np.array([[x - xref], + [y - yref], + [z - zref]]) + # Create Rotation Matrix + rotate = GeoTranslations.calculate_rotation_matrix(latitude=lat, + longitude=long, + type_matrix=RotationMatrixType.TYPE_ECEF_LLH) + enu = np.dot(rotate, xyz) + # Assign to separate variables + east = float(enu[0]) + north = float(enu[1]) + up = float(enu[2]) + return east, north, up + + @staticmethod + def xyz2enu_bis(pos_to_translate: GeoPosition, + ref_pos: GeoPosition) -> (float, float, float): + """ + function to convert a vector in a cartesian x, y, z reference frame to a + vector in a local east, north, up reference frame + :param pos_to_translate GeoPosition to be converted in ENU Coordinates + :param ref_pos: GeoPosition reference position + :return: east, north, up in meters + """ + # Create Delta XYZ Vector + difxyz = np.array([[pos_to_translate.ecef_x-ref_pos.ecef_x], + [pos_to_translate.ecef_y-ref_pos.ecef_y], + [pos_to_translate.ecef_z-ref_pos.ecef_z]]) + # Create Rotation Matrix + rotate = GeoTranslations.calculate_rotation_matrix(latitude=ref_pos.latitude, + longitude=ref_pos.longitude, + type_matrix=RotationMatrixType.TYPE_ECEF_LLH) + enu = np.dot(rotate, difxyz) + # Assign to separate variables + east = float(enu[0]) + north = float(enu[1]) + up = float(enu[2]) + return east, north, up + + @staticmethod + def surfacedistanceheading2llh(surface_distance_m: float, + surface_heading_deg: float, + org_latitude: float, + org_longitude: float) -> (float, float, float): + """ + function to convert a distance, heading in surface from origin to latitude, longitude position + :param surface_distance_m: in metres + :param surface_heading_deg: in degree + :param org_latitude: latitude origin in degree + :param org_longitude: longitude origin in degree + :return: lat_deg, long_deg, altitude in metres + """ + + east = surface_distance_m * cos(radians(surface_heading_deg)) + north = surface_distance_m * cos(radians(surface_heading_deg)) + up = 0 + + out_lat, out_long, out_altit = GeoTranslations.enu2llh(east=east, + north=north, + up=up, + org_lat=org_latitude, + org_long=org_longitude) + + return out_lat, out_long, out_altit + + @staticmethod + def enu2llh(east: float, + north: float, + up: float, + org_lat: float, + org_long: float) -> (float, float, float): + """ + function to convert a vector in a llh position reference frame to a + vector in a local east, north, up reference frame + :param east: in metres + :param north: in metres + :param up: in metres + :param org_lat: latitude in decimal degrees + :param org_long: longitude in decimal degrees + :return: lat_deg, long_deg, altitude in metres + """ + try: + enu = np.array([[east], + [north], + [up]]) + + rotate = GeoTranslations.calculate_rotation_matrix(latitude=org_lat, + longitude=org_long, + type_matrix=RotationMatrixType.TYPE_ECEF_LLH) + + inv_rotate = np.linalg.inv(rotate) + diff_xyz = np.dot(inv_rotate, enu) + (tmp_x, tmp_y, tmp_z) = GeoTranslations.llh2xyz(lat=org_lat, + long=org_long, + ellht=0) + x = tmp_x + float(diff_xyz[0]) + y = tmp_y + float(diff_xyz[1]) + z = tmp_z + float(diff_xyz[2]) + + (out_lat, out_long, out_alt) = GeoTranslations.xyz2llh(x=x, + y=y, + z=z) + + return out_lat, out_long, out_alt + except Exception as ex: + return 0, 0, 0 + + @staticmethod + def llh2xyz(lat, long, ellht, ellipsoid=grs80): + # Add input for ellipsoid (default: grs80) + """ + Input: Latitude and Longitude in Decimal Degrees, Ellipsoidal Height in metres + Output: Cartesian X, Y, Z Coordinates in metres + """ + # Convert lat & long to radians + lat = radians(lat) + long = radians(long) + # Calculate Ellipsoid Radius of Curvature in the Prime Vertical - nu + if lat == 0: + nu = grs80.semimaj + else: + nu = ellipsoid.semimaj / (sqrt(1 - ellipsoid.ecc1sq * (sin(lat) ** 2))) + # Calculate x, y, z + x = (nu + ellht) * cos(lat) * cos(long) + y = (nu + ellht) * cos(lat) * sin(long) + z = ((ellipsoid.semimin ** 2 / ellipsoid.semimaj ** 2) * nu + ellht) * sin(lat) + return x, y, z + + @staticmethod + def vincdir(lat1, lon1, azimuth1to2, ell_dist, ellipsoid=grs80): + """ + Vincenty's Direct Formula + :param lat1: Latitude of Point 1 (Decimal Degrees) + :param lon1: Longitude of Point 1 (Decimal Degrees) + :param azimuth1to2: Azimuth from Point 1 to 2 (Decimal Degrees) + :param ell_dist: Ellipsoidal Distance between Points 1 and 2 (m) + :param ellipsoid: Ellipsoid Object + :return: lat2: Latitude of Point 2 (Decimal Degrees), + lon2: Longitude of Point 2 (Decimal Degrees), + azimuth2to1: Azimuth from Point 2 to 1 (Decimal Degrees) + + Code review: 14-08-2018 Craig Harrison + """ + + azimuth1to2 = radians(azimuth1to2) + + # Equation numbering is from the GDA2020 Tech Manual v1.0 + + # Eq. 88 + u1 = atan((1 - ellipsoid.f) * tan(radians(lat1))) + + # Eq. 89 + sigma1 = atan2(tan(u1), cos(azimuth1to2)) + + # Eq. 90 + alpha = asin(cos(u1) * sin(azimuth1to2)) + + # Eq. 91 + u_squared = cos(alpha)**2 \ + * (ellipsoid.semimaj**2 - ellipsoid.semimin**2) \ + / ellipsoid.semimin**2 + + # Eq. 92 + a = 1 + (u_squared / 16384) \ + * (4096 + u_squared * (-768 + u_squared * (320 - 175 * u_squared))) + + # Eq. 93 + b = (u_squared / 1024) \ + * (256 + u_squared * (-128 + u_squared * (74 - 47 * u_squared))) + + # Eq. 94 + sigma = ell_dist / (ellipsoid.semimin * a) + + # Iterate until the change in sigma, delta_sigma, is insignificant (< 1e-9) + # or after 1000 iterations have been completed + two_sigma_m = 0 + for i in range(1000): + + # Eq. 95 + two_sigma_m = 2*sigma1 + sigma + + # Eq. 96 + delta_sigma = b * sin(sigma) * (cos(two_sigma_m) + (b/4) + * (cos(sigma) + * (-1 + 2 * cos(two_sigma_m)**2) + - (b/6) * cos(two_sigma_m) + * (-3 + 4 * sin(sigma)**2) + * (-3 + 4 * cos(two_sigma_m)**2))) + new_sigma = (ell_dist / (ellipsoid.semimin * a)) + delta_sigma + sigma_change = new_sigma - sigma + sigma = new_sigma + + if abs(sigma_change) < 1e-12: + break + + # Calculate the Latitude of Point 2 + # Eq. 98 + lat2 = atan2(sin(u1)*cos(sigma) + cos(u1)*sin(sigma)*cos(azimuth1to2), + (1 - ellipsoid.f) + * sqrt(sin(alpha)**2 + (sin(u1)*sin(sigma) + - cos(u1)*cos(sigma)*cos(azimuth1to2))**2)) + lat2 = degrees(lat2) + + # Calculate the Longitude of Point 2 + # Eq. 99 + lon = atan2(sin(sigma)*sin(azimuth1to2), + cos(u1)*cos(sigma) - sin(u1)*sin(sigma)*cos(azimuth1to2)) + + # Eq. 100 + c = (ellipsoid.f/16)*cos(alpha)**2 \ + * (4 + ellipsoid.f*(4 - 3*cos(alpha)**2)) + + # Eq. 101 + omega = lon - (1-c)*ellipsoid.f*sin(alpha) \ + * (sigma + c*sin(sigma)*(cos(two_sigma_m) + c*cos(sigma) + * (-1 + 2*cos(two_sigma_m)**2))) + + # Eq. 102 + lon2 = float(lon1) + degrees(omega) + + # Calculate the Reverse Azimuth + azimuth2to1 = degrees(atan2(sin(alpha), -sin(u1)*sin(sigma) + + cos(u1)*cos(sigma)*cos(azimuth1to2))) + 180 + + return round(lat2, 11), round(lon2, 11), round(azimuth2to1, 9) + + + diff --git a/worker/utility/utility_catalog_cached.py b/worker/utility/utility_catalog_cached.py new file mode 100644 index 0000000..e1e7e05 --- /dev/null +++ b/worker/utility/utility_catalog_cached.py @@ -0,0 +1,254 @@ +from jobs.cache_redis import CacheRedisAdapter +from general_types.virtual_classes import ObservableGeneric +# FIXME: Localization to be reviewed +from typing import List, Dict +import logging +import datetime + +logger = logging.getLogger('textlogger') + + +class UtilityCatalogCached: + LABEL_DICTIONARY_OBS_ALREADYUSED = 'LABEL_DICTIONARY_OBS_BLACKLIST_MAIN' + LABEL_DICTIONARY_OBSERVABLE_TO_BACKUP = 'DICTIONARY_OBS_TO_BACKUP' + LABEL_DICTIONARY_OBSERVABLE_NEW = 'DICTIONARY_OBSERVABLE_NEW' + LABEL_DICTIONARY_TOPICS = 'SERVICEOBSERVATION_DICTIONARY_TOPICS' + LABEL_DICTIONARY_DEVICE_REGISTRATION = 'DICTIONARY_DEVICEREGISTRATION' + + @staticmethod + def initialize_catalog() -> bool: + try: + CacheRedisAdapter.initialize() + + CacheRedisAdapter.dictionary_create(label_info=UtilityCatalogCached.LABEL_DICTIONARY_TOPICS) + CacheRedisAdapter.dictionary_create(label_info=UtilityCatalogCached.LABEL_DICTIONARY_DEVICE_REGISTRATION) + + UtilityCatalogCached.configure_catalog_observable_backup(['Localization']) + + return True + + except Exception as ex: + logger.error('initialize_catalog Exception: {}'.format(ex)) + return False + + @staticmethod + def configure_catalog_observable_backup(label_list_types: List[str]): + try: + CacheRedisAdapter.dictionary_create(label_info=UtilityCatalogCached.LABEL_DICTIONARY_OBSERVABLE_TO_BACKUP) + + for type_observable in label_list_types: + label_store = UtilityCatalogCached.LABEL_DICTIONARY_OBSERVABLE_TO_BACKUP+type_observable + CacheRedisAdapter.dictionary_update_value(label_info=UtilityCatalogCached.LABEL_DICTIONARY_OBSERVABLE_TO_BACKUP, + key=type_observable, + value=label_store) + + except Exception as ex: + logger.error('configure_catalog_observable_backup Exception: {}'.format(ex)) + + @staticmethod + def set_catalog_observable_backup(catalog_observable: Dict[str, List[ObservableGeneric]]) -> bool: + try: + if not catalog_observable: + return False + + for type_observable in catalog_observable: + UtilityCatalogCached.set_list_obstobackup(type_observable=type_observable, + list_obs_to_backup=catalog_observable[type_observable]) + + return True + except Exception as ex: + logger.error('set_catalog_observable_backup Exception: {}'.format(ex)) + return False + + @staticmethod + def set_list_obstobackup(type_observable: str, list_obs_to_backup: List[ObservableGeneric]) -> bool: + try: + label_list = UtilityCatalogCached.LABEL_DICTIONARY_OBSERVABLE_TO_BACKUP + type_observable + + return CacheRedisAdapter.set_cache_info(label_info=label_list, + data=list_obs_to_backup) + except Exception as ex: + logger.error('set_list_obstobackup Exception: {}'.format(ex)) + return False + + @staticmethod + def get_list_obstobackup() -> List[ObservableGeneric]: + try: + dictionary_type_observable = \ + CacheRedisAdapter.dictionary_get_all(label_info=UtilityCatalogCached.LABEL_DICTIONARY_OBSERVABLE_TO_BACKUP, + type_value=str) + + if not dictionary_type_observable: + return None + + list_return = list() + + for type_observable in dictionary_type_observable: + + if dictionary_type_observable[type_observable] is None: + continue + + list_partial = CacheRedisAdapter.get_cached_info(label_info=dictionary_type_observable[type_observable], + type_data=list) + + if not list_partial: + continue + + list_return.extend(list_partial) + return list_return + + except Exception as ex: + logger.error('get_dictionary_specific_observable Exception: {}'.format(ex)) + return None + + @staticmethod + def confirm_obs_backup() -> bool: + try: + dictionary_type_observable = CacheRedisAdapter.\ + dictionary_get_all(label_info=UtilityCatalogCached.LABEL_DICTIONARY_OBSERVABLE_TO_BACKUP, + type_value=str) + + if not dictionary_type_observable: + return None + + for type_observable in dictionary_type_observable: + + if dictionary_type_observable[type_observable] is None: + continue + + CacheRedisAdapter.remove_cache_info(label_info=dictionary_type_observable[type_observable]) + + return True + except Exception as ex: + logger.error('get_dictionary_specific_observable Exception: {}'.format(ex)) + return False + + @staticmethod + def get_dictionary_name(label_type_observable: str): + return '{0}_{1}'.format(UtilityCatalogCached.LABEL_DICTIONARY_OBSERVABLE_NEW, + label_type_observable) + + @staticmethod + def append_new_observable(label_type_observable: str, observable: ObservableGeneric) -> bool: + try: + if not observable: + return False + + return CacheRedisAdapter.dictionary_update_value(label_info=UtilityCatalogCached.get_dictionary_name(label_type_observable), + key=observable.get_label_cache(), + value=observable) + except Exception as ex: + logger.error('UtilityCatalogCache append_new_observable Exception: {}'.format(ex)) + return False + + @staticmethod + def get_complete_dictionary_observables(list_type_observables: List[str]) -> Dict[str, ObservableGeneric]: + try: + if not list_type_observables: + logger.warning('UtilityCatalogCache get_complete_dictionary_observables list_type_observable is None') + return None + + dict_return = dict() + + for type_observable in list_type_observables: + dict_observable_type = \ + CacheRedisAdapter.dictionary_get_all(label_info=UtilityCatalogCached.get_dictionary_name(type_observable), + type_value=ObservableGeneric) + if not dict_observable_type: + logger.info('UtilityCatalogCache get_complete_dictionary_observables not available for type_obs: {}'.format(type_observable)) + continue + + logger.info('UtilityCatalogCache get_complete_dictionary_observables available for type_obs: {0}, counter_elements: {1}'.format(type_observable, + len(dict_observable_type))) + + dict_return[type_observable] = dict_observable_type + + return dict_return + except Exception as ex: + logger.error('UtilityCatalogCache: get_complete_dictionary_observables Exception: {}'.format(ex)) + return None + + @staticmethod + def get_last_observable(label_observable: str) -> ObservableGeneric: + try: + return CacheRedisAdapter.get_cached_info(label_info=label_observable, + type_data=ObservableGeneric) + except Exception as ex: + logger.error('get_mostrecent_observable Exception: {}'.format(ex)) + return None + + @staticmethod + def check_observable_new(dictionary_obs_time: Dict[str, datetime.datetime], observable: ObservableGeneric) -> bool: + try: + if not observable: + return False + + if not dictionary_obs_time: + return True + + if observable.get_label_cache() not in dictionary_obs_time \ + or not dictionary_obs_time[observable.get_label_cache()]: + return True + + timestamp_prev_obs = dictionary_obs_time[observable.get_label_cache()] + timestamp_curr_obs = observable.get_timestamp() + + if (timestamp_curr_obs - timestamp_prev_obs).total_seconds() <= 0: + return False + + return True + except Exception as ex: + logger.error('check_observable_new {}'.format(ex)) + return True + + @staticmethod + def get_observationlist_specifictype(type_observable: str, + list_mqtttopics_admitted: List[str], + dictionary_singletype_observables: Dict[str, ObservableGeneric], + dictionary_obs_time: Dict[str, datetime.datetime]) -> List[ObservableGeneric]: + + list_observable_singletype = list() + + for mqtt_topic in dictionary_singletype_observables: + single_observation = dictionary_singletype_observables[mqtt_topic] + + if not single_observation: + logger.info('UtilityCatalogCache get_observationlist_specifictype not observable') + continue + + if not UtilityCatalogCached.check_observable_new(dictionary_obs_time=dictionary_obs_time, + observable=single_observation): + continue + + list_observable_singletype.append(single_observation) + + return list_observable_singletype + + @staticmethod + def append_topic(single_topic: str) -> bool: + try: + CacheRedisAdapter.dictionary_update_value(label_info=UtilityCatalogCached.LABEL_DICTIONARY_TOPICS, + key=single_topic, + value=1) + except Exception as ex: + logger.info('UtilityCatalogCached store_catalog_datastreams Exception: {}'.format(ex)) + return False + + @staticmethod + def get_list_topics() -> List[str]: + try: + dictionary_topics = CacheRedisAdapter.\ + dictionary_get_all(label_info=UtilityCatalogCached.LABEL_DICTIONARY_TOPICS, + type_value=str) + + list_topics = list() + for key in dictionary_topics: + if not key: + continue + list_topics.append(key) + + return list_topics + except Exception as ex: + logger.info('UtilityCatalogCached store_catalog_datastreams Exception: {}'.format(ex)) + return False + diff --git a/worker/utility/utility_conversion.py b/worker/utility/utility_conversion.py new file mode 100644 index 0000000..1c33be8 --- /dev/null +++ b/worker/utility/utility_conversion.py @@ -0,0 +1,55 @@ +import numpy as np +import logging + +logger = logging.getLogger('textlogger') + +class UtilityConversion: + @staticmethod + def conversion_matrix(content): + from scipy.sparse import csr_matrix + density_map = csr_matrix((content['data'], content['inds'], content['indptr']), + shape=content['shape']).todense() + mask = content['mask'] + mask = csr_matrix((mask['data'], mask['inds'], mask['indptr']), shape=mask['shape']).todense() - 1 + final_density_map = mask + density_map + return final_density_map + + + @staticmethod + def convert_matrix_to_string(mat): + np.set_printoptions(threshold=np.inf) + A = np.squeeze(np.asarray(mat)) + + string_matrix = np.array2string(A) + return string_matrix + + @staticmethod + def convert_queryset_string_matrix(text_matrix_to_convert): + + try: + text_matrix_to_convert = text_matrix_to_convert.replace('\n', '') + text_matrix_to_convert = text_matrix_to_convert.replace(' ', ' ') + text_matrix_to_convert = text_matrix_to_convert.replace(' ', ' ') + text_matrix_to_convert = text_matrix_to_convert.replace('[ ', '[') + text_matrix_to_convert = text_matrix_to_convert.replace(' ', ',') + text_matrix_to_convert = text_matrix_to_convert.replace('],[', ';') + text_matrix_to_convert = text_matrix_to_convert.replace('[', '') + text_matrix_to_convert = text_matrix_to_convert.replace(']', '') + + #logger.debug(text_matrix_to_convert) + + matrix_imported = np.matrix(text_matrix_to_convert) + except Exception as ex: + logger.error("Exception convert_queryset_string_matrix: {0}".format(ex)) + return None + + return matrix_imported + + @staticmethod + def convert_datetime_to_isoformat(datetime): + + datetime_str = str(datetime) + + datetime_str = datetime_str.replace(' ', 'T') + + return datetime_str \ No newline at end of file diff --git a/worker/utility/utility_database.py b/worker/utility/utility_database.py new file mode 100644 index 0000000..c60b992 --- /dev/null +++ b/worker/utility/utility_database.py @@ -0,0 +1,155 @@ +from django import db +from general_types.virtual_classes import OutputMessage +from general_types.modelsenums import OutputMessageType +from jobs.models import SWRunningInfo +from django.contrib.gis.geos import MultiPoint +from django.contrib.gis.geos import GEOSGeometry +import logging +import datetime +import pytz +from typing import List + +logger = logging.getLogger('textlogger') + + +class UtilityDatabase: + @staticmethod + def purge_db_connections(): + db.connections.close_all() + + @staticmethod + def connection_db_close(): + db.connection.close() + + @staticmethod + def create_sw_running_info(sw_version: str): + try: + sw_running_info = SWRunningInfo(software_version=sw_version) + sw_running_info.run_id = 1 + sw_running_info.timestamp_start = datetime.datetime.now(tz=pytz.utc) + sw_running_info.save() + + return 1 + except Exception as ex: + logger.error('UtilityDatabase create_sw_running Exception: {}'.format(ex)) + return 0 + + @staticmethod + def update_sw_running_timestop(sw_version: str, + timestamp_stop: datetime, + counter_message_output: int, + counter_observables: int, + counter_device_registered: int): + try: + sw_running_info = SWRunningInfo.objects.get(software_version=sw_version) + + if not sw_running_info: + return False + + sw_running_info.timestamp_stop = timestamp_stop # datetime.datetime.now(tz=pytz.utc) + sw_running_info.counter_observables = counter_observables + sw_running_info.counter_message_output = counter_message_output + sw_running_info.counter_device_registered = counter_device_registered + sw_running_info.save() + + logger.info('UtilityDatabase update_sw_running_timestop now') + + return True + except Exception as ex: + logger.error('UtilityDatabase update_sw_running_timestop exception; {}'.format(ex)) + return False + + @staticmethod + def get_specific_sw_running_info(sw_version: str) -> SWRunningInfo: + try: + return SWRunningInfo.objects.get(software_version=sw_version) + except Exception as ex: + logger.info('UtilityDatabase get_specific_sw_running_info: {}'.format(ex)) + return None + + @staticmethod + def update_get_sw_running_info(sw_version: str) -> int: + try: + sw_running_info = UtilityDatabase.get_specific_sw_running_info(sw_version=sw_version) + + if not sw_running_info: + return UtilityDatabase.create_sw_running_info(sw_version=sw_version) + + sw_running_info.run_id = sw_running_info.run_id + 1 + sw_running_info.timestamp_start = datetime.datetime.now(tz=pytz.utc) + sw_running_info.save() + + running_id = sw_running_info.run_id + + return running_id + except Exception as ex: + logger.error('UtilityDatabase update_sw_running_info exception; {}'.format(ex)) + return UtilityDatabase.create_sw_running_info(sw_version=sw_version) + + @staticmethod + def extract_localization_ids(localization_list: list): + try: + if not localization_list: + return list() + + list_ids = list() + + for localization in localization_list: + list_ids.append(localization.key) + + return list_ids + except Exception as ex: + return None + + @staticmethod + def convert_string_to_list_multipoint(string_list_multipoint: str) -> List[MultiPoint]: + try: + if not string_list_multipoint: + return None + + string_list_multipoint = string_list_multipoint.replace('{', '') + string_list_multipoint = string_list_multipoint.replace('}', '') + + list_string = string_list_multipoint.split(':') + + list_multipoints = list() + + if not list_string: + return None + + for string_multipoint in list_string: + if not string_multipoint: + continue + + multipoint = GEOSGeometry(geo_input=string_multipoint) + + if not multipoint: + continue + + list_multipoints.append(multipoint) + + return list_multipoints + except Exception as ex: + logger.error('UtilityDatabase convert_string_to_list_multipoint Exception: {}'.format(ex)) + return None + + @staticmethod + def get_list_messageoutput_byids(list_ids: List[int], outputmessagetype: OutputMessageType) -> List[OutputMessage]: + try: + if not list_ids: + return None + + list_output_messages = list() + + for id in list_ids: + output_message = UtilityDatabase.get_outputmessage_byid(id=id, + outputmessagetype=outputmessagetype) + + if not output_message: + continue + + list_output_messages.append(output_message) + + return list_output_messages + except Exception as ex: + return None diff --git a/worker/utility/utility_geographic_conversion.py b/worker/utility/utility_geographic_conversion.py new file mode 100644 index 0000000..15495ae --- /dev/null +++ b/worker/utility/utility_geographic_conversion.py @@ -0,0 +1,186 @@ +from django.contrib.gis.gdal import SpatialReference, CoordTransform +# from django.contrib.gis.geos import GEOSGeometry +from django.contrib.gis.geos import Point +from utility.geodesy import GeoPosition, SurfaceVector +# from django.contrib.gis.geos import LineString +# from geopy.distance import distance +# from shapely.geometry import Polygon +import math +import logging + +logger = logging.getLogger('textlogger') + + +class ECEFPosition: + def __init__(self, x=0, y=0, z=0): + self.x = x + self.y = y + self.z = z + + def set_pos_diff(self, pos_a, pos_b): + self.x = pos_a.x-pos_b.x + self.y = pos_a.y-pos_b.y + self.z = pos_a.z-pos_b.z + + def get_point_ecef_conversion(self): + return Point(x=self.x,y=self.y,z=self.z,srid=4978) + + +class GeographicRectangleArea: + def __init__(self, ground_plane_position: GeoPosition, size_x_m, size_y_m, cell_size_m=1): + self.ground_plane_position = None + self.size_x_m = 0 + self.size_y_m = 0 + self.cell_size_m = 1 + self.set_properties(ground_plane_position, size_x_m, size_y_m, cell_size_m) + + def set_properties(self, ground_plane_position: GeoPosition, size_x_m, size_y_m, cell_size_m): + self.ground_plane_position = ground_plane_position + self.size_x_m = size_x_m + self.size_y_m = size_y_m + self.cell_size_m = cell_size_m + + def get_groundplaneposition(self) -> GeoPosition: + if not self.ground_plane_position: + raise Exception("GeographicRectangleArea ground_plane_position not set") + + return self.ground_plane_position + + def check_surfacevector_inside_area(self, surface_vector: SurfaceVector) -> bool: + if not surface_vector: + return False + + if surface_vector.x < 0 or surface_vector.x >= self.size_x_m: + return False + + if surface_vector.y < 0 or surface_vector.y >= self.size_y_m: + return False + + return True + + def check_position_inside_area(self, position): + return True + + def get_vector_distance(self, position) -> SurfaceVector: + try: + referencepos_ecef = GeopgraphicConversion.convert_llhtoecef(self.ground_plane_position) + position_ecef = GeopgraphicConversion.convert_llhtoecef(position) + + return GeopgraphicConversion.convert_eceftoenu(pos1=position_ecef, + ref_position_ecef=referencepos_ecef, + ref_position_llh=self.ground_plane_position) + except Exception as ex: + return None + # gcoord = SpatialReference(4326) + # mycoord = SpatialReference(4978) + # trans = CoordTransform(gcoord, mycoord) + # + # pos1 = self.ground_plane_position + # pos2 = position + # + # pos1.transform(trans) + # pos2.transform(trans) + # + # delta_x = pos1.x-pos2.x + # delta_y = pos1.y-pos2.y + # + # return SurfaceVector(delta_y,delta_y) + + +class GeopgraphicConversion: + + @staticmethod + def create_copy_point(start_point): + try: + if not start_point.z: + return Point(x=start_point.x,y=start_point.y,z=0,srid=start_point.srid) + + return Point(x=start_point.x,y=start_point.y,z=start_point.z,srid=start_point.srid) + except Exception as ex: + return None + + @staticmethod + def test_method(): + posA = Point(x=45.062222, y=7.654426, z=0, srid=4326) + posB = Point(x=45.062432, y=7.654415, z=0, srid=4326) + + posA_ecef = GeopgraphicConversion.convert_llhtoecef(posA) + posB_ecef = GeopgraphicConversion.convert_llhtoecef(posB) + + enu_result =GeopgraphicConversion.convert_eceftoenu(posA_ecef, posB_ecef) + + distance_m = posA.distance(posB) + + print('ENU Conversion: East={0}, North={1}, Distance_m: {2}'.format(str(enu_result.x), str(enu_result.y), str(distance_m))) + + @staticmethod + def check_point_inside_geographic_area(point,geographic_area): + return True + + @staticmethod + def convert_eceftollh(position): + try: + gcoord = SpatialReference(4978) + mycoord = SpatialReference(4326) + trans = CoordTransform(gcoord, mycoord) + + postoconv=GeopgraphicConversion.create_copy_point(position) + postoconv.transform(trans) + + return postoconv + except Exception as ex: + return None + + @staticmethod + def calculate_enu_distance(position: Point, ref_position: Point) -> SurfaceVector: + try: + position_ecef = GeopgraphicConversion.convert_llhtoecef(position) + ref_position_ecef = GeopgraphicConversion.convert_llhtoecef(ref_position) + + enu_result = GeopgraphicConversion.convert_eceftoenu(position_ecef, ref_position_ecef, ref_position) + + return enu_result + + except Exception as ex: + return None + + @staticmethod + def convert_eceftoenu(pos1: Point, ref_position_ecef: Point, ref_position_llh: Point) -> SurfaceVector: + try: + diff_pos = ECEFPosition() + diff_pos.set_pos_diff(pos1, ref_position_ecef) + # ref_position_llh = GeopgraphicConversion.convert_eceftollh(ref_position) + + latitude_rads = math.radians(ref_position_llh.y) + longitude_rads = math.radians(ref_position_llh.x) + + sin_phi = math.sin(latitude_rads) + cos_phi = math.cos(latitude_rads) + sin_lam = math.sin(longitude_rads) + cos_lam = math.cos(longitude_rads) + + x = ((-1*sin_lam)*diff_pos.x)+(cos_lam*diff_pos.y) + y = ((-1*sin_phi*cos_lam)*diff_pos.x) - (sin_phi*sin_lam*diff_pos.y)+(cos_phi*diff_pos.z) + + return SurfaceVector(x=x, + y=y) + + except Exception as ex: + logger.error('convert_eceftoenu Exception: {}'.format(ex)) + return None + + + @staticmethod + def convert_llhtoecef(position:Point): + try: + gcoord = SpatialReference(4326) + mycoord = SpatialReference(4978) + trans = CoordTransform(gcoord, mycoord) + + postoconv=GeopgraphicConversion.create_copy_point(position) + postoconv.transform(trans) + + return postoconv + except Exception as ex: + logger.error('convert_llhtoecef exception: {}'.format(ex)) + return None diff --git a/worker/utility/utility_startup_application.py b/worker/utility/utility_startup_application.py new file mode 100644 index 0000000..1adfa2c --- /dev/null +++ b/worker/utility/utility_startup_application.py @@ -0,0 +1,47 @@ +import logging +from jobs.cache_redis import CacheRedisAdapter +from utility.utility_database import UtilityDatabase +from utility.utility_catalog_cached import UtilityCatalogCached +from shared.settings.appglobalconf import LOCAL_CONFIG, LocConfLbls + +logger = logging.getLogger('textlogger') + + +class UtilityStartupApplication: + @staticmethod + def startup(): + UtilityCatalogCached.initialize_catalog() + + @staticmethod + def trace_startup_info(): + try: + logger.info("HLDFAD MODULE STARTED, VERSION: {0}".format(LOCAL_CONFIG[LocConfLbls.LABEL_SW_RELEASE_VERSION])) + + for key in LOCAL_CONFIG: + if "SW_RELEASE_VERSION" == key: + continue + + logger.info(" - {0}: {1}" + .format(key, LOCAL_CONFIG[key])) + + except Exception as ex: + logger.error('UtilityStartupApplication trace_startup_info Exception: {}'.format(ex)) + + @staticmethod + def adjust_startup_data(): + try: + + UtilityDatabase.purge_db_connections() + UtilityDatabase.update_database_startup() + + # CacheRedisAdapter.test_example() + + # crowd_heatmap_outputs = UtilityDatabase.get_list_crowdheatmap_not_transferred(LOCAL_CONFIG[LocConfLbls.LABEL_PILOT_NAME]) + # crowd_heatmap_ids = UtilityDatabase.extract_crowd_heatmap_ids(crowd_heatmap_outputs) + running_id = UtilityDatabase.update_get_sw_running_info(LOCAL_CONFIG[LocConfLbls.LABEL_SW_RELEASE_VERSION]) + + logger.info('UtilityStartupApplication Running ID: {0} SW_VERSION: {1}' + .format(str(running_id), + LOCAL_CONFIG[LocConfLbls.LABEL_SW_RELEASE_VERSION])) + except Exception as ex: + logger.error('UtilityStartupApplication adjust_startup_data Exception: {}'.format(ex)) \ No newline at end of file diff --git a/worker/utility/utility_sw_update_info.py b/worker/utility/utility_sw_update_info.py new file mode 100644 index 0000000..93130ea --- /dev/null +++ b/worker/utility/utility_sw_update_info.py @@ -0,0 +1,40 @@ +from utility.utility_database import UtilityDatabase +from jobs.cache_redis import CachedComponents +from shared.settings.appglobalconf import LOCAL_CONFIG, LocConfLbls +import datetime +import pytz +import logging + +logger = logging.getLogger('textlogger') + + +class UtilitySWUpdateInfo: + @staticmethod + def update_sw_info_realtime(): + try: + UtilityDatabase.purge_db_connections() + + sw_version = LOCAL_CONFIG[LocConfLbls.LABEL_SW_RELEASE_VERSION] + counter_observables = CachedComponents.get_last_observable_id() + counter_output = CachedComponents.get_counter_crowd_heatmap_output() + counter_wristband_registered = CachedComponents.get_counter_datastreams_registered(datastream_feature='Localization') + current_timestamp = datetime.datetime.now(tz=pytz.utc) + + UtilityDatabase.update_sw_running_timestop(sw_version=sw_version, + timestamp_stop=current_timestamp, + counter_message_output=counter_output, + counter_observables=counter_observables, + counter_device_registered=counter_wristband_registered) + + logger.info('UtilitySWUpdateInfo Updated Info now, ' + 'counterObs: {0}, CounterOutput: {1}, ' + 'CounterDevRegister: {2}' + .format(str(counter_observables), + str(counter_output), + str(counter_wristband_registered)) + ) + except Exception as ex: + logger.error('UtilitySWUpdateInfo Exception: {}'.format(ex)) + + + diff --git a/worker/utility/utilitydictionaries.py b/worker/utility/utilitydictionaries.py new file mode 100644 index 0000000..4d90fd8 --- /dev/null +++ b/worker/utility/utilitydictionaries.py @@ -0,0 +1,22 @@ +from typing import Dict, Any +import logging + +logger = logging.getLogger('textlogger') + + +class UtilityDictionaries: + @staticmethod + def get_dict_field_if(dictionary: Dict[str, Any], + label: str, + none_value: Any = None) -> Any: + try: + if not dictionary: + return None + + if label not in dictionary.keys(): + return none_value + + return dictionary[label] + except Exception as ex: + logger.error('UtilityDictionaries get_dict_field_if Exception: {}'.format(ex)) + return None \ No newline at end of file diff --git a/worker/utility/utilitymongodb.py b/worker/utility/utilitymongodb.py new file mode 100644 index 0000000..ad4c1ac --- /dev/null +++ b/worker/utility/utilitymongodb.py @@ -0,0 +1,78 @@ +from pymongo import MongoClient +from shared.settings.appglobalconf import MONGO_DB_CONFIGURATION +import logging +from typing import Iterable + +logger = logging.getLogger(name='textlogger') + + +class UtilityMongoDB: + mongo_db_client = None + handle_database = None + handle_collection = None + host_address = str() + + @staticmethod + def set_host_address(): + UtilityMongoDB.host_address = '{0}:{1}'.format(MONGO_DB_CONFIGURATION['MONGODB_HOSTNAME'], + MONGO_DB_CONFIGURATION['MONGODB_TCP_PORT']) + + @staticmethod + def initialize(): + try: + UtilityMongoDB.set_host_address() + + UtilityMongoDB.mongo_db_client = MongoClient(host=UtilityMongoDB.host_address, + username=MONGO_DB_CONFIGURATION['MONGODB_USERNAME'], + password=MONGO_DB_CONFIGURATION['MONGODB_PASSWORD']) + + UtilityMongoDB.handle_database = UtilityMongoDB.mongo_db_client[ + MONGO_DB_CONFIGURATION['MONGODB_DATABASE_NAME'] + ] + + UtilityMongoDB.handle_collection = UtilityMongoDB.handle_database[MONGO_DB_CONFIGURATION['MONGODB_DATABASE_COLLECTION']] + + # dictionary_element = {"TESTSW": 2} + # + # result = collection_new.insert_one(dictionary_element) + + except Exception as ex: + logger.error('UtilityMongoDB initialize Exception: {}'.format(ex)) + + @staticmethod + def close() -> bool: + try: + if not UtilityMongoDB.mongo_db_client: + return False + + UtilityMongoDB.mongo_db_client.close() + UtilityMongoDB.mongo_db_client = None + except Exception as ex: + logger.error('UtilityMongoDB Close Exception: {}'.format(ex)) + + @staticmethod + def get_collections_global_count() -> int: + try: + if not UtilityMongoDB.mongo_db_client: + return -1 + + return UtilityMongoDB.handle_collection.count() + except Exception as ex: + logger.error('UtilityMongoDB global_count Exception: {}'.format(ex)) + return 0 + + @staticmethod + def save_observable(json_message: dict): + try: + UtilityMongoDB.handle_collection.insert_one(json_message) + logger.debug('UtilityMongoDB save info done') + except Exception as ex: + logger.error('UtilityMongoDB initialize Exception: {}'.format(ex)) + + @staticmethod + def save_many_observable(collection_json_messages: Iterable): + try: + UtilityMongoDB.handle_collection.insert_many(documents=collection_json_messages) + logger.debug('UtilityMongoDB save info done') + except Exception as ex: + logger.error('UtilityMongoDB initialize Exception: {}'.format(ex)) diff --git a/worker/utility/utilitytimer.py b/worker/utility/utilitytimer.py new file mode 100644 index 0000000..7e8fbc8 --- /dev/null +++ b/worker/utility/utilitytimer.py @@ -0,0 +1,42 @@ +import threading + + +class TimerRequest: + timer_set = None + request_start = False + + @staticmethod + def get_timer() -> threading.Timer: + if not TimerRequest.timer_set: + raise Exception('NO TIMER SET') + return TimerRequest.timer_set + + @staticmethod + def configure_timer(func, timeout): + try: + TimerRequest.timer_set = threading.Timer(interval=timeout, function=func) + except Exception as ex: + print('TimerRequest configure_timer Exception: {}'.format(ex)) + + @staticmethod + def action_timer() -> bool: + try: + TimerRequest.get_timer().start() + TimerRequest.request_start = True + print('TimerRequest start Called') + except Exception as ex: + print('TimerRequest action_timer Exception: {}'.format(ex)) + + @staticmethod + def clear_timer() -> bool: + try: + if not TimerRequest.request_start: + print('TimerRequest clear_timer Not necessary') + return True + + TimerRequest.get_timer().cancel() + print('TimerRequest clear_timer Called') + return True + except Exception as ex: + print('TimerRequest clear_timer Exception: {}'.format(ex)) + return False