Engineering-Research-and-Development / iotagent-opcua

IoT Agent for OPC UA protocol
https://iotagent-opcua.rtfd.io/
GNU Affero General Public License v3.0
42 stars 20 forks source link

"Executed expiring daemon" from IoTAgentNGSI.ContextServer #61

Closed chumvan closed 1 year ago

chumvan commented 3 years ago

I am working with the IoT Agent for OPCUA with the following setting: The server is OPC UA Prosys Server Simulation

AGECONF/config.json:

{
  "logLevel" : "DEBUG",
  "contextBroker" : {
    "host" : "orion",
    "port" : 1026
  },
  "server" : {
    "port" : 4001,
    "baseRoot" : "/"
  },
  "deviceRegistry" : {
    "type" : "memory"
  },
  "mongodb" : {
    "host" : "iotmongo",
    "port" : "27017",
    "db" : "iotagent",
    "retries" : 5,
    "retryTime" : 5
  },
  "types" : {
    "7:Simulation" : {
      "service" : "opcua_car",
      "subservice" : "/demo",
      "active" : [ {
        "name" : "7:Counter",
        "type" : "Number"
      }, {
        "name" : "7:Random",
        "type" : "Number"
      }, {
        "name" : "7:Sawtooth",
        "type" : "Number"
      }, {
        "name" : "7:Sinusoid",
        "type" : "Number"
      }, {
        "name" : "7:Square",
        "type" : "Number"
      }, {
        "name" : "7:Triangle",
        "type" : "Number"
      } ],
      "lazy" : [ ],
      "commands" : [ ]
    }
  },
  "browseServerOptions" : null,
  "service" : "opcua_car",
  "subservice" : "/demo",
  "providerUrl" : "http://iotage:4001",
  "pollingExpiration" : "200000",
  "pollingDaemonFrequency" : "20000",
  "deviceRegistrationDuration" : "P1M",
  "defaultType" : null,
  "contexts" : [ {
    "id" : "age01_7:Simulation",
    "type" : "7:Simulation",
    "service" : "opcua_car",
    "subservice" : "/demo",
    "polling" : false,
    "mappings" : [ {
      "ocb_id" : "7:Counter",
      "opcua_id" : "ns=7;i=1001",
      "object_id" : null,
      "inputArguments" : [ ]
    }, {
      "ocb_id" : "7:Random",
      "opcua_id" : "ns=7;i=1002",
      "object_id" : null,
      "inputArguments" : [ ]
    }, {
      "ocb_id" : "7:Sawtooth",
      "opcua_id" : "ns=7;i=1003",
      "object_id" : null,
      "inputArguments" : [ ]
    }, {
      "ocb_id" : "7:Sinusoid",
      "opcua_id" : "ns=7;i=1004",
      "object_id" : null,
      "inputArguments" : [ ]
    }, {
      "ocb_id" : "7:Square",
      "opcua_id" : "ns=7;i=1005",
      "object_id" : null,
      "inputArguments" : [ ]
    }, {
      "ocb_id" : "7:Triangle",
      "opcua_id" : "ns=7;i=1006",
      "object_id" : null,
      "inputArguments" : [ ]
    } ]
  } ],
  "contextSubscriptions" : [ ]
}

opcua-postgres.yml as a docker-compose file:

version: "3"
#secrets:
#   age_idm_auth:
#      file: age_idm_auth.txt

services:
  iotage:
    hostname: iotage
    image: iotagent4fiware/iotagent-opcua:1.3.4
    networks:
      - hostnet
      - iotnet
    ports:
      - "${AGENT_SERVER_PORT}:${AGENT_SERVER_PORT}"
      - "4081:8080"
    extra_hosts:
      - "iotcarsrv:192.168.50.167"
      - "HP:192.168.50.167"
    depends_on:
      - iotmongo
      - orion
    volumes:
      - ./AGECONF:/opt/iotagent-opcua/conf
      - ./certificates:/opt/iotagent-opcua/certificates
    command: /usr/bin/tail -f /var/log/lastlog

  iotmongo:
    hostname: iotmongo
    image: mongo:3.4
    networks:
      - iotnet
    volumes:
      - iotmongo_data:/data/db
      - iotmongo_conf:/data/configdb

  ################ OCB ################

  orion:
    hostname: orion
    image: fiware/orion:latest
    networks:
      - hostnet
      - ocbnet
    ports:
      - "${ORION_PORT}:${ORION_PORT}"
    depends_on:
      - orion_mongo
    #command: -dbhost mongo
    entrypoint: /usr/bin/contextBroker -fg -multiservice -statCounters -dbhost mongo -logForHumans -logLevel DEBUG -t 255

  orion_mongo:
    hostname: orion_mongo
    image: mongo:3.4
    networks:
      ocbnet:
        aliases:
          - mongo
    volumes:
      - orion_mongo_data:/data/db
      - orion_mongo_conf:/data/configdb
    command: --nojournal

  ############### CYGNUS ###############

  cygnus:
    image: fiware/cygnus-ngsi:${CYGNUS_VERSION}
    hostname: cygnus
    container_name: fiware-cygnus
    networks:
      - hostnet
    depends_on:
      - postgres-db
    expose:
      - "${CYGNUS_POSTGRESQL_SERVICE_PORT}" # 5055
      - "${CYGNUS_API_PORT}" # 5080
    ports:
      - "${CYGNUS_POSTGRESQL_SERVICE_PORT}:${CYGNUS_POSTGRESQL_SERVICE_PORT}"
      - "${CYGNUS_API_PORT}:${CYGNUS_API_PORT}"
    environment:
      - "CYGNUS_POSTGRESQL_SERVICE_PORT=${CYGNUS_POSTGRESQL_SERVICE_PORT}"      
      - "CYGNUS_POSTGRESQL_HOST=postgres-db" # Hostname of the PostgreSQL server used to persist historical contex
      - "CYGNUS_POSTGRESQL_PORT=${POSTGRES_DB_PORT}" # Port that the PostgreSQL server uses to listen to commands
      - "CYGNUS_POSTGRESQL_DATABASE=${POSTGRES_DB}"
      - "CYGNUS_POSTGRESQL_USER=postgres" # Username for the PostgreSQL database user
      - "CYGNUS_POSTGRESQL_PASS=password" # Password for the PostgreSQL database user
      - "CYGNUS_POSTGRESQL_ENABLE_CACHE=true" # Switch to enable caching within the PostgreSQL configuration
      - "CYGNUS_SERVICE_PORT=${CYGNUS_POSTGRESQL_SERVICE_PORT}" # Notification Port that Cygnus listens when subcr
      - "CYGNUS_API_PORT=${CYGNUS_API_PORT}" # Port that Cygnus listens on for operational reasons
      - "CYGNUS_LOG_LEVEL=DEBUG" # The logging level for Cygnus

  postgres-db:
    image: postgres
    hostname: postgres-db
    expose:
      - "${POSTGRES_DB_PORT}"
    ports:
      - "${POSTGRES_DB_PORT}:${POSTGRES_DB_PORT}"
    networks:
      - hostnet
    environment:
      - "POSTGRES_PASSWORD=password"
      - "POSTGRES_USER=postgres"
      - "POSTGRES_DB=${POSTGRES_DB}"
    volumes:
      - postgres-db:/var/lib/postgresql/data

volumes:
  iotmongo_data:
  iotmongo_conf:
  orion_mongo_data:
  orion_mongo_conf:
  postgres-db:

networks:
  hostnet:
  iotnet:
  ocbnet:

The certificates were created and put in both OPCUA server and client (Fiware OPCUA agent). I got this error, which was not shown ever during last implementation.

image

The error message is ambiguous for me as I don't know which daemon it is referring to. I have check the dockerd with sudo systemctl status dockerd which showed active image

After about 5 mins the agent showed this

image

manolait commented 3 years ago

Hello,

we have been talking on stackoverflow. I'm sorry I can't help you with your question but I'm the same as you.

I am also using OPC UA Prosys Server Simulation on my local computer for testing. But I have some doubts.

These are two different servers ?

  - "iotcarsrv:192.168.50.167" Is the server of the example running on node ?
  - "HP:192.168.50.167" this is the server that provides the sw OPC UA Prosys Server Simulation?

I have tried running the iotcarsrv server from my host and from docker and my docker-compose with the agent is not able to connect assigning the hostname and IP of my machine.

Any advice? i'm desperate already from so many tests... also i can't get the mapping tool to work.

I don't understand the need to have two servers, one with iotcarsrv and the other with HP.

Best regards

chumvan commented 3 years ago

@manolait As far as I understand: (1) Yes, it was inherited from the example. Car server is only a test program which plays as an OPC UA server. (2) In my case I don't need the iotcarsrv as I set HP for hostname in all other files.

You should check the security section and run a linux command to know which ports are available. I used: sudo netstat -tulpn | grep LISTEN. From there you can see the IP addresses and open port of Prosys Server.

manolait commented 3 years ago

Hello,

But you are using in docker-compose the two hosts to the same ip. Is this necessary?

extra_hosts:

chumvan commented 3 years ago

Hello,

But you are using in docker-compose the two hosts to the same ip. Is this necessary?

extra_hosts: - "iotcarsrv:192.168.50.167" - "HP:192.168.50.167"

No, it is not. I was testing something which I can not recall for now. However, I don't communicate with iotcarsrv in my test anymore.

MattiaMarzano-Eng commented 3 years ago

Hello @VANHUUTHANHTRUNG can you show me your AGECONF/config.properties file?

chumvan commented 3 years ago

@MattiaMarzano-Eng thanks for your reply.

About the config.properties file, it is in both ./AGECONF and ./conf. Do I have to modify both files ? what is the difference ?

Here is the AGECONF/config.properties :

## SOUTHBOUND CONFIGURATION (OPC UA)
namespace-ignore=0,1,2,3,4
endpoint=opc.tcp://HP:5001/UA/CarServer

## NORTHBOUND CONFIGURATION (ORION CONTEXT BROKER)
context-broker-host=orion
context-broker-port=1026
fiware-service=opcua_car
fiware-service-path=/demo

## AGENT CONFIGURATION
server-base-root=/
server-port=4001
provider-url=http://iotage:4001

device-registration-duration=P1M
device-registry-type=memory

log-level=DEBUG

namespaceIndex=3
namespaceNumericIdentifier=1000

# MONGO-DB CONFIGURATION (required if device-registry-type=mongodb)
mongodb-host=iotmongo
mongodb-port=27017
mongodb-db=iotagent
mongodb-retries=5
mongodb-retry-time=5

## DATATYPE MAPPING OPCUA --> NGSI
OPC-datatype-Number=Number
OPC-datatype-Decimal128=Number
OPC-datatype-Double=Number
OPC-datatype-Float=Number
OPC-datatype-Integer=Integer
OPC-datatype-UInteger=Integer
OPC-datatype-String=Text
OPC-datatype-ByteString=Text
#END DATATYPE MAPPING OPCUA --> NGSI

## SESSION PARAMETERS
requestedPublishingInterval=10
requestedLifetimeCount=1000
requestedMaxKeepAliveCount=10
maxNotificationsPerPublish=100
publishingEnabled=true
priority=10

#SubscriptionsStrategy
uniqueSubscription=true

## MONITORING PARAMETERS
samplingInterval=1
queueSize=10000
discardOldest=false

## SERVER CERT E AUTH
securityMode=SignAndEncrypt
securityPolicy=Basic256Sha256
userName=charm
password=TAU2021

#securityMode=SIGNANDENCRYPT
#securityPolicy=1Basic256
#password=password1
#userName=user1

#api-ip=192.168.13.153

## ADMINISTRATION SERVICES
api-port=8080

## POLL COMMANDS SETTINGS
polling=false
polling-commands-timer=30000
pollingDaemonFrequency=20000
pollingExpiration=200000

## AGENT ID
#agent-id=age01_
#entity-id=age01_Car # used only during tests

## CONFIGURATION
#configuration=api

## CHECK TIMER POLLING DEVICES
checkTimer=2000

Here is the setting in my Server - Prosys Simulation Server: image

I generated the certificates using your tutorial from OPC UA Agent Secure Connection Configuration And put them in both client and server: image

I check with UA Expert Client to make sure the Server is running and reachable. The server is still working: image

MattiaMarzano-Eng commented 3 years ago

It seems that the client certificate you created is not trusted by OPCUA server, can you double check also that please?

chumvan commented 3 years ago

@MattiaMarzano-Eng Hi, I think I got the certificates in the right position: image

On the left is the OPC UA IoT Agent's certificates folder and on the right is the Server's. Please clarify my question above on 2 directories with config files. Is it enough to modify only the AGECONF folder ?

MattiaMarzano-Eng commented 3 years ago

You need to edit only AGECONF folder if you mind to use the docker version of the agent. The conf folder is used to run the agent outside docker. By the way about your issue, I'm not able to replicate your error yet. Could you share also the certificate tab screenshot from OPC-UA Simulation Server? Thanks

chumvan commented 3 years ago

Certificates tab: image The certificate was trusted. I don't know if .der files are required so I also tried to convert it into .der file but did not work either. With the same action, UA Expert can connect to OPCUA Server. I generated the certificate with the same user-key.conf file as in your tutorial. Does the detail in the user-key.conf of [req_text] or [subject] affect our setting or just for descriptive purpose ?

The version of OPCUA Server Simulation is 5.0.8.330

MattiaMarzano-Eng commented 3 years ago

I replicated exactly the same set up you have, adding in the OPC-UA Simulation Server the user charm, trusting ENG certificate for client, you can also find it here, it's not a big deal what are inside user-key.conf.

The only difference I have is related to the docker-compose, I'm currently using the updated version from yesterday.

139041704-fcefd427-6773-4d20-a80a-1dafb248d292

In this scenario the error above no more occurs.

chumvan commented 3 years ago

I copied the certificate from Agent's folder to Server's: image

Use your template for docker-compose.yml file, change the IP for my server and as I don't use NGSI-LD, I commented out the IOTA_JSON_LD_CONTEXT. The file can be seen here: docker-compose.yml

version: "3"

services:

  iotage:
    hostname: iotage
    image: iotagent4fiware/iotagent-opcua:1.3.4
    networks:
      - hostnet
      - iotnet
    ports:
      - "4001:4001"
      - "4081:8080"
    extra_hosts:
      - "HP:192.168.50.167" # Changed for OPCUA Server IP
    depends_on:
      - iotmongo
      - orion
    volumes:
      - ./AGECONF:/opt/iotagent-opcua/conf
      - ./certificates:/opt/iotagent-opcua/certificates
    environment:
      - IOTA_REGISTRY_TYPE=memory #Whether to hold IoT device info in memory or in a database
      - IOTA_LOG_LEVEL=DEBUG # The log level of the IoT Agent
      - IOTA_MONGO_HOST=iot_mongo # The host name of MongoDB
      - IOTA_MONGO_DB=iotagent_opcua # The name of the database used in mongoDB
      #comment if you mind using NGSI-ld
      #- IOTA_CB_NGSI_VERSION=ld
      #- IOTA_JSON_LD_CONTEXT=https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context-v1.3.jsonld #comment if you mind using NGSIv2
      - IOTA_FALLBACK_TENANT=opcua_car
      - IOTA_RELAX_TEMPLATE_VALIDATION=true

  iotmongo:
    hostname: iot_mongo
    image: mongo:4.2
    networks:
      - iotnet
    volumes:
      - iot_mongo_data:/data/db
      - iot_mongo_conf:/data/configdb

  ################ OCB ################

  orion:
    hostname: orion
    #replace fiware/orion:latest with fiware/orion-ld:0.7.0 if you mind using NGSI-ld
    image: fiware/orion:latest
    #image: fiware/orion-ld:0.7.0
    networks:
      - hostnet
      - ocbnet
    ports:
      - "1026:1026"
    depends_on:
      - orion_mongo
    # add -forwarding if using NGSI-ld
    #command: -statCounters -dbhost orion_mongo -logLevel INFO -forwarding
    command: -statCounters -dbhost orion_mongo -logLevel INFO

  orion_mongo:
    hostname: orion_mongo
    image: mongo:4.2
    networks:
      - ocbnet
    ports:
      - "27017:27017"
    volumes:
      - orion_mongo_data:/data/db
      - orion_mongo_conf:/data/configdb
    command: --nojournal

volumes:
  iot_mongo_data:
  iot_mongo_conf:
  orion_mongo_data:
  orion_mongo_conf:

networks:
  hostnet:
  iotnet:
  ocbnet:

The IP was verified with nslookup: image There are several IP with port 5001: image Tried all of them but no result.

Then run with docker-compose -f docker-compose.yml up --build.

Still the same error.

MattiaMarzano-Eng commented 3 years ago

Again fail to connect to the server?

chumvan commented 3 years ago

yes, the error was the same like before: Executed expiration daemon image

MattiaMarzano-Eng commented 3 years ago

Please try to delete your docker volumes and verify if you properly added your user in OPC-UA Simulation Server. Clean the folder where you put the certificate on server side and reboot it.

After that try to do a firts attempt of connection running the agent, that will be rejectet, then go to the certificate tab and trust ENG cert right clicking on it witout copy the files.

Stop and run again the agent.

Expiration daemon is not up to us, it depends on client OPC-UA lib, basically it interrupts the session, but is not a big deal, the agent will enstablish the connection again automatically.

chumvan commented 3 years ago

@MattiaMarzano-Eng I am still not able to connect to the Server after those steps. I will try to reinstall and work from scratch with a VM for now, let's see.

Can you share your AGECONF/config.json file ? Did you generated it with mapping-tool ? I had problem with the mapping tool so I have to manually modify the config.json file. This is my current AGECONF/config.json file:

{
  "logLevel" : "DEBUG",
  "contextBroker" : {
    "host" : "orion",
    "port" : 1026
  },
  "server" : {
    "port" : 4001,
    "baseRoot" : "/"
  },
  "deviceRegistry" : {
    "type" : "memory"
  },
  "mongodb" : {
    "host" : "iotmongo",
    "port" : "27017",
    "db" : "iotagent",
    "retries" : 5,
    "retryTime" : 5
  },
  "types" : {
    "7:Simulation" : {
      "service" : "opcua_car",
      "subservice" : "/demo",
      "active" : [ {
        "name" : "7:Counter",
        "type" : "Number"
      }, {
        "name" : "7:Random",
        "type" : "Number"
      }, {
        "name" : "7:Sawtooth",
        "type" : "Number"
      }, {
        "name" : "7:Sinusoid",
        "type" : "Number"
      }, {
        "name" : "7:Square",
        "type" : "Number"
      }, {
        "name" : "7:Triangle",
        "type" : "Number"
      } ],
      "lazy" : [ ],
      "commands" : [ ]
    }
  },
  "browseServerOptions" : null,
  "service" : "opcua_car",
  "subservice" : "/demo",
  "providerUrl" : "http://iotage:4001",
  "pollingExpiration" : "200000",
  "pollingDaemonFrequency" : "20000",
  "deviceRegistrationDuration" : "P1M",
  "defaultType" : null,
  "contexts" : [ {
    "id" : "age01_7:Simulation",
    "type" : "7:Simulation",
    "service" : "opcua_car",
    "subservice" : "/demo",
    "polling" : false,
    "mappings" : [ {
      "ocb_id" : "7:Counter",
      "opcua_id" : "ns=7;i=1001",
      "object_id" : null,
      "inputArguments" : [ ]
    }, {
      "ocb_id" : "7:Random",
      "opcua_id" : "ns=7;i=1002",
      "object_id" : null,
      "inputArguments" : [ ]
    }, {
      "ocb_id" : "7:Sawtooth",
      "opcua_id" : "ns=7;i=1003",
      "object_id" : null,
      "inputArguments" : [ ]
    }, {
      "ocb_id" : "7:Sinusoid",
      "opcua_id" : "ns=7;i=1004",
      "object_id" : null,
      "inputArguments" : [ ]
    }, {
      "ocb_id" : "7:Square",
      "opcua_id" : "ns=7;i=1005",
      "object_id" : null,
      "inputArguments" : [ ]
    }, {
      "ocb_id" : "7:Triangle",
      "opcua_id" : "ns=7;i=1006",
      "object_id" : null,
      "inputArguments" : [ ]
    } ]
  } ],
  "contextSubscriptions" : [ ]
}

Could this be the problem ?

MattiaMarzano-Eng commented 3 years ago

I'm using the following config.json { "logLevel" : "INFO", "contextBroker" : { "host" : "orion", "port" : 1026 }, "server" : { "port" : 4001, "baseRoot" : "/" }, "deviceRegistry" : { "type" : "memory" }, "mongodb" : { "host" : "iotmongo", "port" : "27017", "db" : "iotagent", "retries" : 5, "retryTime" : 5 }, "types" : { "Device" : { "service" : "opcua_car", "subservice" : "/demo", "active" : [ ], "lazy" : [ ], "commands" : [ ] } }, "browseServerOptions" : null, "service" : "opcua_car", "subservice" : "/demo", "providerUrl" : "http://iotage:4001", "pollingExpiration" : "200000", "pollingDaemonFrequency" : "20000", "deviceRegistrationDuration" : "P1M", "defaultType" : null, "contexts" : [ ], "contextSubscriptions" : [] }

If you cannot connect the agent to the server is not a config.json issue, as I said in another thread a new mapping tool is under development and will be released in the next months, in the meantime if the mapping tool isn't able to connect to the server you can try to write manually it or you can add in AGECONF the one I shared now just to skip the mapping tool run.