diff --git a/README.md b/README.md index deb2186..20e6c43 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,30 @@ -# gseps-image-acquisition +# 이 코드 템플릿은 SDT Cloud 환경에서 mqtt 메세지를 발행하는 코드입니다. -Your repository description \ No newline at end of file +# 패키지 설치 +- 코드는 sdtcloudpubsub 패키지를 사용합니다. 아래 명령어로 패키지를 다운로드 해야합니다. +```bash +$ pip install sdtcloudpubsub +``` + +# 코드 작성 +- 코드는 runAction 함수에서 동작하고자 하는 기능을 작성합니다. +- msg 변수는 발행될 메세지 입니다. +- sdtcloudpubsub의 pubMessage 함수는 발행할 메세지 내용인 dictionary 타입 변수를 입력 받아 처리합니다. + +# 메세지 발행 +- 다음 변수로 메세지를 발행하는 코드를 작성하면... +```bash +msg = { + "message": "Hello World" +} +``` +- 실제로 발행되는 메세지은 다음과 같습니다. +```bash +msg = { + "data": { + "message": "Hello World" + }, + "timestamp": 12312311... +} + +``` diff --git a/acquisition_config.json b/acquisition_config.json new file mode 100644 index 0000000..52b80cf --- /dev/null +++ b/acquisition_config.json @@ -0,0 +1,23 @@ +{ + "amqp_url": "127.0.0.1", + "amqp_port": 5672, + "amqp_vhost": "/", + "amqp_id": "username", + "amqp_pw": "password", + "amqp_queue": "gseps-mq", + "amqp_ReadyQ": "gseps-ready", + "AccessKey": "AKIAQB3AM4WP76ND6Z6C", + "SecretKey": "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", + "Boto3SignatureVersion": "s3v4", + "Boto3RegionName": "ap-northeast-2", + "gseps_stage_bucket":"stage-gs-eps-frontend", + "BucketKey": "sdt/mv_bottom_ash_particle_size", + "S3BucketName": "gseps-data", + "image_save_path": "/home/sdt/Workspace/gseps-edge/image_bucket", + "laser_save_path": "/home/sdt/Workspace/gseps_edge/laser/laser_value", + "fail_log_file": "/home/sdt/Workspace/gseps_edge/image_acquisition/logs/fail_message.log", + "success_log_file": "/home/sdt/Workspace/gseps_edge/image_acquisition/logs/acquisition.log", + "capture_single_time_interval": 1, + "capture_batch_time_interval": 120, + "laser_device_path": "/dev/ttyS0" +} diff --git a/acquisition_config_install.json b/acquisition_config_install.json new file mode 100644 index 0000000..00a9b2b --- /dev/null +++ b/acquisition_config_install.json @@ -0,0 +1,23 @@ +{ + "amqp_url": "25.10.82.120", + "amqp_port": 5672, + "amqp_vhost": "/", + "amqp_id": "sdt", + "amqp_pw": "251327", + "amqp_queue": "gseps-mq", + "amqp_ReadyQ": "gseps-ready", + "AccessKey": "AKIAQB3AM4WP76ND6Z6C", + "SecretKey": "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", + "Boto3SignatureVersion": "s3v4", + "Boto3RegionName": "ap-northeast-2", + "gseps_stage_bucket":"stage-gs-eps-frontend", + "BucketKey": "sdt/mv_bottom_ash_particle_size", + "S3BucketName": "gseps-data", + "image_save_path": "/home/sdt/Workspace/gseps_edge/image_acquisition/capture/", + "laser_save_path": "/home/sdt/Workspace/gseps_edge/laser/laser_value", + "fail_log_file": "/home/sdt/Workspace/gseps_edge/image_acquisition/logs/fail_message.log", + "success_log_file": "/home/sdt/Workspace/gseps_edge/image_acquisition/logs/acquisition.log", + "capture_single_time_interval": 1, + "capture_batch_time_interval": 120, + "laser_device_path": "/dev/ttyS0" +} diff --git a/config.json b/config.json new file mode 100644 index 0000000..13eb577 --- /dev/null +++ b/config.json @@ -0,0 +1,4 @@ +{ + "KEY1": "VALUE1", + "KEY2": "VALUE2" +} \ No newline at end of file diff --git a/framework.yaml b/framework.yaml new file mode 100644 index 0000000..4163b13 --- /dev/null +++ b/framework.yaml @@ -0,0 +1,12 @@ +version: bwc/v2 # bwc 버전 정보입니다. +spec: + appName: gseps-img-acq # 앱의 이름입니다. + runFile: main.py # 앱의 실행 파일입니다. + env: + bin: python3 # 앱을 실행할 바이너라 파일 종류입니다.(장비에 따라 다르므로 확인 후 정의해야 합니다.) + virtualEnv: gseps-env # 사용할 가상환경 이름입니다. + package: requirements.txt # 설치할 Python 패키지 정보 파일입니다.(기본 값은 requirement.txt 입니다.) + runtime: python3.8 +stackbase: + tagName: v0.0.1 # Stackbase(gitea)에 릴리즈 태그명 입니다. + repoName: gseps-image-acquisition # Stackbase(gitea)에 저장될 저장소 이릅니다. diff --git a/laser_test.py b/laser_test.py new file mode 100644 index 0000000..85f111d --- /dev/null +++ b/laser_test.py @@ -0,0 +1,30 @@ +import time +import serial + + +def hex_to_int(data): + data_length = data[2] + hex_distance = data[3:3+data_length] + + return int.from_bytes(hex_distance, byteorder='big') + +for i in range(1000): + ser = serial.Serial( + port="/dev/ttyS0", + baudrate=19200, + timeout=1, + bytesize=serial.EIGHTBITS, + parity=serial.PARITY_NONE, + stopbits=serial.STOPBITS_ONE + ) + + req = b'\x01\x04\x00\x00\x00\x01\x31\xca' + time.sleep(0.3) + ser.write(req) + result = ser.read(6) + + distance = hex_to_int(result) + print(f'Laser Distance: {distance}mm') + + ser.close() + diff --git a/light_test.py b/light_test.py new file mode 100644 index 0000000..80fa9f7 --- /dev/null +++ b/light_test.py @@ -0,0 +1,91 @@ +import time +import serial +import argparse + +#======Serial Variable======= +# SERIAL_PATH = "/dev/serial/by-path/pci-0000:00:14.0-usb-0:2:1.0-port0" +SERIAL_PATH = "/dev/ttyUSB1" +BAUD_RATE = 19200 +PARITY='N' +STOP_BITS=1 +BYTE_SIZE=8 +TIMEOUT=1 +#============================ + +ser = serial.Serial(port=SERIAL_PATH, baudrate=BAUD_RATE, parity=PARITY, stopbits=STOP_BITS, bytesize=BYTE_SIZE, timeout=TIMEOUT) + + +def light_control(args, ch_num:int): + global ser + + ch_list=[0, 16, 17, 18, 19] + bright = int(args.brightness) + + if(ch_num == 0): + print("Channel range : 1~4 // not 0 ") + try: + START=(5 & 0xFF).to_bytes(1, byteorder='big') + CH_NUM=(ch_list[ch_num] & 0xFF).to_bytes(1, byteorder='big') + BLIGHT=(bright & 0xFF).to_bytes(1, byteorder='big') + CHK_SUM=(ch_list[ch_num] + bright & 0xFF).to_bytes(1, byteorder='big') + + ser.write(START) + ser.write(CH_NUM) + ser.write(BLIGHT) + ser.write(CHK_SUM) + + except Exception as e: + print(e) + + return True + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--brightness', help='adjustment led brightness', required=True) + + args = parser.parse_args() + + light_control(args, 1) + + +# print(light_control(ser,1,30)) +# time.sleep(1) +# print(light_control(ser,1,0)) +# time.sleep(1) +# print(light_control(ser,1,100)) +# time.sleep(1) +# print(light_control(ser,1,0)) +# time.sleep(1) +# print(light_control(ser,1,240)) +# +# +# #for i in range(500): +# while True: +# for i in range(0,50): +# light_control(ser,1,i*5) +# time.sleep(0.02) +# +# for i in range(0,50): +# light_control(ser,1,255-i*5) +# time.sleep(0.02) +# print(i) + + + +# turn_on = [b"\x05", b"\x10", b'\xF0', b'\x00'] +# turn_off = [b'\x05', b'\x10', b'\x00', b'\x10'] +# +# rqt_status = [b'\x05', b'\x14', b'\x00', b'\x14'] +# remote_on = [b'\x05', b'\x29', b'\x01', b'\x2A'] +# +# try: +# result = ser.write(b''.join(turn_off)) +# print(result) +# time.sleep(2) +# result = ser.write(b''.join(turn_on)) +# print(result) +# except Exception as e: +# print(f"Error {e}") +# port.close() + diff --git a/logs/gseps.log b/logs/gseps.log new file mode 100644 index 0000000..8f9a383 --- /dev/null +++ b/logs/gseps.log @@ -0,0 +1,248 @@ +2024-05-29 19:42:53,451 - root - INFO - Cam Initialize. +2024-05-29 19:42:53,930 - root - ERROR - Traceback (most recent call last): + File "main.py", line 130, in open_camera + if device_manager.Devices()[0].IsOpenable() and device_manager.Devices()[1].IsOpenable(): + File "/usr/local/lib/python3.8/dist-packages/ids_peak/ids_peak.py", line 7401, in __getitem__ + return _ids_peak_python_interface.VectorDeviceDescriptor___getitem__(self, *args) +IndexError: index out of range + +2024-05-29 21:01:54,987 - root - INFO - Cam Initialize. +2024-05-29 21:01:55,995 - root - INFO - CAM Load. +2024-05-29 21:01:57,108 - root - INFO - laser_value : 258mm +2024-05-29 21:01:57,108 - root - INFO - Capture Start at 258 +2024-05-29 21:01:57,228 - pika.adapters.utils.connection_workflow - INFO - Pika version 1.3.2 connecting to ('127.0.0.1', 5672) +2024-05-29 21:01:57,228 - pika.adapters.utils.io_services_utils - INFO - Socket connected: +2024-05-29 21:01:57,229 - pika.adapters.utils.connection_workflow - INFO - Streaming transport linked up: (, _StreamingProtocolShim: params=>). +2024-05-29 21:01:57,231 - pika.adapters.utils.connection_workflow - INFO - AMQPConnector - reporting success: params=> +2024-05-29 21:01:57,231 - pika.adapters.utils.connection_workflow - INFO - AMQPConnectionWorkflow - reporting success: params=> +2024-05-29 21:01:57,231 - pika.adapters.blocking_connection - INFO - Connection workflow succeeded: params=> +2024-05-29 21:01:57,231 - pika.adapters.blocking_connection - INFO - Created channel=1 +2024-05-29 21:01:57,232 - pika.adapters.blocking_connection - INFO - Closing connection (200): Normal shutdown +2024-05-29 21:01:57,232 - pika.channel - INFO - Closing channel (200): 'Normal shutdown' on params=>> +2024-05-29 21:01:57,232 - pika.channel - INFO - Received on params=>> +2024-05-29 21:01:57,232 - pika.connection - INFO - Closing connection (200): 'Normal shutdown' +2024-05-29 21:01:57,232 - pika.adapters.utils.io_services_utils - INFO - Aborting transport connection: state=1; +2024-05-29 21:01:57,233 - pika.adapters.utils.io_services_utils - INFO - _AsyncTransportBase._initate_abort(): Initiating abrupt asynchronous transport shutdown: state=1; error=None; +2024-05-29 21:01:57,233 - pika.adapters.utils.io_services_utils - INFO - Deactivating transport: state=1; +2024-05-29 21:01:57,233 - pika.connection - INFO - AMQP stack terminated, failed to connect, or aborted: opened=True, error-arg=None; pending-error=ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:01:57,233 - pika.connection - INFO - Stack terminated due to ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:01:57,233 - pika.adapters.utils.io_services_utils - INFO - Closing transport socket and unlinking: state=3; +2024-05-29 21:01:57,233 - pika.adapters.blocking_connection - INFO - User-initiated close: result=BlockingConnection__OnClosedArgs(connection=>, error=ConnectionClosedByClient: (200) 'Normal shutdown') +2024-05-29 21:01:57,586 - root - INFO - laser_value : 258mm +2024-05-29 21:01:57,587 - root - INFO - Capture Start at 258 +2024-05-29 21:01:57,691 - pika.adapters.utils.connection_workflow - INFO - Pika version 1.3.2 connecting to ('127.0.0.1', 5672) +2024-05-29 21:01:57,691 - pika.adapters.utils.io_services_utils - INFO - Socket connected: +2024-05-29 21:01:57,692 - pika.adapters.utils.connection_workflow - INFO - Streaming transport linked up: (, _StreamingProtocolShim: params=>). +2024-05-29 21:01:57,693 - pika.adapters.utils.connection_workflow - INFO - AMQPConnector - reporting success: params=> +2024-05-29 21:01:57,693 - pika.adapters.utils.connection_workflow - INFO - AMQPConnectionWorkflow - reporting success: params=> +2024-05-29 21:01:57,693 - pika.adapters.blocking_connection - INFO - Connection workflow succeeded: params=> +2024-05-29 21:01:57,693 - pika.adapters.blocking_connection - INFO - Created channel=1 +2024-05-29 21:01:57,694 - pika.adapters.blocking_connection - INFO - Closing connection (200): Normal shutdown +2024-05-29 21:01:57,694 - pika.channel - INFO - Closing channel (200): 'Normal shutdown' on params=>> +2024-05-29 21:01:57,694 - pika.channel - INFO - Received on params=>> +2024-05-29 21:01:57,694 - pika.connection - INFO - Closing connection (200): 'Normal shutdown' +2024-05-29 21:01:57,694 - pika.adapters.utils.io_services_utils - INFO - Aborting transport connection: state=1; +2024-05-29 21:01:57,694 - pika.adapters.utils.io_services_utils - INFO - _AsyncTransportBase._initate_abort(): Initiating abrupt asynchronous transport shutdown: state=1; error=None; +2024-05-29 21:01:57,694 - pika.adapters.utils.io_services_utils - INFO - Deactivating transport: state=1; +2024-05-29 21:01:57,694 - pika.connection - INFO - AMQP stack terminated, failed to connect, or aborted: opened=True, error-arg=None; pending-error=ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:01:57,694 - pika.connection - INFO - Stack terminated due to ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:01:57,694 - pika.adapters.utils.io_services_utils - INFO - Closing transport socket and unlinking: state=3; +2024-05-29 21:01:57,694 - pika.adapters.blocking_connection - INFO - User-initiated close: result=BlockingConnection__OnClosedArgs(connection=>, error=ConnectionClosedByClient: (200) 'Normal shutdown') +2024-05-29 21:01:58,049 - root - INFO - laser_value : 258mm +2024-05-29 21:01:58,050 - root - INFO - Capture Start at 258 +2024-05-29 21:01:58,148 - pika.adapters.utils.connection_workflow - INFO - Pika version 1.3.2 connecting to ('127.0.0.1', 5672) +2024-05-29 21:01:58,148 - pika.adapters.utils.io_services_utils - INFO - Socket connected: +2024-05-29 21:01:58,148 - pika.adapters.utils.connection_workflow - INFO - Streaming transport linked up: (, _StreamingProtocolShim: params=>). +2024-05-29 21:01:58,150 - pika.adapters.utils.connection_workflow - INFO - AMQPConnector - reporting success: params=> +2024-05-29 21:01:58,150 - pika.adapters.utils.connection_workflow - INFO - AMQPConnectionWorkflow - reporting success: params=> +2024-05-29 21:01:58,150 - pika.adapters.blocking_connection - INFO - Connection workflow succeeded: params=> +2024-05-29 21:01:58,150 - pika.adapters.blocking_connection - INFO - Created channel=1 +2024-05-29 21:01:58,150 - pika.adapters.blocking_connection - INFO - Closing connection (200): Normal shutdown +2024-05-29 21:01:58,151 - pika.channel - INFO - Closing channel (200): 'Normal shutdown' on params=>> +2024-05-29 21:01:58,151 - pika.channel - INFO - Received on params=>> +2024-05-29 21:01:58,151 - pika.connection - INFO - Closing connection (200): 'Normal shutdown' +2024-05-29 21:01:58,151 - pika.adapters.utils.io_services_utils - INFO - Aborting transport connection: state=1; +2024-05-29 21:01:58,151 - pika.adapters.utils.io_services_utils - INFO - _AsyncTransportBase._initate_abort(): Initiating abrupt asynchronous transport shutdown: state=1; error=None; +2024-05-29 21:01:58,151 - pika.adapters.utils.io_services_utils - INFO - Deactivating transport: state=1; +2024-05-29 21:01:58,151 - pika.connection - INFO - AMQP stack terminated, failed to connect, or aborted: opened=True, error-arg=None; pending-error=ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:01:58,151 - pika.connection - INFO - Stack terminated due to ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:01:58,151 - pika.adapters.utils.io_services_utils - INFO - Closing transport socket and unlinking: state=3; +2024-05-29 21:01:58,151 - pika.adapters.blocking_connection - INFO - User-initiated close: result=BlockingConnection__OnClosedArgs(connection=>, error=ConnectionClosedByClient: (200) 'Normal shutdown') +2024-05-29 21:01:58,505 - root - INFO - laser_value : 258mm +2024-05-29 21:01:58,505 - root - INFO - Capture Start at 258 +2024-05-29 21:01:58,591 - pika.adapters.utils.connection_workflow - INFO - Pika version 1.3.2 connecting to ('127.0.0.1', 5672) +2024-05-29 21:01:58,591 - pika.adapters.utils.io_services_utils - INFO - Socket connected: +2024-05-29 21:01:58,591 - pika.adapters.utils.connection_workflow - INFO - Streaming transport linked up: (, _StreamingProtocolShim: params=>). +2024-05-29 21:01:58,593 - pika.adapters.utils.connection_workflow - INFO - AMQPConnector - reporting success: params=> +2024-05-29 21:01:58,593 - pika.adapters.utils.connection_workflow - INFO - AMQPConnectionWorkflow - reporting success: params=> +2024-05-29 21:01:58,593 - pika.adapters.blocking_connection - INFO - Connection workflow succeeded: params=> +2024-05-29 21:01:58,593 - pika.adapters.blocking_connection - INFO - Created channel=1 +2024-05-29 21:01:58,594 - pika.adapters.blocking_connection - INFO - Closing connection (200): Normal shutdown +2024-05-29 21:01:58,594 - pika.channel - INFO - Closing channel (200): 'Normal shutdown' on params=>> +2024-05-29 21:01:58,594 - pika.channel - INFO - Received on params=>> +2024-05-29 21:01:58,594 - pika.connection - INFO - Closing connection (200): 'Normal shutdown' +2024-05-29 21:01:58,594 - pika.adapters.utils.io_services_utils - INFO - Aborting transport connection: state=1; +2024-05-29 21:01:58,594 - pika.adapters.utils.io_services_utils - INFO - _AsyncTransportBase._initate_abort(): Initiating abrupt asynchronous transport shutdown: state=1; error=None; +2024-05-29 21:01:58,594 - pika.adapters.utils.io_services_utils - INFO - Deactivating transport: state=1; +2024-05-29 21:01:58,594 - pika.connection - INFO - AMQP stack terminated, failed to connect, or aborted: opened=True, error-arg=None; pending-error=ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:01:58,594 - pika.connection - INFO - Stack terminated due to ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:01:58,594 - pika.adapters.utils.io_services_utils - INFO - Closing transport socket and unlinking: state=3; +2024-05-29 21:01:58,594 - pika.adapters.blocking_connection - INFO - User-initiated close: result=BlockingConnection__OnClosedArgs(connection=>, error=ConnectionClosedByClient: (200) 'Normal shutdown') +2024-05-29 21:01:58,949 - root - INFO - laser_value : 258mm +2024-05-29 21:01:58,949 - root - INFO - Capture Start at 258 +2024-05-29 21:01:59,037 - pika.adapters.utils.connection_workflow - INFO - Pika version 1.3.2 connecting to ('127.0.0.1', 5672) +2024-05-29 21:01:59,037 - pika.adapters.utils.io_services_utils - INFO - Socket connected: +2024-05-29 21:01:59,037 - pika.adapters.utils.connection_workflow - INFO - Streaming transport linked up: (, _StreamingProtocolShim: params=>). +2024-05-29 21:01:59,039 - pika.adapters.utils.connection_workflow - INFO - AMQPConnector - reporting success: params=> +2024-05-29 21:01:59,039 - pika.adapters.utils.connection_workflow - INFO - AMQPConnectionWorkflow - reporting success: params=> +2024-05-29 21:01:59,039 - pika.adapters.blocking_connection - INFO - Connection workflow succeeded: params=> +2024-05-29 21:01:59,039 - pika.adapters.blocking_connection - INFO - Created channel=1 +2024-05-29 21:01:59,040 - pika.adapters.blocking_connection - INFO - Closing connection (200): Normal shutdown +2024-05-29 21:01:59,040 - pika.channel - INFO - Closing channel (200): 'Normal shutdown' on params=>> +2024-05-29 21:01:59,040 - pika.channel - INFO - Received on params=>> +2024-05-29 21:01:59,040 - pika.connection - INFO - Closing connection (200): 'Normal shutdown' +2024-05-29 21:01:59,040 - pika.adapters.utils.io_services_utils - INFO - Aborting transport connection: state=1; +2024-05-29 21:01:59,040 - pika.adapters.utils.io_services_utils - INFO - _AsyncTransportBase._initate_abort(): Initiating abrupt asynchronous transport shutdown: state=1; error=None; +2024-05-29 21:01:59,040 - pika.adapters.utils.io_services_utils - INFO - Deactivating transport: state=1; +2024-05-29 21:01:59,040 - pika.connection - INFO - AMQP stack terminated, failed to connect, or aborted: opened=True, error-arg=None; pending-error=ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:01:59,040 - pika.connection - INFO - Stack terminated due to ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:01:59,040 - pika.adapters.utils.io_services_utils - INFO - Closing transport socket and unlinking: state=3; +2024-05-29 21:01:59,040 - pika.adapters.blocking_connection - INFO - User-initiated close: result=BlockingConnection__OnClosedArgs(connection=>, error=ConnectionClosedByClient: (200) 'Normal shutdown') +2024-05-29 21:01:59,393 - root - INFO - laser_value : 258mm +2024-05-29 21:01:59,393 - root - INFO - Capture Start at 258 +2024-05-29 21:01:59,483 - pika.adapters.utils.connection_workflow - INFO - Pika version 1.3.2 connecting to ('127.0.0.1', 5672) +2024-05-29 21:01:59,483 - pika.adapters.utils.io_services_utils - INFO - Socket connected: +2024-05-29 21:01:59,483 - pika.adapters.utils.connection_workflow - INFO - Streaming transport linked up: (, _StreamingProtocolShim: params=>). +2024-05-29 21:01:59,484 - pika.adapters.utils.connection_workflow - INFO - AMQPConnector - reporting success: params=> +2024-05-29 21:01:59,484 - pika.adapters.utils.connection_workflow - INFO - AMQPConnectionWorkflow - reporting success: params=> +2024-05-29 21:01:59,485 - pika.adapters.blocking_connection - INFO - Connection workflow succeeded: params=> +2024-05-29 21:01:59,485 - pika.adapters.blocking_connection - INFO - Created channel=1 +2024-05-29 21:01:59,485 - pika.adapters.blocking_connection - INFO - Closing connection (200): Normal shutdown +2024-05-29 21:01:59,485 - pika.channel - INFO - Closing channel (200): 'Normal shutdown' on params=>> +2024-05-29 21:01:59,485 - pika.channel - INFO - Received on params=>> +2024-05-29 21:01:59,485 - pika.connection - INFO - Closing connection (200): 'Normal shutdown' +2024-05-29 21:01:59,486 - pika.adapters.utils.io_services_utils - INFO - Aborting transport connection: state=1; +2024-05-29 21:01:59,486 - pika.adapters.utils.io_services_utils - INFO - _AsyncTransportBase._initate_abort(): Initiating abrupt asynchronous transport shutdown: state=1; error=None; +2024-05-29 21:01:59,486 - pika.adapters.utils.io_services_utils - INFO - Deactivating transport: state=1; +2024-05-29 21:01:59,486 - pika.connection - INFO - AMQP stack terminated, failed to connect, or aborted: opened=True, error-arg=None; pending-error=ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:01:59,486 - pika.connection - INFO - Stack terminated due to ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:01:59,486 - pika.adapters.utils.io_services_utils - INFO - Closing transport socket and unlinking: state=3; +2024-05-29 21:01:59,486 - pika.adapters.blocking_connection - INFO - User-initiated close: result=BlockingConnection__OnClosedArgs(connection=>, error=ConnectionClosedByClient: (200) 'Normal shutdown') +2024-05-29 21:01:59,841 - root - INFO - laser_value : 258mm +2024-05-29 21:01:59,841 - root - INFO - Capture Start at 258 +2024-05-29 21:01:59,940 - pika.adapters.utils.connection_workflow - INFO - Pika version 1.3.2 connecting to ('127.0.0.1', 5672) +2024-05-29 21:01:59,940 - pika.adapters.utils.io_services_utils - INFO - Socket connected: +2024-05-29 21:01:59,941 - pika.adapters.utils.connection_workflow - INFO - Streaming transport linked up: (, _StreamingProtocolShim: params=>). +2024-05-29 21:01:59,942 - pika.adapters.utils.connection_workflow - INFO - AMQPConnector - reporting success: params=> +2024-05-29 21:01:59,942 - pika.adapters.utils.connection_workflow - INFO - AMQPConnectionWorkflow - reporting success: params=> +2024-05-29 21:01:59,942 - pika.adapters.blocking_connection - INFO - Connection workflow succeeded: params=> +2024-05-29 21:01:59,942 - pika.adapters.blocking_connection - INFO - Created channel=1 +2024-05-29 21:01:59,943 - pika.adapters.blocking_connection - INFO - Closing connection (200): Normal shutdown +2024-05-29 21:01:59,943 - pika.channel - INFO - Closing channel (200): 'Normal shutdown' on params=>> +2024-05-29 21:01:59,943 - pika.channel - INFO - Received on params=>> +2024-05-29 21:01:59,943 - pika.connection - INFO - Closing connection (200): 'Normal shutdown' +2024-05-29 21:01:59,943 - pika.adapters.utils.io_services_utils - INFO - Aborting transport connection: state=1; +2024-05-29 21:01:59,943 - pika.adapters.utils.io_services_utils - INFO - _AsyncTransportBase._initate_abort(): Initiating abrupt asynchronous transport shutdown: state=1; error=None; +2024-05-29 21:01:59,943 - pika.adapters.utils.io_services_utils - INFO - Deactivating transport: state=1; +2024-05-29 21:01:59,943 - pika.connection - INFO - AMQP stack terminated, failed to connect, or aborted: opened=True, error-arg=None; pending-error=ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:01:59,943 - pika.connection - INFO - Stack terminated due to ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:01:59,943 - pika.adapters.utils.io_services_utils - INFO - Closing transport socket and unlinking: state=3; +2024-05-29 21:01:59,943 - pika.adapters.blocking_connection - INFO - User-initiated close: result=BlockingConnection__OnClosedArgs(connection=>, error=ConnectionClosedByClient: (200) 'Normal shutdown') +2024-05-29 21:02:00,296 - root - INFO - laser_value : 258mm +2024-05-29 21:02:00,296 - root - INFO - Capture Start at 258 +2024-05-29 21:02:00,390 - pika.adapters.utils.connection_workflow - INFO - Pika version 1.3.2 connecting to ('127.0.0.1', 5672) +2024-05-29 21:02:00,390 - pika.adapters.utils.io_services_utils - INFO - Socket connected: +2024-05-29 21:02:00,391 - pika.adapters.utils.connection_workflow - INFO - Streaming transport linked up: (, _StreamingProtocolShim: params=>). +2024-05-29 21:02:00,392 - pika.adapters.utils.connection_workflow - INFO - AMQPConnector - reporting success: params=> +2024-05-29 21:02:00,392 - pika.adapters.utils.connection_workflow - INFO - AMQPConnectionWorkflow - reporting success: params=> +2024-05-29 21:02:00,392 - pika.adapters.blocking_connection - INFO - Connection workflow succeeded: params=> +2024-05-29 21:02:00,392 - pika.adapters.blocking_connection - INFO - Created channel=1 +2024-05-29 21:02:00,393 - pika.adapters.blocking_connection - INFO - Closing connection (200): Normal shutdown +2024-05-29 21:02:00,393 - pika.channel - INFO - Closing channel (200): 'Normal shutdown' on params=>> +2024-05-29 21:02:00,393 - pika.channel - INFO - Received on params=>> +2024-05-29 21:02:00,393 - pika.connection - INFO - Closing connection (200): 'Normal shutdown' +2024-05-29 21:02:00,394 - pika.adapters.utils.io_services_utils - INFO - Aborting transport connection: state=1; +2024-05-29 21:02:00,394 - pika.adapters.utils.io_services_utils - INFO - _AsyncTransportBase._initate_abort(): Initiating abrupt asynchronous transport shutdown: state=1; error=None; +2024-05-29 21:02:00,394 - pika.adapters.utils.io_services_utils - INFO - Deactivating transport: state=1; +2024-05-29 21:02:00,394 - pika.connection - INFO - AMQP stack terminated, failed to connect, or aborted: opened=True, error-arg=None; pending-error=ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:02:00,394 - pika.connection - INFO - Stack terminated due to ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:02:00,394 - pika.adapters.utils.io_services_utils - INFO - Closing transport socket and unlinking: state=3; +2024-05-29 21:02:00,394 - pika.adapters.blocking_connection - INFO - User-initiated close: result=BlockingConnection__OnClosedArgs(connection=>, error=ConnectionClosedByClient: (200) 'Normal shutdown') +2024-05-29 21:02:00,747 - root - INFO - laser_value : 258mm +2024-05-29 21:02:00,748 - root - INFO - Capture Start at 258 +2024-05-29 21:02:00,849 - pika.adapters.utils.connection_workflow - INFO - Pika version 1.3.2 connecting to ('127.0.0.1', 5672) +2024-05-29 21:02:00,849 - pika.adapters.utils.io_services_utils - INFO - Socket connected: +2024-05-29 21:02:00,850 - pika.adapters.utils.connection_workflow - INFO - Streaming transport linked up: (, _StreamingProtocolShim: params=>). +2024-05-29 21:02:00,851 - pika.adapters.utils.connection_workflow - INFO - AMQPConnector - reporting success: params=> +2024-05-29 21:02:00,851 - pika.adapters.utils.connection_workflow - INFO - AMQPConnectionWorkflow - reporting success: params=> +2024-05-29 21:02:00,851 - pika.adapters.blocking_connection - INFO - Connection workflow succeeded: params=> +2024-05-29 21:02:00,851 - pika.adapters.blocking_connection - INFO - Created channel=1 +2024-05-29 21:02:00,852 - pika.adapters.blocking_connection - INFO - Closing connection (200): Normal shutdown +2024-05-29 21:02:00,852 - pika.channel - INFO - Closing channel (200): 'Normal shutdown' on params=>> +2024-05-29 21:02:00,852 - pika.channel - INFO - Received on params=>> +2024-05-29 21:02:00,852 - pika.connection - INFO - Closing connection (200): 'Normal shutdown' +2024-05-29 21:02:00,852 - pika.adapters.utils.io_services_utils - INFO - Aborting transport connection: state=1; +2024-05-29 21:02:00,852 - pika.adapters.utils.io_services_utils - INFO - _AsyncTransportBase._initate_abort(): Initiating abrupt asynchronous transport shutdown: state=1; error=None; +2024-05-29 21:02:00,852 - pika.adapters.utils.io_services_utils - INFO - Deactivating transport: state=1; +2024-05-29 21:02:00,852 - pika.connection - INFO - AMQP stack terminated, failed to connect, or aborted: opened=True, error-arg=None; pending-error=ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:02:00,852 - pika.connection - INFO - Stack terminated due to ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:02:00,852 - pika.adapters.utils.io_services_utils - INFO - Closing transport socket and unlinking: state=3; +2024-05-29 21:02:00,853 - pika.adapters.blocking_connection - INFO - User-initiated close: result=BlockingConnection__OnClosedArgs(connection=>, error=ConnectionClosedByClient: (200) 'Normal shutdown') +2024-05-29 21:02:01,207 - root - INFO - laser_value : 258mm +2024-05-29 21:02:01,207 - root - INFO - Capture Start at 258 +2024-05-29 21:02:01,301 - pika.adapters.utils.connection_workflow - INFO - Pika version 1.3.2 connecting to ('127.0.0.1', 5672) +2024-05-29 21:02:01,301 - pika.adapters.utils.io_services_utils - INFO - Socket connected: +2024-05-29 21:02:01,301 - pika.adapters.utils.connection_workflow - INFO - Streaming transport linked up: (, _StreamingProtocolShim: params=>). +2024-05-29 21:02:01,303 - pika.adapters.utils.connection_workflow - INFO - AMQPConnector - reporting success: params=> +2024-05-29 21:02:01,303 - pika.adapters.utils.connection_workflow - INFO - AMQPConnectionWorkflow - reporting success: params=> +2024-05-29 21:02:01,303 - pika.adapters.blocking_connection - INFO - Connection workflow succeeded: params=> +2024-05-29 21:02:01,303 - pika.adapters.blocking_connection - INFO - Created channel=1 +2024-05-29 21:02:01,304 - pika.adapters.blocking_connection - INFO - Closing connection (200): Normal shutdown +2024-05-29 21:02:01,304 - pika.channel - INFO - Closing channel (200): 'Normal shutdown' on params=>> +2024-05-29 21:02:01,304 - pika.channel - INFO - Received on params=>> +2024-05-29 21:02:01,304 - pika.connection - INFO - Closing connection (200): 'Normal shutdown' +2024-05-29 21:02:01,304 - pika.adapters.utils.io_services_utils - INFO - Aborting transport connection: state=1; +2024-05-29 21:02:01,304 - pika.adapters.utils.io_services_utils - INFO - _AsyncTransportBase._initate_abort(): Initiating abrupt asynchronous transport shutdown: state=1; error=None; +2024-05-29 21:02:01,304 - pika.adapters.utils.io_services_utils - INFO - Deactivating transport: state=1; +2024-05-29 21:02:01,304 - pika.connection - INFO - AMQP stack terminated, failed to connect, or aborted: opened=True, error-arg=None; pending-error=ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:02:01,304 - pika.connection - INFO - Stack terminated due to ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:02:01,305 - pika.adapters.utils.io_services_utils - INFO - Closing transport socket and unlinking: state=3; +2024-05-29 21:02:01,305 - pika.adapters.blocking_connection - INFO - User-initiated close: result=BlockingConnection__OnClosedArgs(connection=>, error=ConnectionClosedByClient: (200) 'Normal shutdown') +2024-05-29 21:02:01,658 - root - INFO - laser_value : 258mm +2024-05-29 21:02:01,659 - root - INFO - Capture Start at 258 +2024-05-29 21:02:01,762 - pika.adapters.utils.connection_workflow - INFO - Pika version 1.3.2 connecting to ('127.0.0.1', 5672) +2024-05-29 21:02:01,762 - pika.adapters.utils.io_services_utils - INFO - Socket connected: +2024-05-29 21:02:01,762 - pika.adapters.utils.connection_workflow - INFO - Streaming transport linked up: (, _StreamingProtocolShim: params=>). +2024-05-29 21:02:01,764 - pika.adapters.utils.connection_workflow - INFO - AMQPConnector - reporting success: params=> +2024-05-29 21:02:01,764 - pika.adapters.utils.connection_workflow - INFO - AMQPConnectionWorkflow - reporting success: params=> +2024-05-29 21:02:01,764 - pika.adapters.blocking_connection - INFO - Connection workflow succeeded: params=> +2024-05-29 21:02:01,764 - pika.adapters.blocking_connection - INFO - Created channel=1 +2024-05-29 21:02:01,765 - pika.adapters.blocking_connection - INFO - Closing connection (200): Normal shutdown +2024-05-29 21:02:01,765 - pika.channel - INFO - Closing channel (200): 'Normal shutdown' on params=>> +2024-05-29 21:02:01,765 - pika.channel - INFO - Received on params=>> +2024-05-29 21:02:01,765 - pika.connection - INFO - Closing connection (200): 'Normal shutdown' +2024-05-29 21:02:01,765 - pika.adapters.utils.io_services_utils - INFO - Aborting transport connection: state=1; +2024-05-29 21:02:01,765 - pika.adapters.utils.io_services_utils - INFO - _AsyncTransportBase._initate_abort(): Initiating abrupt asynchronous transport shutdown: state=1; error=None; +2024-05-29 21:02:01,765 - pika.adapters.utils.io_services_utils - INFO - Deactivating transport: state=1; +2024-05-29 21:02:01,765 - pika.connection - INFO - AMQP stack terminated, failed to connect, or aborted: opened=True, error-arg=None; pending-error=ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:02:01,765 - pika.connection - INFO - Stack terminated due to ConnectionClosedByClient: (200) 'Normal shutdown' +2024-05-29 21:02:01,765 - pika.adapters.utils.io_services_utils - INFO - Closing transport socket and unlinking: state=3; +2024-05-29 21:02:01,765 - pika.adapters.blocking_connection - INFO - User-initiated close: result=BlockingConnection__OnClosedArgs(connection=>, error=ConnectionClosedByClient: (200) 'Normal shutdown') +2024-05-29 21:02:02,118 - root - INFO - laser_value : 258mm +2024-05-29 21:02:02,118 - root - INFO - Capture Start at 258 +2024-05-30 13:58:59,073 - botocore.credentials - INFO - Found credentials in shared credentials file: ~/.aws/credentials +2024-05-30 13:58:59,125 - root - INFO - Cam Initialize. +2024-05-30 13:58:59,624 - root - INFO - CAM Load. +2024-05-30 13:58:59,625 - root - ERROR - Traceback (most recent call last): + File "main.py", line 149, in prepare_acquisition + data_streams = m_device.DataStreams() +AttributeError: 'NoneType' object has no attribute 'DataStreams' + +2024-05-30 14:02:22,479 - botocore.credentials - INFO - Found credentials in shared credentials file: ~/.aws/credentials +2024-05-30 14:02:22,525 - root - INFO - Cam Initialize. +2024-05-30 14:02:23,048 - root - INFO - CAM Load. +2024-05-30 14:02:23,049 - root - ERROR - Traceback (most recent call last): + File "main.py", line 149, in prepare_acquisition + data_streams = m_device.DataStreams() +AttributeError: 'NoneType' object has no attribute 'DataStreams' + diff --git a/main.py b/main.py new file mode 100644 index 0000000..771e294 --- /dev/null +++ b/main.py @@ -0,0 +1,503 @@ +#!/usr/bin/python3 +import os +import sys +import json +import time +import datetime +from datetime import timedelta +import traceback +import logging.handlers + +import cv2 +import glob +import pandas as pd +import pika +import boto3 +import serial + +os.environ['GENICAM_GENTL64_PATH'] = '/opt/ids-peak_2.9.0.0-48_amd64/lib/ids/cti/' + +from ids_peak import ids_peak as peak +from ids_peak_ipl import ids_peak_ipl + +############################################### +# Logger Setting # +############################################### +logger = logging.getLogger() +logger.setLevel(logging.INFO) +formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') + +log_fileHandler = logging.handlers.RotatingFileHandler( + filename=f"./logs/gseps.log", + maxBytes=1024000, + backupCount=3, + mode='a') + +log_fileHandler.setFormatter(formatter) +logger.addHandler(log_fileHandler) + +############################################### +# Config # +############################################### +with open('./acquisition_config.json', 'r') as f: + info = json.load(f) + +LASER_SAVE_PATH = info['laser_save_path'] +LASER_DEVICE_PATH = info['laser_device_path'] + +S3_BUCKET = info['S3BucketName'] +GSEPS_STAGE_BUCKET = info['gseps_stage_bucket'] + +S3_UPLOAD_KEY = info['BucketKey'] + +s3 = boto3.resource('s3') +sdt_s3 = boto3.resource('s3', + aws_access_key_id=info['AccessKey'], + aws_secret_access_key=info['SecretKey'], + region_name=info['Boto3RegionName']) + +############################################### +# Camera Variable # +############################################### +m_device = None +m_dataStream = None +m_node_map_remote_device = None + +m_device_2 = None +m_dataStream_2 = None +m_node_map_remote_device_2 = None + + +class Publisher: + def __init__(self): + self.__url = info['amqp_url'] + self.__port = info['amqp_port'] + self.__vhost = info['amqp_vhost'] + self.__cred = pika.PlainCredentials(info['amqp_id'], info['amqp_pw']) + self.__queue = info['amqp_queue'] + self.__ReadyQ = info['amqp_ReadyQ'] + + def check_server_state(self): + conn = pika.BlockingConnection(pika.ConnectionParameters(self.__url, + self.__port, + self.__vhost, + self.__cred)) + chan = conn.channel() + method, properties, body = chan.basic_get(queue=self.__ReadyQ, + auto_ack=True) + + if method: + chan.queue_purge(queue=self.__ReadyQ) + conn.close() + return True + + conn.close() + return False + + def pub(self, body: dict): + try: + conn = pika.BlockingConnection(pika.ConnectionParameters(self.__url, + self.__port, + self.__vhost, + self.__cred)) + chan = conn.channel() + chan.basic_publish(exchange='', + routing_key=self.__queue, + body=json.dumps(body)) + conn.close() + return + except Exception as e: + # add error alarm + logger.error(traceback.format_exc()) + + +def open_camera(): + global m_device, m_node_map_remote_device, m_device_2, m_node_map_remote_device_2 + + try: + # Create instance of the device manager + device_manager = peak.DeviceManager.Instance() + + # Update the device manager + device_manager.Update() + + # Return if no device was found + if device_manager.Devices().empty(): + return False + + # open the first openable device in the device manager's device list + device_count = device_manager.Devices().size() + if device_manager.Devices()[0].IsOpenable() and device_manager.Devices()[1].IsOpenable(): + m_device = device_manager.Devices()[1].OpenDevice(peak.DeviceAccessType_Control) + # Get NodeMap of the RemoteDevice for all accesses to the GenICam NodeMap tree + m_node_map_remote_device = m_device.RemoteDevice().NodeMaps()[0] + + m_device_2 = device_manager.Devices()[0].OpenDevice(peak.DeviceAccessType_Control) + m_node_map_remote_device_2 = m_device_2.RemoteDevice().NodeMaps()[0] + return True + + except Exception as e: + print(e) + logger.error(traceback.format_exc()) + + return False + + +def prepare_acquisition(): + global m_dataStream, m_dataStream_2 + try: + data_streams = m_device.DataStreams() + data_streams_2 = m_device_2.DataStreams() + if data_streams.empty() or data_streams_2.empty(): + # no data streams available + return False + m_dataStream = m_device.DataStreams()[0].OpenDataStream() + m_dataStream_2 = m_device_2.DataStreams()[0].OpenDataStream() + return True + + except Exception as e: + print(e) + logger.error(traceback.format_exc()) + + return False + + +def set_roi(width, height): + try: + offset_x = int((4512 - width)/2) + offset_y = int((4512 - height)/2) + # Get the minimum ROI and set it. After that there are no size restrictions anymore + x_min = m_node_map_remote_device.FindNode("OffsetX").Minimum() + y_min = m_node_map_remote_device.FindNode("OffsetY").Minimum() + w_min = m_node_map_remote_device.FindNode("Width").Minimum() + h_min = m_node_map_remote_device.FindNode("Height").Minimum() + + m_node_map_remote_device.FindNode("OffsetX").SetValue(x_min) + m_node_map_remote_device.FindNode("OffsetY").SetValue(y_min) + m_node_map_remote_device.FindNode("Width").SetValue(w_min) + m_node_map_remote_device.FindNode("Height").SetValue(h_min) + + # Get the maximum ROI values + x_max = m_node_map_remote_device.FindNode("OffsetX").Maximum() + y_max = m_node_map_remote_device.FindNode("OffsetY").Maximum() + w_max = m_node_map_remote_device.FindNode("Width").Maximum() + h_max = m_node_map_remote_device.FindNode("Height").Maximum() + + if (offset_x < x_min) or (offset_y < y_min) or (offset_x > x_max) or (offset_y > y_max): + return False + elif (width < w_min) or (height < h_min) or ((offset_x + width) > w_max) or ((offset_y + height) > h_max): + return False + else: + # Now, set final AOI + m_node_map_remote_device.FindNode("OffsetX").SetValue(offset_x) + m_node_map_remote_device.FindNode("OffsetY").SetValue(offset_y) + m_node_map_remote_device.FindNode("Width").SetValue(width) + m_node_map_remote_device.FindNode("Height").SetValue(height) + + return True + except Exception as e: + logger.error(traceback.format_exc()) + + return False + + +def alloc_and_announce_buffers(): + try: + if m_dataStream and m_dataStream_2: + # cam1 + # Flush queue and prepare all buffers for revoking + m_dataStream.Flush(peak.DataStreamFlushMode_DiscardAll) + + # Clear all old buffers + for buffer in m_dataStream.AnnouncedBuffers(): + m_dataStream.RevokeBuffer(buffer) + + payload_size = m_node_map_remote_device.FindNode("PayloadSize").Value() + + # Get number of minimum required buffers + num_buffers_min_required = m_dataStream.NumBuffersAnnouncedMinRequired() + + # Alloc buffers + for count in range(num_buffers_min_required): + buffer = m_dataStream.AllocAndAnnounceBuffer(payload_size) + m_dataStream.QueueBuffer(buffer) + + # cam2 + m_dataStream_2.Flush(peak.DataStreamFlushMode_DiscardAll) + + for buffer in m_dataStream_2.AnnouncedBuffers(): + m_dataStream_2.RevokeBuffer(buffer) + + payload_size_2 = m_node_map_remote_device_2.FindNode("PayloadSize").Value() + num_buffers_min_required_2 = m_dataStream_2.NumBuffersAnnouncedMinRequired() + + for count in range(num_buffers_min_required_2): + buffer = m_dataStream_2.AllocAndAnnounceBuffer(payload_size_2) + m_dataStream_2.QueueBuffer(buffer) + return True + except Exception as e: + logger.error(traceback.format_exc()) + + return False + + +def start_acquisition(): + try: + m_dataStream.StartAcquisition(peak.AcquisitionStartMode_Default, peak.DataStream.INFINITE_NUMBER) + m_node_map_remote_device.FindNode("TLParamsLocked").SetValue(1) + m_node_map_remote_device.FindNode("AcquisitionStart").Execute() + # m_node_map_remote_device.FindNode("DeviceRegistersStreamingStart").Execute() + + m_dataStream_2.StartAcquisition(peak.AcquisitionStartMode_Default, peak.DataStream.INFINITE_NUMBER) + m_node_map_remote_device_2.FindNode("TLParamsLocked").SetValue(1) + m_node_map_remote_device_2.FindNode("AcquisitionStart").Execute() + # m_node_map_remote_device.FindNode("DeviceRegistersStreamingStart").Execute() + + return True + except Exception as e: + logger.error(traceback.format_exc()) + + return False + + +# check alarm +def check_distance(): + while True: + try: + ser = serial.Serial(LASER_DEVICE_PATH, baudrate=19200, timeout=1) + req = b'\x01\x04\x00\x00\x00\x01\x31\xca' + ser.write(req) + + result = ser.read(6) + data_length = result[2] + hex_distance = result[3:3+data_length] + distance = int((int.from_bytes(hex_distance, byteorder='big'))/10) + logger.info(f"laser_value : {distance}mm") + + ser.close() + + return distance + + except KeyboardInterrupt: + ser.close() + exit() + + except Exception as e: + print(e) + ser.close() + #logger.error(traceback.format_exc()) + # raise + #print(e) + logger.error(f"Error : {e}") + continue + +def upload_to_s3(company, image_list=None): + today = datetime.date.today() + + if company == 'sdt': + for path in image_list: + image_name = path.split('/')[-1] + + date = image_name.split('-')[0] + folder = f'{date[:4]}-{date[4:6]}-{date[6:]}' + + sdt_s3.Bucket(S3_BUCKET).upload_file(path, f'{folder}/{image_name}') + + os.remove(path) + + elif company == 'gseps': + yesterday = today - timedelta(days=1) + file_name = f'{yesterday.year:04d}{yesterday.month:02d}{yesterday.day:02d}' + + laser_csv = sorted(glob.glob(f'{LASER_SAVE_PATH}/{file_name}*.csv')) + total_laser_info = pd.DataFrame() + for path in laser_csv: + try: + csv = pd.read_csv(path) + total_laser_info = pd.concat([total_laser_info, csv], ignore_index=True) + #os.remove(path) + + except pd.errors.EmptyDataError as e: + logger.error(f"Error: {path} pandas empty data error, {e}") + + laser_save_path = f'{LASER_SAVE_PATH}/{yesterday}-mv_laser_height.csv' + total_laser_info.to_csv(laser_save_path, index=False) + s3.Bucket(GSEPS_STAGE_BUCKET).upload_file(laser_save_path, f'{S3_UPLOAD_KEY}/year={yesterday.year}/month={yesterday.month:02d}/day={yesterday.day: 02d}/mv_laser_height.csv') + sdt_s3.Bucket('gseps-daily').upload_file(laser_save_path, f'{yesterday}/mv_laser_height.csv') + + #os.remove(laser_save_path) + + +def check_hour_for_laser(current_time): + if current_time.minute == 0 and current_time.second == 0: + return True + + return False + + +def main(): + logger.info(f'Cam Initialize.') + + # initialize library + peak.Library.Initialize() + + # add logger error + if not open_camera(): + # error + print(1) + sys.exit(-1) + + + logger.info(f'CAM Load.') + + if not prepare_acquisition(): + # error + print(2) + sys.exit(-2) + + + if not set_roi(1208, 1024): + # error + print(3) + sys.exit(-3) + + if not alloc_and_announce_buffers(): + # error + print(4) + sys.exit(-4) + + + if not start_acquisition(): + # error + print(5) + sys.exit(-5) + + + image_info = {} + publisher = Publisher() + + laser_history = pd.DataFrame() + laser_count = 0 + while True: + try: + now_datetime = datetime.datetime.now() + now_unix = int(now_datetime.timestamp()) + now = now_datetime.strftime("%Y%m%d-%H%M%S%f")[:-3] + + laser_value = check_distance() + event_flag = 0 + if 50 <= laser_value < 500: + laser_count = 0 + event_flag = 1 + logger.info(f"Capture Start at {laser_value}") + + # Get buffer from device's DataStream. Wait 5000 ms. The buffer is automatically locked until it is queued again. + ################# CHANGE #################### + + #cam1 + buffer = m_dataStream.WaitForFinishedBuffer(1000) + image = ids_peak_ipl.Image.CreateFromSizeAndBuffer( + buffer.PixelFormat(), + buffer.BasePtr(), + buffer.Size(), + buffer.Width(), + buffer.Height() + ) + image_processed = image.ConvertTo(ids_peak_ipl.PixelFormatName_BGRa8, ids_peak_ipl.ConversionMode_Fast) + # Queue buffer again + m_dataStream.QueueBuffer(buffer) + ############################################### + img_numpy = image_processed.get_numpy() + + # cam2 + buffer = m_dataStream_2.WaitForFinishedBuffer(1000) + image = ids_peak_ipl.Image.CreateFromSizeAndBuffer( + buffer.PixelFormat(), + buffer.BasePtr(), + buffer.Size(), + buffer.Width(), + buffer.Height() + ) + image_processed_2 = image.ConvertTo(ids_peak_ipl.PixelFormatName_BGRa8, ids_peak_ipl.ConversionMode_Fast) + # Queue buffer again + m_dataStream_2.QueueBuffer(buffer) + ############################################### + img_numpy_2 = image_processed_2.get_numpy() + + # save image + cam1_image_name = f'{now}_cam1.jpg' + cam2_image_name = f'{now}_cam2.jpg' + image_save_path = info['image_save_path'] + + if not os.path.exists(image_save_path): + os.makedirs(image_save_path) + + cam1_image_path = os.path.join(image_save_path, cam1_image_name) + cam2_image_path = os.path.join(image_save_path, cam2_image_name) + cv2.imwrite(cam1_image_path, img_numpy) + cv2.imwrite(cam2_image_path, img_numpy_2) + print('cam1 path: ',cam1_image_path) + print('cam2 path: ',cam2_image_path) + + # Upload image to MinIO(inference server) + image_list = [cam1_image_path, cam2_image_path] + upload_to_s3('sdt', image_list) + + # publish + image_info = {'to': {"cam1_image_name": cam1_image_name, + "cam2_image_name": cam2_image_name, + "laser": laser_value, + "bucket": "gseps-dataset"}} + + if publisher.check_server_state(): + publisher.pub(image_info) + print(image_info) + + ############## CHANGE ####################### + # Logger Contents Change // MinIO => S3 # + ############################################# + logger.info(f'Successfully Uploaded To S3!. cam1: {cam1_image_name}, cam2: {cam2_image_name}') + # print(f'Successfully Uploaded To Minio!. cam1: {cam1_image_name}, cam2: {cam2_image_name}') + + + if 50 <= laser_value < 500: + current_laser_info = pd.DataFrame([{"timestamp": now_unix, + "laser_value": laser_value, + "event": event_flag}]) + + laser_history = pd.concat([laser_history, current_laser_info], ignore_index=True) + + + current_time = datetime.datetime.now() + # if check_hour_for_laser(current_time): + if current_time.minute == 0: + today = datetime.date.today() + laser_save_hour = (current_time - timedelta(hours=1)).strftime('%Y%m%d-%H') + hour = current_time.hour + + date = laser_save_hour.split('-')[0] + folder = f'{date[:4]}-{date[4:6]}-{date[6:]}' + + if not os.path.exists(f'{LASER_SAVE_PATH}/{laser_save_hour}.csv'): + laser_history.to_csv(f'{LASER_SAVE_PATH}/{laser_save_hour}.csv', index=False) + sdt_s3.Bucket('gseps-daily').upload_file(f'{LASER_SAVE_PATH}/{laser_save_hour}.csv', f'{folder}/laser/{laser_save_hour}.csv') + laser_history = pd.DataFrame() + + #if hour == 0: + # upload_to_s3('gseps') + + laser_count += 1 + time.sleep(0.3) + + except Exception as e: + + logger.error(traceback.format_exc()) + exit() + + peak.Library.Close() + sys.exit(0) + + +if __name__ == '__main__': + main() + diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..d1d5bf9 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,12 @@ +# Write package's name that need your app. +#sdtcloudpubsub + + +pandas +boto3 +pika +opencv-python +pyserial +/usr/local/share/ids/bindings/python/wheel/ids_peak-1.7.3.0-cp37-abi3-linux_x86_64.whl +/usr/local/share/ids/bindings/python/wheel/ids_peak_afl-1.4.0.0-cp37-abi3-linux_x86_64.whl +/usr/local/share/ids/bindings/python/wheel/ids_peak_ipl-1.11.0.0-cp38-cp38-linux_x86_64.whl diff --git a/utils/__init__.py b/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/utils/publisher.py b/utils/publisher.py new file mode 100644 index 0000000..e489284 --- /dev/null +++ b/utils/publisher.py @@ -0,0 +1,86 @@ +# ============================================================================== +# | | +# | @ 2023. SDT Inc. all rights reserved | +# | Author : Jinsung Lee | +# | | +# ============================================================================== + +import json +import uuid +import logging.handlers + +from awscrt import mqtt +from awsiot import mqtt_connection_builder + +###################################################################### +# Save Log # +###################################################################### +logger = logging.getLogger() +logger.setLevel(logging.INFO) +formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') +log_max_size = 1024000 +log_file_count = 3 +log_fileHandler = logging.handlers.RotatingFileHandler( + filename=f"./logs/mqtt_publish.log", + maxBytes=log_max_size, + backupCount=log_file_count, + mode='a') + +log_fileHandler.setFormatter(formatter) +logger.addHandler(log_fileHandler) + +###################################################################### +# Config # +###################################################################### +with open('./mqtt_config.json', 'r') as f: + config = json.load(f) +with open('./config.json', encoding='UTF-8') as f: + jsonData = json.load(f) +with open(f'/etc/sdt/device.config/config.json', encoding='UTF-8') as f: + codeData = json.load(f) + +MQTT_ENDPOINT = config['mqtt_endpoint'] +MQTT_PORT = config['mqtt_port'] +MQTT_TOPIC = f"sdtcloud/{codeData['projectcode']}/{codeData['assetcode']}/app/{jsonData['appId']}/data" +MQTT_CERT = f"/etc/sdt/cert/{codeData['projectcode']}-certificate.pem" +MQTT_KEY = f"/etc/sdt/cert/{codeData['projectcode']}-private.pem" +MQTT_ROOT_CA = "/etc/sdt/cert/AmazonRootCA1.pem" +UUID = uuid.uuid4() + +received_count = 0 + + +class mqtt_client(): + def __init__(self): + self.mqtt_connection = mqtt_connection_builder.mtls_from_path( + endpoint=MQTT_ENDPOINT, + cert_filepath=MQTT_CERT, + pri_key_filepath=MQTT_KEY, + ca_filepath=MQTT_ROOT_CA, + client_id=f'blokworks-client-{UUID}') + + self.mqtt_connection.connect() + + def publish(self, message): + # self.mqtt_connection.connect() + pub_future, packet_id = self.mqtt_connection.publish(topic=MQTT_TOPIC, + payload=message, + qos=mqtt.QoS.AT_LEAST_ONCE) + # self.mqtt_connection.disconnect() + + def subscribe(self): + subscribe_future, packet_id = self.mqtt_connection.subscribe(topic=MQTT_TOPIC, + qos=mqtt.QoS.AT_LEAST_ONCE, + callback=on_message_received) + subscribe_result = subscribe_future.result() + + def disconnect(self): + self.mqtt_connection.disconnect() + + +def on_message_received(topic, payload, **kwargs): + global received_count + received_count += 1 + + print(f"{received_count:04d} Received message from topic '{topic}': {payload}") +