diff --git a/docs/contributed/deepsecurity_ja.md b/docs/contributed/deepsecurity_ja.md
new file mode 100644
index 00000000..0c4ea9c5
--- /dev/null
+++ b/docs/contributed/deepsecurity_ja.md
@@ -0,0 +1,137 @@
+# es_loaderでDeepSecurityのログを取り込む
+
+以下の仕組みで、DeepSecurityのログをSIEMに取り込んでいきます。
+
+1. ec2 instance上で動作しているDeepSecurity → syslogで/var/log/dsa.logにログを出力
+2. td-agent/fluentdで/var/log/dsa.log → s3 bucketに転送
+3. lambda functionで動作するes loaderでs3を読み取って、Elasticsearchにloadする
+
+## DeepSecurityでのlocalhostへのsyslog転送
+
+DeepSecurity SaaSの管理画面にloginし、Adminitration -> System Settings -> Event Forwardingで、SIEMに127.0.0.1 514/udp Local1 falicityにCommonEventFormatでログを直接転送する設定を行っておきます。
+Agent should forward logs: Directory to the Syslog Server
+
+## rsyslogで/var/log/dsa.logにDeepSecurityのログを保存
+CEF:やLEEF:を含むログをDeepSecurityのログとして、/var/log/dsa.logに保存します。
+
+/etc/rsyslog.d/ds_agent.conf
+```
+$FileCreateMode 0644
+
+:syslogtag, contains, "CEF:" /var/log/dsa.log
+& stop
+
+:syslogtag, contains, "LEEF:" /var/log/dsa.log
+& stop
+```
+
+## td-agent/fluendからS3へのlog転送
+
+td-agentを用いて、S3にログを転送します。
+
+/etc/td-agent/conf.d/ds_agent.conf
+```
+
+ @type tail
+ format none
+ path /var/log/dsa.log
+ pos_file /var/log/td-agent/.dsa.pos
+ tag ds_agent.*
+
+
+
+ @type record_transformer
+ @id ds_agent_record_modifier
+ enable_ruby true
+
+ hostname "#{Socket.gethostname}"
+ timestamp ${time.strftime('%FT%T%:z')}
+ tag ${tag}
+
+
+
+
+ @type s3
+ @id ds_agent_s3
+ s3_bucket ${BUCKET_NAME}
+ s3_region ${REGION}
+ s3_object_key_format %{path}%{time_slice}_${hostname}_%{index}.%{file_extension}
+ path ds_agent/
+ time_slice_format %Y/%m/%d/%H
+ timezone Asia/Tokyo
+ output_time false
+ output_tag false
+
+ @type file
+ path /var/log/td-agent/buffer/s3_ds_agent
+ flush_mode interval
+ flush_interval 1m
+ flush_at_shutdown true
+
+
+```
+
+ec2 instanceからのs3への書き込みは、instance profileで許可をしてあげると良いです。
+https://aws.amazon.com/jp/premiumsupport/knowledge-center/ec2-instance-access-s3-bucket/
+
+## elasticsearchでのlog-deepsecurity templateの定義
+
+```
+PUT _template/log-deepsecurity
+{
+ "log-deepsecurity" : {
+ "index_patterns" : [
+ "log-deepsecurity*"
+ ],
+ "mappings" : {
+ "properties" : {
+ "cloud.account" : {
+ "type" : "object"
+ },
+ "event.severity" : {
+ "type" : "integer"
+ },
+ "event.original" : {
+ "type" : "text"
+ },
+ "event.count" : {
+ "type" : "integer"
+ },
+ "timestamp" : {
+ "type" : "date"
+ }
+ }
+ }
+ }
+}
+```
+
+## es_loader側の設定
+
+aws.ini/user.iniに以下を定義します。
+```
+[deepsecurity]
+index = log-deepsecurity
+s3_key = ds_agent
+format = json
+script_ecs = event.action destination.ip destination.port destination.mac destination.bytes source.ip source.port source.mac source.bytes network.transport event.action server.name file.path event.count rule.category host.id event.original
+event.action = act
+destination.ip = dst
+destination.port = dpt
+destination.mac = dmac
+destination.bytes = out
+source.ip = src
+source.port = spt
+source.mac = smac
+source.bytes = in
+network.transport = proto
+server.name = hostname
+file.path = fname
+event.count = cnt
+rule.category = cs1
+host.id = cn1
+event.original = msg
+```
+
+lambda functionに、deepsecurityのlogを解釈する siem/sf_deepsecurity.py が存在していることを確認してください。
+ここまでの設定で、Elasticsearchにログがloadされていくはずです。
diff --git a/source/lambda/es_loader/siem/sf_deepsecurity.py b/source/lambda/es_loader/siem/sf_deepsecurity.py
new file mode 100644
index 00000000..fe1086df
--- /dev/null
+++ b/source/lambda/es_loader/siem/sf_deepsecurity.py
@@ -0,0 +1,104 @@
+import re
+import base64
+import json
+import ipaddress
+from siem import merge, put_value_into_dict, get_value_from_dict
+
+def transform(logdata):
+ # https://cloudone.trendmicro.com/docs/workload-security/event-syslog-message-formats/
+ fields = logdata['message'].split('|')
+ if len(fields) < 8:
+ print("Illegal format")
+ return Null
+ logdata.setdefault('agent', {})
+ logdata['agent']['name'] = " ".join([fields[1],fields[2],fields[3]])
+ logdata.setdefault('rule', {})
+ logdata['rule']['name'] = " ".join([fields[4],fields[5]])
+ logdata.setdefault('event', {})
+ logdata['event']['severity'] = fields[6]
+
+ # \\=を適当な文字列に置換しておく
+ message = re.sub('\\\\=', '____', fields[7])
+ # =をdelimiterとして、順に処理していく
+ attributes = message.split('=')
+ next_ptr = attributes.pop(0)
+ for ptr in attributes:
+ values = ptr.split()
+ if values is None:
+ break
+ curr_ptr = next_ptr
+ next_ptr = values.pop()
+ value = ' '.join(values)
+ if value:
+ logdata[curr_ptr] = re.sub('____', '=', value)
+ # 末尾の処理
+ logdata[curr_ptr] = re.sub('____', '=', value + next_ptr)
+
+ if 'act' in logdata:
+ # IDS:Resetは、alert出力のみでpacket dropを行わない
+ # 誤解を招くので、置換しておく
+ logdata['act'] = re.sub("IDS:Reset","DetectOnly:NotReset",logdata['act'])
+
+ # 以下はecsにmappingしていく処理
+ deepsecurity_ecs_keys = {
+ 'destination.ip': 'dst',
+ 'destination.port': 'dpt',
+ 'destination.mac': 'dmac',
+ 'destination.bytes': 'out',
+ 'source.ip': 'src',
+ 'source.port': 'spt',
+ 'source.mac': 'smac',
+ 'source.bytes': 'in',
+ 'network.transport': 'proto',
+ 'event.action': 'act',
+ 'server.name': 'fluent_hostname',
+ 'file.path': 'fname',
+ 'event.count': 'cnt',
+ 'rule.category': 'cs1',
+ 'host.id': 'cn1',
+ 'event.original': 'msg',
+ }
+
+ for ecs_key in deepsecurity_ecs_keys:
+ original_keys = deepsecurity_ecs_keys[ecs_key]
+ v = get_value_from_dict(logdata, original_keys)
+ if v:
+ new_ecs_dict = put_value_into_dict(ecs_key, v)
+ if ".ip" in ecs_key:
+ try:
+ ipaddress.ip_address(v)
+ except ValueError:
+ continue
+ merge(logdata, new_ecs_dict)
+ del logdata[original_keys]
+
+ # source.ipが設定されていなければ、dvcで代用する
+ if "dvc" in logdata:
+ if "source" in logdata and not "ip" in logdata['source']:
+ logdata['source']['ip'] = logdata['dvc']
+ else:
+ logdata['source'] = { 'ip': logdata['dvc'] }
+
+ # packet captureをdecodeしておく
+ if 'TrendMicroDsPacketData' in logdata:
+ saved = logdata['TrendMicroDsPacketData']
+ try:
+ logdata['TrendMicroDsPacketData'] = base64.b64decode(logdata['TrendMicroDsPacketData']).decode("utf-8", errors="backslashreplace")
+ except Exception as e:
+ print(e)
+ logdata['TrendMicroDsPacketData'] = saved
+ # filter out 'cookie'
+ filtered = []
+ for line in logdata['TrendMicroDsPacketData'].split("\n"):
+ if re.findall(r'^cookie',line.lower()):
+ continue
+ filtered.append(line)
+ logdata['TrendMicroDsPacketData'] = "\n".join(filtered)
+ # X-Forwarded-Forを取り出す X-Forwarded-For: 123.123.123.234
+ m = re.search(r'X-Forwarded-For: ([0-9.]+)', logdata['TrendMicroDsPacketData'])
+ if m:
+ logdata['source']['ip'] = m.group(1)
+
+ del logdata['TrendMicroDsTenant'], logdata['TrendMicroDsTenantId']
+
+ return logdata
diff --git a/source/lambda/es_loader/user.ini.sample b/source/lambda/es_loader/user.ini.sample
index 32d70741..cf314aec 100644
--- a/source/lambda/es_loader/user.ini.sample
+++ b/source/lambda/es_loader/user.ini.sample
@@ -23,4 +23,30 @@ url.path = request_path
static_ecs = @log_type
@log_type = apache
-geoip = source
\ No newline at end of file
+geoip = source
+
+
+[deepsecurity]
+# https://cloudone.trendmicro.com/docs/workload-security/event-syslog-message-formats/
+# See README for more details
+# https://github.com/aws-samples/siem-on-amazon-elasticsearch/blob/main/docs/contributed/deepsecurity_ja.md
+index = log-deepsecurity
+s3_key = ds_agent
+format = json
+script_ecs = event.action destination.ip destination.port destination.mac destination.bytes source.ip source.port source.mac source.bytes network.transport event.action server.name file.path event.count rule.category host.id event.original
+event.action = act
+destination.ip = dst
+destination.port = dpt
+destination.mac = dmac
+destination.bytes = out
+source.ip = src
+source.port = spt
+source.mac = smac
+source.bytes = in
+network.transport = proto
+server.name = hostname
+file.path = fname
+event.count = cnt
+rule.category = cs1
+host.id = cn1
+event.original = msg