diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml
index d70a1f1d4a..e0cc45f54e 100644
--- a/.github/workflows/build-docs.yml
+++ b/.github/workflows/build-docs.yml
@@ -45,6 +45,10 @@ jobs:
git config user.name intelmq-bot
git config user.email intelmq-bot
+ - name: "Fix images path"
+ run: |
+ sed -i 's|docs/static/images/|static/images/|' docs/index.md
+
- name: "Build docs without publishing"
if: github.event_name == 'pull_request'
run: |
diff --git a/CHANGELOG.md b/CHANGELOG.md
index f5f33ab615..1351c9122a 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -34,6 +34,8 @@
(PR#2408 and PR#2414 by Jan Kaliszewski).
- `intelmq.lib.upgrades`: Replace deprecated instances of `url2fqdn` experts by the new `url` expert in runtime configuration (PR#2432 by Sebastian Wagner).
- `intelmq.lib.bot`: Ensure closing log files on reloading (PR#2435 by Kamil Mankowski).
+- AMQP Pipeline: fix SSL context to pointing to create a client-side connection that verifies the server (PR by Kamil Mankowski).
+- Only load the config once when starting intelmqctl (which makes IntelMQ API calls take less time) (PR#2444 by DigitalTrustCenter).
### Development
- Makefile: Add codespell and test commands (PR#2425 by Sebastian Wagner).
@@ -61,8 +63,20 @@
- Remove undocumented and unused attributes of `StompCollectorBot` instances:
`ssl_ca_cert`, `ssl_cl_cert`, `ssl_cl_cert_key`.
- Minor fixes/improvements and some refactoring (see also above: *Core*...).
+- `intelmq.bots.collectors.amqp`: fix SSL context to pointing to create a client-side connection that verifies the server (PR by Kamil Mankowski).
+- `intelmq.bots.collectors.shadowserver.collector_reports_api`:
+ - The 'json' option is no longer supported as the 'csv' option provides better performance (PR#2372 by elsif2).
+- `intelmq.bots.collectors.alienvault_otx.collector` (PR#2449 by qux-bbb):
+ - Fix modified_pulses_only is always False.
+
#### Parsers
+- `intelmq.bots.parsers.netlab_360.parser`: Removed as the feed is discontinued. (#2442 by Filip Pokorný)
+- `intelmq.bots.parsers.webinspektor.parser`: Removed as the feed is discontinued. (#2442 by Filip Pokorný)
+- `intelmq.bots.parsers.sucuri.parser`: Removed as the feed is discontinued. (#2442 by Filip Pokorný)
+- `intelmq.bots.parsers.shadowserver._config`:
+ - Switch to dynamic configuration to decouple report schema changes from IntelMQ releases by regularly downloading them from the Shadowserver server (PR#2372 by elsif2).
+- `intelmq.bots.parsers.cymru`: Save current line. (PR by Kamil Mankowski)
#### Experts
- `intelmq.bots.experts.jinja` (PR#2417 by Mikk Margus Möll):
@@ -90,6 +104,7 @@
- Try to reconnect on `NotConnectedException`.
- `intelmq.bots.outputs.smtp_batch.output` (PR #2439 by Edvard Rejthar):
- Fix ability to send with the default `bcc`
+- `intelmq.bots.outputs.amqp`: fix SSL context to pointing to create a client-side connection that verifies the server (PR by Kamil Mankowski).
### Documentation
- Add a readthedocs configuration file to fix the build fail (PR#2403 by Sebastian Wagner).
@@ -97,6 +112,8 @@
- Update/fix/improve the stuff related to the STOMP bots and integration with the *n6*'s
Stream API (PR#2408 and PR#2414 by Jan Kaliszewski).
- Complete documentation overhaul. Change to markdown format. Uses the mkdocs-material (PR#2419 by Filip Pokorný).
+- Adds warning banner if not browsing the latest version of the docs (PR#2445 by Filip Pokorný).
+- Fix logo path in index.md when building the docs (PR#2445 by Filip Pokorný).
### Packaging
- Add `pendulum` to suggested packages, as it is required for the sieve bot (PR#2424 by Sebastian Wagner).
@@ -156,13 +173,11 @@
- added support for `Subject NOT LIKE` queries,
- added support for multiple values in ticket subject queries.
- `intelmq.bots.collectors.rsync`: Support for optional private key, relative time parsing for the source path, extra rsync parameters and strict host key checking (PR#2241 by Mateo Durante).
-- `intelmq.bots.collectors.shadowserver.collector_reports_api`:
- - The 'json' option is no longer supported as the 'csv' option provides better performance.
#### Parsers
- `intelmq.bots.parsers.shadowserver._config`:
- Reset detected `feedname` at shutdown to re-detect the feedname on reloads (PR#2361 by @elsif2, fixes #2360).
- - Switch to dynamic configuration to decouple report schema changes from IntelMQ releases.
+ - Switch to dynamic configuration to decouple report schema changes from IntelMQ releases.
- Added 'IPv6-Vulnerable-Exchange' alias and 'Accessible-WS-Discovery-Service' report. (PR#2338)
- Removed unused `p0f_genre` and `p0f_detail` from the 'DNS-Open-Resolvers' report. (PR#2338)
- Added 'Accessible-SIP' report. (PR#2348)
diff --git a/debian/cron.d/intelmq-update-database b/debian/cron.d/intelmq-update-database
index b9223c2f71..b05e04fd48 100644
--- a/debian/cron.d/intelmq-update-database
+++ b/debian/cron.d/intelmq-update-database
@@ -3,14 +3,16 @@
#
# m h dom mon dow command
# Update database for tor_nodes bot, update frequency is unknown:
-11 0 * * * intelmq intelmq.bots.experts.tor_nodes.expert --update-database
+11 0 * * * intelmq intelmq.bots.experts.tor_nodes.expert --update-database
# Update database for maxmind_geoip bot, updated every Tuesday:
-30 23 * * 2,5 intelmq intelmq.bots.experts.maxmind_geoip.expert --update-database
+30 23 * * 2,5 intelmq intelmq.bots.experts.maxmind_geoip.expert --update-database
# Update database for asn_lookup bot, updated every two hours:
-5 */2 * * * intelmq intelmq.bots.experts.asn_lookup.expert --update-database
+5 */2 * * * intelmq intelmq.bots.experts.asn_lookup.expert --update-database
# Update database for domain_suffix bot, updated daily:
-45 1 * * * intelmq intelmq.bots.experts.domain_suffix.expert --update-database
+45 1 * * * intelmq intelmq.bots.experts.domain_suffix.expert --update-database
# Update database for recordedfuture_iprisk bot, update frequency is unknown:
-27 1 * * * intelmq intelmq.bots.experts.recordedfuture_iprisk.expert --update-database
+27 1 * * * intelmq intelmq.bots.experts.recordedfuture_iprisk.expert --update-database
# Update database for domain_valid bot, updated daily:
-50 1 * * * intelmq intelmq.bots.experts.domain_valid.expert --update-database
\ No newline at end of file
+50 1 * * * intelmq intelmq.bots.experts.domain_valid.expert --update-database
+# Update schema for shadowserver parser bot:
+02 1 * * * intelmq intelmq.bots.parsers.shadowserver.parser --update-schema
diff --git a/docs/_overrides/main.html b/docs/_overrides/main.html
new file mode 100644
index 0000000000..1d8f68edf7
--- /dev/null
+++ b/docs/_overrides/main.html
@@ -0,0 +1,11 @@
+{#SPDX-FileCopyrightText: 2023 Filip Pokorný#}
+{#SPDX-License-Identifier: AGPL-3.0-or-later#}
+
+{% extends "base.html" %}
+
+{% block outdated %}
+ You're not viewing the latest version.
+
+ Click here to go to latest.
+
+{% endblock %}
diff --git a/docs/index.md b/docs/index.md
index 79c3101745..cf097897c3 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -7,7 +7,7 @@
[![CII Badge](https://bestpractices.coreinfrastructure.org/projects/4186/badge)](https://bestpractices.coreinfrastructure.org/projects/4186/)
-->
-![IntelMQ](/docs/static/images/Logo_Intel_MQ.svg)
+![IntelMQ](docs/static/images/Logo_Intel_MQ.svg)
# Introduction
diff --git a/docs/user/bots.md b/docs/user/bots.md
index 8c5e491714..791142dc20 100644
--- a/docs/user/bots.md
+++ b/docs/user/bots.md
@@ -930,8 +930,7 @@ oldest files available!).
The resulting reports contain the following special field:
-- `extra.file_name`: The name of the downloaded file, with fixed filename extension. The API returns file names with the
- extension `.csv`, although the files are JSON, not CSV. Therefore, for clarity and better error detection in the parser, the file name in `extra.file_name` uses `.json` as extension.
+- `extra.file_name`: The name of the downloaded file, with fixed filename extension.
**Module:** `intelmq.bots.collectors.shadowserver.collector_reports_api`
@@ -951,7 +950,7 @@ The resulting reports contain the following special field:
**`types`**
-(optional, string/array of strings) An array of strings (or a list of comma-separated values) with the names of report types you want to process. If you leave this empty, all the available reports will be downloaded and processed (i.e. 'scan', 'drones', 'intel', 'sandbox_connection', 'sinkhole_combined'). The possible report types are equivalent to the file names given in the section Supported Reports of the [Shadowserver parser](#intelmq.bots.parsers.shadowserver.parser).
+(optional, string/array of strings) An array of strings (or a list of comma-separated values) with the names of report types you want to process. If you leave this empty, all the available reports will be downloaded and processed (i.e. 'scan', 'drones', 'intel', 'sandbox_connection', 'sinkhole_combined'). The possible report types are equivalent to the file names defined the the schema. Please see the Supported Reports of the [Shadowserver parser](#intelmq.bots.parsers.shadowserver.parser) for details.
**Sample configuration**
@@ -2128,23 +2127,23 @@ correct mapping of the columns:
1. **Automatic report type detection**
- Since IntelMQ version 2.1 the parser can detect the feed based on metadata provided by the collector.
-
- When processing a report, this bot takes `extra.file_name` from the report and looks in `config.py` how the report
- should be parsed. If this lookup is not possible, and the `feedname` is not given as parameter, the feed cannot be
- parsed.
-
- The field `extra.file_name` has the following structure: `%Y-%m-%d-${report_name}[-suffix].csv` where the optional
- suffix can be something like `country-geo`. For example, some possible filenames
- are `2019-01-01-scan_http-country-geo.csv` or `2019-01-01-scan_tftp.csv`. The important part is the `report_name`,
- between the date and the suffix. Since version 2.1.2 the date in the filename is optional, so filenames
- like `scan_tftp.csv` are also detected.
+ Since IntelMQ version 2.1 the parser can detect the feed based on metadata provided by the collector.
+
+ When processing a report, this bot takes `extra.file_name` from the report and looks in `config.py` how the report
+ should be parsed. If this lookup is not possible, and the `feedname` is not given as parameter, the feed cannot be
+ parsed.
+
+ The field `extra.file_name` has the following structure: `%Y-%m-%d-${report_name}[-suffix].csv` where the optional
+ suffix can be something like `country-geo`. For example, some possible filenames
+ are `2019-01-01-scan_http-country-geo.csv` or `2019-01-01-scan_tftp.csv`. The important part is the `report_name`,
+ between the date and the suffix. Since version 2.1.2 the date in the filename is optional, so filenames
+ like `scan_tftp.csv` are also detected.
2. **Fixed report type**
- If the method above is not possible and for upgraded instances, the report type can be set with the `feedname`
- parameter. Report type is derived from the subject of Shadowserver e-mails. A list of possible values of
- the `feedname` parameter can be found in the table below in the column "Report Type".
+ If the method above is not possible and for upgraded instances, the report type can be set with the `feedname`
+ parameter. Report type is derived from the subject of Shadowserver e-mails. A list of possible values of
+ the `feedname` parameter can be found in the table below in the column "Report Type".
**Module:**
@@ -2154,7 +2153,9 @@ correct mapping of the columns:
**`feedname`**
-(optional, string) Name of the Shadowserver report, see list below for possible values.
+(optional, string) Name of the Shadowserver report. The value for each report type can be found in the schema `feed_name` field.
+
+For example using `curl -s https://interchange.shadowserver.org/intelmq/v1/schema | jq .[].feed_name`.
**`overwrite`**
@@ -2200,6 +2201,7 @@ The schema revision history is maintained at https://github.com/The-Shadowserver
auto_update: true
run_mode: continuous
```
+
---
### Shodan
diff --git a/docs/user/feeds.md b/docs/user/feeds.md
index 8224336d8b..06c67fb468 100644
--- a/docs/user/feeds.md
+++ b/docs/user/feeds.md
@@ -1899,101 +1899,6 @@ module: intelmq.bots.parsers.microsoft.parser_ctip
---
-## Netlab 360
-
-### DGA
-
-This feed lists DGA family, Domain, Start and end of valid time(UTC) of a number of DGA families.
-
-**Public:** yes
-
-**Revision:** 2018-01-20
-
-**Documentation:**
-
-
-**Collector configuration**
-
-```yaml
-module: intelmq.bots.collectors.http.collector_http
-parameters:
- http_url: http://data.netlab.360.com/feeds/dga/dga.txt
- name: DGA
- provider: Netlab 360
- rate_limit: 3600
-```
-
-**Parser configuration**
-
-```yaml
-module: intelmq.bots.parsers.netlab_360.parser
-```
-
----
-
-
-### Hajime Scanner
-
-This feed lists IP address for know Hajime bots network. These IPs data are obtained by joining the DHT network and interacting with the Hajime node
-
-**Public:** yes
-
-**Revision:** 2019-08-01
-
-**Documentation:**
-
-
-**Collector configuration**
-
-```yaml
-module: intelmq.bots.collectors.http.collector_http
-parameters:
- http_url: https://data.netlab.360.com/feeds/hajime-scanner/bot.list
- name: Hajime Scanner
- provider: Netlab 360
- rate_limit: 3600
-```
-
-**Parser configuration**
-
-```yaml
-module: intelmq.bots.parsers.netlab_360.parser
-```
-
----
-
-
-### Magnitude EK
-
-This feed lists FQDN and possibly the URL used by Magnitude Exploit Kit. Information also includes the IP address used for the domain and last time seen.
-
-**Public:** yes
-
-**Revision:** 2018-01-20
-
-**Documentation:**
-
-
-**Collector configuration**
-
-```yaml
-module: intelmq.bots.collectors.http.collector_http
-parameters:
- http_url: http://data.netlab.360.com/feeds/ek/magnitude.txt
- name: Magnitude EK
- provider: Netlab 360
- rate_limit: 3600
-```
-
-**Parser configuration**
-
-```yaml
-module: intelmq.bots.parsers.netlab_360.parser
-```
-
----
-
-
## OpenPhish
### Premium Feed
@@ -2495,41 +2400,6 @@ module: intelmq.bots.parsers.github_feed
---
-## Sucuri
-
-### Hidden IFrames
-
-Latest hidden iframes identified on compromised web sites.
-
-**Public:** yes
-
-**Revision:** 2018-01-28
-
-**Documentation:**
-
-**Additional Information:** Please note that the parser only extracts the hidden iframes and the conditional redirects, not the encoded javascript.
-
-
-**Collector configuration**
-
-```yaml
-module: intelmq.bots.collectors.http.collector_http
-parameters:
- http_url: http://labs.sucuri.net/?malware
- name: Hidden IFrames
- provider: Sucuri
- rate_limit: 86400
-```
-
-**Parser configuration**
-
-```yaml
-module: intelmq.bots.parsers.sucuri.parser
-```
-
----
-
-
## Surbl
### Malicious Domains
@@ -2917,37 +2787,6 @@ parameters:
---
-## WebInspektor
-
-### Unsafe sites
-
-Latest detected unsafe sites.
-
-**Public:** yes
-
-**Revision:** 2018-03-09
-
-
-**Collector configuration**
-
-```yaml
-module: intelmq.bots.collectors.http.collector_http
-parameters:
- http_url: https://app.webinspector.com/public/recent_detections/
- name: Unsafe sites
- provider: WebInspektor
- rate_limit: 60
-```
-
-**Parser configuration**
-
-```yaml
-module: intelmq.bots.parsers.webinspektor.parser
-```
-
----
-
-
## ZoneH
### Defacements
diff --git a/intelmq/bin/intelmqctl.py b/intelmq/bin/intelmqctl.py
index 51301b1d82..9ad9266d08 100644
--- a/intelmq/bin/intelmqctl.py
+++ b/intelmq/bin/intelmqctl.py
@@ -87,6 +87,11 @@ def __init__(self, interactive: bool = False, returntype: ReturnType = ReturnTyp
self._parameters.logging_handler = 'file'
self._parameters.logging_path = DEFAULT_LOGGING_PATH
+ try:
+ self._runtime_configuration = utils.load_configuration(RUNTIME_CONF_FILE)
+ except ValueError as exc: # pragma: no cover
+ self.abort(f'Error loading {RUNTIME_CONF_FILE!r}: {exc}')
+
# Try to get logging_level from defaults configuration, else use default (defined above)
defaults_loading_exc = None
try:
@@ -203,11 +208,6 @@ def __init__(self, interactive: bool = False, returntype: ReturnType = ReturnTyp
intelmqctl debug --get-environment-variables
'''
- try:
- self._runtime_configuration = utils.load_configuration(RUNTIME_CONF_FILE)
- except ValueError as exc: # pragma: no cover
- self.abort(f'Error loading {RUNTIME_CONF_FILE!r}: {exc}')
-
self._processmanagertype = getattr(self._parameters, 'process_manager', 'intelmq')
if self._processmanagertype not in process_managers():
self.abort('Invalid process manager given: %r, should be one of %r.' '' % (self._processmanagertype, list(process_managers().keys())))
@@ -384,7 +384,8 @@ def __init__(self, interactive: bool = False, returntype: ReturnType = ReturnTyp
)
def load_defaults_configuration(self, silent=False):
- for option, value in utils.get_global_settings().items():
+ global_settings = self._runtime_configuration.get('global', {})
+ for option, value in global_settings.items():
setattr(self._parameters, option, value)
# copied from intelmq.lib.bot, should be refactored to e.g. intelmq.lib.config
@@ -940,6 +941,12 @@ def check(self, no_connections=False, check_executables=True):
check_logger.error('SyntaxError in bot %r: %r', bot_id, exc)
retval = 1
continue
+ except AttributeError:
+ # if module does not exist, utils.get_bot_module_name returns None. import_module then raises
+ # AttributeError: 'NoneType' object has no attribute 'startswith'
+ check_logger.error('Incomplete installation: Bot %r not importable.', bot_id,)
+ retval = 1
+ continue
bot = getattr(bot_module, 'BOT')
bot_parameters = copy.deepcopy(global_settings)
bot_parameters.update(bot_config.get('parameters', {})) # the parameters field may not exist
diff --git a/intelmq/bots/collectors/alienvault_otx/collector.py b/intelmq/bots/collectors/alienvault_otx/collector.py
index 88e7528adc..171c487abc 100644
--- a/intelmq/bots/collectors/alienvault_otx/collector.py
+++ b/intelmq/bots/collectors/alienvault_otx/collector.py
@@ -26,10 +26,6 @@ def init(self):
if OTXv2 is None:
raise MissingDependencyError("OTXv2")
- self.modified_pulses_only = False
- if hasattr(self, 'modified_pulses_only'):
- self.modified_pulses_only = self.modified_pulses_only
-
def process(self):
self.logger.info("Downloading report through API.")
otx = OTXv2(self.api_key, proxy=self.https_proxy)
diff --git a/intelmq/bots/collectors/amqp/collector_amqp.py b/intelmq/bots/collectors/amqp/collector_amqp.py
index 543f4e0bed..291aa1ecfc 100644
--- a/intelmq/bots/collectors/amqp/collector_amqp.py
+++ b/intelmq/bots/collectors/amqp/collector_amqp.py
@@ -55,7 +55,7 @@ def init(self):
self.password)
if self.use_ssl:
- self.kwargs['ssl_options'] = pika.SSLOptions(context=ssl.create_default_context(ssl.Purpose.CLIENT_AUTH))
+ self.kwargs['ssl_options'] = pika.SSLOptions(context=ssl.create_default_context(ssl.Purpose.SERVER_AUTH))
self.connection_parameters = pika.ConnectionParameters(
host=self.connection_host,
diff --git a/intelmq/bots/outputs/amqptopic/output.py b/intelmq/bots/outputs/amqptopic/output.py
index 68d1c366b1..03c0faba5e 100644
--- a/intelmq/bots/outputs/amqptopic/output.py
+++ b/intelmq/bots/outputs/amqptopic/output.py
@@ -64,7 +64,7 @@ def init(self):
self.password)
if self.use_ssl:
- self.kwargs['ssl_options'] = pika.SSLOptions(context=ssl.create_default_context(ssl.Purpose.CLIENT_AUTH))
+ self.kwargs['ssl_options'] = pika.SSLOptions(context=ssl.create_default_context(ssl.Purpose.SERVER_AUTH))
self.connection_parameters = pika.ConnectionParameters(
host=self.connection_host,
diff --git a/intelmq/bots/parsers/cymru/parser_cap_program.py b/intelmq/bots/parsers/cymru/parser_cap_program.py
index 94af6365f8..dc54c4565c 100644
--- a/intelmq/bots/parsers/cymru/parser_cap_program.py
+++ b/intelmq/bots/parsers/cymru/parser_cap_program.py
@@ -64,6 +64,7 @@ def parse(self, report):
elif 'Data file written at' in line:
self.parse_line = self.parse_line_old
else:
+ self._current_line = line
yield line
def parse_bot_old(self, comment_split, report_type, event):
diff --git a/intelmq/bots/parsers/netlab_360/__init__.py b/intelmq/bots/parsers/netlab_360/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/intelmq/bots/parsers/netlab_360/parser.py b/intelmq/bots/parsers/netlab_360/parser.py
deleted file mode 100644
index 04017e942e..0000000000
--- a/intelmq/bots/parsers/netlab_360/parser.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# SPDX-FileCopyrightText: 2016 jgedeon120
-#
-# SPDX-License-Identifier: AGPL-3.0-or-later
-
-# -*- coding: utf-8 -*-
-""" IntelMQ parser for Netlab 360 data feeds. """
-
-from intelmq.lib.bot import ParserBot
-from intelmq.lib.harmonization import DateTime
-
-
-class Netlab360ParserBot(ParserBot):
- """Parse the Netlab 360 DGA, Hajime, Magnitude and Mirai feeds"""
- DGA_FEED = {'http://data.netlab.360.com/feeds/dga/dga.txt',
- 'https://data.netlab.360.com/feeds/dga/dga.txt'}
- MAGNITUDE_FEED = {'http://data.netlab.360.com/feeds/ek/magnitude.txt',
- 'https://data.netlab.360.com/feeds/ek/magnitude.txt'}
- MIRAI_SCANNER_FEED = {'http://data.netlab.360.com/feeds/mirai-scanner/scanner.list',
- 'https://data.netlab.360.com/feeds/mirai-scanner/scanner.list'}
- HAJIME_SCANNER_FEED = {'http://data.netlab.360.com/feeds/hajime-scanner/bot.list',
- 'https://data.netlab.360.com/feeds/hajime-scanner/bot.list'}
-
- def parse_line(self, line, report):
- if line.startswith('#') or not line.strip():
- self.tempdata.append(line)
-
- else:
- value = line.split('\t')
- event = self.new_event(report)
- event.add('classification.identifier', value[0].lower())
- event.add('raw', line)
-
- if report['feed.url'] in Netlab360ParserBot.DGA_FEED:
- event.add('source.fqdn', value[1])
- # DGA Feed format is
- # DGA family, Domain, Start and end of valid time(UTC)
-
- event.add('time.source', value[2] + ' UTC')
- if event['time.source'] > event['time.observation']:
- event.change('time.source', event['time.observation'])
- event.add('classification.type', 'c2-server')
- event.add('event_description.url', 'http://data.netlab.360.com/dga')
-
- elif report['feed.url'] in Netlab360ParserBot.MAGNITUDE_FEED:
- event.add('time.source', DateTime.from_timestamp(int(value[1])))
- event.add('source.ip', value[2])
- # ignore ips as fqdns
- event.add('source.fqdn', value[3], raise_failure=False)
- if value[4] != 'N/A':
- event.add('source.url', value[4])
- event.add('classification.type', 'exploit')
- event.add('event_description.url', 'http://data.netlab.360.com/ek')
- elif report['feed.url'] in Netlab360ParserBot.MIRAI_SCANNER_FEED:
- event.add('time.source', value[0] + ' UTC')
- event.add('source.ip', value[1].replace('sip=', ''))
- event.add('destination.port', value[2].replace('dport=', ''))
- event.add('classification.type', 'scanner')
- event.add('classification.identifier', 'mirai', overwrite=True)
- elif report['feed.url'] in Netlab360ParserBot.HAJIME_SCANNER_FEED:
- event.add('time.source', value[0] + 'T00:00:00 UTC')
- event.add('source.ip', value[1].replace('ip=', ''))
- event.add('classification.type', 'scanner')
- event.add('classification.identifier', 'hajime', overwrite=True)
- else:
- raise ValueError('Unknown data feed %s.' % report['feed.url'])
-
- yield event
-
-
-BOT = Netlab360ParserBot
diff --git a/intelmq/bots/parsers/shadowserver/_config.py b/intelmq/bots/parsers/shadowserver/_config.py
index 6931e54109..6f3be4c145 100644
--- a/intelmq/bots/parsers/shadowserver/_config.py
+++ b/intelmq/bots/parsers/shadowserver/_config.py
@@ -126,7 +126,7 @@ def enable_auto_update(enable):
__config.auto_update = enable
-def get_feed_by_feedname(given_feedname: str) -> Optional[Dict[str, Any]]:
+def get_feed_by_feedname(given_feedname: str) -> Optional[Tuple[str, Dict[str, Any]]]:
return __config.feedname_mapping.get(given_feedname, None)
@@ -290,6 +290,20 @@ def category_or_detail(value: str, row: Dict[str, str]) -> str:
return row.get('detail', '')
+def extract_cve_from_tag(tag: str) -> Optional[str]:
+ """ Returns a string with a sorted semicolon-separated list of CVEs or None if no CVE found in tag. """
+ cveset = set()
+ tags = tag.split(";")
+
+ for t in tags:
+ if re.match('^cve-[0-9]+-[0-9]+$', t):
+ cveset.add(t)
+
+ if not (len(cveset)):
+ return None
+ return (';'.join(str(c) for c in sorted(cveset)))
+
+
functions = {
'add_UTC_to_timestamp': add_UTC_to_timestamp,
'convert_bool': convert_bool,
@@ -308,6 +322,7 @@ def category_or_detail(value: str, row: Dict[str, str]) -> str:
'scan_exchange_type': scan_exchange_type,
'scan_exchange_identifier': scan_exchange_identifier,
'category_or_detail': category_or_detail,
+ 'extract_cve_from_tag': extract_cve_from_tag,
}
diff --git a/intelmq/bots/parsers/shadowserver/parser.py b/intelmq/bots/parsers/shadowserver/parser.py
index ec1908269a..e3d0eb2317 100644
--- a/intelmq/bots/parsers/shadowserver/parser.py
+++ b/intelmq/bots/parsers/shadowserver/parser.py
@@ -63,6 +63,7 @@ def init(self):
if self.feedname is not None:
self._sparser_config = config.get_feed_by_feedname(self.feedname)
if self._sparser_config:
+ self._sparser_config = self._sparser_config[1]
self.logger.info('Using fixed feed name %r for parsing reports.' % self.feedname)
self._mode = 'fixed'
else:
diff --git a/intelmq/bots/parsers/sucuri/__init__.py b/intelmq/bots/parsers/sucuri/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/intelmq/bots/parsers/sucuri/parser.py b/intelmq/bots/parsers/sucuri/parser.py
deleted file mode 100644
index 2a2bae9517..0000000000
--- a/intelmq/bots/parsers/sucuri/parser.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# SPDX-FileCopyrightText: 2018 dargen3
-#
-# SPDX-License-Identifier: AGPL-3.0-or-later
-
-# -*- coding: utf-8 -*-
-"""
-Only parses hidden iframes and conditional redirections, not Encoded javascript.
-"""
-import re
-from html.parser import HTMLParser
-
-from intelmq.lib import utils
-from intelmq.lib.bot import ParserBot
-
-
-class MyHTMLParser(HTMLParser):
-
- lsData = ""
-
- def handle_data(self, data):
- self.lsData = data
-
-
-parser = MyHTMLParser()
-remove_comments = re.compile(r"", re.DOTALL)
-
-
-class SucuriParserBot(ParserBot):
- """Parse the Sucuri Malware Hidden Iframes and Conditional redirections feeds"""
- def process(self):
- report = self.receive_message()
- raw_report = utils.base64_decode(report["raw"]) # decoding
- report_list = [row.strip() for row in raw_report.splitlines()]
- index = 0
- actual_line = report_list[index]
- while parser.lsData != "Hidden iframes": # displacement to target table
- index += 1
- actual_line = report_list[index]
- parser.feed(actual_line)
- while actual_line[:8] != "": # scrabing table data
- index += 1
- raw_actual_line = report_list[index]
- actual_line = remove_comments.sub("", raw_actual_line).replace('.', '.')
- if actual_line[:2] == "": # scrabing table data
- index += 1
- raw_actual_line = report_list[index]
- actual_line = remove_comments.sub("", raw_actual_line).replace('.', '.')
- if actual_line[:2] == "":
- index += 1
- parser.feed(report_list[index])
- if parser.lsCSP:
- index += 1
- parser.feed(report_list[index])
- event = self.new_event(report)
- raw_url_line = report_list[index]
- event.add("source.url", parser.lsData)
- event.add("classification.type", "blacklist")
- index += 1
- parser.feed(report_list[index])
- event.add("classification.identifier", parser.lsData)
- event.add("classification.taxonomy", "other")
- index += 1
- parser.feed(report_list[index])
- event.add("time.source", parser.lsData)
- event.add("raw", raw_url_line + report_list[index])
- self.send_message(event)
- self.acknowledge_message()
-
-
-BOT = WebinspektorParserBot
diff --git a/intelmq/etc/feeds.yaml b/intelmq/etc/feeds.yaml
index f87c9509f5..258e91c1f7 100644
--- a/intelmq/etc/feeds.yaml
+++ b/intelmq/etc/feeds.yaml
@@ -29,42 +29,6 @@ providers:
revision: 2022-11-15
documentation: https://viriback.com/
public: true
- WebInspektor:
- Unsafe sites:
- description: Latest detected unsafe sites.
- additional_information:
- bots:
- collector:
- module: intelmq.bots.collectors.http.collector_http
- parameters:
- http_url: https://app.webinspector.com/public/recent_detections/
- rate_limit: 60
- name: __FEED__
- provider: __PROVIDER__
- parser:
- module: intelmq.bots.parsers.webinspektor.parser
- parameters:
- revision: 2018-03-09
- documentation:
- public: true
- Sucuri:
- Hidden IFrames:
- description: Latest hidden iframes identified on compromised web sites.
- additional_information: Please note that the parser only extracts the hidden iframes and the conditional redirects, not the encoded javascript.
- bots:
- collector:
- module: intelmq.bots.collectors.http.collector_http
- parameters:
- http_url: http://labs.sucuri.net/?malware
- rate_limit: 86400
- name: __FEED__
- provider: __PROVIDER__
- parser:
- module: intelmq.bots.parsers.sucuri.parser
- parameters:
- revision: 2018-01-28
- documentation: http://labs.sucuri.net/?malware
- public: true
Surbl:
Malicious Domains:
description: Detected malicious domains. Note that you have to opened up Sponsored Datafeed Service (SDS) access to the SURBL data via rsync for your IP address.
@@ -170,61 +134,6 @@ providers:
revision: 2018-02-06
documentation: https://www.openphish.com/phishing_feeds.html
public: false
- Netlab 360:
- Magnitude EK:
- description: 'This feed lists FQDN and possibly the URL used by Magnitude Exploit
- Kit. Information also includes the IP address used for the domain and last
- time seen.'
- additional_information:
- bots:
- collector:
- module: intelmq.bots.collectors.http.collector_http
- parameters:
- http_url: http://data.netlab.360.com/feeds/ek/magnitude.txt
- rate_limit: 3600
- name: __FEED__
- provider: __PROVIDER__
- parser:
- module: intelmq.bots.parsers.netlab_360.parser
- parameters:
- revision: 2018-01-20
- documentation: http://data.netlab.360.com/ek
- public: true
- DGA:
- description: 'This feed lists DGA family, Domain, Start and end of valid time(UTC)
- of a number of DGA families.'
- additional_information:
- bots:
- collector:
- module: intelmq.bots.collectors.http.collector_http
- parameters:
- http_url: http://data.netlab.360.com/feeds/dga/dga.txt
- rate_limit: 3600
- name: __FEED__
- provider: __PROVIDER__
- parser:
- module: intelmq.bots.parsers.netlab_360.parser
- parameters:
- revision: 2018-01-20
- documentation: http://data.netlab.360.com/dga
- public: true
- Hajime Scanner:
- description: 'This feed lists IP address for know Hajime bots network. These IPs data are obtained by joining the DHT network and interacting with the Hajime node'
- additional_information:
- bots:
- collector:
- module: intelmq.bots.collectors.http.collector_http
- parameters:
- http_url: https://data.netlab.360.com/feeds/hajime-scanner/bot.list
- rate_limit: 3600
- name: __FEED__
- provider: __PROVIDER__
- parser:
- module: intelmq.bots.parsers.netlab_360.parser
- parameters:
- revision: 2019-08-01
- documentation: https://data.netlab.360.com/hajime/
- public: true
Abuse.ch:
Feodo Tracker:
description: 'List of botnet Command & Control servers (C&Cs) tracked by Feodo Tracker, associated with Dridex and Emotet (aka Heodo).'
diff --git a/intelmq/lib/pipeline.py b/intelmq/lib/pipeline.py
index 8d307cad1f..2cf36f0238 100644
--- a/intelmq/lib/pipeline.py
+++ b/intelmq/lib/pipeline.py
@@ -530,7 +530,7 @@ def load_configurations(self, queues_type):
if self.username and self.password:
self.kwargs['credentials'] = pika.PlainCredentials(self.username, self.password)
if self.ssl:
- self.kwargs['ssl_options'] = pika.SSLOptions(context=ssl.create_default_context(ssl.Purpose.CLIENT_AUTH))
+ self.kwargs['ssl_options'] = pika.SSLOptions(context=ssl.create_default_context(ssl.Purpose.SERVER_AUTH))
pika_version = tuple(int(x) for x in pika.__version__.split('.'))
if pika_version < (0, 11):
self.kwargs['heartbeat_interval'] = 10
diff --git a/intelmq/lib/upgrades.py b/intelmq/lib/upgrades.py
index 7391e3c18e..70b6a8de31 100644
--- a/intelmq/lib/upgrades.py
+++ b/intelmq/lib/upgrades.py
@@ -40,6 +40,7 @@
'v310_shadowserver_feednames',
'v320_update_turris_greylist_url',
'v322_url_replacement',
+ 'v322_removed_feeds_and_bots'
]
@@ -898,6 +899,59 @@ def v322_url_replacement(configuration, harmonization, dry_run, **kwargs):
return changed, configuration, harmonization
+def v322_removed_feeds_and_bots(configuration, harmonization, dry_run, **kwargs):
+ """
+ Discontinued feeds and bots detection
+ """
+
+ messages = []
+ discontinued_bots = []
+
+ discontinued_bots_modules = [
+ "intelmq.bots.parsers.netlab_360.parser",
+ "intelmq.bots.parsers.webinspektor.parser",
+ "intelmq.bots.parsers.sucuri.parser"
+ ]
+
+ discontinued_feeds = []
+
+ discontinued_feeds_urls = [
+ 'http://data.netlab.360.com/feeds/dga/dga.txt',
+ 'https://data.netlab.360.com/feeds/dga/dga.txt',
+ 'http://data.netlab.360.com/feeds/ek/magnitude.txt',
+ 'https://data.netlab.360.com/feeds/ek/magnitude.txt',
+ 'http://data.netlab.360.com/feeds/hajime-scanner/bot.list',
+ 'https://data.netlab.360.com/feeds/hajime-scanner/bot.list',
+ 'http://labs.sucuri.net/?malware',
+ 'https://app.webinspector.com/public/recent_detections/'
+ ]
+
+ for bot_id, bot in configuration.items():
+
+ if bot_id == 'global':
+ continue
+
+ if bot["module"] in discontinued_bots_modules:
+ discontinued_bots.append(bot_id)
+
+ elif bot["module"] == "intelmq.bots.collectors.http.collector":
+ url: str = bot["parameters"].get("http_url", "")
+
+ if url in discontinued_feeds_urls:
+ discontinued_feeds.append(bot_id)
+
+ if discontinued_bots:
+ messages.append(f"Found discontinued bots: {', '.join(discontinued_bots)}")
+
+ if discontinued_feeds:
+ messages.append(f"Found discontinued feeds collected by bots: {', '.join(discontinued_feeds)}")
+
+ if messages:
+ messages.append("Remove the affected bots from the configuration.")
+
+ return '\n'.join(messages) if messages else None, configuration, harmonization
+
+
UPGRADES = OrderedDict([
((1, 0, 0, 'dev7'), (v100_dev7_modify_syntax,)),
((1, 1, 0), (v110_shadowserver_feednames, v110_deprecations)),
@@ -924,7 +978,7 @@ def v322_url_replacement(configuration, harmonization, dry_run, **kwargs):
((3, 0, 2), ()),
((3, 1, 0), (v310_feed_changes, v310_shadowserver_feednames)),
((3, 2, 0), (v320_update_turris_greylist_url,)),
- ((3, 2, 2), (v322_url_replacement, )),
+ ((3, 2, 2), (v322_url_replacement, v322_removed_feeds_and_bots)),
])
ALWAYS = (harmonization,)
diff --git a/intelmq/lib/utils.py b/intelmq/lib/utils.py
index 5701db1af8..294f4107a0 100644
--- a/intelmq/lib/utils.py
+++ b/intelmq/lib/utils.py
@@ -852,7 +852,10 @@ def _get_console_entry_points():
return entries.get("console_scripts", []) # it's a dict
-def get_bot_module_name(bot_name: str) -> str:
+def get_bot_module_name(bot_name: str) -> Optional[str]:
+ """
+ Returns None if the bot does not exist
+ """
entries = entry_points()
if hasattr(entries, "select"):
entries = tuple(entries.select(name=bot_name, group="console_scripts"))
diff --git a/intelmq/tests/bots/parsers/netlab_360/__init__.py b/intelmq/tests/bots/parsers/netlab_360/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/intelmq/tests/bots/parsers/netlab_360/dga.txt b/intelmq/tests/bots/parsers/netlab_360/dga.txt
deleted file mode 100644
index 77ad87fa1e..0000000000
--- a/intelmq/tests/bots/parsers/netlab_360/dga.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-# DGA Domain List
-# The list contains four columns:
-# DGA family, Domian, Start and end of valid time(UTC)
-#
-# Feed Provided By: netlab 360
-# netlab@360.cn
-
-suppobox difficultdress.net 2016-11-12 11:58:56 2016-11-13 00:04:15
-foobar example.com 2018-01-01 00:00:00 2030-05-04 00:08:08
diff --git a/intelmq/tests/bots/parsers/netlab_360/dga.txt.license b/intelmq/tests/bots/parsers/netlab_360/dga.txt.license
deleted file mode 100644
index 6dbb123843..0000000000
--- a/intelmq/tests/bots/parsers/netlab_360/dga.txt.license
+++ /dev/null
@@ -1,2 +0,0 @@
-SPDX-FileCopyrightText: 2016 jgedeon120
-SPDX-License-Identifier: AGPL-3.0-or-later
diff --git a/intelmq/tests/bots/parsers/netlab_360/hajime.txt b/intelmq/tests/bots/parsers/netlab_360/hajime.txt
deleted file mode 100644
index d4391473c7..0000000000
--- a/intelmq/tests/bots/parsers/netlab_360/hajime.txt
+++ /dev/null
@@ -1 +0,0 @@
-2017-09-11 ip=192.0.2.45
diff --git a/intelmq/tests/bots/parsers/netlab_360/hajime.txt.license b/intelmq/tests/bots/parsers/netlab_360/hajime.txt.license
deleted file mode 100644
index f8f131c2ce..0000000000
--- a/intelmq/tests/bots/parsers/netlab_360/hajime.txt.license
+++ /dev/null
@@ -1,2 +0,0 @@
-SPDX-FileCopyrightText: 2019 Sebastian Wagner
-SPDX-License-Identifier: AGPL-3.0-or-later
diff --git a/intelmq/tests/bots/parsers/netlab_360/magnitude.txt b/intelmq/tests/bots/parsers/netlab_360/magnitude.txt
deleted file mode 100644
index 819aef8922..0000000000
--- a/intelmq/tests/bots/parsers/netlab_360/magnitude.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-# Feed Provided By: netlab 360
-# netlab@360.cn
-
-Magnitude 1478946665 178.32.227.12 3ebo08o4ct0f6n2336.insides.party http://3ebo08o4ct0f6n2336.insides.party/d97cc5cfab47e305536690a9987115ac
diff --git a/intelmq/tests/bots/parsers/netlab_360/magnitude.txt.license b/intelmq/tests/bots/parsers/netlab_360/magnitude.txt.license
deleted file mode 100644
index 6dbb123843..0000000000
--- a/intelmq/tests/bots/parsers/netlab_360/magnitude.txt.license
+++ /dev/null
@@ -1,2 +0,0 @@
-SPDX-FileCopyrightText: 2016 jgedeon120
-SPDX-License-Identifier: AGPL-3.0-or-later
diff --git a/intelmq/tests/bots/parsers/netlab_360/mirai.txt b/intelmq/tests/bots/parsers/netlab_360/mirai.txt
deleted file mode 100644
index a3300cdf8f..0000000000
--- a/intelmq/tests/bots/parsers/netlab_360/mirai.txt
+++ /dev/null
@@ -1 +0,0 @@
-2016-08-01 12:46:01 sip=109.86.182.249 dport=23
diff --git a/intelmq/tests/bots/parsers/netlab_360/mirai.txt.license b/intelmq/tests/bots/parsers/netlab_360/mirai.txt.license
deleted file mode 100644
index 40a7f8c610..0000000000
--- a/intelmq/tests/bots/parsers/netlab_360/mirai.txt.license
+++ /dev/null
@@ -1,2 +0,0 @@
-SPDX-FileCopyrightText: 2017 navtej
-SPDX-License-Identifier: AGPL-3.0-or-later
diff --git a/intelmq/tests/bots/parsers/netlab_360/test_parser.py b/intelmq/tests/bots/parsers/netlab_360/test_parser.py
deleted file mode 100644
index ad87f56759..0000000000
--- a/intelmq/tests/bots/parsers/netlab_360/test_parser.py
+++ /dev/null
@@ -1,148 +0,0 @@
-# SPDX-FileCopyrightText: 2016 jgedeon120
-#
-# SPDX-License-Identifier: AGPL-3.0-or-later
-
-# -*- coding: utf-8 -*-
-
-import os
-import unittest
-
-import intelmq.lib.test as test
-import intelmq.lib.utils as utils
-
-from intelmq.bots.parsers.netlab_360.parser import Netlab360ParserBot
-
-with open(os.path.join(os.path.dirname(__file__), 'dga.txt')) as handle:
- DGA_FILE = handle.read()
-
-with open(os.path.join(os.path.dirname(__file__), 'magnitude.txt')) as handle:
- MAGNITUDE_FILE = handle.read()
-
-with open(os.path.join(os.path.dirname(__file__), 'mirai.txt')) as handle:
- MIRAI_FILE = handle.read()
-
-with open(os.path.join(os.path.dirname(__file__), 'hajime.txt')) as handle:
- HAJIME_FILE = handle.read()
-
-
-DGA_REPORT = {'feed.name': 'Netlab 360 DGA',
- 'feed.url': 'http://data.netlab.360.com/feeds/dga/dga.txt',
- '__type': 'Report',
- 'time.observation': '2018-01-01T00:00:00+00:00',
- 'raw': utils.base64_encode(DGA_FILE),
- }
-
-DGA_EVENT0 = {'feed.name': 'Netlab 360 DGA',
- 'feed.url': 'http://data.netlab.360.com/feeds/dga/dga.txt',
- '__type': 'Event',
- 'time.observation': '2018-01-01T00:00:00+00:00',
- #'time.source': '2016-11-13T00:04:15+00:00',
- 'time.source': '2016-11-12T11:58:56+00:00',
- 'source.fqdn': 'difficultdress.net',
- 'classification.type': 'c2-server',
- 'classification.identifier': 'suppobox',
- 'event_description.url': 'http://data.netlab.360.com/dga',
- 'raw': 'c3VwcG9ib3gJZGlmZmljdWx0ZHJlc3MubmV0CTIwMTYtMTEtMTIgMTE6NTg6NTYJMjAxNi0xMS0xMyAwMDowNDoxNQ==',
- }
-DGA_EVENT1 = {'feed.name': 'Netlab 360 DGA',
- 'feed.url': 'http://data.netlab.360.com/feeds/dga/dga.txt',
- '__type': 'Event',
- 'time.observation': '2018-01-01T00:00:00+00:00',
- 'time.source': '2018-01-01T00:00:00+00:00',
- 'source.fqdn': 'example.com',
- 'classification.type': 'c2-server',
- 'classification.identifier': 'foobar',
- 'event_description.url': 'http://data.netlab.360.com/dga',
- 'raw': 'Zm9vYmFyCWV4YW1wbGUuY29tCTIwMTgtMDEtMDEgMDA6MDA6MDAJMjAzMC0wNS0wNCAwMDowODowOA==',
- }
-
-MAGNITUDE_REPORT = {'feed.name': 'Netlab 360 Magnitude',
- 'feed.url': 'http://data.netlab.360.com/feeds/ek/magnitude.txt',
- '__type': 'Report',
- 'time.observation': '2016-01-01T00:00:00+00:00',
- 'raw': utils.base64_encode(MAGNITUDE_FILE)
- }
-
-MAGNITUDE_EVENTS = {'feed.name': 'Netlab 360 Magnitude',
- 'feed.url': 'http://data.netlab.360.com/feeds/ek/magnitude.txt',
- '__type': 'Event',
- 'time.observation': '2016-01-01T00:00:00+00:00',
- 'time.source': '2016-11-12T10:31:05+00:00',
- 'source.fqdn': '3ebo08o4ct0f6n2336.insides.party',
- 'source.ip': '178.32.227.12',
- 'source.url': 'http://3ebo08o4ct0f6n2336.insides.party/d97cc5cfab47e305536690a9987115ac',
- 'classification.type': 'exploit',
- 'classification.identifier': 'magnitude',
- 'event_description.url': 'http://data.netlab.360.com/ek',
- 'raw': 'TWFnbml0dWRlCTE0Nzg5NDY2NjUJMTc4LjMyLjIyNy4xMgkzZWJvMDhvNGN0MGY2bjIzMzYuaW5zaWRlcy5wYXJ0eQlodHRwOi8vM2VibzA4bzRjdDBmNm4yMzM2Lmluc2lkZXMucGFydHkvZDk3Y2M1Y2ZhYjQ3ZTMwNTUzNjY5MGE5OTg3MTE1YWM='
- }
-
-MIRAI_REPORT = {'feed.name': 'Netlab 360 Mirai Scanner',
- 'feed.url': 'http://data.netlab.360.com/feeds/mirai-scanner/scanner.list',
- '__type': 'Report',
- 'time.observation': '2016-01-01T00:00:00+00:00',
- 'raw': utils.base64_encode(MIRAI_FILE)
- }
-
-MIRAI_EVENTS = {'feed.name': 'Netlab 360 Mirai Scanner',
- 'feed.url': 'http://data.netlab.360.com/feeds/mirai-scanner/scanner.list',
- '__type': 'Event',
- 'destination.port': 23,
- 'time.observation': '2016-01-01T00:00:00+00:00',
- 'time.source': '2016-08-01T12:46:01+00:00',
- 'source.ip': '109.86.182.249',
- 'classification.type': 'scanner',
- 'classification.identifier': 'mirai',
- 'raw': 'MjAxNi0wOC0wMSAxMjo0NjowMQlzaXA9MTA5Ljg2LjE4Mi4yNDkJZHBvcnQ9MjM=',
- }
-HAJIME_REPORT = {'feed.name': 'Netlab 360 Hajime Scanner',
- 'feed.url': 'https://data.netlab.360.com/feeds/hajime-scanner/bot.list',
- '__type': 'Report',
- 'time.observation': '2016-01-01T00:00:00+00:00',
- 'raw': utils.base64_encode(HAJIME_FILE)
- }
-
-HAJIME_EVENTS = {'feed.name': 'Netlab 360 Hajime Scanner',
- 'feed.url': 'https://data.netlab.360.com/feeds/hajime-scanner/bot.list',
- '__type': 'Event',
- 'time.observation': '2016-01-01T00:00:00+00:00',
- 'time.source': '2017-09-11T00:00:00+00:00',
- 'source.ip': '192.0.2.45',
- 'classification.type': 'scanner',
- 'classification.identifier': 'hajime',
- 'raw': 'MjAxNy0wOS0xMQlpcD0xOTIuMC4yLjQ1',
- }
-
-
-class TestNetlab360ParserBot(test.BotTestCase, unittest.TestCase):
- """ A TestCase for Netlab360ParserBot with DGA and Magnitude feeds. """
-
- @classmethod
- def set_bot(cls):
- cls.bot_reference = Netlab360ParserBot
- cls.default_input_message = DGA_REPORT
-
- def test_DGA(self):
- self.run_bot()
- self.assertMessageEqual(0, DGA_EVENT0)
- # the time is in the future here
- self.assertMessageEqual(1, DGA_EVENT1)
-
- def test_magnitude(self):
- self.input_message = MAGNITUDE_REPORT
- self.run_bot()
- self.assertMessageEqual(0, MAGNITUDE_EVENTS)
-
- def test_mirai(self):
- self.input_message = MIRAI_REPORT
- self.run_bot()
- self.assertMessageEqual(0, MIRAI_EVENTS)
-
- def test_hajime(self):
- self.input_message = HAJIME_REPORT
- self.run_bot()
- self.assertMessageEqual(0, HAJIME_EVENTS)
-
-
-if __name__ == '__main__': # pragma: no cover
- unittest.main()
diff --git a/intelmq/tests/bots/parsers/sucuri/__init__.py b/intelmq/tests/bots/parsers/sucuri/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/intelmq/tests/bots/parsers/sucuri/test_sucuri.data b/intelmq/tests/bots/parsers/sucuri/test_sucuri.data
deleted file mode 100644
index eff8ae7f3a..0000000000
--- a/intelmq/tests/bots/parsers/sucuri/test_sucuri.data
+++ /dev/null
@@ -1,764 +0,0 @@
-
-
-
-
-
- Sucuri Research - Website Monitoring, Recovery and Protection
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
We separate the data in three categories: Iframes, redirections and javascript. For each one you can click on the domain for more information, IP addresses and details on the malware.
-
-
Hidden iframes
-
-Latest hidden iframes our scanner have identified on compromised web sites.
-
-
- This list contains some of websites which host malicious, suspicious content and malware files. It is updated every 60 mins.
- These sites were checked in the last 24 hours.
-
-
You may search website reports detected in the last 30 days.