Skip to content

[Bug] CLI Fixes #1073

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 40 commits into from
Sep 10, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
40 commits
Select commit Hold shift + click to select a range
7eb7046
add support for self-signed certs in es and kibana
brokensound77 Mar 29, 2021
42faa0c
allow Kibana to auth against any providerType
brokensound77 Mar 30, 2021
d0876a9
fix export-rules command
brokensound77 Mar 30, 2021
fd15ac7
fix kibana upload-rule command
brokensound77 Mar 30, 2021
deb4887
fix view-rule command
brokensound77 Mar 30, 2021
3fb19d1
fix validate-rule command
brokensound77 Mar 30, 2021
669f97e
fix search-rules command
brokensound77 Mar 30, 2021
c427065
fix dev kibana-diff command
brokensound77 Mar 30, 2021
a489249
fix dev package-stats command
brokensound77 Mar 30, 2021
f0ff089
fix dev search-rule-prs command
brokensound77 Mar 30, 2021
3577305
fix dev deprecate-rule command
brokensound77 Mar 30, 2021
ca1b1ab
replace toml with pytoml to fix import-rules command
brokensound77 Mar 31, 2021
9e1332a
use no_verify in get_kibana_client
brokensound77 Mar 31, 2021
5a4ef4a
Merge remote-tracking branch 'upstream/main' into cli-fixes
brokensound77 Mar 31, 2021
5d02bc8
remove unneeded imports after upstream merge
brokensound77 Mar 31, 2021
73f6c71
Merge remote-tracking branch 'upstream/main' into cli-fixes
brokensound77 Apr 12, 2021
87bd269
Merge remote-tracking branch 'upstream/main' into cli-fixes
brokensound77 Jun 16, 2021
4c9f088
use rule dicts in rule-search & resolve conflicts
brokensound77 Jun 16, 2021
23601f2
use Path for rule-file type in view-rule
brokensound77 Jun 16, 2021
6a4ef0b
Merge branch 'main' into cli-fixes
brokensound77 Jun 16, 2021
8d2fb24
Merge remote-tracking branch 'upstream/main' into cli-fixes
brokensound77 Jun 23, 2021
a528f02
small tweaks from feedback
brokensound77 Jun 23, 2021
bc884ea
Merge remote-tracking branch 'upstream/main' into cli-fixes
brokensound77 Jul 27, 2021
453682f
fix imports from conflicts
brokensound77 Jul 27, 2021
2820a6d
Merge branch 'main' into cli-fixes
brokensound77 Jul 28, 2021
02ef05f
Merge remote-tracking branch 'upstream/main' into cli-fixes
brokensound77 Aug 14, 2021
a353d8f
Merge remote-tracking branch 'origin/cli-fixes' into cli-fixes
brokensound77 Aug 15, 2021
f09b7a7
Merge remote-tracking branch 'upstream/main' into cli-fixes
brokensound77 Aug 19, 2021
ba3e95c
update schemas to resolve additionalProperties type bug
brokensound77 Aug 19, 2021
5a7fdee
Merge remote-tracking branch 'upstream/main' into cli-fixes
brokensound77 Aug 26, 2021
dfa08ee
revert usage of rule.new method
brokensound77 Aug 26, 2021
ad66223
Merge branch 'main' into cli-fixes
brokensound77 Sep 1, 2021
3e67856
fix missing unique_fields in package rule filter
brokensound77 Sep 1, 2021
81a089b
fix github pr loader
brokensound77 Sep 1, 2021
9f2623f
Load gh rules as TOMLRule instead of dict
brokensound77 Sep 1, 2021
1a0df0a
Merge remote-tracking branch 'upstream/main' into cli-fixes
brokensound77 Sep 1, 2021
8efab85
resolve conflict
brokensound77 Sep 1, 2021
fa0a944
remove unnecessary version insertion
brokensound77 Sep 1, 2021
5ca0c2b
Merge remote-tracking branch 'upstream/main' into cli-fixes
brokensound77 Sep 9, 2021
49298b9
resolve conflicts
brokensound77 Sep 9, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
73 changes: 41 additions & 32 deletions detection_rules/devtools.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
import time
import typing
from pathlib import Path
from typing import Optional, Tuple, List
from typing import Dict, Optional, Tuple, List

import click
import yaml
Expand All @@ -32,6 +32,7 @@
from .version_lock import manage_versions, load_versions
from .rule import AnyRuleData, BaseRuleData, QueryRuleData, TOMLRule
from .rule_loader import RuleCollection, production_filter
from .schemas import definitions
from .semver import Version
from .utils import dict_hash, get_path, load_dump

Expand Down Expand Up @@ -212,8 +213,6 @@ def kibana_diff(rule_id, repo, branch, threads):
else:
rules = rules.filter(production_filter).id_map

# add versions to the rules
manage_versions(list(rules.values()), verbose=False)
repo_hashes = {r.id: r.contents.sha256(include_version=True) for r in rules.values()}

kibana_rules = {r['rule_id']: r for r in get_kibana_rules(repo=repo, branch=branch, threads=threads).values()}
Expand Down Expand Up @@ -594,32 +593,39 @@ def search_rule_prs(ctx, no_loop, query, columns, language, token, threads):
from uuid import uuid4
from .main import search_rules

all_rules = {}
all_rules: Dict[Path, TOMLRule] = {}
new, modified, errors = rule_loader.load_github_pr_rules(token=token, threads=threads)

def add_github_meta(this_rule, status, original_rule_id=None):
def add_github_meta(this_rule: TOMLRule, status: str, original_rule_id: Optional[definitions.UUIDString] = None):
pr = this_rule.gh_pr
rule.metadata['status'] = status
rule.metadata['github'] = {
'base': pr.base.label,
'comments': [c.body for c in pr.get_comments()],
'commits': pr.commits,
'created_at': str(pr.created_at),
'head': pr.head.label,
'is_draft': pr.draft,
'labels': [lbl.name for lbl in pr.get_labels()],
'last_modified': str(pr.last_modified),
'title': pr.title,
'url': pr.html_url,
'user': pr.user.login
data = rule.contents.data
extend_meta = {
'status': status,
'github': {
'base': pr.base.label,
'comments': [c.body for c in pr.get_comments()],
'commits': pr.commits,
'created_at': str(pr.created_at),
'head': pr.head.label,
'is_draft': pr.draft,
'labels': [lbl.name for lbl in pr.get_labels()],
'last_modified': str(pr.last_modified),
'title': pr.title,
'url': pr.html_url,
'user': pr.user.login
}
}

if original_rule_id:
rule.metadata['original_rule_id'] = original_rule_id
rule.contents['rule_id'] = str(uuid4())
extend_meta['original_rule_id'] = original_rule_id
data = dataclasses.replace(rule.contents.data, rule_id=str(uuid4()))

rule_path = Path(f'pr-{pr.number}-{rule.path}')
new_meta = dataclasses.replace(rule.contents.metadata, extended=extend_meta)
contents = dataclasses.replace(rule.contents, metadata=new_meta, data=data)
new_rule = TOMLRule(path=rule_path, contents=contents)

rule_path = f'pr-{pr.number}-{rule.path}'
all_rules[rule_path] = rule.rule_format()
all_rules[new_rule.path] = new_rule

for rule_id, rule in new.items():
add_github_meta(rule, 'new')
Expand All @@ -638,32 +644,35 @@ def add_github_meta(this_rule, status, original_rule_id=None):


@dev_group.command('deprecate-rule')
@click.argument('rule-file', type=click.Path(dir_okay=False))
@click.argument('rule-file', type=Path)
@click.pass_context
def deprecate_rule(ctx: click.Context, rule_file: str):
def deprecate_rule(ctx: click.Context, rule_file: Path):
"""Deprecate a rule."""
import pytoml

version_info = load_versions()
rule_file = Path(rule_file)
contents = pytoml.loads(rule_file.read_text())
rule_collection = RuleCollection()
contents = rule_collection.load_file(rule_file).contents
rule = TOMLRule(path=rule_file, contents=contents)

if rule.id not in version_info:
if rule.contents.id not in version_info:
click.echo('Rule has not been version locked and so does not need to be deprecated. '
'Delete the file or update the maturity to `development` instead')
ctx.exit()

today = time.strftime('%Y/%m/%d')

new_meta = {
'updated_date': today,
'deprecation_date': today,
'maturity': 'deprecated'
}
deprecated_path = get_path('rules', '_deprecated', rule_file.name)

# create the new rule and save it
new_meta = dataclasses.replace(rule.contents.metadata,
updated_date=today,
deprecation_date=today,
maturity='deprecated')
contents = dataclasses.replace(rule.contents, metadata=new_meta)
deprecated_path = get_path('rules', '_deprecated', rule_file.name)

# create the new rule and save it
new_rule = TOMLRule(contents=contents, path=Path(deprecated_path))
new_rule.save_toml()

Expand Down
19 changes: 16 additions & 3 deletions detection_rules/kbwrap.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@ def kibana_group(ctx: click.Context, **kibana_kwargs):
@click.pass_context
def upload_rule(ctx, rules, replace_id):
"""Upload a list of rule .toml files to Kibana."""

kibana = ctx.obj['kibana']
api_payloads = []

Expand All @@ -60,8 +59,22 @@ def upload_rule(ctx, rules, replace_id):
api_payloads.append(rule)

with kibana:
rules = RuleResource.bulk_create(api_payloads)
click.echo(f"Successfully uploaded {len(rules)} rules")
results = RuleResource.bulk_create(api_payloads)

success = []
errors = []
for result in results:
if 'error' in result:
errors.append(f'{result["rule_id"]} - {result["error"]["message"]}')
else:
success.append(result['rule_id'])

if success:
click.echo('Successful uploads:\n - ' + '\n - '.join(success))
if errors:
click.echo('Failed uploads:\n - ' + '\n - '.join(errors))

return results


@kibana_group.command('search-alerts')
Expand Down
74 changes: 56 additions & 18 deletions detection_rules/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import time
from datetime import datetime
from pathlib import Path
from typing import Dict
from typing import Dict, Optional
from uuid import uuid4

import click
Expand All @@ -22,7 +22,7 @@
from .rule import TOMLRule, TOMLRuleContents
from .rule_formatter import toml_write
from .rule_loader import RuleCollection
from .schemas import all_versions
from .schemas import all_versions, definitions
from .utils import get_path, get_etc_path, clear_caches, load_dump, load_rule_contents

RULES_DIR = get_path('rules')
Expand All @@ -41,7 +41,7 @@ def root(ctx, debug):


@root.command('create-rule')
@click.argument('path', type=click.Path(dir_okay=False))
@click.argument('path', type=Path)
@click.option('--config', '-c', type=click.Path(exists=True, dir_okay=False), help='Rule or config file')
@click.option('--required-only', is_flag=True, help='Only prompt for required fields')
@click.option('--rule-type', '-t', type=click.Choice(sorted(TOMLRuleContents.all_rule_types())),
Expand Down Expand Up @@ -95,7 +95,7 @@ def import_rules(input_file, directory):

rule_contents = []
for rule_file in rule_files:
rule_contents.extend(load_rule_contents(rule_file))
rule_contents.extend(load_rule_contents(Path(rule_file)))

if not rule_contents:
click.echo('Must specify at least one file!')
Expand Down Expand Up @@ -156,7 +156,7 @@ def mass_update(ctx, query, metadata, language, field):


@root.command('view-rule')
@click.argument('rule-file')
@click.argument('rule-file', type=Path)
@click.option('--api-format/--rule-format', default=True, help='Print the rule in final api or rule format')
@click.pass_context
def view_rule(ctx, rule_file, api_format):
Expand All @@ -168,21 +168,57 @@ def view_rule(ctx, rule_file, api_format):
else:
click.echo(toml_write(rule.contents.to_dict()))

return rule


def _export_rules(rules: RuleCollection, outfile: Path, downgrade_version: Optional[definitions.SemVer] = None,
verbose=True, skip_unsupported=False):
"""Export rules into a consolidated ndjson file."""
from .rule import downgrade_contents_from_rule

outfile = outfile.with_suffix('.ndjson')
unsupported = []

if downgrade_version:
if skip_unsupported:
output_lines = []

for rule in rules:
try:
output_lines.append(json.dumps(downgrade_contents_from_rule(rule, downgrade_version),
sort_keys=True))
except ValueError as e:
unsupported.append(f'{e}: {rule.id} - {rule.name}')
continue

else:
output_lines = [json.dumps(downgrade_contents_from_rule(r, downgrade_version), sort_keys=True)
for r in rules]
else:
output_lines = [json.dumps(r.contents.to_api_format(), sort_keys=True) for r in rules]

outfile.write_text('\n'.join(output_lines) + '\n')

if verbose:
click.echo(f'Exported {len(rules) - len(unsupported)} rules into {outfile}')

if skip_unsupported and unsupported:
unsupported_str = '\n- '.join(unsupported)
click.echo(f'Skipped {len(unsupported)} unsupported rules: \n- {unsupported_str}')


@root.command('export-rules')
@multi_collection
@click.option('--outfile', '-o', default=get_path('exports', f'{time.strftime("%Y%m%dT%H%M%SL")}.ndjson'),
type=click.Path(dir_okay=False), help='Name of file for exported rules')
@click.option('--outfile', '-o', default=Path(get_path('exports', f'{time.strftime("%Y%m%dT%H%M%SL")}.ndjson')),
type=Path, help='Name of file for exported rules')
@click.option('--replace-id', '-r', is_flag=True, help='Replace rule IDs with new IDs before export')
@click.option('--stack-version', type=click.Choice(all_versions()),
help='Downgrade a rule version to be compatible with older instances of Kibana')
@click.option('--skip-unsupported', '-s', is_flag=True,
help='If `--stack-version` is passed, skip rule types which are unsupported '
'(an error will be raised otherwise)')
def export_rules(rules, outfile, replace_id, stack_version, skip_unsupported) -> RuleCollection:
def export_rules(rules, outfile: Path, replace_id, stack_version, skip_unsupported) -> RuleCollection:
"""Export rule(s) into an importable ndjson file."""
from .packaging import Package

assert len(rules) > 0, "No rules found"

if replace_id:
Expand All @@ -196,10 +232,11 @@ def export_rules(rules, outfile, replace_id, stack_version, skip_unsupported) ->
new_contents = dataclasses.replace(rule.contents, data=new_data)
rules.add_rule(TOMLRule(contents=new_contents))

Path(outfile).parent.mkdir(exist_ok=True)
package = Package(rules, '_', verbose=False)
package.export(outfile, downgrade_version=stack_version, skip_unsupported=skip_unsupported)
return package.rules
outfile.parent.mkdir(exist_ok=True)
_export_rules(rules=rules, outfile=outfile, downgrade_version=stack_version,
skip_unsupported=skip_unsupported)

return rules


@root.command('validate-rule')
Expand Down Expand Up @@ -231,13 +268,14 @@ def search_rules(query, columns, language, count, verbose=True, rules: Dict[str,
from eql.build import get_engine
from eql import parse_query
from eql.pipes import CountPipe
from .rule import get_unique_query_fields

flattened_rules = []
rules = rules or {str(rule.path): rule for rule in RuleCollection.default()}

for file_name, rule_doc in rules.items():
for file_name, rule in rules.items():
flat: dict = {"file": os.path.relpath(file_name)}
flat.update(rule_doc.contents.to_dict())
flat.update(rule.contents.to_dict())
flat.update(flat["metadata"])
flat.update(flat["rule"])

Expand All @@ -254,8 +292,8 @@ def search_rules(query, columns, language, count, verbose=True, rules: Dict[str,
technique_ids.extend([t['id'] for t in techniques])
subtechnique_ids.extend([st['id'] for t in techniques for st in t.get('subtechnique', [])])

flat.update(techniques=technique_ids, tactics=tactic_names, subtechniques=subtechnique_ids)
# unique_fields=TOMLRule.get_unique_query_fields(rule_doc['rule']))
flat.update(techniques=technique_ids, tactics=tactic_names, subtechniques=subtechnique_ids,
unique_fields=get_unique_query_fields(rule))
flattened_rules.append(flat)

flattened_rules.sort(key=lambda dct: dct["name"])
Expand Down
30 changes: 24 additions & 6 deletions detection_rules/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,6 +283,7 @@ def get_elasticsearch_client(cloud_id=None, elasticsearch_url=None, es_user=None
es_password = es_password or click.prompt("es_password", hide_input=True)
hosts = [elasticsearch_url] if elasticsearch_url else None
timeout = kwargs.pop('timeout', 60)
kwargs['verify_certs'] = not kwargs.pop('ignore_ssl_errors', False)

try:
client = Elasticsearch(hosts=hosts, cloud_id=cloud_id, http_auth=(es_user, es_password), timeout=timeout,
Expand All @@ -295,8 +296,10 @@ def get_elasticsearch_client(cloud_id=None, elasticsearch_url=None, es_user=None
client_error(error_msg, e, ctx=ctx, err=True)


def get_kibana_client(cloud_id, kibana_url, kibana_user, kibana_password, kibana_cookie, **kwargs):
def get_kibana_client(cloud_id, kibana_url, kibana_user, kibana_password, kibana_cookie, space, ignore_ssl_errors,
provider_type, provider_name, **kwargs):
"""Get an authenticated Kibana client."""
from requests import HTTPError
from kibana import Kibana

if not (cloud_id or kibana_url):
Expand All @@ -307,11 +310,22 @@ def get_kibana_client(cloud_id, kibana_url, kibana_user, kibana_password, kibana
kibana_user = kibana_user or click.prompt("kibana_user")
kibana_password = kibana_password or click.prompt("kibana_password", hide_input=True)

with Kibana(cloud_id=cloud_id, kibana_url=kibana_url, **kwargs) as kibana:
verify = not ignore_ssl_errors

with Kibana(cloud_id=cloud_id, kibana_url=kibana_url, space=space, verify=verify, **kwargs) as kibana:
if kibana_cookie:
kibana.add_cookie(kibana_cookie)
else:
kibana.login(kibana_user, kibana_password)
return kibana

try:
kibana.login(kibana_user, kibana_password, provider_type=provider_type, provider_name=provider_name)
except HTTPError as exc:
if exc.response.status_code == 401:
err_msg = f'Authentication failed for {kibana_url}. If credentials are valid, check --provider-name'
client_error(err_msg, exc, err=True)
else:
raise

return kibana


Expand All @@ -323,14 +337,18 @@ def get_kibana_client(cloud_id, kibana_url, kibana_user, kibana_password, kibana
'kibana_password': click.Option(['--kibana-password', '-kp'], default=getdefault('kibana_password')),
'kibana_url': click.Option(['--kibana-url'], default=getdefault('kibana_url')),
'kibana_user': click.Option(['--kibana-user', '-ku'], default=getdefault('kibana_user')),
'space': click.Option(['--space'], default=None, help='Kibana space')
'provider_type': click.Option(['--provider-type'], default=getdefault('provider_type')),
'provider_name': click.Option(['--provider-name'], default=getdefault('provider_name')),
'space': click.Option(['--space'], default=None, help='Kibana space'),
'ignore_ssl_errors': click.Option(['--ignore-ssl-errors'], default=getdefault('ignore_ssl_errors'))
},
'elasticsearch': {
'cloud_id': click.Option(['--cloud-id'], default=getdefault("cloud_id")),
'elasticsearch_url': click.Option(['--elasticsearch-url'], default=getdefault("elasticsearch_url")),
'es_user': click.Option(['--es-user', '-eu'], default=getdefault("es_user")),
'es_password': click.Option(['--es-password', '-ep'], default=getdefault("es_password")),
'timeout': click.Option(['--timeout', '-et'], default=60, help='Timeout for elasticsearch client')
'timeout': click.Option(['--timeout', '-et'], default=60, help='Timeout for elasticsearch client'),
'ignore_ssl_errors': click.Option(['--ignore-ssl-errors'], default=getdefault('ignore_ssl_errors'))
}
}
kibana_options = list(client_options['kibana'].values())
Expand Down
Loading